#![cfg_attr(not(feature = "std"), no_std)]
#[cfg(feature = "std")]
use serde::Serialize;
#[cfg(feature = "runtime-benchmarks")]
use sp_runtime::traits::TrailingZeroInput;
use sp_runtime::{
generic,
traits::{
self, AtLeast32Bit, AtLeast32BitUnsigned, BadOrigin, BlockNumberProvider, Bounded,
CheckEqual, Dispatchable, Hash, Lookup, LookupError, MaybeDisplay,
MaybeSerializeDeserialize, Member, One, Saturating, SimpleBitOps, StaticLookup, Zero,
},
DispatchError, RuntimeDebug,
};
#[cfg(any(feature = "std", test))]
use sp_std::map;
use sp_std::{fmt::Debug, marker::PhantomData, prelude::*};
use sp_version::RuntimeVersion;
use codec::{Decode, Encode, EncodeLike, FullCodec, MaxEncodedLen};
use frame_support::{
dispatch::{
extract_actual_pays_fee, extract_actual_weight, DispatchClass, DispatchInfo,
DispatchResult, DispatchResultWithPostInfo, PerDispatchClass,
},
storage::{self, StorageStreamIter},
traits::{
ConstU32, Contains, EnsureOrigin, Get, HandleLifetime, OnKilledAccount, OnNewAccount,
OriginTrait, PalletInfo, SortedMembers, StoredMap, TypedGet,
},
Parameter,
};
use scale_info::TypeInfo;
use sp_core::storage::well_known_keys;
use sp_weights::{RuntimeDbWeight, Weight};
#[cfg(feature = "std")]
use frame_support::traits::GenesisBuild;
#[cfg(any(feature = "std", test))]
use sp_io::TestExternalities;
pub mod limits;
#[cfg(test)]
pub(crate) mod mock;
pub mod offchain;
mod extensions;
#[cfg(feature = "std")]
pub mod mocking;
#[cfg(test)]
mod tests;
pub mod weights;
pub mod migrations;
pub use extensions::{
check_genesis::CheckGenesis, check_mortality::CheckMortality,
check_non_zero_sender::CheckNonZeroSender, check_nonce::CheckNonce,
check_spec_version::CheckSpecVersion, check_tx_version::CheckTxVersion,
check_weight::CheckWeight,
};
pub use extensions::check_mortality::CheckMortality as CheckEra;
pub use frame_support::dispatch::RawOrigin;
pub use weights::WeightInfo;
const LOG_TARGET: &str = "runtime::system";
pub fn extrinsics_root<H: Hash, E: codec::Encode>(extrinsics: &[E]) -> H::Output {
extrinsics_data_root::<H>(extrinsics.iter().map(codec::Encode::encode).collect())
}
pub fn extrinsics_data_root<H: Hash>(xts: Vec<Vec<u8>>) -> H::Output {
H::ordered_trie_root(xts, sp_core::storage::StateVersion::V0)
}
pub type ConsumedWeight = PerDispatchClass<Weight>;
pub use pallet::*;
pub trait SetCode<T: Config> {
fn set_code(code: Vec<u8>) -> DispatchResult;
}
impl<T: Config> SetCode<T> for () {
fn set_code(code: Vec<u8>) -> DispatchResult {
<Pallet<T>>::update_code_in_storage(&code)?;
Ok(())
}
}
pub trait ConsumerLimits {
fn max_consumers() -> RefCount;
fn max_overflow() -> RefCount;
}
impl<const Z: u32> ConsumerLimits for ConstU32<Z> {
fn max_consumers() -> RefCount {
Z
}
fn max_overflow() -> RefCount {
Z
}
}
impl<MaxNormal: Get<u32>, MaxOverflow: Get<u32>> ConsumerLimits for (MaxNormal, MaxOverflow) {
fn max_consumers() -> RefCount {
MaxNormal::get()
}
fn max_overflow() -> RefCount {
MaxOverflow::get()
}
}
#[frame_support::pallet]
pub mod pallet {
use crate::{self as frame_system, pallet_prelude::*, *};
use frame_support::pallet_prelude::*;
#[pallet::config]
#[pallet::disable_frame_system_supertrait_check]
pub trait Config: 'static + Eq + Clone {
type BaseCallFilter: Contains<Self::RuntimeCall>;
#[pallet::constant]
type BlockWeights: Get<limits::BlockWeights>;
#[pallet::constant]
type BlockLength: Get<limits::BlockLength>;
type RuntimeOrigin: Into<Result<RawOrigin<Self::AccountId>, Self::RuntimeOrigin>>
+ From<RawOrigin<Self::AccountId>>
+ Clone
+ OriginTrait<Call = Self::RuntimeCall>;
type RuntimeCall: Parameter
+ Dispatchable<RuntimeOrigin = Self::RuntimeOrigin>
+ Debug
+ From<Call<Self>>;
type Index: Parameter
+ Member
+ MaybeSerializeDeserialize
+ Debug
+ Default
+ MaybeDisplay
+ AtLeast32Bit
+ Copy
+ MaxEncodedLen;
type BlockNumber: Parameter
+ Member
+ MaybeSerializeDeserialize
+ Debug
+ MaybeDisplay
+ AtLeast32BitUnsigned
+ Default
+ Bounded
+ Copy
+ sp_std::hash::Hash
+ sp_std::str::FromStr
+ MaxEncodedLen
+ TypeInfo;
type Hash: Parameter
+ Member
+ MaybeSerializeDeserialize
+ Debug
+ MaybeDisplay
+ SimpleBitOps
+ Ord
+ Default
+ Copy
+ CheckEqual
+ sp_std::hash::Hash
+ AsRef<[u8]>
+ AsMut<[u8]>
+ MaxEncodedLen;
type Hashing: Hash<Output = Self::Hash> + TypeInfo;
type AccountId: Parameter
+ Member
+ MaybeSerializeDeserialize
+ Debug
+ MaybeDisplay
+ Ord
+ MaxEncodedLen;
type Lookup: StaticLookup<Target = Self::AccountId>;
type Header: Parameter + traits::Header<Number = Self::BlockNumber, Hash = Self::Hash>;
type RuntimeEvent: Parameter
+ Member
+ From<Event<Self>>
+ Debug
+ IsType<<Self as frame_system::Config>::RuntimeEvent>;
#[pallet::constant]
type BlockHashCount: Get<Self::BlockNumber>;
#[pallet::constant]
type DbWeight: Get<RuntimeDbWeight>;
#[pallet::constant]
type Version: Get<RuntimeVersion>;
type PalletInfo: PalletInfo;
type AccountData: Member + FullCodec + Clone + Default + TypeInfo + MaxEncodedLen;
type OnNewAccount: OnNewAccount<Self::AccountId>;
type OnKilledAccount: OnKilledAccount<Self::AccountId>;
type SystemWeightInfo: WeightInfo;
#[pallet::constant]
type SS58Prefix: Get<u16>;
type OnSetCode: SetCode<Self>;
type MaxConsumers: ConsumerLimits;
}
#[pallet::pallet]
#[pallet::generate_store(pub (super) trait Store)]
pub struct Pallet<T>(_);
#[pallet::hooks]
impl<T: Config> Hooks<BlockNumberFor<T>> for Pallet<T> {
#[cfg(feature = "std")]
fn integrity_test() {
sp_io::TestExternalities::default().execute_with(|| {
T::BlockWeights::get().validate().expect("The weights are invalid.");
});
}
}
#[pallet::call]
impl<T: Config> Pallet<T> {
#[pallet::call_index(0)]
#[pallet::weight(T::SystemWeightInfo::remark(_remark.len() as u32))]
pub fn remark(origin: OriginFor<T>, _remark: Vec<u8>) -> DispatchResultWithPostInfo {
ensure_signed_or_root(origin)?;
Ok(().into())
}
#[pallet::call_index(1)]
#[pallet::weight((T::SystemWeightInfo::set_heap_pages(), DispatchClass::Operational))]
pub fn set_heap_pages(origin: OriginFor<T>, pages: u64) -> DispatchResultWithPostInfo {
ensure_root(origin)?;
storage::unhashed::put_raw(well_known_keys::HEAP_PAGES, &pages.encode());
Self::deposit_log(generic::DigestItem::RuntimeEnvironmentUpdated);
Ok(().into())
}
#[pallet::call_index(2)]
#[pallet::weight((T::BlockWeights::get().max_block, DispatchClass::Operational))]
pub fn set_code(origin: OriginFor<T>, code: Vec<u8>) -> DispatchResultWithPostInfo {
ensure_root(origin)?;
Self::can_set_code(&code)?;
T::OnSetCode::set_code(code)?;
Ok(().into())
}
#[pallet::call_index(3)]
#[pallet::weight((T::BlockWeights::get().max_block, DispatchClass::Operational))]
pub fn set_code_without_checks(
origin: OriginFor<T>,
code: Vec<u8>,
) -> DispatchResultWithPostInfo {
ensure_root(origin)?;
T::OnSetCode::set_code(code)?;
Ok(().into())
}
#[pallet::call_index(4)]
#[pallet::weight((
T::SystemWeightInfo::set_storage(items.len() as u32),
DispatchClass::Operational,
))]
pub fn set_storage(
origin: OriginFor<T>,
items: Vec<KeyValue>,
) -> DispatchResultWithPostInfo {
ensure_root(origin)?;
for i in &items {
storage::unhashed::put_raw(&i.0, &i.1);
}
Ok(().into())
}
#[pallet::call_index(5)]
#[pallet::weight((
T::SystemWeightInfo::kill_storage(keys.len() as u32),
DispatchClass::Operational,
))]
pub fn kill_storage(origin: OriginFor<T>, keys: Vec<Key>) -> DispatchResultWithPostInfo {
ensure_root(origin)?;
for key in &keys {
storage::unhashed::kill(key);
}
Ok(().into())
}
#[pallet::call_index(6)]
#[pallet::weight((
T::SystemWeightInfo::kill_prefix(_subkeys.saturating_add(1)),
DispatchClass::Operational,
))]
pub fn kill_prefix(
origin: OriginFor<T>,
prefix: Key,
_subkeys: u32,
) -> DispatchResultWithPostInfo {
ensure_root(origin)?;
let _ = storage::unhashed::clear_prefix(&prefix, None, None);
Ok(().into())
}
#[pallet::call_index(7)]
#[pallet::weight(T::SystemWeightInfo::remark_with_event(remark.len() as u32))]
pub fn remark_with_event(
origin: OriginFor<T>,
remark: Vec<u8>,
) -> DispatchResultWithPostInfo {
let who = ensure_signed(origin)?;
let hash = T::Hashing::hash(&remark[..]);
Self::deposit_event(Event::Remarked { sender: who, hash });
Ok(().into())
}
}
#[pallet::event]
pub enum Event<T: Config> {
ExtrinsicSuccess { dispatch_info: DispatchInfo },
ExtrinsicFailed { dispatch_error: DispatchError, dispatch_info: DispatchInfo },
CodeUpdated,
NewAccount { account: T::AccountId },
KilledAccount { account: T::AccountId },
Remarked { sender: T::AccountId, hash: T::Hash },
}
#[pallet::error]
pub enum Error<T> {
InvalidSpecName,
SpecVersionNeedsToIncrease,
FailedToExtractRuntimeVersion,
NonDefaultComposite,
NonZeroRefCount,
CallFiltered,
}
#[pallet::origin]
pub type Origin<T> = RawOrigin<<T as Config>::AccountId>;
#[pallet::storage]
#[pallet::getter(fn account)]
pub type Account<T: Config> = StorageMap<
_,
Blake2_128Concat,
T::AccountId,
AccountInfo<T::Index, T::AccountData>,
ValueQuery,
>;
#[pallet::storage]
pub(super) type ExtrinsicCount<T: Config> = StorageValue<_, u32>;
#[pallet::storage]
#[pallet::whitelist_storage]
#[pallet::getter(fn block_weight)]
pub(super) type BlockWeight<T: Config> = StorageValue<_, ConsumedWeight, ValueQuery>;
#[pallet::storage]
pub(super) type AllExtrinsicsLen<T: Config> = StorageValue<_, u32>;
#[pallet::storage]
#[pallet::getter(fn block_hash)]
pub type BlockHash<T: Config> =
StorageMap<_, Twox64Concat, T::BlockNumber, T::Hash, ValueQuery>;
#[pallet::storage]
#[pallet::getter(fn extrinsic_data)]
#[pallet::unbounded]
pub(super) type ExtrinsicData<T: Config> =
StorageMap<_, Twox64Concat, u32, Vec<u8>, ValueQuery>;
#[pallet::storage]
#[pallet::whitelist_storage]
#[pallet::getter(fn block_number)]
pub(super) type Number<T: Config> = StorageValue<_, T::BlockNumber, ValueQuery>;
#[pallet::storage]
#[pallet::getter(fn parent_hash)]
pub(super) type ParentHash<T: Config> = StorageValue<_, T::Hash, ValueQuery>;
#[pallet::storage]
#[pallet::unbounded]
#[pallet::getter(fn digest)]
pub(super) type Digest<T: Config> = StorageValue<_, generic::Digest, ValueQuery>;
#[pallet::storage]
#[pallet::whitelist_storage]
#[pallet::unbounded]
pub(super) type Events<T: Config> =
StorageValue<_, Vec<Box<EventRecord<T::RuntimeEvent, T::Hash>>>, ValueQuery>;
#[pallet::storage]
#[pallet::whitelist_storage]
#[pallet::getter(fn event_count)]
pub(super) type EventCount<T: Config> = StorageValue<_, EventIndex, ValueQuery>;
#[pallet::storage]
#[pallet::unbounded]
#[pallet::getter(fn event_topics)]
pub(super) type EventTopics<T: Config> =
StorageMap<_, Blake2_128Concat, T::Hash, Vec<(T::BlockNumber, EventIndex)>, ValueQuery>;
#[pallet::storage]
#[pallet::unbounded]
pub type LastRuntimeUpgrade<T: Config> = StorageValue<_, LastRuntimeUpgradeInfo>;
#[pallet::storage]
pub(super) type UpgradedToU32RefCount<T: Config> = StorageValue<_, bool, ValueQuery>;
#[pallet::storage]
pub(super) type UpgradedToTripleRefCount<T: Config> = StorageValue<_, bool, ValueQuery>;
#[pallet::storage]
#[pallet::whitelist_storage]
pub(super) type ExecutionPhase<T: Config> = StorageValue<_, Phase>;
#[cfg_attr(feature = "std", derive(Default))]
#[pallet::genesis_config]
pub struct GenesisConfig {
#[serde(with = "sp_core::bytes")]
pub code: Vec<u8>,
}
#[pallet::genesis_build]
impl<T: Config> GenesisBuild<T> for GenesisConfig {
fn build(&self) {
<BlockHash<T>>::insert::<_, T::Hash>(T::BlockNumber::zero(), hash69());
<ParentHash<T>>::put::<T::Hash>(hash69());
<LastRuntimeUpgrade<T>>::put(LastRuntimeUpgradeInfo::from(T::Version::get()));
<UpgradedToU32RefCount<T>>::put(true);
<UpgradedToTripleRefCount<T>>::put(true);
sp_io::storage::set(well_known_keys::CODE, &self.code);
sp_io::storage::set(well_known_keys::EXTRINSIC_INDEX, &0u32.encode());
}
}
}
#[cfg(feature = "std")]
impl GenesisConfig {
pub fn build_storage<T: Config>(&self) -> Result<sp_runtime::Storage, String> {
<Self as GenesisBuild<T>>::build_storage(self)
}
pub fn assimilate_storage<T: Config>(
&self,
storage: &mut sp_runtime::Storage,
) -> Result<(), String> {
<Self as GenesisBuild<T>>::assimilate_storage(self, storage)
}
}
pub type Key = Vec<u8>;
pub type KeyValue = (Vec<u8>, Vec<u8>);
#[derive(Encode, Decode, RuntimeDebug, TypeInfo, MaxEncodedLen)]
#[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))]
pub enum Phase {
ApplyExtrinsic(u32),
Finalization,
Initialization,
}
impl Default for Phase {
fn default() -> Self {
Self::Initialization
}
}
#[derive(Encode, Decode, RuntimeDebug, TypeInfo)]
#[cfg_attr(feature = "std", derive(Serialize, PartialEq, Eq, Clone))]
pub struct EventRecord<E: Parameter + Member, T> {
pub phase: Phase,
pub event: E,
pub topics: Vec<T>,
}
#[cfg(feature = "std")]
fn hash69<T: AsMut<[u8]> + Default>() -> T {
let mut h = T::default();
h.as_mut().iter_mut().for_each(|byte| *byte = 69);
h
}
type EventIndex = u32;
pub type RefCount = u32;
#[derive(Clone, Eq, PartialEq, Default, RuntimeDebug, Encode, Decode, TypeInfo, MaxEncodedLen)]
pub struct AccountInfo<Index, AccountData> {
pub nonce: Index,
pub consumers: RefCount,
pub providers: RefCount,
pub sufficients: RefCount,
pub data: AccountData,
}
#[derive(sp_runtime::RuntimeDebug, Encode, Decode, TypeInfo)]
#[cfg_attr(feature = "std", derive(PartialEq))]
pub struct LastRuntimeUpgradeInfo {
pub spec_version: codec::Compact<u32>,
pub spec_name: sp_runtime::RuntimeString,
}
impl LastRuntimeUpgradeInfo {
pub fn was_upgraded(&self, current: &sp_version::RuntimeVersion) -> bool {
current.spec_version > self.spec_version.0 || current.spec_name != self.spec_name
}
}
impl From<sp_version::RuntimeVersion> for LastRuntimeUpgradeInfo {
fn from(version: sp_version::RuntimeVersion) -> Self {
Self { spec_version: version.spec_version.into(), spec_name: version.spec_name }
}
}
pub struct EnsureRoot<AccountId>(sp_std::marker::PhantomData<AccountId>);
impl<O: Into<Result<RawOrigin<AccountId>, O>> + From<RawOrigin<AccountId>>, AccountId>
EnsureOrigin<O> for EnsureRoot<AccountId>
{
type Success = ();
fn try_origin(o: O) -> Result<Self::Success, O> {
o.into().and_then(|o| match o {
RawOrigin::Root => Ok(()),
r => Err(O::from(r)),
})
}
#[cfg(feature = "runtime-benchmarks")]
fn try_successful_origin() -> Result<O, ()> {
Ok(O::from(RawOrigin::Root))
}
}
pub struct EnsureRootWithSuccess<AccountId, Success>(
sp_std::marker::PhantomData<(AccountId, Success)>,
);
impl<
O: Into<Result<RawOrigin<AccountId>, O>> + From<RawOrigin<AccountId>>,
AccountId,
Success: TypedGet,
> EnsureOrigin<O> for EnsureRootWithSuccess<AccountId, Success>
{
type Success = Success::Type;
fn try_origin(o: O) -> Result<Self::Success, O> {
o.into().and_then(|o| match o {
RawOrigin::Root => Ok(Success::get()),
r => Err(O::from(r)),
})
}
#[cfg(feature = "runtime-benchmarks")]
fn try_successful_origin() -> Result<O, ()> {
Ok(O::from(RawOrigin::Root))
}
}
pub struct EnsureWithSuccess<Ensure, AccountId, Success>(
sp_std::marker::PhantomData<(Ensure, AccountId, Success)>,
);
impl<
O: Into<Result<RawOrigin<AccountId>, O>> + From<RawOrigin<AccountId>>,
Ensure: EnsureOrigin<O>,
AccountId,
Success: TypedGet,
> EnsureOrigin<O> for EnsureWithSuccess<Ensure, AccountId, Success>
{
type Success = Success::Type;
fn try_origin(o: O) -> Result<Self::Success, O> {
Ensure::try_origin(o).map(|_| Success::get())
}
#[cfg(feature = "runtime-benchmarks")]
fn try_successful_origin() -> Result<O, ()> {
Ensure::try_successful_origin()
}
}
pub struct EnsureSigned<AccountId>(sp_std::marker::PhantomData<AccountId>);
impl<O: Into<Result<RawOrigin<AccountId>, O>> + From<RawOrigin<AccountId>>, AccountId: Decode>
EnsureOrigin<O> for EnsureSigned<AccountId>
{
type Success = AccountId;
fn try_origin(o: O) -> Result<Self::Success, O> {
o.into().and_then(|o| match o {
RawOrigin::Signed(who) => Ok(who),
r => Err(O::from(r)),
})
}
#[cfg(feature = "runtime-benchmarks")]
fn try_successful_origin() -> Result<O, ()> {
let zero_account_id =
AccountId::decode(&mut TrailingZeroInput::zeroes()).map_err(|_| ())?;
Ok(O::from(RawOrigin::Signed(zero_account_id)))
}
}
pub struct EnsureSignedBy<Who, AccountId>(sp_std::marker::PhantomData<(Who, AccountId)>);
impl<
O: Into<Result<RawOrigin<AccountId>, O>> + From<RawOrigin<AccountId>>,
Who: SortedMembers<AccountId>,
AccountId: PartialEq + Clone + Ord + Decode,
> EnsureOrigin<O> for EnsureSignedBy<Who, AccountId>
{
type Success = AccountId;
fn try_origin(o: O) -> Result<Self::Success, O> {
o.into().and_then(|o| match o {
RawOrigin::Signed(ref who) if Who::contains(who) => Ok(who.clone()),
r => Err(O::from(r)),
})
}
#[cfg(feature = "runtime-benchmarks")]
fn try_successful_origin() -> Result<O, ()> {
let zero_account_id =
AccountId::decode(&mut TrailingZeroInput::zeroes()).map_err(|_| ())?;
let members = Who::sorted_members();
let first_member = match members.get(0) {
Some(account) => account.clone(),
None => zero_account_id,
};
Ok(O::from(RawOrigin::Signed(first_member)))
}
}
pub struct EnsureNone<AccountId>(sp_std::marker::PhantomData<AccountId>);
impl<O: Into<Result<RawOrigin<AccountId>, O>> + From<RawOrigin<AccountId>>, AccountId>
EnsureOrigin<O> for EnsureNone<AccountId>
{
type Success = ();
fn try_origin(o: O) -> Result<Self::Success, O> {
o.into().and_then(|o| match o {
RawOrigin::None => Ok(()),
r => Err(O::from(r)),
})
}
#[cfg(feature = "runtime-benchmarks")]
fn try_successful_origin() -> Result<O, ()> {
Ok(O::from(RawOrigin::None))
}
}
pub struct EnsureNever<T>(sp_std::marker::PhantomData<T>);
impl<O, T> EnsureOrigin<O> for EnsureNever<T> {
type Success = T;
fn try_origin(o: O) -> Result<Self::Success, O> {
Err(o)
}
#[cfg(feature = "runtime-benchmarks")]
fn try_successful_origin() -> Result<O, ()> {
Err(())
}
}
pub fn ensure_signed<OuterOrigin, AccountId>(o: OuterOrigin) -> Result<AccountId, BadOrigin>
where
OuterOrigin: Into<Result<RawOrigin<AccountId>, OuterOrigin>>,
{
match o.into() {
Ok(RawOrigin::Signed(t)) => Ok(t),
_ => Err(BadOrigin),
}
}
pub fn ensure_signed_or_root<OuterOrigin, AccountId>(
o: OuterOrigin,
) -> Result<Option<AccountId>, BadOrigin>
where
OuterOrigin: Into<Result<RawOrigin<AccountId>, OuterOrigin>>,
{
match o.into() {
Ok(RawOrigin::Root) => Ok(None),
Ok(RawOrigin::Signed(t)) => Ok(Some(t)),
_ => Err(BadOrigin),
}
}
pub fn ensure_root<OuterOrigin, AccountId>(o: OuterOrigin) -> Result<(), BadOrigin>
where
OuterOrigin: Into<Result<RawOrigin<AccountId>, OuterOrigin>>,
{
match o.into() {
Ok(RawOrigin::Root) => Ok(()),
_ => Err(BadOrigin),
}
}
pub fn ensure_none<OuterOrigin, AccountId>(o: OuterOrigin) -> Result<(), BadOrigin>
where
OuterOrigin: Into<Result<RawOrigin<AccountId>, OuterOrigin>>,
{
match o.into() {
Ok(RawOrigin::None) => Ok(()),
_ => Err(BadOrigin),
}
}
#[derive(RuntimeDebug)]
pub enum RefStatus {
Referenced,
Unreferenced,
}
#[derive(Eq, PartialEq, RuntimeDebug)]
pub enum IncRefStatus {
Created,
Existed,
}
#[derive(Eq, PartialEq, RuntimeDebug)]
pub enum DecRefStatus {
Reaped,
Exists,
}
impl<T: Config> Pallet<T> {
pub fn account_exists(who: &T::AccountId) -> bool {
Account::<T>::contains_key(who)
}
pub fn update_code_in_storage(code: &[u8]) -> DispatchResult {
storage::unhashed::put_raw(well_known_keys::CODE, code);
Self::deposit_log(generic::DigestItem::RuntimeEnvironmentUpdated);
Self::deposit_event(Event::CodeUpdated);
Ok(())
}
#[deprecated = "Use `inc_consumers` instead"]
pub fn inc_ref(who: &T::AccountId) {
let _ = Self::inc_consumers(who);
}
#[deprecated = "Use `dec_consumers` instead"]
pub fn dec_ref(who: &T::AccountId) {
let _ = Self::dec_consumers(who);
}
#[deprecated = "Use `consumers` instead"]
pub fn refs(who: &T::AccountId) -> RefCount {
Self::consumers(who)
}
#[deprecated = "Use `!is_provider_required` instead"]
pub fn allow_death(who: &T::AccountId) -> bool {
!Self::is_provider_required(who)
}
pub fn inc_providers(who: &T::AccountId) -> IncRefStatus {
Account::<T>::mutate(who, |a| {
if a.providers == 0 && a.sufficients == 0 {
a.providers = 1;
Self::on_created_account(who.clone(), a);
IncRefStatus::Created
} else {
a.providers = a.providers.saturating_add(1);
IncRefStatus::Existed
}
})
}
pub fn dec_providers(who: &T::AccountId) -> Result<DecRefStatus, DispatchError> {
Account::<T>::try_mutate_exists(who, |maybe_account| {
if let Some(mut account) = maybe_account.take() {
if account.providers == 0 {
log::error!(
target: LOG_TARGET,
"Logic error: Unexpected underflow in reducing provider",
);
account.providers = 1;
}
match (account.providers, account.consumers, account.sufficients) {
(1, 0, 0) => {
Pallet::<T>::on_killed_account(who.clone());
Ok(DecRefStatus::Reaped)
},
(1, c, _) if c > 0 => {
Err(DispatchError::ConsumerRemaining)
},
(x, _, _) => {
account.providers = x - 1;
*maybe_account = Some(account);
Ok(DecRefStatus::Exists)
},
}
} else {
log::error!(
target: LOG_TARGET,
"Logic error: Account already dead when reducing provider",
);
Ok(DecRefStatus::Reaped)
}
})
}
pub fn inc_sufficients(who: &T::AccountId) -> IncRefStatus {
Account::<T>::mutate(who, |a| {
if a.providers + a.sufficients == 0 {
a.sufficients = 1;
Self::on_created_account(who.clone(), a);
IncRefStatus::Created
} else {
a.sufficients = a.sufficients.saturating_add(1);
IncRefStatus::Existed
}
})
}
pub fn dec_sufficients(who: &T::AccountId) -> DecRefStatus {
Account::<T>::mutate_exists(who, |maybe_account| {
if let Some(mut account) = maybe_account.take() {
if account.sufficients == 0 {
log::error!(
target: LOG_TARGET,
"Logic error: Unexpected underflow in reducing sufficients",
);
}
match (account.sufficients, account.providers) {
(0, 0) | (1, 0) => {
Pallet::<T>::on_killed_account(who.clone());
DecRefStatus::Reaped
},
(x, _) => {
account.sufficients = x - 1;
*maybe_account = Some(account);
DecRefStatus::Exists
},
}
} else {
log::error!(
target: LOG_TARGET,
"Logic error: Account already dead when reducing provider",
);
DecRefStatus::Reaped
}
})
}
pub fn providers(who: &T::AccountId) -> RefCount {
Account::<T>::get(who).providers
}
pub fn sufficients(who: &T::AccountId) -> RefCount {
Account::<T>::get(who).sufficients
}
pub fn reference_count(who: &T::AccountId) -> RefCount {
let a = Account::<T>::get(who);
a.providers + a.sufficients
}
pub fn inc_consumers(who: &T::AccountId) -> Result<(), DispatchError> {
Account::<T>::try_mutate(who, |a| {
if a.providers > 0 {
if a.consumers < T::MaxConsumers::max_consumers() {
a.consumers = a.consumers.saturating_add(1);
Ok(())
} else {
Err(DispatchError::TooManyConsumers)
}
} else {
Err(DispatchError::NoProviders)
}
})
}
pub fn inc_consumers_without_limit(who: &T::AccountId) -> Result<(), DispatchError> {
Account::<T>::try_mutate(who, |a| {
if a.providers > 0 {
a.consumers = a.consumers.saturating_add(1);
Ok(())
} else {
Err(DispatchError::NoProviders)
}
})
}
pub fn dec_consumers(who: &T::AccountId) {
Account::<T>::mutate(who, |a| {
if a.consumers > 0 {
a.consumers -= 1;
} else {
log::error!(
target: LOG_TARGET,
"Logic error: Unexpected underflow in reducing consumer",
);
}
})
}
pub fn consumers(who: &T::AccountId) -> RefCount {
Account::<T>::get(who).consumers
}
pub fn is_provider_required(who: &T::AccountId) -> bool {
Account::<T>::get(who).consumers != 0
}
pub fn can_dec_provider(who: &T::AccountId) -> bool {
let a = Account::<T>::get(who);
a.consumers == 0 || a.providers > 1
}
pub fn can_inc_consumer(who: &T::AccountId) -> bool {
let a = Account::<T>::get(who);
a.providers > 0 && a.consumers < T::MaxConsumers::max_consumers()
}
pub fn deposit_event(event: impl Into<T::RuntimeEvent>) {
Self::deposit_event_indexed(&[], event.into());
}
pub fn deposit_event_indexed(topics: &[T::Hash], event: T::RuntimeEvent) {
let block_number = Self::block_number();
if block_number.is_zero() {
return
}
let phase = ExecutionPhase::<T>::get().unwrap_or_default();
let event = EventRecord { phase, event, topics: topics.to_vec() };
let event_idx = {
let old_event_count = EventCount::<T>::get();
let new_event_count = match old_event_count.checked_add(1) {
None => return,
Some(nc) => nc,
};
EventCount::<T>::put(new_event_count);
old_event_count
};
Events::<T>::append(event);
for topic in topics {
<EventTopics<T>>::append(topic, &(block_number, event_idx));
}
}
pub fn extrinsic_index() -> Option<u32> {
storage::unhashed::get(well_known_keys::EXTRINSIC_INDEX)
}
pub fn extrinsic_count() -> u32 {
ExtrinsicCount::<T>::get().unwrap_or_default()
}
pub fn all_extrinsics_len() -> u32 {
AllExtrinsicsLen::<T>::get().unwrap_or_default()
}
pub fn register_extra_weight_unchecked(weight: Weight, class: DispatchClass) {
BlockWeight::<T>::mutate(|current_weight| {
current_weight.add(weight, class);
});
}
pub fn initialize(number: &T::BlockNumber, parent_hash: &T::Hash, digest: &generic::Digest) {
ExecutionPhase::<T>::put(Phase::Initialization);
storage::unhashed::put(well_known_keys::EXTRINSIC_INDEX, &0u32);
<Number<T>>::put(number);
<Digest<T>>::put(digest);
<ParentHash<T>>::put(parent_hash);
<BlockHash<T>>::insert(*number - One::one(), parent_hash);
BlockWeight::<T>::kill();
}
pub fn finalize() -> T::Header {
log::debug!(
target: LOG_TARGET,
"[{:?}] {} extrinsics, length: {} (normal {}%, op: {}%, mandatory {}%) / normal weight:\
{} ({}%) op weight {} ({}%) / mandatory weight {} ({}%)",
Self::block_number(),
Self::extrinsic_index().unwrap_or_default(),
Self::all_extrinsics_len(),
sp_runtime::Percent::from_rational(
Self::all_extrinsics_len(),
*T::BlockLength::get().max.get(DispatchClass::Normal)
).deconstruct(),
sp_runtime::Percent::from_rational(
Self::all_extrinsics_len(),
*T::BlockLength::get().max.get(DispatchClass::Operational)
).deconstruct(),
sp_runtime::Percent::from_rational(
Self::all_extrinsics_len(),
*T::BlockLength::get().max.get(DispatchClass::Mandatory)
).deconstruct(),
Self::block_weight().get(DispatchClass::Normal),
sp_runtime::Percent::from_rational(
Self::block_weight().get(DispatchClass::Normal).ref_time(),
T::BlockWeights::get().get(DispatchClass::Normal).max_total.unwrap_or(Bounded::max_value()).ref_time()
).deconstruct(),
Self::block_weight().get(DispatchClass::Operational),
sp_runtime::Percent::from_rational(
Self::block_weight().get(DispatchClass::Operational).ref_time(),
T::BlockWeights::get().get(DispatchClass::Operational).max_total.unwrap_or(Bounded::max_value()).ref_time()
).deconstruct(),
Self::block_weight().get(DispatchClass::Mandatory),
sp_runtime::Percent::from_rational(
Self::block_weight().get(DispatchClass::Mandatory).ref_time(),
T::BlockWeights::get().get(DispatchClass::Mandatory).max_total.unwrap_or(Bounded::max_value()).ref_time()
).deconstruct(),
);
ExecutionPhase::<T>::kill();
AllExtrinsicsLen::<T>::kill();
let number = <Number<T>>::get();
let parent_hash = <ParentHash<T>>::get();
let digest = <Digest<T>>::get();
let extrinsics = (0..ExtrinsicCount::<T>::take().unwrap_or_default())
.map(ExtrinsicData::<T>::take)
.collect();
let extrinsics_root = extrinsics_data_root::<T::Hashing>(extrinsics);
let block_hash_count = T::BlockHashCount::get();
let to_remove = number.saturating_sub(block_hash_count).saturating_sub(One::one());
if !to_remove.is_zero() {
<BlockHash<T>>::remove(to_remove);
}
let version = T::Version::get().state_version();
let storage_root = T::Hash::decode(&mut &sp_io::storage::root(version)[..])
.expect("Node is configured to use the same hash; qed");
<T::Header as traits::Header>::new(
number,
extrinsics_root,
storage_root,
parent_hash,
digest,
)
}
pub fn deposit_log(item: generic::DigestItem) {
<Digest<T>>::append(item);
}
#[cfg(any(feature = "std", test))]
pub fn externalities() -> TestExternalities {
TestExternalities::new(sp_core::storage::Storage {
top: map![
<BlockHash<T>>::hashed_key_for(T::BlockNumber::zero()) => [69u8; 32].encode(),
<Number<T>>::hashed_key().to_vec() => T::BlockNumber::one().encode(),
<ParentHash<T>>::hashed_key().to_vec() => [69u8; 32].encode()
],
children_default: map![],
})
}
#[cfg(any(feature = "std", feature = "runtime-benchmarks", test))]
pub fn events() -> Vec<EventRecord<T::RuntimeEvent, T::Hash>> {
Self::read_events_no_consensus().map(|e| *e).collect()
}
pub fn read_events_no_consensus(
) -> impl sp_std::iter::Iterator<Item = Box<EventRecord<T::RuntimeEvent, T::Hash>>> {
Events::<T>::stream_iter()
}
#[cfg(any(feature = "std", feature = "runtime-benchmarks", test))]
pub fn set_block_number(n: T::BlockNumber) {
<Number<T>>::put(n);
}
#[cfg(any(feature = "std", test))]
pub fn set_extrinsic_index(extrinsic_index: u32) {
storage::unhashed::put(well_known_keys::EXTRINSIC_INDEX, &extrinsic_index)
}
#[cfg(any(feature = "std", test))]
pub fn set_parent_hash(n: T::Hash) {
<ParentHash<T>>::put(n);
}
#[cfg(any(feature = "std", test))]
pub fn set_block_consumed_resources(weight: Weight, len: usize) {
BlockWeight::<T>::mutate(|current_weight| {
current_weight.set(weight, DispatchClass::Normal)
});
AllExtrinsicsLen::<T>::put(len as u32);
}
pub fn reset_events() {
<Events<T>>::kill();
EventCount::<T>::kill();
let _ = <EventTopics<T>>::clear(u32::max_value(), None);
}
#[cfg(any(feature = "std", feature = "runtime-benchmarks", test))]
pub fn assert_has_event(event: T::RuntimeEvent) {
assert!(Self::events().iter().any(|record| record.event == event))
}
#[cfg(any(feature = "std", feature = "runtime-benchmarks", test))]
pub fn assert_last_event(event: T::RuntimeEvent) {
assert_eq!(Self::events().last().expect("events expected").event, event);
}
pub fn runtime_version() -> RuntimeVersion {
T::Version::get()
}
pub fn account_nonce(who: impl EncodeLike<T::AccountId>) -> T::Index {
Account::<T>::get(who).nonce
}
pub fn inc_account_nonce(who: impl EncodeLike<T::AccountId>) {
Account::<T>::mutate(who, |a| a.nonce += T::Index::one());
}
pub fn note_extrinsic(encoded_xt: Vec<u8>) {
ExtrinsicData::<T>::insert(Self::extrinsic_index().unwrap_or_default(), encoded_xt);
}
pub fn note_applied_extrinsic(r: &DispatchResultWithPostInfo, mut info: DispatchInfo) {
info.weight = extract_actual_weight(r, &info)
.saturating_add(T::BlockWeights::get().get(info.class).base_extrinsic);
info.pays_fee = extract_actual_pays_fee(r, &info);
Self::deposit_event(match r {
Ok(_) => Event::ExtrinsicSuccess { dispatch_info: info },
Err(err) => {
log::trace!(
target: LOG_TARGET,
"Extrinsic failed at block({:?}): {:?}",
Self::block_number(),
err,
);
Event::ExtrinsicFailed { dispatch_error: err.error, dispatch_info: info }
},
});
let next_extrinsic_index = Self::extrinsic_index().unwrap_or_default() + 1u32;
storage::unhashed::put(well_known_keys::EXTRINSIC_INDEX, &next_extrinsic_index);
ExecutionPhase::<T>::put(Phase::ApplyExtrinsic(next_extrinsic_index));
}
pub fn note_finished_extrinsics() {
let extrinsic_index: u32 =
storage::unhashed::take(well_known_keys::EXTRINSIC_INDEX).unwrap_or_default();
ExtrinsicCount::<T>::put(extrinsic_index);
ExecutionPhase::<T>::put(Phase::Finalization);
}
pub fn note_finished_initialize() {
ExecutionPhase::<T>::put(Phase::ApplyExtrinsic(0))
}
pub fn on_created_account(who: T::AccountId, _a: &mut AccountInfo<T::Index, T::AccountData>) {
T::OnNewAccount::on_new_account(&who);
Self::deposit_event(Event::NewAccount { account: who });
}
fn on_killed_account(who: T::AccountId) {
T::OnKilledAccount::on_killed_account(&who);
Self::deposit_event(Event::KilledAccount { account: who });
}
pub fn can_set_code(code: &[u8]) -> Result<(), sp_runtime::DispatchError> {
let current_version = T::Version::get();
let new_version = sp_io::misc::runtime_version(code)
.and_then(|v| RuntimeVersion::decode(&mut &v[..]).ok())
.ok_or(Error::<T>::FailedToExtractRuntimeVersion)?;
if new_version.spec_name != current_version.spec_name {
return Err(Error::<T>::InvalidSpecName.into())
}
if new_version.spec_version <= current_version.spec_version {
return Err(Error::<T>::SpecVersionNeedsToIncrease.into())
}
Ok(())
}
}
pub struct Provider<T>(PhantomData<T>);
impl<T: Config> HandleLifetime<T::AccountId> for Provider<T> {
fn created(t: &T::AccountId) -> Result<(), DispatchError> {
Pallet::<T>::inc_providers(t);
Ok(())
}
fn killed(t: &T::AccountId) -> Result<(), DispatchError> {
Pallet::<T>::dec_providers(t).map(|_| ())
}
}
pub struct SelfSufficient<T>(PhantomData<T>);
impl<T: Config> HandleLifetime<T::AccountId> for SelfSufficient<T> {
fn created(t: &T::AccountId) -> Result<(), DispatchError> {
Pallet::<T>::inc_sufficients(t);
Ok(())
}
fn killed(t: &T::AccountId) -> Result<(), DispatchError> {
Pallet::<T>::dec_sufficients(t);
Ok(())
}
}
pub struct Consumer<T>(PhantomData<T>);
impl<T: Config> HandleLifetime<T::AccountId> for Consumer<T> {
fn created(t: &T::AccountId) -> Result<(), DispatchError> {
Pallet::<T>::inc_consumers(t)
}
fn killed(t: &T::AccountId) -> Result<(), DispatchError> {
Pallet::<T>::dec_consumers(t);
Ok(())
}
}
impl<T: Config> BlockNumberProvider for Pallet<T> {
type BlockNumber = <T as Config>::BlockNumber;
fn current_block_number() -> Self::BlockNumber {
Pallet::<T>::block_number()
}
}
fn is_providing<T: Default + Eq>(d: &T) -> bool {
d != &T::default()
}
impl<T: Config> StoredMap<T::AccountId, T::AccountData> for Pallet<T> {
fn get(k: &T::AccountId) -> T::AccountData {
Account::<T>::get(k).data
}
fn try_mutate_exists<R, E: From<DispatchError>>(
k: &T::AccountId,
f: impl FnOnce(&mut Option<T::AccountData>) -> Result<R, E>,
) -> Result<R, E> {
let account = Account::<T>::get(k);
let was_providing = is_providing(&account.data);
let mut some_data = if was_providing { Some(account.data) } else { None };
let result = f(&mut some_data)?;
let is_providing = some_data.is_some();
if !was_providing && is_providing {
Self::inc_providers(k);
} else if was_providing && !is_providing {
match Self::dec_providers(k)? {
DecRefStatus::Reaped => return Ok(result),
DecRefStatus::Exists => {
},
}
} else if !was_providing && !is_providing {
return Ok(result)
}
Account::<T>::mutate(k, |a| a.data = some_data.unwrap_or_default());
Ok(result)
}
}
pub fn split_inner<T, R, S>(
option: Option<T>,
splitter: impl FnOnce(T) -> (R, S),
) -> (Option<R>, Option<S>) {
match option {
Some(inner) => {
let (r, s) = splitter(inner);
(Some(r), Some(s))
},
None => (None, None),
}
}
pub struct ChainContext<T>(PhantomData<T>);
impl<T> Default for ChainContext<T> {
fn default() -> Self {
ChainContext(PhantomData)
}
}
impl<T: Config> Lookup for ChainContext<T> {
type Source = <T::Lookup as StaticLookup>::Source;
type Target = <T::Lookup as StaticLookup>::Target;
fn lookup(&self, s: Self::Source) -> Result<Self::Target, LookupError> {
<T::Lookup as StaticLookup>::lookup(s)
}
}
pub mod pallet_prelude {
pub use crate::{ensure_none, ensure_root, ensure_signed, ensure_signed_or_root};
pub type OriginFor<T> = <T as crate::Config>::RuntimeOrigin;
pub type BlockNumberFor<T> = <T as crate::Config>::BlockNumber;
}