Skip to content
Snippets Groups Projects
Commit 97f532f3 authored by Hugo Trentesaux's avatar Hugo Trentesaux
Browse files

refac data flow

parent 05e12df1
Branches
Tags
No related merge requests found
Showing
with 653 additions and 104 deletions
# Duniter-Squid # Duniter-Squid
A [Squid](https://subsquid.io)-based indexer. A [Squid](https://subsquid.io)-based indexer.
It takes [ĞDev](https://kusama.network) data and serves it via GraphQL API. It takes [ĞDev](https://forum.duniter.org/c/currencies/gdev/63) data and serves it via GraphQL API.
## Dev requirements ## Dev requirements
...@@ -107,9 +107,7 @@ See https://duniter.org/wiki/duniter-v2/indexers/duniter-squid/ ...@@ -107,9 +107,7 @@ See https://duniter.org/wiki/duniter-v2/indexers/duniter-squid/
## TODO ## TODO
- [ ] Refac way to process data from events (new, fetch needed...) - [x] Refac way to process data from events (new, fetch needed...)
- [ ] Use cache abilities
- [ ] Look if it is possible to add relation without fetching endpoints
- [ ] Add events: - [ ] Add events:
- [ ] identity.IdtyCreated - [ ] identity.IdtyCreated
- [ ] identity.IdtyConfirmed - [ ] identity.IdtyConfirmed
......
...@@ -111,7 +111,7 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) { ...@@ -111,7 +111,7 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) {
identities.set( identities.set(
idty.index, idty.index,
new Identity({ new Identity({
id: String(idty.name), id: String(idty.index),
index: idty.index, index: idty.index,
account: accounts.get(idty.value.owner_key), account: accounts.get(idty.value.owner_key),
name: idty.name, name: idty.name,
...@@ -177,6 +177,7 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) { ...@@ -177,6 +177,7 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) {
await ctx.store.insert(smithCerts); await ctx.store.insert(smithCerts);
await ctx.store.insert(memberships); await ctx.store.insert(memberships);
await ctx.store.insert(smithMemberships); await ctx.store.insert(smithMemberships);
// await ctx.store.flush(); // do not flush otherwise we lose cache
ctx.log.info("Genesis saved"); ctx.log.info("Genesis saved");
...@@ -208,7 +209,7 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) { ...@@ -208,7 +209,7 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) {
// const unknown_wallets = wallets.difference(genesis_wallets) // const unknown_wallets = wallets.difference(genesis_wallets)
let unknown_wallets = new Set([...all_wallets].filter((x) => !accounts.has(x))); let unknown_wallets = new Set([...all_wallets].filter((x) => !accounts.has(x)));
ctx.log.info(`There are ${accounts.size} genesis wallets, ${all_wallets.size} total wallets, ${unknown_wallets.size} unknown wallets`); ctx.log.info(`There are ${accounts.size} genesis wallets, ${all_wallets.size} total wallets, ${unknown_wallets.size} unknown wallets`);
if (unknown_wallets.size != 0) { // create accounts for unknown wallets
for (const address of unknown_wallets) { for (const address of unknown_wallets) {
let account = new Account({ let account = new Account({
id: address, id: address,
...@@ -216,7 +217,6 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) { ...@@ -216,7 +217,6 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) {
accounts.set(address, account); accounts.set(address, account);
other_accounts.set(address, account); other_accounts.set(address, account);
} }
}
// add txs // add txs
let genesis_tx_counter = 0; let genesis_tx_counter = 0;
...@@ -246,6 +246,7 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) { ...@@ -246,6 +246,7 @@ export async function saveGenesis(ctx: ProcessorContext<StoreWithCache>) {
ctx.log.info("Saving transaction history"); ctx.log.info("Saving transaction history");
await ctx.store.insert([...other_accounts.values()]); await ctx.store.insert([...other_accounts.values()]);
await ctx.store.insert(genesis_transfers); await ctx.store.insert(genesis_transfers);
await ctx.store.flush();
ctx.log.info("Saved transaction history"); ctx.log.info("Saved transaction history");
ctx.log.info("======================"); ctx.log.info("======================");
ctx.log.info("Starting blockchain indexing"); ctx.log.info("Starting blockchain indexing");
......
// import { TypeormDatabase, Store } from "@subsquid/typeorm-store"; // import { TypeormDatabase, Store } from "@subsquid/typeorm-store";
import { StoreWithCache, TypeormDatabaseWithCache } from "@belopash/typeorm-store"; import { StoreWithCache, TypeormDatabaseWithCache } from "@belopash/typeorm-store";
import { In } from "typeorm";
import * as ss58 from "@subsquid/ss58"; import * as ss58 from "@subsquid/ss58";
import assert from "assert"; import assert from "assert";
import { processor, ProcessorContext } from "./processor"; import { processor, ProcessorContext } from "./processor";
import { Account, Cert, Identity, Transfer } from "./model"; import { Account, Cert, Identity, Transfer } from "./model";
import { events as events_t, calls as calls_t } from "./types"; import { events as events_t, calls as calls_t } from "./types";
import { string } from "./model/generated/marshal";
import { saveBlock, saveExtrinsic, saveCall, saveEvent } from "./giant-squid"; import { saveBlock, saveExtrinsic, saveCall, saveEvent } from "./giant-squid";
import { saveGenesis } from "./genesis"; import { saveGenesis } from "./genesis";
...@@ -38,26 +36,26 @@ processor.run(new TypeormDatabaseWithCache(), async (ctx) => { ...@@ -38,26 +36,26 @@ processor.run(new TypeormDatabaseWithCache(), async (ctx) => {
} }
} }
// collect new data from events
let ndata: NewData = { let ndata: NewData = {
accounts: [], accounts: [],
identitiesCreated: [], identities: [],
idtyChangedOwnerKey: [], idtyChangedOwnerKey: [],
transfers: [], transfers: [],
certs: [], certs: [],
}; };
getNewData(ctx, ndata); getNewData(ctx, ndata);
// transform new data to objects
let prepared: PreparedData = { let prepared: PreparedData = {
accounts: new Map(), accounts: new Map(),
newAccounts: [],
identities: new Map(), identities: new Map(),
newIdentities: [],
changedIdentities: [],
transfers: [], transfers: [],
certs: [], certs: [],
}; };
await prepareData(ctx, ndata, prepared); await prepareData(ctx, ndata, prepared);
// store data
await storeData(ctx, prepared); await storeData(ctx, prepared);
}); });
...@@ -67,9 +65,10 @@ export type IdtyIndex = number; ...@@ -67,9 +65,10 @@ export type IdtyIndex = number;
type BlockNumber = number; type BlockNumber = number;
// a way to group data returned from events // a way to group data returned from events
// this contains partial data to be turned into types
interface NewData { interface NewData {
accounts: Address[]; accounts: Address[];
identitiesCreated: IdtyCreatedEvent[]; identities: IdtyCreatedEvent[];
idtyChangedOwnerKey: IdtyChangedOwnerKeyEvent[]; idtyChangedOwnerKey: IdtyChangedOwnerKeyEvent[];
transfers: TransferEvent[]; transfers: TransferEvent[];
certs: CertEvent[]; certs: CertEvent[];
...@@ -101,31 +100,38 @@ interface IdtyChangedOwnerKeyEvent { ...@@ -101,31 +100,38 @@ interface IdtyChangedOwnerKeyEvent {
// a way to group data prepared for database insertion // a way to group data prepared for database insertion
interface PreparedData { interface PreparedData {
// all accounts that other entities point to
accounts: Map<Address, Account>; accounts: Map<Address, Account>;
// non existing accounts to add in the database before transactions and identites
newAccounts: Account[];
// all identities that other entities point to or which might have changed
identities: Map<IdtyIndex, Identity>; identities: Map<IdtyIndex, Identity>;
// non existing identities to add in the database before certifications and memberships
newIdentities: Identity[];
// existing identities that have changed
changedIdentities: Identity[];
// transfers
transfers: Transfer[]; transfers: Transfer[];
// certifications
certs: Cert[]; certs: Cert[];
} }
/// fill data with data collected from events /// fill data with data collected from events
function getNewData(ctx: ProcessorContext<StoreWithCache>, ndata: NewData) { function getNewData(ctx: ProcessorContext<StoreWithCache>, ndata: NewData) {
const silence_events = [
events_t.system.extrinsicSuccess.name,
events_t.system.killedAccount.name,
events_t.cert.removedCert.name,
events_t.session.newSession.name,
events_t.imOnline.allGood.name,
events_t.membership.membershipExpired.name,
events_t.universalDividend.udsAutoPaidAtRemoval.name,
events_t.universalDividend.newUdCreated.name,
events_t.balances.withdraw.name,
events_t.balances.deposit.name,
events_t.transactionPayment.transactionFeePaid.name,
events_t.identity.idtyRemoved.name,
events_t.quota.refunded.name,
];
// ===== Blocks ===== // ===== Blocks =====
for (let block of ctx.blocks) { for (let block of ctx.blocks) {
// ===== Events ===== // ===== Events =====
for (let event of block.events) { for (let event of block.events) {
ctx.log.debug(event.name); if (!silence_events.includes(event.name)) {
ctx.log.info("" + block.header.height + " " + event.name);
}
switch (event.name) { switch (event.name) {
// ===== System.NewAccount // ===== System.NewAccount
case events_t.system.newAccount.name: case events_t.system.newAccount.name:
...@@ -165,7 +171,7 @@ function getNewData(ctx: ProcessorContext<StoreWithCache>, ndata: NewData) { ...@@ -165,7 +171,7 @@ function getNewData(ctx: ProcessorContext<StoreWithCache>, ndata: NewData) {
} else { } else {
throw new Error("Unsupported spec"); throw new Error("Unsupported spec");
} }
ndata.identitiesCreated.push({ ndata.identities.push({
id: event.id, id: event.id,
index: newI.idtyIndex, index: newI.idtyIndex,
account: ss58.codec(42).encode(newI.ownerKey), account: ss58.codec(42).encode(newI.ownerKey),
...@@ -231,19 +237,18 @@ async function prepareData(ctx: ProcessorContext<StoreWithCache>, newData: NewDa ...@@ -231,19 +237,18 @@ async function prepareData(ctx: ProcessorContext<StoreWithCache>, newData: NewDa
await createAccounts(ctx, newData, data); await createAccounts(ctx, newData, data);
await createIdentities(ctx, newData, data); await createIdentities(ctx, newData, data);
await changeIdtyOwnerKey(ctx, newData, data); await changeIdtyOwnerKey(ctx, newData, data);
createTransfers(newData, data); await createTransfers(ctx, newData, data);
createCerts(newData, data); await createCerts(ctx, newData, data);
} }
/// store prepared data into database /// store prepared data into database
async function storeData(ctx: ProcessorContext<StoreWithCache>, data: PreparedData) { async function storeData(ctx: ProcessorContext<StoreWithCache>, data: PreparedData) {
// UPSERT = update or insert if not existing // UPSERT = update or insert if not existing
// account can have already existed, been killed, and recreated // account can have already existed, been killed, and recreated
await ctx.store.upsert(data.newAccounts); await ctx.store.upsert([...data.accounts.values()]);
// identities which have changed // identities can have been changed (change owner key)
await ctx.store.upsert([...data.changedIdentities.values()]); await ctx.store.upsert([...data.identities.values()]);
// INSERT // INSERT
await ctx.store.insert([...data.newIdentities.values()]);
await ctx.store.insert(data.transfers); await ctx.store.insert(data.transfers);
await ctx.store.insert(data.certs); await ctx.store.insert(data.certs);
} }
...@@ -251,89 +256,40 @@ async function storeData(ctx: ProcessorContext<StoreWithCache>, data: PreparedDa ...@@ -251,89 +256,40 @@ async function storeData(ctx: ProcessorContext<StoreWithCache>, data: PreparedDa
// ===== create new instances if not existing in database ===== // ===== create new instances if not existing in database =====
async function createAccounts(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) { async function createAccounts(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) {
// collect all accounts from transfers that we need to fetch
// accounts can only be created from a transfer
const accountIds = new Set<Address>();
for (let t of newData.transfers) {
accountIds.add(t.from);
accountIds.add(t.to);
}
// system will tell when accounts are created (but this should be added above) // system will tell when accounts are created (but this should be added above)
for (let a of newData.accounts) { for (let id of newData.accounts) {
accountIds.add(a);
}
// fill the map of accounts with those that are already stored
await ctx.store.findBy(Account, { id: In([...accountIds]) }).then((accounts) => {
accounts.map((a) => data.accounts.set(a.id, a));
});
// for each account, add an account if not already existing (not found above)
for (let a of accountIds) {
updateAccounts(a);
}
// function to add inexisting accounts to the map
function updateAccounts(id: Address): void {
const acc = data.accounts.get(id);
if (acc == null) {
let newA = new Account({ id }); let newA = new Account({ id });
data.accounts.set(id, newA); data.accounts.set(id, newA);
data.newAccounts.push(newA); ctx.log.info(`added account ${id}`);
}
} }
} }
async function createIdentities(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) { async function createIdentities(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) {
const idtyIds = new Set<IdtyIndex>(); for (let i of newData.identities) {
for (let c of newData.certs) { let account = await ctx.store.getOrFail(Account, i.account);
idtyIds.add(c.issuer);
idtyIds.add(c.receiver);
}
await ctx.store.findBy(Identity, { index: In([...idtyIds]) }).then((idties) => {
idties.map((i) => data.identities.set(i.index, i));
});
for (let i of newData.identitiesCreated) {
let account = data.accounts.get(i.account);
let newI = new Identity({ let newI = new Identity({
id: String(i.index), id: String(i.index),
index: i.index, index: i.index,
account, account,
name: "<created>",
}); });
data.identities.set(i.index, newI); data.identities.set(i.index, newI);
data.newIdentities.push(newI);
} }
} }
async function changeIdtyOwnerKey(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) { async function changeIdtyOwnerKey(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) {
const idtyIds = new Set<IdtyIndex>(); for (let icok of newData.idtyChangedOwnerKey) {
const accountIds = new Set<Address>(); let idty = await ctx.store.getOrFail(Identity, String(icok.index));
for (let i of newData.idtyChangedOwnerKey) { let account = await ctx.store.getOrFail(Account, icok.account);
idtyIds.add(i.index);
accountIds.add(i.account);
}
// TODO avoid fetching multiple times
await ctx.store.findBy(Identity, { index: In([...idtyIds]) }).then((idties) => {
idties.map((i) => data.identities.set(i.index, i));
});
await ctx.store.findBy(Account, { id: In([...accountIds]) }).then((accounts) => {
accounts.map((a) => data.accounts.set(a.id, a));
});
for (let i of newData.idtyChangedOwnerKey) {
let idty = data.identities.get(i.index);
let account = data.accounts.get(i.account);
if (idty && account) {
idty.account = account; idty.account = account;
data.changedIdentities.push(idty); data.identities.set(idty.index, idty);
} else {
ctx.log.error(`changed owner key to non existing account ${account} or identity ${idty}`);
}
} }
} }
function createTransfers(newData: NewData, data: PreparedData) { async function createTransfers(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) {
for (let t of newData.transfers) { for (let t of newData.transfers) {
let { id, blockNumber, timestamp, amount } = t; let { id, blockNumber, timestamp, amount } = t;
let from = data.accounts.get(t.from); let from = data.accounts.get(t.from) ?? (await ctx.store.getOrFail(Account, t.from));
let to = data.accounts.get(t.to); let to = data.accounts.get(t.to) ?? (await ctx.store.getOrFail(Account, t.to));
data.transfers.push( data.transfers.push(
new Transfer({ new Transfer({
id, id,
...@@ -347,11 +303,11 @@ function createTransfers(newData: NewData, data: PreparedData) { ...@@ -347,11 +303,11 @@ function createTransfers(newData: NewData, data: PreparedData) {
} }
} }
function createCerts(newData: NewData, data: PreparedData) { async function createCerts(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) {
for (let c of newData.certs) { for (let c of newData.certs) {
let { id, blockNumber, issuer, receiver } = c; let { id, blockNumber, issuer, receiver } = c;
let issuerIdty = data.identities.get(issuer); let issuerIdty = data.identities.get(issuer) ?? await ctx.store.getOrFail(Identity, String(issuer));
let receiverIdty = data.identities.get(receiver); let receiverIdty = data.identities.get(receiver) ?? await ctx.store.getOrFail(Identity, String(receiver));
data.certs.push( data.certs.push(
new Cert({ new Cert({
id, id,
......
...@@ -2,6 +2,9 @@ export * as system from './system/calls' ...@@ -2,6 +2,9 @@ export * as system from './system/calls'
export * as timestamp from './timestamp/calls' export * as timestamp from './timestamp/calls'
export * as balances from './balances/calls' export * as balances from './balances/calls'
export * as authorityMembers from './authority-members/calls' export * as authorityMembers from './authority-members/calls'
export * as session from './session/calls'
export * as imOnline from './im-online/calls'
export * as universalDividend from './universal-dividend/calls'
export * as identity from './identity/calls' export * as identity from './identity/calls'
export * as membership from './membership/calls' export * as membership from './membership/calls'
export * as cert from './cert/calls' export * as cert from './cert/calls'
......
export * as system from './system/constants' export * as system from './system/constants'
export * as timestamp from './timestamp/constants' export * as timestamp from './timestamp/constants'
export * as balances from './balances/constants' export * as balances from './balances/constants'
export * as transactionPayment from './transaction-payment/constants'
export * as quota from './quota/constants'
export * as authorityMembers from './authority-members/constants' export * as authorityMembers from './authority-members/constants'
export * as imOnline from './im-online/constants'
export * as universalDividend from './universal-dividend/constants'
export * as identity from './identity/constants' export * as identity from './identity/constants'
export * as membership from './membership/constants' export * as membership from './membership/constants'
export * as cert from './cert/constants' export * as cert from './cert/constants'
......
export * as system from './system/events' export * as system from './system/events'
export * as balances from './balances/events' export * as balances from './balances/events'
export * as transactionPayment from './transaction-payment/events'
export * as quota from './quota/events'
export * as authorityMembers from './authority-members/events' export * as authorityMembers from './authority-members/events'
export * as session from './session/events'
export * as imOnline from './im-online/events'
export * as universalDividend from './universal-dividend/events'
export * as identity from './identity/events' export * as identity from './identity/events'
export * as membership from './membership/events' export * as membership from './membership/events'
export * as cert from './cert/events' export * as cert from './cert/events'
......
import {sts, Block, Bytes, Option, Result, CallType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const heartbeat = {
name: 'ImOnline.heartbeat',
/**
* ## Complexity:
* - `O(K + E)` where K is length of `Keys` (heartbeat.validators_len) and E is length of
* `heartbeat.network_state.external_address`
* - `O(K)`: decoding of length `K`
* - `O(E)`: decoding/encoding of length `E`
*/
v700: new CallType(
'ImOnline.heartbeat',
sts.struct({
heartbeat: v700.Heartbeat,
signature: sts.bytes(),
})
),
}
import {sts, Block, Bytes, Option, Result, ConstantType, RuntimeCtx} from '../support'
export const unsignedPriority = {
/**
* A configuration for base priority of unsigned transactions.
*
* This is exposed so that it can be tuned for particular runtime, when
* multiple pallets send unsigned transactions.
*/
v700: new ConstantType(
'ImOnline.UnsignedPriority',
sts.bigint()
),
}
import {sts, Block, Bytes, Option, Result, EventType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const heartbeatReceived = {
name: 'ImOnline.HeartbeatReceived',
/**
* A new heartbeat was received from `AuthorityId`.
*/
v700: new EventType(
'ImOnline.HeartbeatReceived',
sts.struct({
authorityId: sts.bytes(),
})
),
}
export const allGood = {
name: 'ImOnline.AllGood',
/**
* At the end of the session, no offence was committed.
*/
v700: new EventType(
'ImOnline.AllGood',
sts.unit()
),
}
export const someOffline = {
name: 'ImOnline.SomeOffline',
/**
* At the end of the session, at least one validator was found to be offline.
*/
v700: new EventType(
'ImOnline.SomeOffline',
sts.struct({
offline: sts.array(() => sts.tuple(() => [v700.AccountId32, v700.ValidatorFullIdentification])),
})
),
}
import {sts, Block, Bytes, Option, Result, StorageType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const heartbeatAfter = {
/**
* The block number after which it's ok to send heartbeats in the current
* session.
*
* At the beginning of each session we set this to a value that should fall
* roughly in the middle of the session duration. The idea is to first wait for
* the validators to produce a block in the current session, so that the
* heartbeat later on will not be necessary.
*
* This value will only be used as a fallback if we fail to get a proper session
* progress estimate from `NextSessionRotation`, as those estimates should be
* more accurate then the value we calculate for `HeartbeatAfter`.
*/
v700: new StorageType('ImOnline.HeartbeatAfter', 'Default', [], sts.number()) as HeartbeatAfterV700,
}
/**
* The block number after which it's ok to send heartbeats in the current
* session.
*
* At the beginning of each session we set this to a value that should fall
* roughly in the middle of the session duration. The idea is to first wait for
* the validators to produce a block in the current session, so that the
* heartbeat later on will not be necessary.
*
* This value will only be used as a fallback if we fail to get a proper session
* progress estimate from `NextSessionRotation`, as those estimates should be
* more accurate then the value we calculate for `HeartbeatAfter`.
*/
export interface HeartbeatAfterV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): number
get(block: Block): Promise<(number | undefined)>
}
export const keys = {
/**
* The current set of keys that may issue a heartbeat.
*/
v700: new StorageType('ImOnline.Keys', 'Default', [], sts.array(() => sts.bytes())) as KeysV700,
}
/**
* The current set of keys that may issue a heartbeat.
*/
export interface KeysV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): Bytes[]
get(block: Block): Promise<(Bytes[] | undefined)>
}
export const receivedHeartbeats = {
/**
* For each session index, we keep a mapping of `SessionIndex` and `AuthIndex` to
* `WrapperOpaque<BoundedOpaqueNetworkState>`.
*/
v700: new StorageType('ImOnline.ReceivedHeartbeats', 'Optional', [sts.number(), sts.number()], v700.WrapperOpaque) as ReceivedHeartbeatsV700,
}
/**
* For each session index, we keep a mapping of `SessionIndex` and `AuthIndex` to
* `WrapperOpaque<BoundedOpaqueNetworkState>`.
*/
export interface ReceivedHeartbeatsV700 {
is(block: RuntimeCtx): boolean
get(block: Block, key1: number, key2: number): Promise<(v700.WrapperOpaque | undefined)>
getMany(block: Block, keys: [number, number][]): Promise<(v700.WrapperOpaque | undefined)[]>
getKeys(block: Block): Promise<[number, number][]>
getKeys(block: Block, key1: number): Promise<[number, number][]>
getKeys(block: Block, key1: number, key2: number): Promise<[number, number][]>
getKeysPaged(pageSize: number, block: Block): AsyncIterable<[number, number][]>
getKeysPaged(pageSize: number, block: Block, key1: number): AsyncIterable<[number, number][]>
getKeysPaged(pageSize: number, block: Block, key1: number, key2: number): AsyncIterable<[number, number][]>
getPairs(block: Block): Promise<[k: [number, number], v: (v700.WrapperOpaque | undefined)][]>
getPairs(block: Block, key1: number): Promise<[k: [number, number], v: (v700.WrapperOpaque | undefined)][]>
getPairs(block: Block, key1: number, key2: number): Promise<[k: [number, number], v: (v700.WrapperOpaque | undefined)][]>
getPairsPaged(pageSize: number, block: Block): AsyncIterable<[k: [number, number], v: (v700.WrapperOpaque | undefined)][]>
getPairsPaged(pageSize: number, block: Block, key1: number): AsyncIterable<[k: [number, number], v: (v700.WrapperOpaque | undefined)][]>
getPairsPaged(pageSize: number, block: Block, key1: number, key2: number): AsyncIterable<[k: [number, number], v: (v700.WrapperOpaque | undefined)][]>
}
export const authoredBlocks = {
/**
* For each session index, we keep a mapping of `ValidatorId<T>` to the
* number of blocks authored by the given authority.
*/
v700: new StorageType('ImOnline.AuthoredBlocks', 'Default', [sts.number(), v700.AccountId32], sts.number()) as AuthoredBlocksV700,
}
/**
* For each session index, we keep a mapping of `ValidatorId<T>` to the
* number of blocks authored by the given authority.
*/
export interface AuthoredBlocksV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): number
get(block: Block, key1: number, key2: v700.AccountId32): Promise<(number | undefined)>
getMany(block: Block, keys: [number, v700.AccountId32][]): Promise<(number | undefined)[]>
getKeys(block: Block): Promise<[number, v700.AccountId32][]>
getKeys(block: Block, key1: number): Promise<[number, v700.AccountId32][]>
getKeys(block: Block, key1: number, key2: v700.AccountId32): Promise<[number, v700.AccountId32][]>
getKeysPaged(pageSize: number, block: Block): AsyncIterable<[number, v700.AccountId32][]>
getKeysPaged(pageSize: number, block: Block, key1: number): AsyncIterable<[number, v700.AccountId32][]>
getKeysPaged(pageSize: number, block: Block, key1: number, key2: v700.AccountId32): AsyncIterable<[number, v700.AccountId32][]>
getPairs(block: Block): Promise<[k: [number, v700.AccountId32], v: (number | undefined)][]>
getPairs(block: Block, key1: number): Promise<[k: [number, v700.AccountId32], v: (number | undefined)][]>
getPairs(block: Block, key1: number, key2: v700.AccountId32): Promise<[k: [number, v700.AccountId32], v: (number | undefined)][]>
getPairsPaged(pageSize: number, block: Block): AsyncIterable<[k: [number, v700.AccountId32], v: (number | undefined)][]>
getPairsPaged(pageSize: number, block: Block, key1: number): AsyncIterable<[k: [number, v700.AccountId32], v: (number | undefined)][]>
getPairsPaged(pageSize: number, block: Block, key1: number, key2: v700.AccountId32): AsyncIterable<[k: [number, v700.AccountId32], v: (number | undefined)][]>
}
import {sts, Block, Bytes, Option, Result, ConstantType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const refundAccount = {
/**
* Account used to refund fee
*/
v700: new ConstantType(
'Quota.RefundAccount',
v700.AccountId32
),
}
import {sts, Block, Bytes, Option, Result, EventType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const refunded = {
name: 'Quota.Refunded',
/**
* Refunded fees to an account
*/
v700: new EventType(
'Quota.Refunded',
sts.struct({
who: v700.AccountId32,
identity: sts.number(),
amount: sts.bigint(),
})
),
}
export const noQuotaForIdty = {
name: 'Quota.NoQuotaForIdty',
/**
* No quota for identity
*/
v700: new EventType(
'Quota.NoQuotaForIdty',
sts.number()
),
}
export const noMoreCurrencyForRefund = {
name: 'Quota.NoMoreCurrencyForRefund',
/**
* No more currency available for refund
*/
v700: new EventType(
'Quota.NoMoreCurrencyForRefund',
sts.unit()
),
}
export const refundFailed = {
name: 'Quota.RefundFailed',
/**
* Refund failed
*/
v700: new EventType(
'Quota.RefundFailed',
v700.AccountId32
),
}
export const refundQueueFull = {
name: 'Quota.RefundQueueFull',
/**
* Refund queue full
*/
v700: new EventType(
'Quota.RefundQueueFull',
sts.unit()
),
}
import {sts, Block, Bytes, Option, Result, StorageType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const idtyQuota = {
/**
* maps identity index to quota
*/
v700: new StorageType('Quota.IdtyQuota', 'Optional', [sts.number()], v700.Quota) as IdtyQuotaV700,
}
/**
* maps identity index to quota
*/
export interface IdtyQuotaV700 {
is(block: RuntimeCtx): boolean
get(block: Block, key: number): Promise<(v700.Quota | undefined)>
getMany(block: Block, keys: number[]): Promise<(v700.Quota | undefined)[]>
getKeys(block: Block): Promise<number[]>
getKeys(block: Block, key: number): Promise<number[]>
getKeysPaged(pageSize: number, block: Block): AsyncIterable<number[]>
getKeysPaged(pageSize: number, block: Block, key: number): AsyncIterable<number[]>
getPairs(block: Block): Promise<[k: number, v: (v700.Quota | undefined)][]>
getPairs(block: Block, key: number): Promise<[k: number, v: (v700.Quota | undefined)][]>
getPairsPaged(pageSize: number, block: Block): AsyncIterable<[k: number, v: (v700.Quota | undefined)][]>
getPairsPaged(pageSize: number, block: Block, key: number): AsyncIterable<[k: number, v: (v700.Quota | undefined)][]>
}
export const refundQueue = {
/**
* fees waiting for refund
*/
v700: new StorageType('Quota.RefundQueue', 'Default', [], sts.array(() => v700.Refund)) as RefundQueueV700,
}
/**
* fees waiting for refund
*/
export interface RefundQueueV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): v700.Refund[]
get(block: Block): Promise<(v700.Refund[] | undefined)>
}
import {sts, Block, Bytes, Option, Result, CallType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const setKeys = {
name: 'Session.set_keys',
/**
* Sets the session key(s) of the function caller to `keys`.
* Allows an account to set its session key prior to becoming a validator.
* This doesn't take effect until the next session.
*
* The dispatch origin of this function must be signed.
*
* ## Complexity
* - `O(1)`. Actual cost depends on the number of length of `T::Keys::key_ids()` which is
* fixed.
*/
v700: new CallType(
'Session.set_keys',
sts.struct({
keys: v700.SessionKeys,
proof: sts.bytes(),
})
),
}
export const purgeKeys = {
name: 'Session.purge_keys',
/**
* Removes any session key(s) of the function caller.
*
* This doesn't take effect until the next session.
*
* The dispatch origin of this function must be Signed and the account must be either be
* convertible to a validator ID using the chain's typical addressing system (this usually
* means being a controller account) or directly convertible into a validator ID (which
* usually means being a stash account).
*
* ## Complexity
* - `O(1)` in number of key types. Actual cost depends on the number of length of
* `T::Keys::key_ids()` which is fixed.
*/
v700: new CallType(
'Session.purge_keys',
sts.unit()
),
}
import {sts, Block, Bytes, Option, Result, EventType, RuntimeCtx} from '../support'
export const newSession = {
name: 'Session.NewSession',
/**
* New session has happened. Note that the argument is the session index, not the
* block number as the type might suggest.
*/
v700: new EventType(
'Session.NewSession',
sts.struct({
sessionIndex: sts.number(),
})
),
}
import {sts, Block, Bytes, Option, Result, StorageType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const validators = {
/**
* The current set of validators.
*/
v700: new StorageType('Session.Validators', 'Default', [], sts.array(() => v700.AccountId32)) as ValidatorsV700,
}
/**
* The current set of validators.
*/
export interface ValidatorsV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): v700.AccountId32[]
get(block: Block): Promise<(v700.AccountId32[] | undefined)>
}
export const currentIndex = {
/**
* Current index of the session.
*/
v700: new StorageType('Session.CurrentIndex', 'Default', [], sts.number()) as CurrentIndexV700,
}
/**
* Current index of the session.
*/
export interface CurrentIndexV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): number
get(block: Block): Promise<(number | undefined)>
}
export const queuedChanged = {
/**
* True if the underlying economic identities or weighting behind the validators
* has changed in the queued validator set.
*/
v700: new StorageType('Session.QueuedChanged', 'Default', [], sts.boolean()) as QueuedChangedV700,
}
/**
* True if the underlying economic identities or weighting behind the validators
* has changed in the queued validator set.
*/
export interface QueuedChangedV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): boolean
get(block: Block): Promise<(boolean | undefined)>
}
export const queuedKeys = {
/**
* The queued keys for the next session. When the next session begins, these keys
* will be used to determine the validator's session keys.
*/
v700: new StorageType('Session.QueuedKeys', 'Default', [], sts.array(() => sts.tuple(() => [v700.AccountId32, v700.SessionKeys]))) as QueuedKeysV700,
}
/**
* The queued keys for the next session. When the next session begins, these keys
* will be used to determine the validator's session keys.
*/
export interface QueuedKeysV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): [v700.AccountId32, v700.SessionKeys][]
get(block: Block): Promise<([v700.AccountId32, v700.SessionKeys][] | undefined)>
}
export const disabledValidators = {
/**
* Indices of disabled validators.
*
* The vec is always kept sorted so that we can find whether a given validator is
* disabled using binary search. It gets cleared when `on_session_ending` returns
* a new set of identities.
*/
v700: new StorageType('Session.DisabledValidators', 'Default', [], sts.array(() => sts.number())) as DisabledValidatorsV700,
}
/**
* Indices of disabled validators.
*
* The vec is always kept sorted so that we can find whether a given validator is
* disabled using binary search. It gets cleared when `on_session_ending` returns
* a new set of identities.
*/
export interface DisabledValidatorsV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): number[]
get(block: Block): Promise<(number[] | undefined)>
}
export const nextKeys = {
/**
* The next session keys for a validator.
*/
v700: new StorageType('Session.NextKeys', 'Optional', [v700.AccountId32], v700.SessionKeys) as NextKeysV700,
}
/**
* The next session keys for a validator.
*/
export interface NextKeysV700 {
is(block: RuntimeCtx): boolean
get(block: Block, key: v700.AccountId32): Promise<(v700.SessionKeys | undefined)>
getMany(block: Block, keys: v700.AccountId32[]): Promise<(v700.SessionKeys | undefined)[]>
getKeys(block: Block): Promise<v700.AccountId32[]>
getKeys(block: Block, key: v700.AccountId32): Promise<v700.AccountId32[]>
getKeysPaged(pageSize: number, block: Block): AsyncIterable<v700.AccountId32[]>
getKeysPaged(pageSize: number, block: Block, key: v700.AccountId32): AsyncIterable<v700.AccountId32[]>
getPairs(block: Block): Promise<[k: v700.AccountId32, v: (v700.SessionKeys | undefined)][]>
getPairs(block: Block, key: v700.AccountId32): Promise<[k: v700.AccountId32, v: (v700.SessionKeys | undefined)][]>
getPairsPaged(pageSize: number, block: Block): AsyncIterable<[k: v700.AccountId32, v: (v700.SessionKeys | undefined)][]>
getPairsPaged(pageSize: number, block: Block, key: v700.AccountId32): AsyncIterable<[k: v700.AccountId32, v: (v700.SessionKeys | undefined)][]>
}
export const keyOwner = {
/**
* The owner of a key. The key is the `KeyTypeId` + the encoded key.
*/
v700: new StorageType('Session.KeyOwner', 'Optional', [sts.tuple(() => [v700.KeyTypeId, sts.bytes()])], v700.AccountId32) as KeyOwnerV700,
}
/**
* The owner of a key. The key is the `KeyTypeId` + the encoded key.
*/
export interface KeyOwnerV700 {
is(block: RuntimeCtx): boolean
get(block: Block, key: [v700.KeyTypeId, Bytes]): Promise<(v700.AccountId32 | undefined)>
getMany(block: Block, keys: [v700.KeyTypeId, Bytes][]): Promise<(v700.AccountId32 | undefined)[]>
getKeys(block: Block): Promise<[v700.KeyTypeId, Bytes][]>
getKeys(block: Block, key: [v700.KeyTypeId, Bytes]): Promise<[v700.KeyTypeId, Bytes][]>
getKeysPaged(pageSize: number, block: Block): AsyncIterable<[v700.KeyTypeId, Bytes][]>
getKeysPaged(pageSize: number, block: Block, key: [v700.KeyTypeId, Bytes]): AsyncIterable<[v700.KeyTypeId, Bytes][]>
getPairs(block: Block): Promise<[k: [v700.KeyTypeId, Bytes], v: (v700.AccountId32 | undefined)][]>
getPairs(block: Block, key: [v700.KeyTypeId, Bytes]): Promise<[k: [v700.KeyTypeId, Bytes], v: (v700.AccountId32 | undefined)][]>
getPairsPaged(pageSize: number, block: Block): AsyncIterable<[k: [v700.KeyTypeId, Bytes], v: (v700.AccountId32 | undefined)][]>
getPairsPaged(pageSize: number, block: Block, key: [v700.KeyTypeId, Bytes]): AsyncIterable<[k: [v700.KeyTypeId, Bytes], v: (v700.AccountId32 | undefined)][]>
}
export * as system from './system/storage' export * as system from './system/storage'
export * as timestamp from './timestamp/storage' export * as timestamp from './timestamp/storage'
export * as balances from './balances/storage' export * as balances from './balances/storage'
export * as transactionPayment from './transaction-payment/storage'
export * as quota from './quota/storage'
export * as authorityMembers from './authority-members/storage' export * as authorityMembers from './authority-members/storage'
export * as session from './session/storage'
export * as imOnline from './im-online/storage'
export * as universalDividend from './universal-dividend/storage'
export * as identity from './identity/storage' export * as identity from './identity/storage'
export * as membership from './membership/storage' export * as membership from './membership/storage'
export * as cert from './cert/storage' export * as cert from './cert/storage'
......
import {sts, Block, Bytes, Option, Result, ConstantType, RuntimeCtx} from '../support'
export const operationalFeeMultiplier = {
/**
* A fee mulitplier for `Operational` extrinsics to compute "virtual tip" to boost their
* `priority`
*
* This value is multipled by the `final_fee` to obtain a "virtual tip" that is later
* added to a tip component in regular `priority` calculations.
* It means that a `Normal` transaction can front-run a similarly-sized `Operational`
* extrinsic (with no tip), by including a tip value greater than the virtual tip.
*
* ```rust,ignore
* // For `Normal`
* let priority = priority_calc(tip);
*
* // For `Operational`
* let virtual_tip = (inclusion_fee + tip) * OperationalFeeMultiplier;
* let priority = priority_calc(tip + virtual_tip);
* ```
*
* Note that since we use `final_fee` the multiplier applies also to the regular `tip`
* sent with the transaction. So, not only does the transaction get a priority bump based
* on the `inclusion_fee`, but we also amplify the impact of tips applied to `Operational`
* transactions.
*/
v700: new ConstantType(
'TransactionPayment.OperationalFeeMultiplier',
sts.number()
),
}
import {sts, Block, Bytes, Option, Result, EventType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const transactionFeePaid = {
name: 'TransactionPayment.TransactionFeePaid',
/**
* A transaction fee `actual_fee`, of which `tip` was added to the minimum inclusion fee,
* has been paid by `who`.
*/
v700: new EventType(
'TransactionPayment.TransactionFeePaid',
sts.struct({
who: v700.AccountId32,
actualFee: sts.bigint(),
tip: sts.bigint(),
})
),
}
import {sts, Block, Bytes, Option, Result, StorageType, RuntimeCtx} from '../support'
import * as v700 from '../v700'
export const nextFeeMultiplier = {
v700: new StorageType('TransactionPayment.NextFeeMultiplier', 'Default', [], v700.FixedU128) as NextFeeMultiplierV700,
}
export interface NextFeeMultiplierV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): v700.FixedU128
get(block: Block): Promise<(v700.FixedU128 | undefined)>
}
export const storageVersion = {
v700: new StorageType('TransactionPayment.StorageVersion', 'Default', [], v700.Releases) as StorageVersionV700,
}
export interface StorageVersionV700 {
is(block: RuntimeCtx): boolean
getDefault(block: Block): v700.Releases
get(block: Block): Promise<(v700.Releases | undefined)>
}
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment