diff --git a/.npmrc b/.npmrc index 4fd021952d5a1d74ce03afc79643268a0480adfb..c389796540d15a26422becbb9c6f7e3c227051af 100644 --- a/.npmrc +++ b/.npmrc @@ -1 +1,2 @@ -engine-strict=true \ No newline at end of file +engine-strict=true +# loglevel=notice \ No newline at end of file diff --git a/docker-compose.duniter-squid.yml b/docker-compose.duniter-squid.yml index a6b49386012986d403450ca82720896faaad2f8e..7cd15979842da88c82352dee94e28eb0c31e0dd4 100644 --- a/docker-compose.duniter-squid.yml +++ b/docker-compose.duniter-squid.yml @@ -3,11 +3,11 @@ version: "3" services: db: image: postgres:15 + volumes: + - postgres-data:/var/lib/postgresql/data environment: POSTGRES_DB: squid POSTGRES_PASSWORD: postgres - ports: - - "5432:5432" # Uncomment for logging all SQL statements # command: ["postgres", "-c", "log_statement=all"] api: @@ -20,7 +20,7 @@ services: - GQL_PORT=4350 ports: # GraphQL endpoint at port 4350 - - "4350:4350" + - "127.0.0.1:4350:4350" command: ["sqd", "serve:prod"] depends_on: - db @@ -42,6 +42,8 @@ services: - default - duniter +volumes: + postgres-data: networks: duniter: diff --git a/src/main.ts b/src/main.ts index b55f1785393f5a4df1eed5282ca6e01bec27a575..6aa6ae27297d49c5b765dc8c7ca985a2ba99804b 100644 --- a/src/main.ts +++ b/src/main.ts @@ -24,19 +24,15 @@ processor.run(new TypeormDatabaseWithCache(), async (ctx) => { if (header.height == 0) { await saveGenesis(ctx); } - // save all extrinsics of the block (except Timestamp.Set) + // save all extrinsics of the block (except Timestamp.Set when no UD) for (const extrinsic of extrinsics) { - if (!(extrinsic.call && extrinsic.call.name == calls_t.timestamp.set.name)) { - await saveExtrinsic(ctx, extrinsic); - } + await saveExtrinsic(ctx, extrinsic); } - // save all calls of the block (except Timestamp.Set) + // save all calls of the block (except Timestamp.Set when no UD) for (const call of calls.reverse()) { - if (call.name != calls_t.timestamp.set.name) { - await saveCall(ctx, call); - } + await saveCall(ctx, call); } - // save all events of the block + // save all events of the block (except System.ExtrinsicSuccess of Timestamp.Set) for (const event of events) { await saveEvent(ctx, event); } @@ -45,6 +41,7 @@ processor.run(new TypeormDatabaseWithCache(), async (ctx) => { let ndata: NewData = { accounts: [], identitiesCreated: [], + idtyChangedOwnerKey: [], transfers: [], certs: [], }; @@ -55,6 +52,7 @@ processor.run(new TypeormDatabaseWithCache(), async (ctx) => { newAccounts: [], identities: new Map(), newIdentities: [], + changedIdentities: [], transfers: [], certs: [], }; @@ -72,6 +70,7 @@ type BlockNumber = number; interface NewData { accounts: Address[]; identitiesCreated: IdtyCreatedEvent[]; + idtyChangedOwnerKey: IdtyChangedOwnerKeyEvent[]; transfers: TransferEvent[]; certs: CertEvent[]; } @@ -94,6 +93,11 @@ interface IdtyCreatedEvent { index: IdtyIndex; account: Address; } +interface IdtyChangedOwnerKeyEvent { + id: string; + index: IdtyIndex; + account: Address; +} // a way to group data prepared for database insertion interface PreparedData { @@ -101,10 +105,12 @@ interface PreparedData { accounts: Map<Address, Account>; // non existing accounts to add in the database before transactions and identites newAccounts: Account[]; - // all identities that other entities point to + // all identities that other entities point to or which might have changed identities: Map<IdtyIndex, Identity>; // non existing identities to add in the database before certifications and memberships newIdentities: Identity[]; + // existing identities that have changed + changedIdentities: Identity[]; // transfers transfers: Transfer[]; // certifications @@ -119,16 +125,18 @@ function getNewData(ctx: ProcessorContext<StoreWithCache>, ndata: NewData) { // ===== Events ===== for (let event of block.events) { + ctx.log.debug(event.name); switch (event.name) { // ===== System.NewAccount case events_t.system.newAccount.name: let newA: { account: Address }; - if (events_t.balances.transfer.v700.is(event)) { + if (events_t.system.newAccount.v700.is(event)) { newA = events_t.system.newAccount.v700.decode(event); } else { throw new Error("Unsupported spec"); } - ndata.accounts.push(newA.account); + ndata.accounts.push(ss58.codec(42).encode(newA.account)); + break; // ===== Balances.Transfer case events_t.balances.transfer.name: @@ -160,8 +168,24 @@ function getNewData(ctx: ProcessorContext<StoreWithCache>, ndata: NewData) { ndata.identitiesCreated.push({ id: event.id, index: newI.idtyIndex, - account: newI.ownerKey, + account: ss58.codec(42).encode(newI.ownerKey), }); + break; + + // ===== Identity.ChangeOwnerKey + case events_t.identity.idtyChangedOwnerKey.name: + let chok: { idtyIndex: IdtyIndex; newOwnerKey: Address }; + if (events_t.identity.idtyChangedOwnerKey.v700.is(event)) { + chok = events_t.identity.idtyChangedOwnerKey.v700.decode(event); + } else { + throw new Error("Unsupported spec"); + } + ndata.idtyChangedOwnerKey.push({ + id: event.id, + index: chok.idtyIndex, + account: ss58.codec(42).encode(chok.newOwnerKey), + }); + break; // ===== Cert.NewCert case events_t.cert.newCert.name: @@ -194,6 +218,7 @@ function getNewData(ctx: ProcessorContext<StoreWithCache>, ndata: NewData) { receiver: recert.receiver, }); break; + default: // ctx.log.info(`Unhandled event ${event.name}`) } @@ -205,6 +230,7 @@ function getNewData(ctx: ProcessorContext<StoreWithCache>, ndata: NewData) { async function prepareData(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) { await createAccounts(ctx, newData, data); await createIdentities(ctx, newData, data); + await changeIdtyOwnerKey(ctx, newData, data); createTransfers(newData, data); createCerts(newData, data); } @@ -212,8 +238,10 @@ async function prepareData(ctx: ProcessorContext<StoreWithCache>, newData: NewDa /// store prepared data into database async function storeData(ctx: ProcessorContext<StoreWithCache>, data: PreparedData) { // UPSERT = update or insert if not existing - // account can have already existed, been killed, and recreated - await ctx.store.upsert([...data.newAccounts.values()]); + // account can have already existed, been killed, and recreated + await ctx.store.upsert(data.newAccounts); + // identities which have changed + await ctx.store.upsert([...data.changedIdentities.values()]); // INSERT await ctx.store.insert([...data.newIdentities.values()]); await ctx.store.insert(data.transfers); @@ -230,11 +258,15 @@ async function createAccounts(ctx: ProcessorContext<StoreWithCache>, newData: Ne accountIds.add(t.from); accountIds.add(t.to); } + // system will tell when accounts are created (but this should be added above) + for (let a of newData.accounts) { + accountIds.add(a); + } // fill the map of accounts with those that are already stored await ctx.store.findBy(Account, { id: In([...accountIds]) }).then((accounts) => { accounts.map((a) => data.accounts.set(a.id, a)); }); - // for each account, add an account if not already existing + // for each account, add an account if not already existing (not found above) for (let a of accountIds) { updateAccounts(a); } @@ -255,7 +287,7 @@ async function createIdentities(ctx: ProcessorContext<StoreWithCache>, newData: idtyIds.add(c.issuer); idtyIds.add(c.receiver); } - await ctx.store.findBy(Identity, { id: In([...idtyIds]) }).then((idties) => { + await ctx.store.findBy(Identity, { index: In([...idtyIds]) }).then((idties) => { idties.map((i) => data.identities.set(i.index, i)); }); for (let i of newData.identitiesCreated) { @@ -271,6 +303,32 @@ async function createIdentities(ctx: ProcessorContext<StoreWithCache>, newData: } } +async function changeIdtyOwnerKey(ctx: ProcessorContext<StoreWithCache>, newData: NewData, data: PreparedData) { + const idtyIds = new Set<IdtyIndex>(); + const accountIds = new Set<Address>(); + for (let i of newData.idtyChangedOwnerKey) { + idtyIds.add(i.index); + accountIds.add(i.account); + } + // TODO avoid fetching multiple times + await ctx.store.findBy(Identity, { index: In([...idtyIds]) }).then((idties) => { + idties.map((i) => data.identities.set(i.index, i)); + }); + await ctx.store.findBy(Account, { id: In([...accountIds]) }).then((accounts) => { + accounts.map((a) => data.accounts.set(a.id, a)); + }); + for (let i of newData.idtyChangedOwnerKey) { + let idty = data.identities.get(i.index); + let account = data.accounts.get(i.account); + if (idty && account) { + idty.account = account; + data.changedIdentities.push(idty); + } else { + ctx.log.error(`changed owner key to non existing account ${account} or identity ${idty}`); + } + } +} + function createTransfers(newData: NewData, data: PreparedData) { for (let t of newData.transfers) { let { id, blockNumber, timestamp, amount } = t; diff --git a/src/processor.ts b/src/processor.ts index f28137fb86a356db734423d9093af542d9bd0571..f54bb00ab868d18412e9155d6a94f286d67c2edf 100644 --- a/src/processor.ts +++ b/src/processor.ts @@ -1,5 +1,4 @@ import {assertNotNull} from '@subsquid/util-internal' -import {lookupArchive} from '@subsquid/archive-registry' import { BlockHeader, DataHandlerContext, @@ -28,14 +27,14 @@ export const processor = new SubstrateBatchProcessor() }) .addEvent({ // name: [], - name: [events.balances.transfer.name, events.cert.newCert.name, events.cert.renewedCert.name, events.cert.removedCert.name], + // name: [events.balances.transfer.name, events.cert.newCert.name, events.cert.renewedCert.name, events.cert.removedCert.name], // extrinsic: true, // https://github.com/subsquid-labs/giant-squid-explorer/blob/main/src/processor.ts call: true, extrinsic: true, }) .addCall({ - // name: [] + // name: [], // events: true, // https://github.com/subsquid-labs/giant-squid-explorer/blob/main/src/processor.ts extrinsic: true,