diff --git a/assets/blocks-v1.json b/assets/blocks-v1.json
new file mode 100644
index 0000000000000000000000000000000000000000..b58ffa9d553af42029f8a5d7d8c5c458b5167483
Binary files /dev/null and b/assets/blocks-v1.json differ
diff --git a/db/migrations/1713861445705-Data.js b/db/migrations/1713972569027-Data.js
similarity index 96%
rename from db/migrations/1713861445705-Data.js
rename to db/migrations/1713972569027-Data.js
index d229bdd484c24d4880b817ee9348ce55b05fdb03..3355e0a4d6eb9dd62eceb7c2f851797590ff925f 100644
--- a/db/migrations/1713861445705-Data.js
+++ b/db/migrations/1713972569027-Data.js
@@ -1,5 +1,5 @@
-module.exports = class Data1713861445705 {
-    name = 'Data1713861445705'
+module.exports = class Data1713972569027 {
+    name = 'Data1713972569027'
 
     async up(db) {
         await db.query(`CREATE TABLE "event" ("id" character varying NOT NULL, "index" integer NOT NULL, "phase" text NOT NULL, "pallet" text NOT NULL, "name" text NOT NULL, "args" jsonb, "args_str" text array, "block_id" character varying, "extrinsic_id" character varying, "call_id" character varying, CONSTRAINT "PK_30c2f3bbaf6d34a55f8ae6e4614" PRIMARY KEY ("id"))`)
@@ -42,10 +42,11 @@ module.exports = class Data1713861445705 {
         await db.query(`CREATE INDEX "IDX_d2bc74ffa9c2571da03670f2c8" ON "cert_event" ("cert_id") `)
         await db.query(`CREATE INDEX "IDX_411c2bb1469a8a96762ceb00ae" ON "cert_event" ("event_id") `)
         await db.query(`CREATE INDEX "IDX_53154146094ec7b030dbc31388" ON "cert_event" ("block_number") `)
-        await db.query(`CREATE TABLE "cert" ("id" character varying NOT NULL, "is_active" boolean NOT NULL, "created_on" integer NOT NULL, "expire_on" integer NOT NULL, "issuer_id" character varying, "receiver_id" character varying, "created_in_id" character varying, CONSTRAINT "PK_6a0ce80cc860598b4f16c00998c" PRIMARY KEY ("id"))`)
+        await db.query(`CREATE TABLE "cert" ("id" character varying NOT NULL, "is_active" boolean NOT NULL, "created_on" integer NOT NULL, "updated_on" integer NOT NULL, "expire_on" integer NOT NULL, "issuer_id" character varying, "receiver_id" character varying, "created_in_id" character varying, "updated_in_id" character varying, CONSTRAINT "PK_6a0ce80cc860598b4f16c00998c" PRIMARY KEY ("id"))`)
         await db.query(`CREATE INDEX "IDX_70592e488b2e75cd8a2fa79826" ON "cert" ("issuer_id") `)
         await db.query(`CREATE INDEX "IDX_262e29ab91c8ebc727cc518f2f" ON "cert" ("receiver_id") `)
         await db.query(`CREATE INDEX "IDX_ad35ca166ad24ecea43d7ebfca" ON "cert" ("created_in_id") `)
+        await db.query(`CREATE INDEX "IDX_5fbefe3a497e898aff45e44f50" ON "cert" ("updated_in_id") `)
         await db.query(`CREATE TABLE "smith_cert" ("id" character varying NOT NULL, "created_on" integer NOT NULL, "issuer_id" character varying, "receiver_id" character varying, CONSTRAINT "PK_ae2ef36c9f6d40348c86230fd35" PRIMARY KEY ("id"))`)
         await db.query(`CREATE INDEX "IDX_ae67cbd087fcea0e1ec2f70cd0" ON "smith_cert" ("issuer_id") `)
         await db.query(`CREATE INDEX "IDX_5e414c1d12af16165881a16b63" ON "smith_cert" ("receiver_id") `)
@@ -86,6 +87,7 @@ module.exports = class Data1713861445705 {
         await db.query(`ALTER TABLE "cert" ADD CONSTRAINT "FK_70592e488b2e75cd8a2fa798261" FOREIGN KEY ("issuer_id") REFERENCES "identity"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`)
         await db.query(`ALTER TABLE "cert" ADD CONSTRAINT "FK_262e29ab91c8ebc727cc518f2fb" FOREIGN KEY ("receiver_id") REFERENCES "identity"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`)
         await db.query(`ALTER TABLE "cert" ADD CONSTRAINT "FK_ad35ca166ad24ecea43d7ebfca9" FOREIGN KEY ("created_in_id") REFERENCES "event"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`)
+        await db.query(`ALTER TABLE "cert" ADD CONSTRAINT "FK_5fbefe3a497e898aff45e44f504" FOREIGN KEY ("updated_in_id") REFERENCES "event"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`)
         await db.query(`ALTER TABLE "smith_cert" ADD CONSTRAINT "FK_ae67cbd087fcea0e1ec2f70cd04" FOREIGN KEY ("issuer_id") REFERENCES "identity"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`)
         await db.query(`ALTER TABLE "smith_cert" ADD CONSTRAINT "FK_5e414c1d12af16165881a16b638" FOREIGN KEY ("receiver_id") REFERENCES "identity"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`)
         await db.query(`ALTER TABLE "membership_event" ADD CONSTRAINT "FK_fbbd75d84ab6cc2aafeaf37a03f" FOREIGN KEY ("identity_id") REFERENCES "identity"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`)
@@ -146,6 +148,7 @@ module.exports = class Data1713861445705 {
         await db.query(`DROP INDEX "public"."IDX_70592e488b2e75cd8a2fa79826"`)
         await db.query(`DROP INDEX "public"."IDX_262e29ab91c8ebc727cc518f2f"`)
         await db.query(`DROP INDEX "public"."IDX_ad35ca166ad24ecea43d7ebfca"`)
+        await db.query(`DROP INDEX "public"."IDX_5fbefe3a497e898aff45e44f50"`)
         await db.query(`DROP TABLE "smith_cert"`)
         await db.query(`DROP INDEX "public"."IDX_ae67cbd087fcea0e1ec2f70cd0"`)
         await db.query(`DROP INDEX "public"."IDX_5e414c1d12af16165881a16b63"`)
@@ -186,6 +189,7 @@ module.exports = class Data1713861445705 {
         await db.query(`ALTER TABLE "cert" DROP CONSTRAINT "FK_70592e488b2e75cd8a2fa798261"`)
         await db.query(`ALTER TABLE "cert" DROP CONSTRAINT "FK_262e29ab91c8ebc727cc518f2fb"`)
         await db.query(`ALTER TABLE "cert" DROP CONSTRAINT "FK_ad35ca166ad24ecea43d7ebfca9"`)
+        await db.query(`ALTER TABLE "cert" DROP CONSTRAINT "FK_5fbefe3a497e898aff45e44f504"`)
         await db.query(`ALTER TABLE "smith_cert" DROP CONSTRAINT "FK_ae67cbd087fcea0e1ec2f70cd04"`)
         await db.query(`ALTER TABLE "smith_cert" DROP CONSTRAINT "FK_5e414c1d12af16165881a16b638"`)
         await db.query(`ALTER TABLE "membership_event" DROP CONSTRAINT "FK_fbbd75d84ab6cc2aafeaf37a03f"`)
diff --git a/db/migrations/1713861565754-EnumsMigration.js b/db/migrations/1713972689052-EnumsMigration.js
similarity index 72%
rename from db/migrations/1713861565754-EnumsMigration.js
rename to db/migrations/1713972689052-EnumsMigration.js
index 35ff70627b7becff85ec426ad7a5b00ae2101295..2273e65808937a1e5356a3175e58ac9e7b36f14d 100644
--- a/db/migrations/1713861565754-EnumsMigration.js
+++ b/db/migrations/1713972689052-EnumsMigration.js
@@ -1,7 +1,7 @@
 const fs = require("fs");
 
-module.exports = class EnumsMigration1713861565754 {
-  name = "EnumsMigration1713861565754";
+module.exports = class EnumsMigration1713972689052 {
+  name = "EnumsMigration1713972689052";
 
   async up(db) {
     await db.query(fs.readFileSync("assets/sql/EnumsMigration_up.sql", "utf8"));
diff --git a/db/migrations/1713861565754-udHistoryFunction.js b/db/migrations/1713972689052-udHistoryFunction.js
similarity index 71%
rename from db/migrations/1713861565754-udHistoryFunction.js
rename to db/migrations/1713972689052-udHistoryFunction.js
index d1bdafba7f095e9c0b5c6a894adf15947c189f13..148169d16bb7bc48e35b1e1720e3872b2c9d688c 100644
--- a/db/migrations/1713861565754-udHistoryFunction.js
+++ b/db/migrations/1713972689052-udHistoryFunction.js
@@ -1,7 +1,7 @@
 const fs = require("fs");
 
-module.exports = class udHistoryFunction1713861565754 {
-  name = "udHistoryFunction1713861565754";
+module.exports = class udHistoryFunction1713972689052 {
+  name = "udHistoryFunction1713972689052";
 
   async up(db) {
     await db.query(fs.readFileSync("assets/sql/udHistoryFunction_up.sql", "utf8"));
diff --git a/hasura/metadata/databases/default/tables/public_cert.yaml b/hasura/metadata/databases/default/tables/public_cert.yaml
index 88f4078c0ef063317295cf37234c53a5fd7beca5..740b170385e143a2a2ef1f211e7a6fa0bfb49ba5 100644
--- a/hasura/metadata/databases/default/tables/public_cert.yaml
+++ b/hasura/metadata/databases/default/tables/public_cert.yaml
@@ -11,6 +11,9 @@ object_relationships:
   - name: createdIn
     using:
       foreign_key_constraint_on: created_in_id
+  - name: updatedIn
+    using:
+      foreign_key_constraint_on: updated_in_id
 array_relationships:
   - name: certHistory
     using:
diff --git a/package-lock.json b/package-lock.json
index fe3b48d00eedc10ac488fa25ded77a63e10710ba..f7aaca58aa22958cdbcb00b2bb818b390aa9d760 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
 {
   "name": "squid",
-  "version": "0.1.3",
+  "version": "0.2.0",
   "lockfileVersion": 3,
   "requires": true,
   "packages": {
     "": {
       "name": "squid",
-      "version": "0.1.3",
+      "version": "0.2.0",
       "dependencies": {
         "@belopash/typeorm-store": "^1.2.2",
         "@subsquid/archive-registry": "^3.3.0",
@@ -15,9 +15,12 @@
         "@subsquid/substrate-processor": "^8.2.1",
         "@subsquid/typeorm-migration": "^1.3.0",
         "@subsquid/typeorm-store": "^1.2.6",
+        "axios": "^1.6.8",
+        "bs58": "^5.0.0",
         "dotenv": "^16.4.5",
         "node-fetch": "^3.3.2",
         "pg": "8.11.3",
+        "stream-json": "^1.8.0",
         "type-graphql": "^1.2.0-rc.1",
         "typeorm": "^0.3.20"
       },
@@ -28,6 +31,7 @@
         "@subsquid/typeorm-codegen": "^1.3.3",
         "@types/js-yaml": "^4.0.9",
         "@types/node": "^20.11.25",
+        "@types/stream-json": "^1.7.7",
         "copyfiles": "^2.4.1",
         "js-yaml": "^4.1.0",
         "typescript": "^5.4.2"
@@ -1337,6 +1341,25 @@
         "@types/node": "*"
       }
     },
+    "node_modules/@types/stream-chain": {
+      "version": "2.0.4",
+      "resolved": "https://registry.npmjs.org/@types/stream-chain/-/stream-chain-2.0.4.tgz",
+      "integrity": "sha512-V7TsWLHrx79KumkHqSD7F8eR6POpEuWb6PuXJ7s/dRHAf3uVst3Jkp1yZ5XqIfECZLQ4a28vBVstTErmsMBvaQ==",
+      "dev": true,
+      "dependencies": {
+        "@types/node": "*"
+      }
+    },
+    "node_modules/@types/stream-json": {
+      "version": "1.7.7",
+      "resolved": "https://registry.npmjs.org/@types/stream-json/-/stream-json-1.7.7.tgz",
+      "integrity": "sha512-hHG7cLQ09H/m9i0jzL6UJAeLLxIWej90ECn0svO4T8J0nGcl89xZDQ2ujT4WKlvg0GWkcxJbjIDzW/v7BYUM6Q==",
+      "dev": true,
+      "dependencies": {
+        "@types/node": "*",
+        "@types/stream-chain": "*"
+      }
+    },
     "node_modules/@types/validator": {
       "version": "13.11.9",
       "resolved": "https://registry.npmjs.org/@types/validator/-/validator-13.11.9.tgz",
@@ -1687,6 +1710,11 @@
         "retry": "0.13.1"
       }
     },
+    "node_modules/asynckit": {
+      "version": "0.4.0",
+      "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz",
+      "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q=="
+    },
     "node_modules/available-typed-arrays": {
       "version": "1.0.7",
       "resolved": "https://registry.npmjs.org/available-typed-arrays/-/available-typed-arrays-1.0.7.tgz",
@@ -1701,6 +1729,16 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
+    "node_modules/axios": {
+      "version": "1.6.8",
+      "resolved": "https://registry.npmjs.org/axios/-/axios-1.6.8.tgz",
+      "integrity": "sha512-v/ZHtJDU39mDpyBoFVkETcd/uNdxrWRrg3bKpOKzXFA6Bvqopts6ALSMU3y6ijYxbw2B+wPrIv46egTzJXCLGQ==",
+      "dependencies": {
+        "follow-redirects": "^1.15.6",
+        "form-data": "^4.0.0",
+        "proxy-from-env": "^1.1.0"
+      }
+    },
     "node_modules/b4a": {
       "version": "1.6.6",
       "resolved": "https://registry.npmjs.org/b4a/-/b4a-1.6.6.tgz",
@@ -1790,6 +1828,14 @@
         "concat-map": "0.0.1"
       }
     },
+    "node_modules/bs58": {
+      "version": "5.0.0",
+      "resolved": "https://registry.npmjs.org/bs58/-/bs58-5.0.0.tgz",
+      "integrity": "sha512-r+ihvQJvahgYT50JD05dyJNKlmmSlMoOGwn1lCcEzanPglg7TxYjioQUYehQ9mAR/+hOSd2jRc/Z2y5UxBymvQ==",
+      "dependencies": {
+        "base-x": "^4.0.0"
+      }
+    },
     "node_modules/buffer": {
       "version": "6.0.3",
       "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz",
@@ -1971,6 +2017,17 @@
       "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
       "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
     },
+    "node_modules/combined-stream": {
+      "version": "1.0.8",
+      "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz",
+      "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==",
+      "dependencies": {
+        "delayed-stream": "~1.0.0"
+      },
+      "engines": {
+        "node": ">= 0.8"
+      }
+    },
     "node_modules/commander": {
       "version": "10.0.1",
       "resolved": "https://registry.npmjs.org/commander/-/commander-10.0.1.tgz",
@@ -2201,6 +2258,14 @@
         "url": "https://github.com/sponsors/ljharb"
       }
     },
+    "node_modules/delayed-stream": {
+      "version": "1.0.0",
+      "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz",
+      "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==",
+      "engines": {
+        "node": ">=0.4.0"
+      }
+    },
     "node_modules/denque": {
       "version": "2.1.0",
       "resolved": "https://registry.npmjs.org/denque/-/denque-2.1.0.tgz",
@@ -2492,6 +2557,25 @@
         "node": ">= 0.8"
       }
     },
+    "node_modules/follow-redirects": {
+      "version": "1.15.6",
+      "resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.6.tgz",
+      "integrity": "sha512-wWN62YITEaOpSK584EZXJafH1AGpO8RVgElfkuXbTOrPX4fIfOyEpW/CsiNd8JdYrAoOvafRTOEnvsO++qCqFA==",
+      "funding": [
+        {
+          "type": "individual",
+          "url": "https://github.com/sponsors/RubenVerborgh"
+        }
+      ],
+      "engines": {
+        "node": ">=4.0"
+      },
+      "peerDependenciesMeta": {
+        "debug": {
+          "optional": true
+        }
+      }
+    },
     "node_modules/for-each": {
       "version": "0.3.3",
       "resolved": "https://registry.npmjs.org/for-each/-/for-each-0.3.3.tgz",
@@ -2515,6 +2599,19 @@
         "url": "https://github.com/sponsors/isaacs"
       }
     },
+    "node_modules/form-data": {
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/form-data/-/form-data-4.0.0.tgz",
+      "integrity": "sha512-ETEklSGi5t0QMZuiXoA/Q6vcnxcLQP5vdugSpuAyi6SVGi2clPPp+xgEhuMaHC+zGgn31Kd235W35f7Hykkaww==",
+      "dependencies": {
+        "asynckit": "^0.4.0",
+        "combined-stream": "^1.0.8",
+        "mime-types": "^2.1.12"
+      },
+      "engines": {
+        "node": ">= 6"
+      }
+    },
     "node_modules/formdata-polyfill": {
       "version": "4.0.10",
       "resolved": "https://registry.npmjs.org/formdata-polyfill/-/formdata-polyfill-4.0.10.tgz",
@@ -3723,6 +3820,11 @@
         "node": ">= 0.10"
       }
     },
+    "node_modules/proxy-from-env": {
+      "version": "1.1.0",
+      "resolved": "https://registry.npmjs.org/proxy-from-env/-/proxy-from-env-1.1.0.tgz",
+      "integrity": "sha512-D+zkORCbA9f1tdWRK0RaCR3GPv50cMxcrz4X8k5LTSUD1Dkw47mKJEZQNunItRTkWwgtaUSo1RVFRIG9ZXiFYg=="
+    },
     "node_modules/qs": {
       "version": "6.11.0",
       "resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
@@ -4063,6 +4165,19 @@
         "npm": ">=6"
       }
     },
+    "node_modules/stream-chain": {
+      "version": "2.2.5",
+      "resolved": "https://registry.npmjs.org/stream-chain/-/stream-chain-2.2.5.tgz",
+      "integrity": "sha512-1TJmBx6aSWqZ4tx7aTpBDXK0/e2hhcNSTV8+CbFJtDjbb+I1mZ8lHit0Grw9GRT+6JbIrrDd8esncgBi8aBXGA=="
+    },
+    "node_modules/stream-json": {
+      "version": "1.8.0",
+      "resolved": "https://registry.npmjs.org/stream-json/-/stream-json-1.8.0.tgz",
+      "integrity": "sha512-HZfXngYHUAr1exT4fxlbc1IOce1RYxp2ldeaf97LYCOPSoOqY/1Psp7iGvpb+6JIOgkra9zDYnPX01hGAHzEPw==",
+      "dependencies": {
+        "stream-chain": "^2.2.5"
+      }
+    },
     "node_modules/string_decoder": {
       "version": "0.10.31",
       "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-0.10.31.tgz",
diff --git a/package.json b/package.json
index 0f4560f192962bc5517c01feb84e2ff9b5a5b614..6fac9d039e0a4649dab9a83eb39a030ea6c30d47 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
   "name": "squid",
-  "version": "0.2.0",
+  "version": "0.2.1",
   "private": true,
   "engines": {
     "node": ">=20"
@@ -16,9 +16,12 @@
     "@subsquid/substrate-processor": "^8.2.1",
     "@subsquid/typeorm-migration": "^1.3.0",
     "@subsquid/typeorm-store": "^1.2.6",
+    "axios": "^1.6.8",
+    "bs58": "^5.0.0",
     "dotenv": "^16.4.5",
     "node-fetch": "^3.3.2",
     "pg": "8.11.3",
+    "stream-json": "^1.8.0",
     "type-graphql": "^1.2.0-rc.1",
     "typeorm": "^0.3.20"
   },
@@ -29,6 +32,7 @@
     "@subsquid/typeorm-codegen": "^1.3.3",
     "@types/js-yaml": "^4.0.9",
     "@types/node": "^20.11.25",
+    "@types/stream-json": "^1.7.7",
     "copyfiles": "^2.4.1",
     "js-yaml": "^4.1.0",
     "typescript": "^5.4.2"
diff --git a/schema.graphql b/schema.graphql
index 79fb7d027eb2977ac520309d308709806af8b511..db72348b86713b58aeff9b155fd9ed8c238b0806 100644
--- a/schema.graphql
+++ b/schema.graphql
@@ -16,11 +16,9 @@ type Block @entity {
   implVersion: Int!
   timestamp: DateTime! @index
   validator: Bytes @index
-
   extrinsicsCount: Int!
   callsCount: Int!
   eventsCount: Int!
-
   extrinsics: [Extrinsic]!
     @derivedFrom(field: "block")
     @cardinality(value: 1000)
@@ -219,11 +217,16 @@ type Cert @entity {
   receiver: Identity! @index
   "whether the certification is currently active or not" # helper to avoid compare expireOn to current block number
   isActive: Boolean!
-  "the last createdOn value" # helper field to avoid looking for all CertCreation
+  "the first block number of the certification creation" # helper field to avoid looking for all CertCreation
   createdOn: Int!
+  "the event corresponding to the first certification creation"
+  createdIn: Event!
+  "the last block number of the certification renewal"
+  updatedOn: Int!
+  "the event corresponding to the last certification renewal"
+  updatedIn: Event!
   "the current expireOn value" # helper field to avoid looking for all CertRenewal and search for the last one
   expireOn: Int!
-  createdIn: Event!
   certHistory: [CertEvent!] @derivedFrom(field: "cert")
 }
 
diff --git a/src/data_handler.ts b/src/data_handler.ts
index 4fbe7054e5b55070f3b9747d467690b655f2c6e2..2af056f7cfe9e939c8c30aa6f9f745e975df9d17 100644
--- a/src/data_handler.ts
+++ b/src/data_handler.ts
@@ -226,7 +226,7 @@ export class DataHandler {
         where: { issuer: { index: issuerId }, receiver: { index: receiverId } },
       });
 
-      const eventType = await ctx.store.getOrFail(Event, event.id);
+      const createdIn = await ctx.store.getOrFail(Event, event.id);
       if (cert == null) {
         const issuer = await this.getIdtyByIndexOrFail(ctx, issuerId);
         const receiver = await this.getIdtyByIndexOrFail(ctx, receiverId);
@@ -236,16 +236,18 @@ export class DataHandler {
           issuer,
           receiver,
           createdOn,
+          createdIn,
+          updatedOn: createdOn,
+          updatedIn: createdIn,
           expireOn,
-          createdIn: eventType,
         });
         // the cert has already existed, expired, and is created again
         // we update it accordingly
       } else {
         cert.isActive = true;
-        cert.createdOn = createdOn;
+        cert.updatedOn = createdOn;
+        cert.updatedIn = createdIn;
         cert.expireOn = expireOn;
-        cert.createdIn = eventType;
       }
 
       // update cert and add event
@@ -270,10 +272,11 @@ export class DataHandler {
         relations: { issuer: true, receiver: true },
         where: { issuer: { index: issuerId }, receiver: { index: receiverId } },
       });
-      const eventType = await ctx.store.getOrFail(Event, event.id);
+      const createdIn = await ctx.store.getOrFail(Event, event.id);
       // update expiration date
       cert.expireOn = expireOn;
-      cert.createdIn = eventType;
+      cert.updatedOn = blockNumber;
+      cert.updatedIn = createdIn;
       this.data.certification.set([issuerId, receiverId], cert);
       this.data.certEvent.push(
         new CertEvent({
@@ -291,7 +294,7 @@ export class DataHandler {
       const { issuerId, receiverId, blockNumber, event } = c;
       // should never fail because cert removal can only happen on existing cert
       // and cert should not be removed at their creation block
-      const eventType = await ctx.store.getOrFail(Event, event.id);
+      const createdIn = await ctx.store.getOrFail(Event, event.id);
       const cert = await ctx.store.findOneOrFail(Cert, {
         relations: { issuer: true, receiver: true },
         where: { issuer: { index: issuerId }, receiver: { index: receiverId } },
@@ -299,7 +302,6 @@ export class DataHandler {
       // update cert
       cert.isActive = false;
       cert.expireOn = blockNumber;
-      cert.createdIn = eventType;
       this.data.certification.set([issuerId, receiverId], cert);
 
       this.data.certEvent.push(
@@ -402,7 +404,7 @@ export class DataHandler {
     // Process universal dividend
     for (const ud of newData.universalDividend) {
       const { blockNumber, amount, monetaryMass, membersCount, event, timestamp } = ud;
-      const eventType = await ctx.store.getOrFail(Event, event.id);
+      const createdIn = await ctx.store.getOrFail(Event, event.id);
       this.data.universalDividend.push(new UniversalDividend({
         id: event.id,
         blockNumber,
@@ -410,7 +412,7 @@ export class DataHandler {
         amount: Number(amount),
         monetaryMass,
         membersCount: Number(membersCount),
-        event: eventType,
+        event: createdIn,
       }));
     }
 
@@ -418,7 +420,7 @@ export class DataHandler {
     for (const udReeval of newData.udReeval) {
       const { blockNumber, newUdAmount, monetaryMass, membersCount, event, timestamp } =
         udReeval;
-      const eventType = await ctx.store.getOrFail(Event, event.id);
+      const createdIn = await ctx.store.getOrFail(Event, event.id);
       this.data.udReeval.push(new UdReeval({
         id: event.id,
         blockNumber,
@@ -426,7 +428,7 @@ export class DataHandler {
         newUdAmount: Number(newUdAmount),
         monetaryMass,
         membersCount: Number(membersCount),
-        event: eventType,
+        event: createdIn,
       }));
     }
   }
diff --git a/src/genesis.ts b/src/genesis/genesis.ts
similarity index 86%
rename from src/genesis.ts
rename to src/genesis/genesis.ts
index c522be527685059cb742b93f6c308b06082cfe73..77d44c92d2bad1b264273c0851704c687eaf6f7f 100644
--- a/src/genesis.ts
+++ b/src/genesis/genesis.ts
@@ -1,8 +1,10 @@
 import { readFileSync } from "fs";
 import path from "path/posix";
-import { Account, Block, Cert, ChangeOwnerKey, Event, EventType, Identity, MembershipEvent, SmithCert, SmithStatus, Transfer } from "./model";
-import type { Address, Ctx, Genesis, IdtyIndex, TransactionHistory, Tx } from "./types_custom";
-import { bytesToString } from "./utils";
+import { Account, Block, Cert, CertEvent, ChangeOwnerKey, Event, EventType, Identity, MembershipEvent, SmithCert, SmithStatus, Transfer } from "../model";
+import type { Address, Ctx, Genesis, IdtyIndex, TransactionHistory, Tx } from "../types_custom";
+import { bytesToString } from "../utils";
+import { AccountId32 } from "../types/v800";
+import { BlocksV1Reader } from "./genesis_update_blockv1";
 
 export async function saveGenesis(ctx: Ctx, block: Block) {
   const genesis_path = `./assets/${process.env.GENESIS_FILE}`;
@@ -15,8 +17,10 @@ export async function saveGenesis(ctx: Ctx, block: Block) {
 
   const accounts: Map<Address, Account> = new Map();
   const identities: Map<IdtyIndex, Identity> = new Map();
+  const identitiesMap: Map<AccountId32, IdtyIndex> = new Map();
   const chok: ChangeOwnerKey[] = [];
-  const certs: Cert[] = [];
+  const certs: Map<string, Cert> = new Map();
+  const certsEvents: CertEvent[] = [];
   const smithCerts: SmithCert[] = [];
   const membershipsEvents: MembershipEvent[] = [];
 
@@ -59,6 +63,7 @@ export async function saveGenesis(ctx: Ctx, block: Block) {
 
     // add identity to list
     identities.set(idty.index, the_identity);
+    identitiesMap.set(idty.value.owner_key, idty.index);
 
     // if changed owner key, also add event
     if (idty.value.old_owner_key != null) {
@@ -100,17 +105,20 @@ export async function saveGenesis(ctx: Ctx, block: Block) {
   // collect certifications
   for (const [receiver_index, certs_received] of Object.entries(genesis.certification.certsByReceiver)) {
     for (const [issuer_index, expiration_block] of Object.entries(certs_received)) {
-      certs.push(
+
+      certs.set(`${issuer_index}-${receiver_index}`,
         new Cert({
           // cert id is different in genesis than in the rest of blockchain
           id: `genesis-cert-${issuer_index}-${receiver_index}`,
           isActive: true,
           issuer: identities.get(parseInt(issuer_index)),
           receiver: identities.get(parseInt(receiver_index)),
-          //TODO: Set negative block number for genesis certs
           createdOn: 0,
+          updatedOn: 0,
           createdIn: genesis_event,
+          updatedIn: genesis_event,
           expireOn: expiration_block as number,
+          certHistory: [],
         })
       );
     }
@@ -135,12 +143,23 @@ export async function saveGenesis(ctx: Ctx, block: Block) {
     }
   }
 
+  // collect g1v1 blocks to update events and certifications
+  const blocksV1Reader = new BlocksV1Reader({
+    ctx,
+    identitiesMap,
+    certs,
+    identities,
+    certsEvents,
+  });
+  await blocksV1Reader.readBlocksV1();
+
   ctx.log.info("Saving genesis");
 
   // insert everything in storage
   await ctx.store.insert([...accounts.values()]);
   await ctx.store.insert([...identities.values()]);
-  await ctx.store.insert(certs);
+  await ctx.store.insert([...certs.values()]);
+  await ctx.store.insert(certsEvents);
   await ctx.store.insert(chok);
   await ctx.store.insert(smithCerts);
   await ctx.store.insert(membershipsEvents);
@@ -213,8 +232,10 @@ export async function saveGenesis(ctx: Ctx, block: Block) {
   ctx.log.info("Saving transaction history");
   await ctx.store.insert([...other_accounts.values()]);
   await ctx.store.insert(genesis_transfers);
+
+  ctx.log.info("Flushing changes to storage, this can take a while...");
   await ctx.store.flush();
-  ctx.log.info("Saved transaction history");
-  ctx.log.info("======================");
+  ctx.log.info("Genesis history saved!");
+  ctx.log.info("=====================");
   ctx.log.info("Starting blockchain indexing");
 }
diff --git a/src/genesis/genesis_chunks.ts b/src/genesis/genesis_chunks.ts
new file mode 100644
index 0000000000000000000000000000000000000000..9f8fc7de1d448f93af0455bf4b0caec323dcf42e
--- /dev/null
+++ b/src/genesis/genesis_chunks.ts
@@ -0,0 +1,119 @@
+import fs from 'fs';
+import { decode as base58Decode } from 'bs58';
+import axios from 'axios';
+import { hexToUint8Array, pubkeyToAddress } from "../utils";
+import { BlockParsed, Certification, Chunk } from '../types_custom';
+
+async function fetchCurrentBlockNumber(): Promise<number> {
+    // const apiUrl = 'https://git.duniter.org/api/v4/projects/nodes%2Frust%2Fduniter-v2s/repository/files/resources%2Fg1-data.json/raw?ref=master';
+    // const response = await axios.get(apiUrl);
+    // const data = response.data;
+    // return data.current_block.number;
+
+    // For gdev 801
+    return 700935;
+}
+
+function parseCertification(certificationString: string): Certification {
+    const [issuer, receiver] = certificationString.split(':');
+    return {
+        issuer: pubkeyToAddress(issuer),
+        receiver: pubkeyToAddress(receiver),
+    };
+}
+
+async function fetchChunk(url: string): Promise<Chunk> {
+    const response = await axios.get(url);
+    return response.data;
+}
+
+export async function processChunks() {
+    const baseUrl = 'https://files.datajune.coinduf.eu/chunks/';
+    const outputFile = 'assets/blocks-v1.json';
+
+    const processedBlocks = new Set<number>();
+    const blockBuffer: BlockParsed[] = [];
+    const chunkBatchSize = 20;
+
+    const fileExists = fs.existsSync(outputFile);
+    const writeStream = fs.createWriteStream(outputFile, { flags: fileExists ? 'a' : 'w' });
+
+    if (!fileExists) {
+        writeStream.write('[');
+    }
+
+    try {
+        const genesisBlockNumber = await fetchCurrentBlockNumber();
+        console.log(`Genesis block number: ${genesisBlockNumber.toLocaleString('en-US')}\n`);
+
+        const response = await axios.get(baseUrl);
+        const html = response.data;
+
+        const chunkFileRegex = /chunk_(\d+)-250\.json/g;
+        const chunkFiles = [];
+
+        let match: RegExpExecArray | null;
+        const chunkNumbers: number[] = [];
+        while ((match = chunkFileRegex.exec(html)) !== null) {
+            const chunkNumber = parseInt(match[1]);
+            if (chunkNumbers.includes(chunkNumber)) {
+                continue;
+            }
+            chunkNumbers.push(chunkNumber);
+            chunkFiles.push({ number: chunkNumber, fileName: match[0] });
+        }
+
+        chunkFiles.sort((a, b) => a.number - b.number);
+
+        if (!fs.existsSync(outputFile)) {
+            fs.writeFileSync(outputFile, '[]');
+        }
+
+        chunkLoop: for (let i = 0; i < chunkFiles.length; i++) {
+            const { fileName } = chunkFiles[i];
+            const chunkUrl = baseUrl + fileName;
+            const chunk: Chunk = await fetchChunk(chunkUrl);
+
+            for (const block of chunk.blocks) {
+                if (block.number > genesisBlockNumber) {
+                    console.log(`Importing v1 block ${(block.number - 1).toLocaleString('en-US')}`);
+                    writeStream.write(JSON.stringify(blockBuffer).slice(1, -1));
+                    writeStream.write(']');
+                    break chunkLoop;
+                }
+                if (block.number % 10000 === 0 && !processedBlocks.has(block.number)) {
+                    console.log(`Importing v1 block ${block.number.toLocaleString('en-US')}`);
+                    processedBlocks.add(block.number);
+                }
+
+                const { version, issuer, hash, previousHash, number, medianTime } = block;
+
+                const newBlock: BlockParsed = {
+                    height: (genesisBlockNumber - number + 1) * -1, // Reverse block numbering
+                    version,
+                    validator: base58Decode(issuer),
+                    hash: hexToUint8Array(hash),
+                    parentHash: hexToUint8Array(previousHash ?? hash),
+                    timestamp: medianTime
+                } as BlockParsed;
+                newBlock.certifications = block.certifications.map(parseCertification);
+
+                if (newBlock.certifications.length > 0 || block.transactions.length > 0) {
+                    newBlock.hasEvents = true;
+                }
+
+                blockBuffer.push(newBlock);
+            }
+
+            if ((i + 1) % chunkBatchSize === 0) {
+                writeStream.write(JSON.stringify(blockBuffer).slice(1, -1));
+                writeStream.write(',\n');
+                blockBuffer.length = 0;
+            }
+        }
+        console.log(`Blocks written to ${outputFile}`);
+    } catch (error) {
+        console.error('An error occurred:', error);
+    }
+}
+
diff --git a/src/genesis/genesis_update_blockv1.ts b/src/genesis/genesis_update_blockv1.ts
new file mode 100644
index 0000000000000000000000000000000000000000..da3ae1321dd3da7e47bf06e13586c97121a50836
--- /dev/null
+++ b/src/genesis/genesis_update_blockv1.ts
@@ -0,0 +1,144 @@
+import { createReadStream, existsSync } from "fs";
+import path from "path";
+import { parser } from "stream-json";
+import { pipeline, streamArray } from "stream-json/streamers/StreamArray";
+import { BlockParsed, BlocksV1ReaderConfig, Ctx, IdtyIndex } from "../types_custom";
+import { Block, Cert, Event, CertEvent, EventType, Identity } from "../model";
+import { promisify } from "util";
+import { AccountId32 } from "../types/v800";
+import { processChunks } from "./genesis_chunks";
+
+export class BlocksV1Reader {
+    private ctx: Ctx;
+    private identitiesMap: Map<AccountId32, IdtyIndex>;
+    private certs: Map<string, Cert>;
+    private identities: Map<IdtyIndex, Identity>;
+    private certsEvents: CertEvent[];
+
+    constructor(config: BlocksV1ReaderConfig) {
+        this.ctx = config.ctx;
+        this.identitiesMap = config.identitiesMap;
+        this.certs = config.certs;
+        this.identities = config.identities;
+        this.certsEvents = config.certsEvents;
+    }
+
+    /**
+     * Read blocks v1 from the json file and insert them into the database.
+     * Also, create the corresponding events and certifications.
+     */
+    async readBlocksV1(): Promise<void> {
+        const blocksV1Path = 'assets/blocks-v1.json';
+        const filePath = path.resolve(process.cwd(), blocksV1Path);
+        if (!existsSync(filePath)) {
+            await processChunks();
+        }
+        const jsonStream = createReadStream(filePath);
+
+        const parserStream = parser();
+        const streamer = streamArray();
+
+        streamer.on('data', ({ value }) => {
+            const blockV1 = value as BlockParsed;
+            if (blockV1.height % 10000 === 0) {
+                this.ctx.log.info(`Processing v1 block ${blockV1.height.toLocaleString('en-US')}`);
+            }
+
+            const newBlock = this.createBlock(blockV1);
+            const eventV1 = this.createEvent(newBlock);
+
+            this.ctx.store.insert(newBlock).then(() => this.ctx.store.insert(eventV1));
+
+            this.processCertifications(blockV1, eventV1);
+        });
+
+        await promisify(pipeline)(jsonStream, parserStream, streamer);
+    }
+
+    private createBlock(blockV1: BlockParsed): Block {
+        return new Block({
+            id: `genesis-block-${blockV1.height}`,
+            height: blockV1.height,
+            timestamp: new Date(blockV1.timestamp * 1000),
+            hash: blockV1.hash,
+            parentHash: blockV1.parentHash,
+            eventsCount: blockV1.hasEvents ? 1 : 0,
+            validator: blockV1.validator,
+            stateRoot: new Uint8Array(),
+            extrinsicsicRoot: new Uint8Array(),
+            specVersion: blockV1.version,
+            extrinsicsCount: 0,
+            implVersion: blockV1.version,
+            specName: "",
+            implName: "",
+            callsCount: 0,
+            extrinsics: [],
+        });
+    }
+
+    private createEvent(newBlock: Block): Event {
+        return new Event({
+            id: `genesis-event-${newBlock.height}`,
+            index: newBlock.height,
+            block: newBlock,
+            phase: 'genesis-phase',
+            pallet: 'genesis-cert',
+            name: 'genesis-cert',
+        });
+    }
+
+    private processCertifications(blockV1: BlockParsed, eventV1: Event): void {
+        for (const certv1 of blockV1.certifications) {
+            const issuerIdtyIndex = this.identitiesMap.get(certv1.issuer);
+            const receiverIdtyIndex = this.identitiesMap.get(certv1.receiver);
+            const cert = this.certs.get(`${issuerIdtyIndex}-${receiverIdtyIndex}`);
+
+            if (cert) {
+                this.updateCert(cert, blockV1, eventV1);
+            } else {
+                this.createCert(issuerIdtyIndex!, receiverIdtyIndex!, blockV1, eventV1);
+            }
+        }
+    }
+
+    private updateCert(cert: Cert, blockV1: BlockParsed, eventV1: Event): void {
+        if (cert.createdOn === 0) {
+            cert.createdOn = blockV1.height;
+            cert.createdIn = eventV1;
+        }
+        cert.updatedOn = blockV1.height;
+        cert.updatedIn = eventV1;
+        const certEvent = new CertEvent({
+            id: `${cert.id}-${blockV1.height}`,
+            cert,
+            blockNumber: blockV1.height,
+            eventType: EventType.Renewal,
+            event: eventV1,
+        });
+        this.certsEvents.push(certEvent);
+    }
+
+    private createCert(issuerIdtyIndex: number, receiverIdtyIndex: number, blockV1: BlockParsed, eventV1: Event): void {
+        const cert = new Cert({
+            id: `genesis-cert-${issuerIdtyIndex}-${receiverIdtyIndex}`,
+            isActive: false,
+            issuer: this.identities.get(issuerIdtyIndex),
+            receiver: this.identities.get(receiverIdtyIndex),
+            createdOn: blockV1.height,
+            updatedOn: blockV1.height,
+            createdIn: eventV1,
+            updatedIn: eventV1,
+            expireOn: 0,
+            certHistory: [],
+        });
+        const certEvent = new CertEvent({
+            id: `${cert.id}-${blockV1.height}`,
+            cert,
+            blockNumber: blockV1.height,
+            eventType: EventType.Creation,
+            event: eventV1,
+        });
+        this.certsEvents.push(certEvent);
+        this.certs.set(`${issuerIdtyIndex}-${receiverIdtyIndex}`, cert);
+    }
+}
diff --git a/src/main.ts b/src/main.ts
index 525b8772eb2840ee87f7fd0832cbc387f678d943..d60f073fc88012a66aef7940839a926cdf3e90dd 100644
--- a/src/main.ts
+++ b/src/main.ts
@@ -1,12 +1,12 @@
 import { TypeormDatabaseWithCache } from "@belopash/typeorm-store";
 import assert from "assert";
 import { DataHandler } from "./data_handler";
-import { saveGenesis } from "./genesis";
+import { saveGenesis } from "./genesis/genesis";
 import { saveBlock, saveCall, saveEvent, saveExtrinsic } from "./giant-squid";
 import { processor } from "./processor";
 import { constants, events as events_t } from "./types";
 import { Ctx, NewData } from "./types_custom";
-import { ss58encode } from "./utils";
+import { pubkeyToAddress } from "./utils";
 
 // main processor loop able to manage a batch of blocks
 processor.run(new TypeormDatabaseWithCache(), async (ctx) => {
@@ -101,7 +101,7 @@ function collectDataFromEvents(ctx: Ctx, newData: NewData) {
       switch (event.name) {
         case events_t.system.newAccount.name: {
           const evt = events_t.system.newAccount.v800.decode(event);
-          newData.accounts.push(ss58encode(evt.account));
+          newData.accounts.push(pubkeyToAddress(evt.account));
           break;
         }
 
@@ -109,7 +109,7 @@ function collectDataFromEvents(ctx: Ctx, newData: NewData) {
           const evt =
             events_t.account.accountLinked.v800.decode(event);
           newData.accountLink.push({
-            accountId: ss58encode(evt.who),
+            accountId: pubkeyToAddress(evt.who),
             index: evt.identity,
           });
           break;
@@ -119,7 +119,7 @@ function collectDataFromEvents(ctx: Ctx, newData: NewData) {
           const evt =
             events_t.account.accountUnlinked.v800.decode(event);
           newData.accountUnlink.push({
-            accountId: ss58encode(evt),
+            accountId: pubkeyToAddress(evt),
           });
           break;
         }
@@ -134,8 +134,8 @@ function collectDataFromEvents(ctx: Ctx, newData: NewData) {
             id: event.id,
             blockNumber: block.header.height,
             timestamp: new Date(block.header.timestamp),
-            from: ss58encode(evt.from),
-            to: ss58encode(evt.to),
+            from: pubkeyToAddress(evt.from),
+            to: pubkeyToAddress(evt.to),
             amount: evt.amount,
           });
           break;
@@ -145,7 +145,7 @@ function collectDataFromEvents(ctx: Ctx, newData: NewData) {
           const evt = events_t.identity.idtyCreated.v800.decode(event);
           newData.identitiesCreated.push({
             index: evt.idtyIndex,
-            accountId: ss58encode(evt.ownerKey),
+            accountId: pubkeyToAddress(evt.ownerKey),
             blockNumber: block.header.height,
             expireOn:
               block.header.height +
@@ -218,7 +218,7 @@ function collectDataFromEvents(ctx: Ctx, newData: NewData) {
           newData.idtyChangedOwnerKey.push({
             id: event.id,
             index: evt.idtyIndex,
-            accountId: ss58encode(evt.newOwnerKey),
+            accountId: pubkeyToAddress(evt.newOwnerKey),
             blockNumber: block.header.height,
             expireOn:
               block.header.height +
diff --git a/src/model/generated/cert.model.ts b/src/model/generated/cert.model.ts
index add3ff0e04e81ae48eabcd1f3c2dba05c0097eb1..4b5f9b16f8c88e84394bab53634f551ecc3c1f45 100644
--- a/src/model/generated/cert.model.ts
+++ b/src/model/generated/cert.model.ts
@@ -30,20 +30,36 @@ export class Cert {
     isActive!: boolean
 
     /**
-     * the last createdOn value
+     * the first block number of the certification creation
      */
     @Column_("int4", {nullable: false})
     createdOn!: number
 
     /**
-     * the current expireOn value
+     * the event corresponding to the first certification creation
+     */
+    @Index_()
+    @ManyToOne_(() => Event, {nullable: true})
+    createdIn!: Event
+
+    /**
+     * the last block number of the certification renewal
      */
     @Column_("int4", {nullable: false})
-    expireOn!: number
+    updatedOn!: number
 
+    /**
+     * the event corresponding to the last certification renewal
+     */
     @Index_()
     @ManyToOne_(() => Event, {nullable: true})
-    createdIn!: Event
+    updatedIn!: Event
+
+    /**
+     * the current expireOn value
+     */
+    @Column_("int4", {nullable: false})
+    expireOn!: number
 
     @OneToMany_(() => CertEvent, e => e.cert)
     certHistory!: CertEvent[]
diff --git a/src/types_custom.ts b/src/types_custom.ts
index 22ee964bc502b6a12b5492936693ec301f8f1348..f99c1de7fb56abe44a24aed34f95e23ab7133777 100644
--- a/src/types_custom.ts
+++ b/src/types_custom.ts
@@ -13,7 +13,7 @@ import {
   UniversalDividend,
 } from "./model";
 import { Event, ProcessorContext } from "./processor";
-import { MembershipRemovalReason, RemovalReason, RevocationReason } from "./types/v800";
+import { AccountId32, MembershipRemovalReason, RemovalReason, RevocationReason } from "./types/v800";
 
 // type aliases
 type BlockNumber = number;
@@ -109,6 +109,70 @@ interface GenInitialAuthorities {
   initialAuthorities: Map<number, Array<any>>;
 }
 
+export interface Certification {
+  issuer: string;
+  receiver: string;
+}
+
+export interface BlockV1 {
+  wrong: boolean;
+  version: number;
+  number: number;
+  currency: string;
+  hash: string;
+  previousHash: string;
+  issuer: string;
+  previousIssuer: string;
+  dividend: null;
+  time: number;
+  powMin: number;
+  unitbase: number;
+  membersCount: number;
+  issuersCount: number;
+  issuersFrame: number;
+  issuersFrameVar: number;
+  identities: any[];
+  joiners: any[];
+  actives: any[];
+  leavers: any[];
+  revoked: any[];
+  excluded: any[];
+  certifications: any[];
+  transactions: any[];
+  medianTime: number;
+  parameters: string;
+  inner_hash: string;
+  signature: string;
+  nonce: number;
+  monetaryMass: number;
+  writtenOn: number;
+  written_on: string;
+}
+
+export interface BlockParsed {
+  version: number;
+  validator: Uint8Array;
+  hash: Uint8Array;
+  parentHash: Uint8Array;
+  hasEvents: boolean;
+  height: number;
+  timestamp: number;
+  identities: any[];
+  certifications: Certification[];
+}
+
+export interface Chunk {
+  blocks: BlockV1[];
+}
+
+export interface BlocksV1ReaderConfig {
+  ctx: Ctx;
+  identitiesMap: Map<AccountId32, IdtyIndex>;
+  certs: Map<string, Cert>;
+  identities: Map<IdtyIndex, Identity>;
+  certsEvents: CertEvent[];
+}
+
 // =========================== DataHandler =========================== //
 
 // a way to group data prepared for database insertion
diff --git a/src/utils.ts b/src/utils.ts
index 7c4159ff24ca49d2d05074f4a5aabf6ee312d641..bc5b1d7ff815288a25f5b076aa33b8b358e6be42 100644
--- a/src/utils.ts
+++ b/src/utils.ts
@@ -1,7 +1,7 @@
 import * as ss58 from "@subsquid/ss58";
-import fs from 'fs';
-import path from 'path';
+import fs, { createReadStream, readFileSync } from 'fs';
 import { Address } from "./types_custom";
+import bs58 from 'bs58';
 
 /**
  * Define ss58 encoding with custom prefix
@@ -9,10 +9,16 @@ import { Address } from "./types_custom";
 const SS58_PREFIX = 42;
 
 /**
- * Transform hex encoded address to ss58 encoded address
+ * Transform a public key to an ss58 encoded address
  */
-export function ss58encode(hex_encoded_address: string): Address {
-  return ss58.codec(SS58_PREFIX).encode(hex_encoded_address);
+export function pubkeyToAddress(pubkey: string | Uint8Array): Address {
+
+  // if the pubkey is not a hex string, it's a base58 encoded string
+  if (typeof pubkey === "string" && !pubkey.startsWith("0x")) {
+    pubkey = bs58.decode(pubkey);
+  }
+
+  return ss58.codec(SS58_PREFIX).encode(pubkey);
 }
 
 /**
@@ -41,3 +47,21 @@ export function ensureDirectoryExists(dirPath: string) {
     fs.mkdirSync(dirPath, { recursive: true });
   }
 }
+
+/**
+ * Converts a hexadecimal string to a Uint8Array.
+ */
+export function hexToUint8Array(hex: string): Uint8Array {
+  if (hex.length % 2 !== 0) {
+    throw new Error("The hexadecimal string must have an even length.");
+  }
+
+  const numBytes = hex.length / 2;
+  const uint8Array = new Uint8Array(numBytes);
+
+  for (let i = 0; i < numBytes; i++) {
+    uint8Array[i] = parseInt(hex.substring(i * 2, 2), 16);
+  }
+
+  return uint8Array;
+}