diff --git a/app/lib/dal/sqliteDAL/BlockDAL.ts b/app/lib/dal/sqliteDAL/BlockDAL.ts
new file mode 100644
index 0000000000000000000000000000000000000000..48cbd32878254d4e08c8b6bd3fff39d263545ab2
--- /dev/null
+++ b/app/lib/dal/sqliteDAL/BlockDAL.ts
@@ -0,0 +1,175 @@
+// Source file from duniter: Crypto-currency software to manage libre currency such as Ğ1
+// Copyright (C) 2018  Cedric Moreau <cem.moreau@gmail.com>
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+
+import {AbstractSQLite} from "./AbstractSQLite"
+import {SQLiteDriver} from "../drivers/SQLiteDriver"
+import {DBBlock} from "../../db/DBBlock"
+
+const constants = require('../../constants');
+
+const IS_FORK = true;
+const IS_NOT_FORK = false;
+
+export class BlockDAL extends AbstractSQLite<DBBlock> {
+
+  private current: DBBlock|null
+
+  constructor(driver:SQLiteDriver) {
+    super(
+      driver,
+      'block',
+      // PK fields
+      ['number','hash'],
+      // Fields
+      ['fork', 'hash', 'inner_hash', 'signature', 'currency', 'issuer', 'issuersCount', 'issuersFrame', 'issuersFrameVar', 'parameters', 'previousHash', 'previousIssuer', 'version', 'membersCount', 'monetaryMass', 'UDTime', 'medianTime', 'dividend', 'unitbase', 'time', 'powMin', 'number', 'nonce', 'transactions', 'certifications', 'identities', 'joiners', 'actives', 'leavers', 'revoked', 'excluded', 'len', 'legacy'],
+      // Arrays
+      ['identities','certifications','actives','revoked','excluded','leavers','joiners','transactions'],
+      // Booleans
+      ['wrong', 'legacy'],
+      // BigIntegers
+      ['monetaryMass'],
+      // Transient
+      []
+    )
+
+    /**
+     * Periodically cleans the current block cache.
+     * It seems the cache is not always correct and may stuck the node, so it is preferable to reset it periodically.
+     */
+    setInterval(this.cleanCache, constants.CURRENT_BLOCK_CACHE_DURATION);
+  }
+
+  async init() {
+    await this.exec('BEGIN;' +
+      'CREATE TABLE IF NOT EXISTS ' + this.table + ' (' +
+      'fork BOOLEAN NOT NULL,' +
+      'legacy BOOLEAN NOT NULL,' +
+      'hash VARCHAR(64) NOT NULL,' +
+      'inner_hash VARCHAR(64) NOT NULL,' +
+      'signature VARCHAR(100) NOT NULL,' +
+      'currency VARCHAR(50) NOT NULL,' +
+      'issuer VARCHAR(50) NOT NULL,' +
+      'issuersFrame INTEGER NULL,' +
+      'issuersFrameVar INTEGER NULL,' +
+      'issuersCount INTEGER NULL,' +
+      'len INTEGER NULL,' +
+      'parameters VARCHAR(255),' +
+      'previousHash VARCHAR(64),' +
+      'previousIssuer VARCHAR(50),' +
+      'version INTEGER NOT NULL,' +
+      'membersCount INTEGER NOT NULL,' +
+      'monetaryMass VARCHAR(100) DEFAULT \'0\',' +
+      'UDTime DATETIME,' +
+      'medianTime DATETIME NOT NULL,' +
+      'dividend INTEGER DEFAULT \'0\',' +
+      'unitbase INTEGER NULL,' +
+      'time DATETIME NOT NULL,' +
+      'powMin INTEGER NOT NULL,' +
+      'number INTEGER NOT NULL,' +
+      'nonce INTEGER NOT NULL,' +
+      'transactions TEXT,' +
+      'certifications TEXT,' +
+      'identities TEXT,' +
+      'joiners TEXT,' +
+      'actives TEXT,' +
+      'leavers TEXT,' +
+      'revoked TEXT,' +
+      'excluded TEXT,' +
+      'created DATETIME DEFAULT NULL,' +
+      'updated DATETIME DEFAULT NULL,' +
+      'PRIMARY KEY (number,hash)' +
+      ');' +
+      'CREATE INDEX IF NOT EXISTS idx_block_hash ON block (hash);' +
+      'CREATE INDEX IF NOT EXISTS idx_block_fork ON block (fork);' +
+      'COMMIT;')
+  }
+
+  cleanCache() {
+    this.current = null
+  }
+
+  async getCurrent() {
+    if (!this.current) {
+      this.current = (await this.query('SELECT * FROM block WHERE NOT fork ORDER BY number DESC LIMIT 1'))[0];
+    }
+    return this.current
+  }
+
+  async getBlock(number:string | number): Promise<DBBlock|null> {
+    return (await this.query('SELECT * FROM block WHERE number = ? and NOT fork', [parseInt(String(number))]))[0];
+  }
+
+  async getAbsoluteBlock(number:number, hash:string): Promise<DBBlock|null> {
+    return (await this.query('SELECT * FROM block WHERE number = ? and hash = ?', [number, hash]))[0];
+  }
+
+  getBlocks(start:number, end:number) {
+    return this.query('SELECT * FROM block WHERE number BETWEEN ? and ? and NOT fork ORDER BY number ASC', [start, end]);
+  }
+
+  async lastBlockOfIssuer(issuer:string) {
+    return (await this.query('SELECT * FROM block WHERE issuer = ? and NOT fork ORDER BY number DESC LIMIT 1', [issuer]))[0]
+  }
+
+  async getCountOfBlocksIssuedBy(issuer:string) {
+    let res: any = await this.query('SELECT COUNT(*) as quantity FROM block WHERE issuer = ? and NOT fork', [issuer]);
+    return res[0].quantity;
+  }
+
+  getPotentialForkBlocks(numberStart:number, medianTimeStart:number, maxNumber:number) {
+    return this.query('SELECT * FROM block WHERE fork AND number >= ? AND number <= ? AND medianTime >= ? ORDER BY number DESC', [numberStart, maxNumber, medianTimeStart]);
+  }
+
+  getPotentialRoots() {
+    return this.query('SELECT * FROM block WHERE fork AND number = ?', [0])
+  }
+
+  async saveBunch(blocks:DBBlock[]) {
+    let queries = "INSERT INTO block (" + this.fields.join(',') + ") VALUES ";
+    for (let i = 0, len = blocks.length; i < len; i++) {
+      let block = blocks[i];
+      queries += this.toInsertValues(block);
+      if (i + 1 < len) {
+        queries += ",\n";
+      }
+    }
+    await this.exec(queries);
+    this.cleanCache();
+  }
+
+  async saveBlock(block:DBBlock) {
+    let saved = await this.saveBlockAs(block, IS_NOT_FORK);
+    if (!this.current || this.current.number < block.number) {
+      this.current = block;
+    }
+    return saved;
+  }
+
+  saveSideBlock(block:DBBlock) {
+    return this.saveBlockAs(block, IS_FORK)
+  }
+
+  private async saveBlockAs(block:DBBlock, fork:boolean) {
+    block.fork = fork;
+    return await this.saveEntity(block);
+  }
+
+  async setSideBlock(number:number, previousBlock:DBBlock|null) {
+    await this.query('UPDATE block SET fork = ? WHERE number = ?', [true, number]);
+    this.current = previousBlock;
+  }
+
+  getNextForkBlocks(number:number, hash:string) {
+    return this.query('SELECT * FROM block WHERE fork AND number = ? AND previousHash like ? ORDER BY number', [number + 1, hash]);
+  }
+}
diff --git a/app/lib/dal/sqliteDAL/index/IIndexDAL.ts b/app/lib/dal/sqliteDAL/index/IIndexDAL.ts
new file mode 100644
index 0000000000000000000000000000000000000000..4f5dfdf2d58d44fd39894cc51710881bbbd45869
--- /dev/null
+++ b/app/lib/dal/sqliteDAL/index/IIndexDAL.ts
@@ -0,0 +1,201 @@
+// Source file from duniter: Crypto-currency software to manage libre currency such as Ğ1
+// Copyright (C) 2018  Cedric Moreau <cem.moreau@gmail.com>
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+
+import {SQLiteDriver} from "../../drivers/SQLiteDriver"
+import {FullIindexEntry, IindexEntry, Indexer} from "../../../indexer"
+import {AbstractSQLite} from "../AbstractSQLite"
+
+const _ = require('underscore');
+
+export interface OldIindexEntry extends IindexEntry {
+  pubkey: string
+  buid: string | null
+  revocation_sig:string | null
+}
+
+export class IIndexDAL extends AbstractSQLite<IindexEntry> {
+
+  constructor(driver:SQLiteDriver) {
+    super(
+      driver,
+      'i_index',
+      // PK fields
+      ['op', 'pub', 'created_on', 'written_on'],
+      // Fields
+      [
+        'op',
+        'uid',
+        'pub',
+        'hash',
+        'sig',
+        'created_on',
+        'written_on',
+        'writtenOn',
+        'member',
+        'wasMember',
+        'kick',
+        'wotb_id',
+        'legacy'
+      ],
+      // Arrays
+      [],
+      // Booleans
+      ['member', 'wasMember', 'kick', 'legacy'],
+      // BigIntegers
+      [],
+      // Transient
+      []
+    )
+  }
+
+  init() {
+    return this.exec('BEGIN;' +
+      'CREATE TABLE IF NOT EXISTS ' + this.table + ' (' +
+      'op VARCHAR(10) NOT NULL,' +
+      'uid VARCHAR(100) NULL,' +
+      'pub VARCHAR(50) NOT NULL,' +
+      'hash VARCHAR(80) NULL,' +
+      'sig VARCHAR(80) NULL,' +
+      'created_on VARCHAR(80) NULL,' +
+      'written_on VARCHAR(80) NOT NULL,' +
+      'writtenOn INTEGER NOT NULL,' +
+      'member BOOLEAN NULL,' +
+      'wasMember BOOLEAN NULL,' +
+      'kick BOOLEAN NULL,' +
+      'legacy BOOLEAN NOT NULL,' +
+      'wotb_id INTEGER NULL,' +
+      'PRIMARY KEY (op,pub,created_on,written_on)' +
+      ');' +
+      'CREATE INDEX IF NOT EXISTS idx_iindex_pub ON i_index (pub);' +
+      'COMMIT;')
+  }
+
+  async getMembers() {
+    // All those who has been subject to, or who are currently subject to kicking. Make one result per pubkey.
+    const pubkeys = await this.query('SELECT DISTINCT(pub) FROM ' + this.table);
+    // We get the full representation for each member
+    const reduced = await Promise.all(pubkeys.map(async (entry) => {
+      const reducable = await this.reducable(entry.pub);
+      return Indexer.DUP_HELPERS.reduce(reducable);
+    }));
+    // Filter on those to be kicked, return their pubkey
+    const filtered = _.filter(reduced, (entry:IindexEntry) => entry.member);
+    return filtered.map((t:IindexEntry) => this.toCorrectEntity(t))
+  }
+
+  getMembersPubkeys() {
+    return this.query('SELECT i1.pub ' +
+      'FROM i_index i1 ' +
+      'WHERE i1.member ' +
+      'AND CAST(i1.written_on as int) = (' +
+      ' SELECT MAX(CAST(i2.written_on as int)) ' +
+      ' FROM i_index i2 ' +
+      ' WHERE i1.pub = i2.pub ' +
+      ' AND i2.member IS NOT NULL' +
+      ')')
+  }
+
+  async getToBeKickedPubkeys() {
+    // All those who has been subject to, or who are currently subject to kicking. Make one result per pubkey.
+    const reducables = Indexer.DUP_HELPERS.reduceBy(await this.sqlFind({ kick: true }), ['pub']);
+    // We get the full representation for each member
+    const reduced = await Promise.all(reducables.map(async (entry) => {
+      const reducable = await this.reducable(entry.pub);
+      return Indexer.DUP_HELPERS.reduce(reducable);
+    }))
+    // Filter on those to be kicked, return their pubkey
+    return _.filter(reduced, (entry:IindexEntry) => entry.kick).map((entry:IindexEntry) => entry.pub);
+  }
+
+  async searchThoseMatching(search:string) {
+    const reducables = Indexer.DUP_HELPERS.reduceBy(await this.sqlFindLikeAny({
+      pub: "%" + search + "%",
+      uid: "%" + search + "%"
+    }), ['pub']);
+    // We get the full representation for each member
+    return await Promise.all(reducables.map(async (entry) => {
+      return this.toCorrectEntity(Indexer.DUP_HELPERS.reduce(await this.reducable(entry.pub)))
+    }))
+  }
+
+  getFromPubkey(pubkey:string) {
+    return this.entityOrNull('pub', pubkey) as Promise<FullIindexEntry|null>
+  }
+
+  getFromUID(uid:string, retrieveOnPubkey = false) {
+    return this.entityOrNull('uid', uid, retrieveOnPubkey)
+  }
+
+  getFullFromPubkey(pub:string): Promise<FullIindexEntry> {
+    return this.entityOrNull('pub', pub) as Promise<FullIindexEntry>
+  }
+
+  getFullFromUID(uid:string): Promise<FullIindexEntry|null> {
+    return this.entityOrNull('uid', uid, true) as Promise<FullIindexEntry|null>
+  }
+
+  getFullFromHash(hash:string): Promise<FullIindexEntry|null> {
+    return this.entityOrNull('hash', hash, true) as Promise<FullIindexEntry|null>
+  }
+
+  reducable(pub:string) {
+    return this.query('SELECT * FROM ' + this.table + ' WHERE pub = ? ORDER BY CAST(written_on as integer) ASC', [pub])
+  }
+
+  removeBlock(blockstamp:string) {
+    return this.exec('DELETE FROM ' + this.table + ' WHERE written_on = \'' + blockstamp + '\'')
+  }
+
+  private async entityOrNull(field:string, value:any, retrieveOnField:boolean = false) {
+    let reducable = await this.query('SELECT * FROM ' + this.table + ' WHERE ' + field + ' = ?', [value]);
+    if (reducable.length) {
+      if (retrieveOnField) {
+        // Force full retrieval on `pub` field
+        reducable = await this.query('SELECT * FROM ' + this.table + ' WHERE pub = ? ORDER BY CAST(written_on as int) ASC', [reducable[0].pub]);
+      }
+      return this.toCorrectEntity(Indexer.DUP_HELPERS.reduce(reducable));
+    }
+    return null;
+  }
+
+  private toCorrectEntity(row:IindexEntry): OldIindexEntry {
+    // Old field
+    return {
+      pubkey: row.pub,
+      pub: row.pub,
+      buid: row.created_on,
+      revocation_sig: null,
+      uid: row.uid,
+      hash: row.hash,
+      sig: row.sig,
+      created_on: row.created_on,
+      member: row.member,
+      wasMember: row.wasMember,
+      kick: row.kick,
+      wotb_id: row.wotb_id,
+      age: row.age,
+      index: row.index,
+      op: row.op,
+      writtenOn: row.writtenOn,
+      written_on: row.written_on
+    }
+  }
+
+  async getFromPubkeyOrUid(search: string) {
+    const idty = await this.getFromPubkey(search)
+    if (idty) {
+      return idty
+    }
+    return this.getFromUID(search, true) as Promise<FullIindexEntry|null>
+  }
+}
diff --git a/app/lib/dto/ConfDTO.ts b/app/lib/dto/ConfDTO.ts
index 3b8ed67ab22d6616b011df6093b4cd6f99e07411..59c229f19a724175c2e1077d5f7630351e7dda11 100644
--- a/app/lib/dto/ConfDTO.ts
+++ b/app/lib/dto/ConfDTO.ts
@@ -25,6 +25,7 @@ export interface Keypair {
 export interface StorageDTO {
   storage?: {
     transactions?:boolean
+    wotwizard?:boolean
   }
 }
 
@@ -192,7 +193,8 @@ export class ConfDTO implements StorageDTO, CurrencyConfDTO, KeypairConfDTO, Net
     },
     public powNoSecurity = false,
     public storage = {
-      transactions: false
+      transactions: false,
+      wotwizard: false,
     },
 ) {}
 
diff --git a/app/lib/system/directory.ts b/app/lib/system/directory.ts
index a0a33e365ce0f38bc548d33fdaeb7e0f1636367c..c0aa31108bbd8432a1ece9c9cc43e67dd4aa41f5 100644
--- a/app/lib/system/directory.ts
+++ b/app/lib/system/directory.ts
@@ -118,6 +118,7 @@ export const MemFS = (initialTree:{ [folder:string]: { [file:string]: string }}
 export const Directory = {
 
   DATA_FILES: ['mindex.db', 'c_mindex.db', 'iindex.db', 'cindex.db', 'sindex.db', 'wallet.db', 'dividend.db', 'txs.db', 'peers.db'],
+  WW_FILES: ['wotwizard-export.db'],
   DATA_DIRS: ['data'],
 
   INSTANCE_NAME: getDomain(opts.mdb),
diff --git a/app/modules/dump.ts b/app/modules/dump.ts
index f6f6775c9cbb6dd88c5d79cd98db3e27931a74da..e03f3465b987b98773d2e08d272302470e739303 100644
--- a/app/modules/dump.ts
+++ b/app/modules/dump.ts
@@ -22,6 +22,32 @@ const Table = require('cli-table')
 
 module.exports = {
   duniter: {
+
+    service: {
+      neutral: (server:Server, conf:ConfDTO) => {
+        return {
+          startService: () => {
+            if (conf.storage && conf.storage.wotwizard) {
+              let fifo: Querable<any> = querablep(Promise.resolve())
+              server
+                .on('bcEvent', (e) => {
+                  if ((e.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED || e.bcEvent === OtherConstants.BC_EVENT.SWITCHED) && fifo.isFulfilled()) {
+                    fifo = querablep(fifo.then(async () => {
+                      try {
+                        await dumpWotWizard(server)
+                      } catch (e) {}
+                    }))
+                  }
+                })
+            }
+          },
+          stopService: () => {
+            // Never stops, just wait for blocks
+          }
+        }
+      }
+    },
+
     cli: [{
       name: 'dump [what] [name] [cond]',
       desc: 'Dumps data of the blockchain.',
@@ -32,27 +58,37 @@ module.exports = {
         const what: string = params[0] || ''
         const name: string = params[1] || ''
         const cond: string = params[2] || ''
-        switch (what) {
 
-          case 'current':
-            await dumpCurrent(server)
-            break
+        try {
+
+          switch (what) {
+
+            case 'current':
+              await dumpCurrent(server)
+              break
+
+            case 'volumes':
+              await dumpVolumes(server)
+              break
 
-          case 'volumes':
-            await dumpVolumes(server)
-            break
+            case 'table':
+              await dumpTable(server, name, cond)
+              break
 
-          case 'table':
-            await dumpTable(server, name, cond)
-            break
+            case 'history':
+              await dumpHistory(server, name)
+              break
 
-          case 'history':
-            await dumpHistory(server, name)
-            break
+            case 'wotwizard':
+              await dumpWotWizard(server)
+              break
 
-          default:
-            console.error(`Unknown dump ${what}`)
-            break
+            default:
+              console.error(`Unknown dump ${what}`)
+              break
+          }
+        } catch (e) {
+          console.error(e)
         }
         // Save DB
         await server.disconnect();
diff --git a/app/modules/dump/wotwizard/wotwizard.constants.ts b/app/modules/dump/wotwizard/wotwizard.constants.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6019e22a9a7ca417357b38747879d21b79dc63d4
--- /dev/null
+++ b/app/modules/dump/wotwizard/wotwizard.constants.ts
@@ -0,0 +1,5 @@
+export const WotWizardConstants = {
+
+  DB_NAME: 'wotwizard-export.db',
+  BLOCKS_SAVE_BATCH_SIZE: 10,
+}
\ No newline at end of file
diff --git a/app/modules/dump/wotwizard/wotwizard.copy.mempool.ts b/app/modules/dump/wotwizard/wotwizard.copy.mempool.ts
new file mode 100644
index 0000000000000000000000000000000000000000..b8b1d439839c9ecd49880577ad8badd134a8baae
--- /dev/null
+++ b/app/modules/dump/wotwizard/wotwizard.copy.mempool.ts
@@ -0,0 +1,33 @@
+import {WotWizardDAL} from "./wotwizard.init.structure"
+import {Server} from "../../../../server"
+import {DBBlock} from "../../../lib/db/DBBlock"
+import {Underscore} from "../../../lib/common-libs/underscore"
+import {NewLogger} from "../../../lib/logger"
+
+export async function copyMemPool(server: Server, wwDAL: WotWizardDAL) {
+
+  const logger = NewLogger()
+
+  const identities = await server.dal.idtyDAL.sqlListAll()
+
+  // Blocks on which are based identities
+  const blocks = await Promise.all(identities.map(async idty => {
+    let b = await server.dal.getAbsoluteBlockByBlockstamp(idty.buid)
+    if (b) {
+      const b2 = await wwDAL.blockDao.getAbsoluteBlock(b.number, b.hash)
+      if (!b2) {
+        return b
+      }
+    }
+    return null
+  }))
+
+
+  const toPersist: DBBlock[] = Underscore.uniq(blocks.filter(b => b) as DBBlock[], false, b => [b.number, b.hash].join('-'))
+
+  logger.debug('Persisting %s blocks for identities...', toPersist.length)
+  await wwDAL.blockDao.insertBatch(toPersist.map(b => { (b as any).legacy = true; return b }))
+  await wwDAL.idtyDao.insertBatch(identities)
+  await wwDAL.certDao.insertBatch(await server.dal.certDAL.sqlListAll())
+  await wwDAL.msDao.insertBatch(await server.dal.msDAL.sqlListAll())
+}
\ No newline at end of file
diff --git a/app/modules/dump/wotwizard/wotwizard.delete.ts b/app/modules/dump/wotwizard/wotwizard.delete.ts
new file mode 100644
index 0000000000000000000000000000000000000000..5c3bf33b382bc5f661cfdd7e886c9717bb1b7063
--- /dev/null
+++ b/app/modules/dump/wotwizard/wotwizard.delete.ts
@@ -0,0 +1,10 @@
+import {WotWizardDAL} from "./wotwizard.init.structure"
+
+export async function deleteNonLegacy(wwDAL: WotWizardDAL) {
+
+  await wwDAL.iindexDao.exec('DELETE FROM i_index WHERE NOT legacy')
+  await wwDAL.blockDao.exec('DELETE FROM block WHERE NOT legacy')
+  await wwDAL.idtyDao.sqlDeleteAll()
+  await wwDAL.certDao.sqlDeleteAll()
+  await wwDAL.msDao.sqlDeleteAll()
+}
\ No newline at end of file
diff --git a/app/modules/dump/wotwizard/wotwizard.dump.ts b/app/modules/dump/wotwizard/wotwizard.dump.ts
new file mode 100644
index 0000000000000000000000000000000000000000..64628788e19b32fc29c5830be9c7e4365a5dd329
--- /dev/null
+++ b/app/modules/dump/wotwizard/wotwizard.dump.ts
@@ -0,0 +1,25 @@
+import {Server} from "../../../../server"
+import {createExportStructure} from "./wotwizard.init.structure"
+import {WotWizardConstants} from "./wotwizard.constants"
+import {addLegacyBlocks} from "./wotwizard.legacy.blocks"
+import {addNewBlocks} from "./wotwizard.new.blocks"
+import {deleteNonLegacy} from "./wotwizard.delete"
+import {copyMemPool} from "./wotwizard.copy.mempool"
+
+export async function dumpWotWizard(server: Server) {
+
+  // 1. Create dump structure if it does not exist
+  const wwDAL = await createExportStructure(WotWizardConstants.DB_NAME)
+
+  // 2. Integrate legacy blocks (= non-forkable)
+  await addLegacyBlocks(server, wwDAL)
+
+  // 3. Delete non-legacy data
+  await deleteNonLegacy(wwDAL)
+
+  // 4. Integrate new blocks (= forkable)
+  await addNewBlocks(server, wwDAL)
+
+  // 5. Copy mempool
+  await copyMemPool(server, wwDAL)
+}
diff --git a/app/modules/dump/wotwizard/wotwizard.init.structure.ts b/app/modules/dump/wotwizard/wotwizard.init.structure.ts
new file mode 100644
index 0000000000000000000000000000000000000000..c0867dc374b7ac31adba73a2b8b624c0b43f0e84
--- /dev/null
+++ b/app/modules/dump/wotwizard/wotwizard.init.structure.ts
@@ -0,0 +1,53 @@
+import {Directory} from "../../../lib/system/directory"
+import {IdentityDAL} from "../../../lib/dal/sqliteDAL/IdentityDAL"
+import {MembershipDAL} from "../../../lib/dal/sqliteDAL/MembershipDAL"
+import {CertDAL} from "../../../lib/dal/sqliteDAL/CertDAL"
+import {BlockDAL} from "../../../lib/dal/sqliteDAL/BlockDAL"
+import {IIndexDAL} from "../../../lib/dal/sqliteDAL/index/IIndexDAL"
+
+export interface WotWizardDAL {
+  idtyDao: IdentityDAL
+  certDao: CertDAL
+  msDao: MembershipDAL
+  blockDao: BlockDAL
+  iindexDao: IIndexDAL
+}
+
+export async function createExportStructure(dbName: string): Promise<WotWizardDAL> {
+  const driver = await Directory.getHomeDB(false, dbName)
+
+  // DAOs
+  const idtyDao = new IdentityDAL(driver)
+  const certDao = new CertDAL(driver)
+  const msDao = new MembershipDAL(driver)
+  const blockDao = new BlockDAL(driver)
+  const iindexDao = new IIndexDAL(driver)
+
+  // Create tables
+  await idtyDao.init()
+  await certDao.init()
+  await msDao.init()
+  await blockDao.init()
+  await iindexDao.init()
+
+  const data = await blockDao.query('SELECT COUNT(*) as count FROM block')
+  const blocksCount = parseInt(String((data[0] as any).count))
+
+  // If first DB initialization
+  if (blocksCount === 0) {
+    // Manual updates (which are normally present in MetaDAL)
+    await idtyDao.exec('ALTER TABLE idty ADD COLUMN expired INTEGER NULL')
+    await idtyDao.exec('ALTER TABLE idty ADD COLUMN revoked_on INTEGER NULL')
+    await idtyDao.exec('ALTER TABLE idty ADD COLUMN removed BOOLEAN NULL DEFAULT 0')
+    await certDao.exec('ALTER TABLE cert ADD COLUMN expired INTEGER NULL')
+    await msDao.exec('ALTER TABLE membership ADD COLUMN expired INTEGER NULL')
+  }
+
+  return {
+    idtyDao,
+    certDao,
+    msDao,
+    blockDao,
+    iindexDao,
+  }
+}
diff --git a/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts b/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts
new file mode 100644
index 0000000000000000000000000000000000000000..0c960e1b9506290f6e5712f52c3efeb270aded98
--- /dev/null
+++ b/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts
@@ -0,0 +1,47 @@
+import {WotWizardDAL} from "./wotwizard.init.structure"
+import {Server} from "../../../../server"
+import {CommonConstants} from "../../../lib/common-libs/constants"
+import {DBBlock} from "../../../lib/db/DBBlock"
+import {NewLogger} from "../../../lib/logger"
+
+export async function addLegacyBlocks(server: Server, wwDAL: WotWizardDAL) {
+
+  const logger = NewLogger()
+
+  const currentWW = await wwDAL.blockDao.getCurrent()
+  const current = await server.dal.blockDAL.getCurrent()
+  const start = currentWW && currentWW.number + 1 || 0
+  const end = current && Math.max(-1, current.number - 100) || -1
+
+  const blocksSaved: DBBlock[] = []
+  logger.debug('Reading blocks...')
+
+  // We loop taking care of archives structure
+  for (let i = start; i <= end; i += CommonConstants.ARCHIVES_BLOCKS_CHUNK) {
+    const blocks = await server.dal.getBlocksBetween(i, Math.min(end, i + CommonConstants.ARCHIVES_BLOCKS_CHUNK) - 1)
+    const filtered = blocks.filter(b => b.joiners.length
+      || b.actives.length
+      || b.leavers.length
+      || b.revoked.length
+      || b.excluded.length
+      || b.certifications.length
+    )
+    if (filtered.length) {
+      const legacies = filtered.map(f => { (f as any).legacy = true; return f })
+      legacies.forEach(l => blocksSaved.push(l))
+      // blocksSaved.push(wwDAL.blockDao.insertBatch(legacies))
+    }
+  }
+
+  logger.debug('Saving blocks...')
+  await wwDAL.blockDao.insertBatch(blocksSaved)
+
+  await Promise.all(blocksSaved)
+
+  const iindexRows = (await server.dal.iindexDAL.findRawWithOrder({}, [['writtenOn', false], ['wotb_id', false]]))
+    .filter(r => r.hash && r.writtenOn >= start && r.writtenOn <= end)
+
+  logger.debug('Saving %s iindex rows...', iindexRows.length)
+  const legacies = iindexRows.map(f => { (f as any).legacy = true; return f })
+  await wwDAL.iindexDao.insertBatch(legacies)
+}
\ No newline at end of file
diff --git a/app/modules/dump/wotwizard/wotwizard.new.blocks.ts b/app/modules/dump/wotwizard/wotwizard.new.blocks.ts
new file mode 100644
index 0000000000000000000000000000000000000000..83ad6088555e8fed50459c71cb1c80aa174eb4e6
--- /dev/null
+++ b/app/modules/dump/wotwizard/wotwizard.new.blocks.ts
@@ -0,0 +1,38 @@
+import {WotWizardDAL} from "./wotwizard.init.structure"
+import {Server} from "../../../../server"
+import {CommonConstants} from "../../../lib/common-libs/constants"
+import {NewLogger} from "../../../lib/logger"
+
+export async function addNewBlocks(server: Server, wwDAL: WotWizardDAL) {
+
+  const logger = NewLogger()
+
+  wwDAL.blockDao.cleanCache()
+
+  const currentWW = await wwDAL.blockDao.getCurrent()
+  const current = await server.dal.blockDAL.getCurrent()
+  const start = currentWW && currentWW.number + 1 || 0
+  const end = current && current.number || -1
+
+  const blocksSaved: Promise<any>[] = []
+
+  // We loop taking care of archives structure
+  for (let i = start; i <= end; i += CommonConstants.ARCHIVES_BLOCKS_CHUNK) {
+    const beginAt = i
+    const endAt = Math.min(end, i + CommonConstants.ARCHIVES_BLOCKS_CHUNK) - 1
+    const blocks = await server.dal.getBlocksBetween(beginAt, endAt)
+    const forks = await server.dal.getPotentialForkBlocks(beginAt, 0, endAt)
+    const all =  blocks.concat(forks).map(f => { (f as any).legacy = false; return f })
+    logger.debug('Saving %s pending blocks...', all.length)
+    blocksSaved.push(wwDAL.blockDao.insertBatch(all))
+  }
+
+  await Promise.all(blocksSaved)
+
+  const iindexRows = (await server.dal.iindexDAL.findRawWithOrder({}, [['writtenOn', false], ['wotb_id', false]]))
+    .filter(r => r.writtenOn >= start)
+
+  logger.debug('Saving %s iindex rows...', iindexRows.length)
+  const legacies = iindexRows.map(f => { (f as any).legacy = false; return f })
+  await wwDAL.iindexDao.insertBatch(legacies)
+}
\ No newline at end of file
diff --git a/server.ts b/server.ts
index 163b4a8915c0ad8e75e1da02b5bcc98c6bd328fb..dc8cd270f1f75efff7730a9424cea7d9cae6427b 100644
--- a/server.ts
+++ b/server.ts
@@ -393,6 +393,7 @@ export class Server extends stream.Duplex implements HookableServer {
     await this.resetConfigHook()
     const files = ['stats', 'cores', 'current', Directory.DUNITER_DB_NAME, Directory.DUNITER_DB_NAME + '.db', Directory.DUNITER_DB_NAME + '.log', Directory.WOTB_FILE, 'export.zip', 'import.zip', 'conf']
       .concat(Directory.DATA_FILES)
+      .concat(Directory.WW_FILES)
     const dirs  = ['archives', 'loki', 'blocks', 'blockchain', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb']
       .concat(Directory.DATA_DIRS)
     return this.resetFiles(files, dirs, done);
@@ -402,6 +403,7 @@ export class Server extends stream.Duplex implements HookableServer {
     await this.resetDataHook()
     const files = ['stats', 'cores', 'current', Directory.DUNITER_DB_NAME, Directory.DUNITER_DB_NAME + '.db', Directory.DUNITER_DB_NAME + '.log', Directory.WOTB_FILE]
       .concat(Directory.DATA_FILES)
+      .concat(Directory.WW_FILES)
     const dirs  = ['archives', 'loki', 'blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb']
       .concat(Directory.DATA_DIRS)
     await this.resetFiles(files, dirs, done);
@@ -453,15 +455,6 @@ export class Server extends stream.Duplex implements HookableServer {
     })
   }
 
-  async cleanDBData() {
-    await this.dal.cleanCaches();
-    this.dal.wotb.resetWoT();
-    const files = ['stats', 'cores', 'current', Directory.DUNITER_DB_NAME, Directory.DUNITER_DB_NAME + '.db', Directory.DUNITER_DB_NAME + '.log'];
-    const dirs  = ['loki', 'blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb']
-      .concat(Directory.DATA_DIRS)
-    return this.resetFiles(files, dirs);
-  }
-
   private async resetFiles(files:string[], dirs:string[], done:any = null) {
     try {
       const params = await this.paramsP;