diff --git a/index.js b/index.js
index 2144830fa8ce0341195ee36de56cd98cc942a562..769d3aa6bc6b423147f02c8f0f8955dcbdee1951 100755
--- a/index.js
+++ b/index.js
@@ -42,7 +42,8 @@ module.exports = {
       },
 
       cliOptions: [
-        { value: '--monitor', desc: 'Enable performance monitoring of DB access'}
+        { value: '--monitor', desc: 'Enable performance monitoring of DB access'},
+        { value: '--reset-data', desc: 'Forces Monit to reset its indexed data on startup'},
       ],
 
       cli: [{
diff --git a/lib/DataFinder.ts b/lib/DataFinder.ts
index e5d11218fdb860521b3eed2d8eef4cf3d32b10c6..9d70d0f2ba14798f3f1eae35f7ecef802b16b697 100644
--- a/lib/DataFinder.ts
+++ b/lib/DataFinder.ts
@@ -1,59 +1,159 @@
 import {Server} from 'duniter/server'
 import {DBBlock} from 'duniter/app/lib/db/DBBlock'
 import {MonitorExecutionTime} from './MonitorExecutionTime'
+import {LevelDBIindex} from "duniter/app/lib/dal/indexDAL/leveldb/LevelDBIindex";
+import {FullIindexEntry, IindexEntry, Indexer, reduce} from "duniter/app/lib/indexer";
+import {LevelDBBlockchain} from "duniter/app/lib/dal/indexDAL/leveldb/LevelDBBlockchain";
+import {Underscore} from "./underscore";
+import {CFSBlockchainArchive} from "duniter/app/lib/dal/indexDAL/CFSBlockchainArchive";
+import {MonitDBBlock, SqliteBlockchain} from "./SqliteBlockchain";
+import {LevelDBCindex} from "duniter/app/lib/dal/indexDAL/leveldb/LevelDBCindex";
+import {reduceConcat} from "duniter/app/lib/common-libs/reduce";
+import {LevelDBMindex} from "duniter/app/lib/dal/indexDAL/leveldb/LevelDBMindex";
+
+/**
+ * Creates the DB objects + reset data + launches a first indexation
+ * @param duniterServer The server to index blockchain from.
+ */
+export async function initMonitDB(duniterServer: Server, resetData: boolean = false) {
+  DataFinder.createInstance(duniterServer)
+  if (resetData) {
+    await DataFinder.getInstance().resetIndexedData()
+  }
+  await DataFinder.getInstance().index()
+}
 
+/**
+ * Abstraction layer for data access (SQL + LevelDB of Duniter).
+ */
 export class DataFinder {
 
+  private static instance: DataFinder
+  private static reindexing: Promise<void> = Promise.resolve()
+
+  /**
+   * Singleton constructor
+   * @param duniterServer
+   */
+  public static createInstance(duniterServer: Server) {
+    if (!DataFinder.instance) {
+      DataFinder.instance = new DataFinder(duniterServer)
+    }
+  }
+
+  /**
+   * Singleton getter
+   */
+  public static getInstance() {
+    return DataFinder.instance
+  }
+
+  /**
+   * Retrieve the singleton + reindex Monit data if current HEAD is not up-to-date.
+   */
+  public static async getInstanceReindexedIfNecessary() {
+    const currentMonit = await DataFinder.instance.getHighestBlock()
+    const currentDuniter = await DataFinder.instance.blockchainDao.getCurrent()
+    // Wait any already triggered reindexing
+    await DataFinder.reindexing
+    // Index only when opportune
+    if (currentDuniter && (!currentMonit || currentMonit.number < currentDuniter.number)) {
+      console.log('Duniter current = ', currentDuniter.number)
+      console.log('Monit current = ', currentMonit && currentMonit.number || -1)
+      DataFinder.reindexing = DataFinder.instance.index()
+      // Wait end of indexing
+      await DataFinder.reindexing
+    }
+    return DataFinder.instance
+  }
+
+  private dbArchives: SqliteBlockchain;
   private memCache: {
     [cacheName: string]: {
       [k: string]: any
     }
-  } = {}
+  } = {};
+  private dbInited: Promise<any>
+
+  // Cache
+  private intemporalWot: Promise<IindexEntry[]>;
+  private wotmap: Promise<WotMap>;
+
+  private constructor(protected duniterServer: Server) {
+    this.dbArchives = new SqliteBlockchain(duniterServer.dal.getSqliteDB)
+    this.dbInited = this.dbArchives.init()
+  }
+
+  async resetIndexedData() {
+    await this.dbInited
+    console.log('Reseting all Monit data...')
+    await this.dbArchives.deleteAll()
+  }
 
-  constructor(protected duniterServer: Server) {
+  /**
+   * Mirror the Duniter archives for long term storage
+   * Renew periodically the non-archived part (in which forks may have occurred)
+   */
+  async index() {
+    console.log('Reindexing blockchain...')
+    await this.dbInited
+    // 1. Look at first out-of-for-window block in Duniter: archive in Monit all the blocks < to this number
+    const firstOutOfFork = await this.getFirstOutOfForkBlockInDuniter();
+    const newCeil = await this.archiveBlocksInMonit(firstOutOfFork)
+    // 2. Add all the blocks >= to this number
+    await this.addForkWindowBlocks(newCeil, firstOutOfFork)
+    console.log('Reindexing done.')
   }
 
   @MonitorExecutionTime()
   findPendingMembers() {
-    return this.query('SELECT `buid`,`pubkey`,`uid`,`hash`,`expires_on`,`revocation_sig` FROM identities_pending WHERE `member`=0')
+    return this.duniterServer.dal.idtyDAL.query('SELECT `buid`,`pubkey`,`uid`,`hash`,`expires_on`,`revocation_sig` FROM identities_pending WHERE `member`=0')
   }
 
   @MonitorExecutionTime()
   findPendingCertsToTarget(toPubkey: string, hash: string) {
-    return this.getFromCacheOrDB('findPendingCertsToTarget', [toPubkey, hash].join('-'), () => this.query(
+    return this.getFromCacheOrDB('findPendingCertsToTarget', [toPubkey, hash].join('-'), () => this.duniterServer.dal.certDAL.query(
       'SELECT `from`,`block_number`,`block_hash`,`expires_on` FROM certifications_pending WHERE `to`=\''+toPubkey+'\' AND `target`=\''+hash+'\' ORDER BY `expires_on` DESC'))
   }
 
   @MonitorExecutionTime()
-  getWotexInfos(uid: string) {
-    return this.duniterServer.dal.idtyDAL.query('' +
-      'SELECT hash, uid, pub, wotb_id FROM i_index WHERE uid = ? ' +
-      'UNION ALL ' + 'SELECT hash, uid, pubkey as pub, (SELECT NULL) AS wotb_id FROM idty WHERE uid = ?', [uid, uid])
+  async getWotexInfos(uid: string): Promise<{ hash: string }[]> {
+    const pendingIdentities: { hash: string }[] = await this.duniterServer.dal.idtyDAL.query('' +
+      'SELECT hash, uid, pubkey as pub, (SELECT NULL) AS wotb_id FROM idty WHERE uid = ?', [uid])
+    const eventualMember: { hash: string }|null = await this.iindex.getFromUID(uid)
+    if (eventualMember) {
+      pendingIdentities.push(eventualMember)
+    }
+    return pendingIdentities
   }
 
   @MonitorExecutionTime()
-  async getBlock(block_number: number): Promise<DBBlock|undefined> {
+  async getBlock(block_number: number): Promise<DBBlock> {
     return (await this.getFromCacheOrDB('getBlock', String(block_number),() => this.duniterServer.dal.getBlock(block_number))) || undefined
   }
 
   @MonitorExecutionTime()
   getUidOfPub(pub: string): Promise<{ uid: string }[]> {
-    return this.getFromCacheOrDB('getUidOfPub', pub, () => this.query('SELECT `uid` FROM i_index WHERE `pub`=\''+pub+'\' LIMIT 1'))
+    return this.getFromCacheOrDB('getUidOfPub', pub, () => this.iindex.getFullFromPubkey(pub))
   }
 
   @MonitorExecutionTime()
   async getWotbIdByIssuerPubkey(issuerPubkey: string) {
-    return this.getFromCacheOrDB('getWotbIdByIssuerPubkey', issuerPubkey, async () => (await this.duniterServer.dal.iindexDAL.query('SELECT wotb_id FROM i_index WHERE pub = ? AND wotb_id IS NOT NULL', [issuerPubkey]))[0].wotb_id)
+    return this.getFromCacheOrDB('getWotbIdByIssuerPubkey', issuerPubkey, async () => (await this.iindex.getFullFromPubkey(issuerPubkey)).wotb_id)
   }
 
   @MonitorExecutionTime()
-  getChainableOnByIssuerPubkey(issuerPubkey: string) {
-    return this.query('SELECT `chainable_on` FROM c_index WHERE `issuer`=\''+issuerPubkey+'\' ORDER BY `chainable_on` DESC LIMIT 1')
+  async getChainableOnByIssuerPubkey(issuerPubkey: string) {
+    const reduced = await this.cindex.reducablesFrom(issuerPubkey);
+    return Underscore.sortBy(reduced, r => -r.chainable_on);
   }
 
   @MonitorExecutionTime()
   getChainableOnByIssuerPubkeyByExpOn(from: string) {
-    return this.getFromCacheOrDB('getChainableOnByIssuerPubkeyByExpOn', from, () => this.query('SELECT `chainable_on` FROM c_index WHERE `issuer`=\''+from+'\' ORDER BY `expires_on` DESC LIMIT 1'))
+    return this.getFromCacheOrDB('getChainableOnByIssuerPubkeyByExpOn', from, async () => {
+      const reduced = await this.cindex.reducablesFrom(from);
+      return Underscore.sortBy(reduced, r => -r.expires_on)[0];
+    })
   }
 
   @MonitorExecutionTime()
@@ -63,47 +163,52 @@ export class DataFinder {
 
   @MonitorExecutionTime()
   findCertsOfIssuer(pub: string, tmpOrder: string) {
-    return this.getFromCacheOrDB('findCertsOfIssuer', [pub, tmpOrder].join('-'), () => this.query(
-      'SELECT `receiver`,`written_on`,`expires_on` FROM c_index WHERE `issuer`=\''+pub+'\' ORDER BY `expires_on` '+tmpOrder))
+    return this.getFromCacheOrDB('findCertsOfIssuer', [pub, tmpOrder].join('-'), async () => {
+      const reduced = await this.cindex.reducablesFrom(pub);
+      return Underscore.sortBy(reduced, r => tmpOrder === 'DESC' ? -r.expires_on : r.expires_on);
+    })
   }
 
   @MonitorExecutionTime()
   findCertsOfReceiver(pub: any, tmpOrder: string) {
-    return this.getFromCacheOrDB('findCertsOfReceiver', [pub, tmpOrder].join('-'), () => this.query(
-      'SELECT `issuer`,`written_on`,`expires_on` FROM c_index WHERE `receiver`=\''+pub+'\' ORDER BY `expires_on` '+tmpOrder))
+    return this.getFromCacheOrDB('findCertsOfReceiver', [pub, tmpOrder].join('-'), async () => {
+      const reduced = await this.reducablesTo(pub);
+      return Underscore.sortBy(reduced, r => tmpOrder === 'DESC' ? -r.expires_on : r.expires_on);
+    })
   }
 
   @MonitorExecutionTime()
   getProtagonist(pub: string) {
-    return this.getFromCacheOrDB('getProtagonist', pub, () => this.query('SELECT `uid`,`wotb_id` FROM i_index WHERE `pub`=\''+pub+'\' LIMIT 1'))
+    return this.getFromCacheOrDB('getProtagonist', pub, async (): Promise<FullIindexEntry> => {
+      return (await this.iindex.getFromPubkey(pub)) as FullIindexEntry;
+    })
   }
 
   @MonitorExecutionTime()
   getCertsPending(pub: string, tmpOrder: string) {
-    return this.getFromCacheOrDB('getCertsPending', [pub, tmpOrder].join('-'), () => this.query(
+    return this.getFromCacheOrDB('getCertsPending', [pub, tmpOrder].join('-'), () => this.duniterServer.dal.certDAL.query(
       'SELECT `from`,`to`,`block_number`,`expires_on` FROM certifications_pending WHERE `from`=\''+pub+'\' ORDER BY `expires_on` '+tmpOrder))
   }
 
   @MonitorExecutionTime()
   getCertsPendingFromTo(pub: any, tmpOrder: string) {
-    return this.getFromCacheOrDB('getCertsPendingFromTo', [pub, tmpOrder].join('-'), () => this.query(
+    return this.getFromCacheOrDB('getCertsPendingFromTo', [pub, tmpOrder].join('-'), () => this.duniterServer.dal.certDAL.query(
       'SELECT `from`,`block_number`,`block_hash`,`expires_on` FROM certifications_pending WHERE `to`=\''+pub+'\' ORDER BY `expires_on` '+tmpOrder))
   }
 
   @MonitorExecutionTime()
   getMembers() {
-    return this.getFromCacheOrDB('getMembers', 'members', () => this.query('SELECT `uid`,`pub`,`member`,`written_on`,`wotb_id` FROM i_index WHERE `member`=1'))
+    return this.getFromCacheOrDB('getMembers', 'members', async () => {
+      const intemporalWot = await this.getIntemporalWot();
+      return intemporalWot.filter(node => node.member)
+    })
   }
 
   @MonitorExecutionTime()
   membershipWrittenOnExpiresOn(pub: string) {
-    return this.getFromCacheOrDB('membershipWrittenOnExpiresOn', pub, () => this.query(
-      'SELECT `written_on`,`expires_on` FROM m_index WHERE `pub`=\''+pub+'\' ORDER BY `expires_on` DESC LIMIT 1'))
-  }
-
-  @MonitorExecutionTime()
-  query(sql: string, params?: any[]) {
-    return this.duniterServer.dal.peerDAL.query(sql, params || [])
+    return this.getFromCacheOrDB('membershipWrittenOnExpiresOn', pub, async () => {
+      return this.mindex.getReducedMS(pub);
+    })
   }
 
   @MonitorExecutionTime()
@@ -123,67 +228,273 @@ export class DataFinder {
   @MonitorExecutionTime()
   getBlockWhereMedianTimeGt(previousBlockchainTime: number) {
     return this.getFromCacheOrDB('getBlockWhereMedianTimeGt', String(previousBlockchainTime),
-      () => this.query('SELECT `issuer`,`membersCount`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` > '+previousBlockchainTime+' ORDER BY `medianTime` ASC'))
+      () => this.dbArchives.query('SELECT `issuer`,`membersCount`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` > '+previousBlockchainTime+' ORDER BY `medianTime` ASC'))
   }
 
   @MonitorExecutionTime()
-  getBlockWhereMedianTimeLte(medianTime: number) {
-    return this.getFromCacheOrDB('getBlockWhereMedianTimeLte', [medianTime].join('-'),
-      () => this.query('SELECT `hash`,`membersCount`,`medianTime`,`number`,`certifications`,`issuersCount`,`powMin` FROM block WHERE `fork`=0 AND `medianTime` <= '+medianTime+' ORDER BY `medianTime` ASC'))
+  getBlockWhereMedianTimeLte(newEndTime: number) {
+    return this.getFromCacheOrDB('getBlockWhereMedianTimeLte', [newEndTime].join('-'),
+      () => this.dbArchives.query('SELECT `medianTime`,`number` FROM block WHERE `fork`=0 AND `medianTime` <= \''+newEndTime+'\' ORDER BY `medianTime` DESC LIMIT 1 '))
   }
 
   @MonitorExecutionTime()
   getBlockWhereMedianTimeLteNoLimit(medianTime: number) {
     return this.getFromCacheOrDB('getBlockWhereMedianTimeLteNoLimit', [medianTime].join('-'),
-      () => this.query('SELECT `hash`,`membersCount`,`medianTime`,`number`,`certifications`,`issuersCount`,`powMin` FROM block WHERE `fork`=0 AND `medianTime` <= '+medianTime+' ORDER BY `medianTime` ASC'))
+      () => this.dbArchives.query('SELECT `hash`,`membersCount`,`medianTime`,`number`,`certifications`,`issuersCount`,`powMin` FROM block WHERE `fork`=0 AND `medianTime` <= '+medianTime+' ORDER BY `medianTime` ASC'))
   }
 
   @MonitorExecutionTime()
   getIdentityByWotbid(wotb_id: number): Promise<any> {
     return this.getFromCacheOrDB('getIdentityByWotbid', [wotb_id].join('-'),
-      async () => (await this.duniterServer.dal.idtyDAL.query('SELECT * FROM i_index WHERE wotb_id = ?', [wotb_id]))[0])
+      async () => {
+        const matching = (await this.getWotmap())[wotb_id];
+        return matching
+      })
   }
 
   @MonitorExecutionTime()
   getBlockWhereMedianTimeLteAndGtNoLimit(currentBlockTime: number, medianTime: number) {
     return this.getFromCacheOrDB('getBlockWhereMedianTimeLteAndGtNoLimit', [currentBlockTime, medianTime].join('-'),
-      () => this.query('SELECT `hash`,`membersCount`,`medianTime`,`number`,`certifications`,`joiners`,`actives`,`revoked` FROM block WHERE `fork`=0 AND `medianTime` > '+currentBlockTime+' AND `medianTime` <= '+medianTime+' ORDER BY `medianTime` ASC'))
+      () => this.dbArchives.query('SELECT `hash`,`membersCount`,`medianTime`,`number`,`certifications`,`joiners`,`actives`,`revoked` FROM block WHERE `fork`=0 AND `medianTime` > '+currentBlockTime+' AND `medianTime` <= '+medianTime+' ORDER BY `medianTime` ASC'))
   }
 
   @MonitorExecutionTime()
   getBlockWhereMedianTimeLteAndGte(endMedianTime: number, beginMedianTime: number) {
     return this.getFromCacheOrDB('getBlockWhereMedianTimeLteAndGte', [endMedianTime, beginMedianTime].join('-'),
-      () => this.query('SELECT `issuer`,`membersCount`,`monetaryMass`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` <= '+endMedianTime+' AND `medianTime` >= '+beginMedianTime+' ORDER BY `medianTime` ASC'))
+      () => this.dbArchives.query('SELECT `issuer`,`membersCount`,`monetaryMass`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` <= '+endMedianTime+' AND `medianTime` >= '+beginMedianTime+' ORDER BY `medianTime` ASC'))
   }
 
   @MonitorExecutionTime()
-  getBlockWhereMedianTimeGte(previousBlockchainTime: number) {
-    return this.getFromCacheOrDB('getBlockWhereMedianTimeGte', String(previousBlockchainTime),
-      () => this.query('SELECT `issuer`,`membersCount`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` >= '+previousBlockchainTime+' ORDER BY `medianTime` ASC'))
+  getBlockWhereMedianTimeGte(beginTime: number) {
+    return this.getFromCacheOrDB('getBlockWhereMedianTimeGte', String(beginTime),
+      () => this.dbArchives.query('SELECT `medianTime`,`number` FROM block WHERE `fork`=0 AND `medianTime` >= \''+beginTime+'\' ORDER BY `medianTime` ASC LIMIT 1 '))
   }
 
   @MonitorExecutionTime()
   getBlockWhereMedianTimeLteAndGt(medianTime: number, previousBlockchainTime: number) {
     return this.getFromCacheOrDB('getBlockWhereMedianTimeLteAndGt', [medianTime, previousBlockchainTime].join('-'),
-      () => this.query('SELECT `issuer`,`membersCount`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` <= '+medianTime+' AND `medianTime` > '+previousBlockchainTime+' ORDER BY `medianTime` ASC'))
+      () => this.dbArchives.query('SELECT `issuer`,`membersCount`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` <= '+medianTime+' AND `medianTime` > '+previousBlockchainTime+' ORDER BY `medianTime` ASC'))
   }
 
   @MonitorExecutionTime()
   getBlockWhereMedianTimeLteAndGteNoLimit(endMedianTime: number, beginMedianTime: number) {
     return this.getFromCacheOrDB('getBlockWhereMedianTimeLteAndGteNoLimit', [endMedianTime, beginMedianTime].join('-'),
-      () => this.query('SELECT `issuer`,`membersCount`,`monetaryMass`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` <= '+endMedianTime+' AND `medianTime` >= '+beginMedianTime+' ORDER BY `medianTime` ASC'))
+      () => this.dbArchives.query('SELECT `issuer`,`membersCount`,`monetaryMass`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` <= '+endMedianTime+' AND `medianTime` >= '+beginMedianTime+' ORDER BY `medianTime` ASC'))
   }
 
   @MonitorExecutionTime()
   getBlockWhereMedianTimeGtNoLimit(beginMedianTime: number) {
     return this.getFromCacheOrDB('getBlockWhereMedianTimeGtNoLimit', String(beginMedianTime),
-      () => this.query('SELECT `issuer`,`membersCount`,`monetaryMass`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` >= '+beginMedianTime+' ORDER BY `medianTime` ASC'))
+      () => this.dbArchives.query('SELECT `issuer`,`membersCount`,`monetaryMass`,`medianTime`,`dividend`,`number`,`nonce` FROM block WHERE `fork`=0 AND `medianTime` >= '+beginMedianTime+' ORDER BY `medianTime` ASC'))
+  }
+
+  searchIdentities(search: string) {
+    return this.duniterServer.dal.searchJustIdentities(search)
+  }
+
+  /**
+   * Get the highest block known by Monit
+   */
+  async getHighestBlock() {
+    const number = await this.dbArchives.getHighestBlockNumber()
+    if (number < 0) {
+      return null
+    }
+    return this.dbArchives.getBlock(number)
+  }
+
+  /**
+   * Get the highest block number known by Monit
+   */
+  async getHighestBlockNumber() {
+    return await this.dbArchives.getHighestBlockNumber()
+  }
+
+  /**
+   * Get the highest archived block number known by Monit
+   */
+  async getHighestArchivedBlockNumber() {
+    return await this.dbArchives.getHighestArchivedBlockNumber()
+  }
+
+  @MonitorExecutionTime()
+  async findRemainingBlocksInForkZone(criteria: (b: DBBlock) => boolean) {
+    const topArchived = await this.getHighestBlock()
+    return await this.blockchainDao.findWhere(block => (!topArchived || block.number > topArchived.number) && criteria(block))
   }
 
-  searchIdentities(search: string) { // TODO: refactor duniterServer in all this class
-    return this.duniterServer.dal.idtyDAL.query('' +
-      'SELECT uid, pub, wotb_id FROM i_index WHERE (uid = ? or pub = ?) ' +
-      'UNION ALL ' +
-      'SELECT uid, pubkey as pub, (SELECT NULL) AS wotb_id FROM idty WHERE (uid = ? or pubkey = ?)', [search, search, search, search])
+  async getFirstOutOfForkBlockInDuniter(): Promise<number> {
+    const current = (await this.blockchainDao.getCurrent())
+    return (current && current.number || -1) - this.duniterServer.conf.forksize
   }
+
+  get blockchainDao() {
+    return this.duniterServer.dal.blockDAL as LevelDBBlockchain
+  }
+
+  get iindex() {
+    return this.duniterServer.dal.iindexDAL as LevelDBIindex
+  }
+
+  get mindex() {
+    return this.duniterServer.dal.mindexDAL as LevelDBMindex
+  }
+
+  get cindex() {
+    return this.duniterServer.dal.cindexDAL as LevelDBCindex
+  }
+
+  /**
+   * Singleton de fetching de la wotmap
+   */
+  getIntemporalWot() {
+    if (!this.intemporalWot) {
+      this.intemporalWot = this.fetchIntemporalWot()
+    }
+    return this.intemporalWot
+  }
+
+  /**
+   * Singleton de fetching de la wotmap
+   */
+  getWotmap() {
+    if (!this.wotmap) {
+      this.wotmap = this.fetchWotMap()
+    }
+    return this.wotmap
+  }
+
+  async fetchIntemporalWot() {
+    console.log('Fetching intemporal wot...');
+    return (await this.iindex.findAllValues()).map(reduce);
+  }
+
+  async fetchWotMap() {
+    console.log('Fetching wotmap...');
+    const reducedIdentities = await this.getIntemporalWot();
+    const wotmap: WotMap = {};
+    reducedIdentities.forEach(identity => {
+      wotmap[identity.wotb_id as number] = identity;
+    });
+    return wotmap;
+  }
+
+  // Extracted from Duniter `getValidLinksTo`, adapted to return even non-valid links
+  private async reducablesTo(receiver: any) {
+    const issuers: string[] = ((await this.cindex.getOrNull(receiver)) || { issued: [], received: [] }).received
+    return (await Promise.all(issuers.map(async issuer => {
+      const fullEntries = Indexer.DUP_HELPERS.reduceBy((await this.cindex.get(issuer)).issued, ['issuer', 'receiver'])
+      return fullEntries.filter(e => e.receiver === receiver )
+    }))).reduce(reduceConcat, [])
+  }
+
+  /**
+   * Save as archived blocks in Monit blocks the blocks that are not supposed to change
+   * ever in Duniter (non-fork blocks).
+   */
+  private async archiveBlocksInMonit(targetCeil: number) {
+    console.log(`[Archives] Compiling archives up to #${targetCeil} (first non-forkable block)...`)
+    // Trim all the blocks above the ceil (should be the non-archived blocks)
+    console.log(`[Archives] Removing forkable blocks`)
+    await this.dbArchives.trimNonArchived()
+    // Check what is our new ceil
+    let currentCeil = await this.dbArchives.getHighestBlockNumber()
+    // Copy the blocks available from Duniter archives (they were stored during a sync)
+    currentCeil = await this.copyFromDuniterArchives(currentCeil, targetCeil)
+    // Then copy the bocks available in classical Duniter DB (a part stored during the sync, the other during the node's life)
+    currentCeil = await this.copyFromDuniterDB(currentCeil, targetCeil)
+    return this.dbArchives.getHighestBlockNumber()
+  }
+
+  /**
+   * Save as non-archived blocks in Monit blocks the blocks that are in fork window of Duniter.
+   */
+  private async addForkWindowBlocks(newCeil: number, firstOutOfFork: number) {
+    console.log(`[Forkables] Copying DB blocks from #${newCeil + 1} to #${firstOutOfFork}...`)
+    const current = (await this.blockchainDao.getCurrent()) as DBBlock
+    // Fetch memory blocks above our new ceil
+    const nonArchived: MonitDBBlock[] = await this.blockchainDao.getBlocks(newCeil + 1, firstOutOfFork) as any
+    // Mark them as non-archived
+    nonArchived.forEach(b => b.archived = true)
+    console.log(`[Forkables] Copying ${nonArchived.length} blocks.`)
+    await this.dbArchives.insertBatch(nonArchived)
+    console.log(`[Forkables] Copying DB forkable blocks from #${firstOutOfFork + 1} to #${current.number}...`)
+    // Fetch memory blocks above our new ceil
+    const nonArchivedForkable: MonitDBBlock[] = await this.blockchainDao.getBlocks(firstOutOfFork + 1, current.number) as any
+    // Mark them as non-archived because they are forkable
+    nonArchivedForkable.forEach(b => b.archived = false)
+    // And finally store them
+    console.log(`[Forkables] Copying ${nonArchivedForkable.length} blocks.`)
+    await this.dbArchives.insertBatch(nonArchivedForkable)
+  }
+
+  /**
+   * Extract blocks from Duniter archives zone.
+   * @param currentCeil Our current ceil block in dbArchives.
+   * @param targetCeil  Our target block in dbArchives (block to reach).
+   */
+  private async copyFromDuniterArchives(currentCeil: number, targetCeil: number) {
+    console.log(`[Archives] Copying from Duniter archives from #${currentCeil + 1}...#${targetCeil}`)
+    while (currentCeil < targetCeil) {
+      // Get the chunk that contains the block following our current ceil
+      const chunk: MonitDBBlock[]|null = (await (this.duniterServer.dal.blockchainArchiveDAL as CFSBlockchainArchive<DBBlock>).getChunkForBlock(currentCeil + 1)) as any[];
+      const toArchive: MonitDBBlock[] = [];
+      if (!chunk) {
+        // Not in the archives
+        break;
+      }
+      for (const block of chunk) {
+        if (block.number > currentCeil) {
+          // Archive it
+          block.archived = true;
+          toArchive.push(block);
+          currentCeil = block.number
+        }
+      }
+      if (toArchive.length) {
+        console.log(`[Archives] Copying from Duniter archives block #${toArchive[0].number}...#${toArchive[toArchive.length-1].number}`)
+        await this.dbArchives.insertBatch(toArchive)
+        // Force journal writing, otherwise we will have to wait for all the writings later on.
+        // I prefer to wait now, to follow the progress using logs
+        await this.dbArchives.getHighestBlockNumber()
+      }
+    }
+    await this.dbArchives.setArchived(currentCeil)
+    console.log(`[Archives] Copying from Duniter archives done.`)
+    return currentCeil
+  }
+
+  /**
+   * Extract blocks from Duniter database zone.
+   * @param currentCeil Our current ceil block in dbArchives.
+   * @param targetCeil  Our target block in dbArchives (block to reach).
+   */
+  private async copyFromDuniterDB(currentCeil: number, targetCeil: number) {
+    console.log('[Archives] Copying from Duniter DB...')
+    const duniterCurrent = await this.blockchainDao.getCurrent()
+    if (duniterCurrent) {
+      // Get all the remaining blocks
+      console.log(`[Archives] Copying from Duniter DB block #${currentCeil + 1}...#${targetCeil}`)
+      const chunk: MonitDBBlock[]|null = (await this.blockchainDao.getBlocks(currentCeil + 1, targetCeil)) as any[];
+      const toStore: MonitDBBlock[] = [];
+      for (const block of chunk) {
+        if (!block.fork && block.number === currentCeil + 1) {
+          // Store it
+          block.archived = block.number <= duniterCurrent.number;
+          toStore.push(block);
+          currentCeil = block.number
+        }
+      }
+      console.log(`[Archives] Copying ${toStore.length} blocks...`)
+      if (toStore.length) {
+        await this.dbArchives.insertBatch(toStore)
+      }
+    }
+    console.log('[Archives] Copying from Duniter DB done.')
+    return currentCeil
+  }
+}
+
+interface WotMap {
+  [k: number]: IindexEntry
 }
diff --git a/lib/SqliteBlockchain.ts b/lib/SqliteBlockchain.ts
new file mode 100644
index 0000000000000000000000000000000000000000..f0bf86a00856c74561147c67fef0f371a9803f28
--- /dev/null
+++ b/lib/SqliteBlockchain.ts
@@ -0,0 +1,105 @@
+import {SqliteTable} from "duniter/app/lib/dal/indexDAL/sqlite/SqliteTable";
+import {SQLiteDriver} from "duniter/app/lib/dal/drivers/SQLiteDriver";
+import {
+  SqlNotNullableFieldDefinition,
+  SqlNullableFieldDefinition
+} from "duniter/app/lib/dal/indexDAL/sqlite/SqlFieldDefinition";
+import {MonitorExecutionTime} from "./MonitorExecutionTime";
+import {DBBlock} from "duniter/app/lib/db/DBBlock";
+
+export class SqliteBlockchain extends SqliteTable<MonitDBBlock> {
+
+  constructor(getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>) {
+    super(
+      'monit',
+      {
+        'archived':       new SqlNotNullableFieldDefinition('BOOLEAN', true),
+        'fork':           new SqlNotNullableFieldDefinition('BOOLEAN', true),
+        'hash':           new SqlNotNullableFieldDefinition('VARCHAR', false, 64),
+        'inner_hash':     new SqlNotNullableFieldDefinition('VARCHAR', false, 64),
+        'signature':      new SqlNotNullableFieldDefinition('VARCHAR', false, 100),
+        'currency':       new SqlNotNullableFieldDefinition('VARCHAR', false, 50),
+        'issuer':         new SqlNotNullableFieldDefinition('VARCHAR', false, 50),
+        'version':        new SqlNotNullableFieldDefinition('INT', false),
+        'membersCount':   new SqlNotNullableFieldDefinition('INT', false),
+        'medianTime':     new SqlNotNullableFieldDefinition('INT', true), // DATETIME?
+        'time':           new SqlNotNullableFieldDefinition('INT', false), // DATETIME?
+        'powMin':         new SqlNotNullableFieldDefinition('INT', false),
+        'number':         new SqlNotNullableFieldDefinition('INT', false),
+        'nonce':          new SqlNotNullableFieldDefinition('INT', false),
+        'issuersCount':   new SqlNotNullableFieldDefinition('INT', false),
+        'parameters':     new SqlNullableFieldDefinition('VARCHAR', false, 255),
+        'previousHash':   new SqlNullableFieldDefinition('VARCHAR', false, 64),
+        'previousIssuer': new SqlNullableFieldDefinition('VARCHAR', false, 50),
+        'monetaryMass':   new SqlNullableFieldDefinition('VARCHAR', false, 100),
+        'UDTime':         new SqlNullableFieldDefinition('INT', false), // DATETIME
+        'dividend':       new SqlNullableFieldDefinition('INT', false), // DEFAULT \'0\'
+        'unitbase':       new SqlNullableFieldDefinition('INT', false),
+        'transactions':   new SqlNullableFieldDefinition('TEXT', false),
+        'certifications': new SqlNullableFieldDefinition('TEXT', false),
+        'identities':     new SqlNullableFieldDefinition('TEXT', false),
+        'joiners':        new SqlNullableFieldDefinition('TEXT', false),
+        'actives':        new SqlNullableFieldDefinition('TEXT', false),
+        'leavers':        new SqlNullableFieldDefinition('TEXT', false),
+        'revoked':        new SqlNullableFieldDefinition('TEXT', false),
+        'excluded':       new SqlNullableFieldDefinition('TEXT', false),
+      },
+      getSqliteDB
+    );
+    this.name = 'block'
+  }
+
+  @MonitorExecutionTime()
+  async insertBatch(records: MonitDBBlock[]): Promise<void> {
+    records.forEach((b:any) => {
+      for (const prop of ['joiners', 'actives', 'leavers', 'identities', 'certifications', 'transactions', 'revoked', 'excluded']) {
+        b[prop] = JSON.stringify(b[prop]);
+      }
+      return b
+    });
+    if (records.length) {
+      return this.insertBatchInTable(this.driver, records)
+    }
+  }
+
+  @MonitorExecutionTime()
+  async query(sql: string, params?: any[]): Promise<any> {
+    return this.driver.sqlRead(sql, params || [])
+  }
+
+  async getBlock(number: number): Promise<MonitDBBlock|null> {
+    const blocks = await this.driver.sqlRead('SELECT * FROM block WHERE number = ?', [number])
+    return blocks.length ? blocks[0] : null
+  }
+
+  async getHighestBlock(): Promise<MonitDBBlock|null> {
+    const blocks = await this.driver.sqlRead('SELECT * FROM block ORDER BY number DESC LIMIT 1', [])
+    return blocks.length ? blocks[0] : null
+  }
+
+  async getHighestBlockNumber(): Promise<number> {
+    const block = await this.getHighestBlock()
+    return block && block.number || -1
+  }
+
+  async getHighestArchivedBlockNumber(): Promise<number> {
+    const block = await this.driver.sqlRead('SELECT * FROM block WHERE archived ORDER BY number DESC LIMIT 1', [])
+    return block.length && block[0].number || -1
+  }
+
+  trimNonArchived() {
+    return this.driver.sqlWrite('DELETE FROM block WHERE NOT archived', [])
+  }
+
+  setArchived(currentCeil: number) {
+    return this.driver.sqlWrite('UPDATE block SET archived = ? WHERE number <= ? AND NOT archived', [true, currentCeil])
+  }
+
+  deleteAll() {
+    return this.driver.sqlWrite('DELETE FROM block', [])
+  }
+}
+
+export interface MonitDBBlock extends DBBlock {
+  archived: boolean
+}
diff --git a/lib/constants.js b/lib/constants2.ts
similarity index 91%
rename from lib/constants.js
rename to lib/constants2.ts
index 8bf57365ba91c1312659149860264f90284f0311..80530f207c94cd82bcc138b4b859b3efb393b4af 100755
--- a/lib/constants.js
+++ b/lib/constants2.ts
@@ -1,6 +1,4 @@
-"use strict";
-
-module.exports = {
+export const MonitConstants = {
   USE_WOTB6: false,
   DEFAULT_LANGUAGE: "fr",
   MIN_WILLMEMBERS_UPDATE_FREQ: 180,
@@ -16,4 +14,4 @@ module.exports = {
     GET_SENTRIES_COUNT: 3,
     GET_D_SEN: 3
   }
-};
\ No newline at end of file
+};
diff --git a/lib/main.js b/lib/main.js
index 0877d4da2b411e1f4e10b970921ebf724d7be61e..d6920fb60c7022023998449d51338c1ef833b2af 100755
--- a/lib/main.js
+++ b/lib/main.js
@@ -4,7 +4,7 @@ const co = require('co');
 const os = require('os');
 const fs = require('fs');
 
-const webserver = require(__dirname + '/webserver.js');
+const webserver = require(__dirname + '/webserver2.js');
 const timestampToDatetime = require(__dirname + '/timestampToDatetime.js');
 
 /****************************
@@ -48,7 +48,7 @@ module.exports = (duniterServer, host, port, appParente, program) => co(function
   console.log("module currency-monit started");
   
   // Specialized node's UI
-  let httpServer = webserver(host, port, appParente, duniterServer, monitDatasPath, offset, cache);
+  let httpServer = webserver(host, port, appParente, duniterServer, monitDatasPath, offset, cache, program.resetData);
   yield httpServer.openConnection();
 
 })
diff --git a/lib/updateCache2.ts b/lib/updateCache2.ts
index 91f6d50c09ffe8dca0e50082c5397df4202ff425..851d231add0a4584948d97388b151980e03e927e 100755
--- a/lib/updateCache2.ts
+++ b/lib/updateCache2.ts
@@ -2,9 +2,10 @@
 
 import {DataFinder} from "./DataFinder";
 import {DBBlock} from "duniter/app/lib/db/DBBlock";
+import {MonitConstants} from "./constants2";
+import {Server} from "duniter/server";
 
 const co = require('co');
-const constants = require(__dirname + '/constants')
 
 /**
      * updateCache
@@ -12,9 +13,9 @@ const constants = require(__dirname + '/constants')
      */
 module.exports = async (req:any, res:any, next:any) => {
   
-  var { duniterServer, cache } = req.app.locals
+  var { duniterServer, cache } = req.app.locals as { duniterServer: Server, cache: MonitCache };
 
-	const dataFinder = new DataFinder(duniterServer)
+	const dataFinder = await DataFinder.getInstanceReindexedIfNecessary()
 
   try {
 		// Définition des constantes
@@ -24,7 +25,7 @@ module.exports = async (req:any, res:any, next:any) => {
 		let upgradeCache = false;
 		
 		// Cacluler s'il faut mettre à jour le cache ou pas
-		upgradeCache = (Math.floor(Date.now() / 1000) > (cache.lastUptime + constants.MIN_CACHE_UPDATE_FREQ));
+		upgradeCache = (Math.floor(Date.now() / 1000) > (cache.lastUptime + MonitConstants.MIN_CACHE_UPDATE_FREQ));
 		
 		// Si le cache membersCount est dévérouillé, le vérouiller, sinon ne pas réinitialiser le cache
 		if (upgradeCache && !cache.lockMembersCount)
@@ -109,8 +110,8 @@ module.exports = async (req:any, res:any, next:any) => {
     { cache.beginBlock = [await dataFinder.getBlock(0)]; }
     else if (req.query.begin > cache.endBlock[0].number)
     {
-			let beginTime = cache.endBlock[0].medianTime-(parseInt(cache.step)*unitTime*constants.STEP_COUNT_MIN);
-      cache.beginBlock =  [await dataFinder.getBlockWhereMedianTimeGte(beginTime)];
+			let beginTime = cache.endBlock[0].medianTime-(parseInt(cache.step)*unitTime*MonitConstants.STEP_COUNT_MIN);
+      cache.beginBlock =  await dataFinder.getBlockWhereMedianTimeGte(beginTime);
     }
 		else { cache.beginBlock = [await dataFinder.getBlock(req.query.begin)]; }
 
@@ -118,34 +119,36 @@ module.exports = async (req:any, res:any, next:any) => {
 		if ( typeof(req.query.nbMaxPoints) != 'undefined' && req.query.nbMaxPoints > 0 ) {
 			cache.nbMaxPoints = req.query.nbMaxPoints;
 		} else {
-			cache.nbMaxPoints = constants.STEP_COUNT_MAX;
+			cache.nbMaxPoints = MonitConstants.STEP_COUNT_MAX;
 		}
 		if ( typeof(req.query.adaptMaxPoints) != 'undefined' && (req.query.adaptMaxPoints == "step" || req.query.adaptMaxPoints == "end")) {
 			cache.adaptMaxPoints = req.query.adaptMaxPoints;
 		} else {
 			cache.adaptMaxPoints = "begin";
 		}
-		
+		if (!cache.beginBlock || !cache.beginBlock[0]) {
+			throw Error("No begin block")
+		}
 		// Apply nbMaxPoints and adaptMaxPoints
 		if (cache.adaptMaxPoints == "begin")
 		{
 			if ( Math.ceil((cache.endBlock[0].medianTime-cache.beginBlock[0].medianTime)/(cache.step*unitTime)) > cache.nbMaxPoints  )
 			{
 				let newBeginTime = cache.endBlock[0].medianTime-cache.step*cache.nbMaxPoints*unitTime;
-				cache.beginBlock =  [await dataFinder.getBlockWhereMedianTimeGte(newBeginTime)];
+				cache.beginBlock =  await dataFinder.getBlockWhereMedianTimeGte(newBeginTime);
 			}
 		} else if (cache.adaptMaxPoints == "step") {
-			cache.step = Math.ceil((cache.endBlock[0].medianTime-cache.beginBlock[0].medianTime)/(constants.STEP_COUNT_MAX*unitTime));
+			cache.step = Math.ceil((cache.endBlock[0].medianTime-cache.beginBlock[0].medianTime)/(MonitConstants.STEP_COUNT_MAX*unitTime));
 		} else {
 			let newEndTime = cache.beginBlock[0].medianTime+cache.step*cache.nbMaxPoints*unitTime;
-			cache.endBlock = [await dataFinder.getBlockWhereMedianTimeLte(newEndTime)];
+			cache.endBlock = await dataFinder.getBlockWhereMedianTimeLte(newEndTime);
 		}
     
 		// Calculate stepTime
     cache.stepTime = parseInt(cache.step)*unitTime;
 
     // if new blocks and MIN_CACHE_UPDATE_FREQ pass, update cache
-		if ( parseInt(cache.endBlock[0].number) >= cache.currentBlockNumber && Math.floor(Date.now() / 1000) > (cache.lastUptime + constants.MIN_CACHE_UPDATE_FREQ))
+		if ( parseInt(cache.endBlock[0].number) >= cache.currentBlockNumber && Math.floor(Date.now() / 1000) > (cache.lastUptime + MonitConstants.MIN_CACHE_UPDATE_FREQ))
     {
       // let previousCacheTime = (cache.blockchain.length > 0) ? cache.blockchain[cache.blockchain.length-1].medianTime:0;
       var newBlocks = await dataFinder.getBlockWhereMedianTimeLteAndGtNoLimit(cache.currentBlockTime, cache.endBlock[0].medianTime);
@@ -293,3 +296,7 @@ module.exports = async (req:any, res:any, next:any) => {
   }
 }
 
+interface MonitCache {
+	[k: string]: any
+	beginBlock: null|DBBlock[]
+}
diff --git a/lib/webserver.js b/lib/webserver2.ts
similarity index 63%
rename from lib/webserver.js
rename to lib/webserver2.ts
index 567b5acc4343c27b04e1a06c14f8a3c120aa05a6..de0fd69a8f2b716429dd849df4cd58bce589b7aa 100755
--- a/lib/webserver.js
+++ b/lib/webserver2.ts
@@ -1,5 +1,8 @@
 "use strict";
 
+import {Server} from "duniter/server";
+import {initMonitDB} from "./DataFinder";
+
 const fs = require('fs');
 //const util = require('util');
 const Q = require('q');
@@ -13,13 +16,13 @@ const bodyParser = require('body-parser');
 const routes = require(__dirname + '/../routes');
 const tpl = require(__dirname + '/tplit.js');
 
-module.exports = (host, port, appParente, duniterServer, monitDatasPath, offset, cache) => {
+module.exports = (host: any, port: any, appParente: any, duniterServer: Server, monitDatasPath: any, offset: any, cache: any, resetData: boolean) => {
   
   var app = express();
   
   app.use(morgan('\x1b[90m:remote-addr :remote-user [:date[clf]] :method :url HTTP/:http-version :status :res[content-length] - :response-time ms\x1b[0m', {
     stream: {
-      write: function(message){
+      write: function(message: any){
         message && console.log(message.replace(/\n$/,''));
       }
     }
@@ -44,7 +47,7 @@ module.exports = (host, port, appParente, duniterServer, monitDatasPath, offset,
   /***************************************
   * CSV des membres calculants
   ***************************************/
-  app.get('/csvCalculatorsRank', function(req, res) {
+  app.get('/csvCalculatorsRank', function(req: any, res: any) {
     let files = fs.readdirSync(monitDatasPath + '/calculators_rank/')
     let maxTimestamp = 0
     for (let file of files) {
@@ -61,35 +64,46 @@ module.exports = (host, port, appParente, duniterServer, monitDatasPath, offset,
   if ( appParente == null )
   {
     let httpServer = http.createServer(app);
-    httpServer.on('error', function(err) {
+    httpServer.on('error', function(err: any) {
       httpServer.errorPropagates(err);
     });
     
     return {
-      openConnection: () => co(function *() {
-	try {
-	  yield Q.Promise((resolve, reject) => {
-	    // Weird the need of such a hack to catch an exception...
-	    httpServer.errorPropagates = function(err) {
-	      reject(err);
-	    };
+      openConnection: async () => {
+        try {
+          await Q.Promise((resolve: any, reject: any) => {
+            // Weird the need of such a hack to catch an exception...
+            httpServer.errorPropagates = function(err: any) {
+              reject(err);
+            };
+
+            httpServer.listen(port, host, (err: any) => {
+              if (err) return reject(err);
+              resolve(httpServer);
+            });
+          });
+
+          // Init + first incremental indexation
+          await initMonitDB(duniterServer, resetData)
+
+          console.log('Server listening on http://' + host + ':' + port);
 
-	    httpServer.listen(port, host, (err) => {
-	      if (err) return reject(err);
-	      resolve(httpServer);
-	    });
-	  });
-	  console.log('Server listening on http://' + host + ':' + port);
-	} catch (e) {
-	  console.warn('Could NOT listen to http://' + host + ':' + port);
-	  console.warn(e);
-	}
-      }),
+        } catch (e) {
+          console.warn('Could NOT listen to http://' + host + ':' + port);
+          console.warn(e);
+        }
+      },
     };
   }
   else
   {
     appParente.use("/currency-monit", app);
+
+    return {
+      openConnection: async () => {
+        console.log('No connection to open')
+      }
+    };
   }
   
   
diff --git a/package.json b/package.json
index c958b512f04311ef71e7212d3e9599ebb2ee75ed..fe988f094d57f88d17eceed049a71fbbab0e349b 100755
--- a/package.json
+++ b/package.json
@@ -4,7 +4,8 @@
   "main": "index.js",
   "license": "AGPLv3",
   "scripts": {
-    "prepublish": "tsc"
+    "prepublish": "tsc",
+    "test": "mocha"
   },
   "dependencies": {
     "body-parser": "1.17.1",
@@ -21,10 +22,14 @@
   },
   "devDependencies": {
     "@types/node": "^11.9.3",
-    "duniter": "1.6.x",
-    "typescript": "^3.3.3"
+    "@types/mocha": "^2.2.41",
+    "duniter": "1.7.x",
+    "typescript": "^3.3.3",
+    "mocha": "^3.4.2",
+    "should": "*",
+    "ts-node": "^3.3.0"
   },
   "peerDependencies": {
-    "duniter": "^1.6.21"
+    "duniter": "1.7.x"
   }
 }
diff --git a/routes/blockCount2.ts b/routes/blockCount2.ts
index a92abb500498b939f5c69437f34336eabcdbd4b6..3f8bf888a30150ca9668df1c23320067bf1b187b 100755
--- a/routes/blockCount2.ts
+++ b/routes/blockCount2.ts
@@ -1,12 +1,12 @@
 import {DBBlock} from 'duniter/app/lib/db/DBBlock'
 import {DataFinder} from '../lib/DataFinder'
 import {showExecutionTimes} from '../lib/MonitorExecutionTime'
+import {MonitConstants} from "../lib/constants2";
 
 const fs = require('fs')
 const timestampToDatetime = require(__dirname + '/../lib/timestampToDatetime')
 const colorScale = require(__dirname + '/../lib/colorScale')
 const getLang = require(__dirname + '/../lib/getLang')
-const constants = require(__dirname + '/../lib/constants')
 
 // Garder l'index des blocs en mémoire vive
 var blockchain: DBBlock[] = [];
@@ -15,9 +15,9 @@ var previousBlockchainTime= 0;
 
 module.exports = async (req: any, res: any, next: any) => {
   
-  var { duniterServer, monitDatasPath } = req.app.locals
+  var { monitDatasPath } = req.app.locals
 
-  const dataFinder = new DataFinder(duniterServer)
+  const dataFinder = await DataFinder.getInstanceReindexedIfNecessary()
   
   try {
     // get GET parameters
@@ -30,7 +30,7 @@ module.exports = async (req: any, res: any, next: any) => {
     var significantPercent = req.query.significantPercent || 3;
 
     // get lg file
-    const LANG = getLang(`${__dirname}/../lg/blockCount_${req.query.lg||constants.DEFAULT_LANGUAGE}.txt`);
+    const LANG = getLang(`${__dirname}/../lg/blockCount_${req.query.lg||MonitConstants.DEFAULT_LANGUAGE}.txt`);
     
     // detect fork
     if ( blockchain.length > 0 )
diff --git a/routes/gaussianWotQuality2.ts b/routes/gaussianWotQuality2.ts
index 25513f99b348252a2a9a77708caf7db2d9efc738..d2be351ccdc6299e50d287804ebc09f35391229a 100644
--- a/routes/gaussianWotQuality2.ts
+++ b/routes/gaussianWotQuality2.ts
@@ -1,10 +1,6 @@
-"use strict";
-
 import {DataFinder} from "../lib/DataFinder";
+import {MonitConstants} from "../lib/constants2";
 
-const co = require('co')
-
-const constants = require(__dirname + '/../lib/constants')
 const membersQuality = require(__dirname + '/tools/membersQuality')
 const getLang = require(__dirname + '/../lib/getLang')
 
@@ -15,7 +11,7 @@ module.exports = async (req:any, res:any, next:any) => {
   
   var { duniterServer  } = req.app.locals
 
-  const dataFinder = new DataFinder(duniterServer)
+  const dataFinder = await DataFinder.getInstanceReindexedIfNecessary()
   
   try {
       // get GET parameters
@@ -25,14 +21,14 @@ module.exports = async (req:any, res:any, next:any) => {
       const nextYn = (req.query.nextYn=="yes") ? "yes":"no";
 
       // get lg file
-      const LANG = getLang(`${__dirname}/../lg/gaussianWotQuality_${req.query.lg||constants.DEFAULT_LANGUAGE}.txt`);
+      const LANG = getLang(`${__dirname}/../lg/gaussianWotQuality_${req.query.lg||MonitConstants.DEFAULT_LANGUAGE}.txt`);
 
       // Définition des contantes
       const conf = duniterServer.conf;
       const qualityMax = (1/conf.xpercent);
 
       // Définition des variables
-      let lastUpgradeTimeDatas = membersQuality(constants.QUALITY_CACHE_ACTION.INIT);
+      let lastUpgradeTimeDatas = membersQuality(MonitConstants.QUALITY_CACHE_ACTION.INIT);
       let tabUidIndex = [];
       let tabMembersQuality= [];
       let tabMembersQualitySorted = [];
@@ -44,7 +40,7 @@ module.exports = async (req:any, res:any, next:any) => {
       let membersList = await dataFinder.getMembers();
 
       // Si les données de qualité n'ont jamais été calculés, le faire
-      if (lastUpgradeTimeDatas == 0 || (lastUpgradeTimeDatas+constants.MIN_WOT_QUALITY_CACHE_UPDATE_FREQ) < (Math.floor(Date.now() / 1000)) || (previousNextYn != nextYn))
+      if (lastUpgradeTimeDatas == 0 || (lastUpgradeTimeDatas+MonitConstants.MIN_WOT_QUALITY_CACHE_UPDATE_FREQ) < (Math.floor(Date.now() / 1000)) || (previousNextYn != nextYn))
       {
         // Calculer dSen
         var dSen = Math.ceil(Math.pow(membersList.length, 1 / conf.stepMax));
@@ -54,14 +50,14 @@ module.exports = async (req:any, res:any, next:any) => {
         const wot = duniterServer.dal.wotb;
 
         // Initialiser le cache des données de qualité
-        membersQuality(constants.QUALITY_CACHE_ACTION.INIT, 0, dSen, conf.stepMax, conf.xpercent, wot.memCopy());
+        membersQuality(MonitConstants.QUALITY_CACHE_ACTION.INIT, 0, dSen, conf.stepMax, conf.xpercent, wot.memCopy());
       }
 
       // Mettre a jour previousNextYn
       previousNextYn = (nextYn=="yes") ? "yes":"no";
 
       // Calculer nbSentries, limit1 and label
-      const nbSentries = (sentries=="no") ? membersList.length:membersQuality(constants.QUALITY_CACHE_ACTION.GET_SENTRIES_COUNT);
+      const nbSentries = (sentries=="no") ? membersList.length:membersQuality(MonitConstants.QUALITY_CACHE_ACTION.GET_SENTRIES_COUNT);
       let limit1 = 1;
       let label = LANG['QUALITY'];
       switch (unit)
@@ -80,15 +76,15 @@ module.exports = async (req:any, res:any, next:any) => {
 
       // Récupérer le tableau de qualité des membres
       tabMembersQuality= [];
-      for (let i=0;membersQuality(constants.QUALITY_CACHE_ACTION.GET_QUALITY, i) >= 0;i++)
+      for (let i=0; i < membersList.length && membersQuality(MonitConstants.QUALITY_CACHE_ACTION.GET_QUALITY, i) >= 0;i++)
       {
         if (sentries == "no")
         {
-          tabMembersQuality[i] = membersQuality(constants.QUALITY_CACHE_ACTION.GET_QUALITY, i, -1);
+          tabMembersQuality[i] = membersQuality(MonitConstants.QUALITY_CACHE_ACTION.GET_QUALITY, i, -1);
         }
         else
         {
-          tabMembersQuality[i] = membersQuality(constants.QUALITY_CACHE_ACTION.GET_QUALITY, i);
+          tabMembersQuality[i] = membersQuality(MonitConstants.QUALITY_CACHE_ACTION.GET_QUALITY, i);
         }
       }
 
diff --git a/routes/index.js b/routes/index.js
index f7efc130c26adcdd701377df37d8c322d9bb4de7..d49a20df63625d43056cbaeb666c29ff9313eaed 100755
--- a/routes/index.js
+++ b/routes/index.js
@@ -1,7 +1,7 @@
 const fs = require('fs')
 const express = require('express')
 
-const constants = require(__dirname + '/../lib/constants')
+const constants = require(__dirname + '/../lib/constants2').MonitConstants
 const getLang = require(__dirname + '/../lib/getLang')
 const printMenu = require(__dirname + '/../views/printMenu')
 
diff --git a/routes/members2.ts b/routes/members2.ts
index 5e9c7831bfe092b8242bbda992b1f385b65a558c..aa761d5a556d0584001ba5bb87149b754452c3e3 100755
--- a/routes/members2.ts
+++ b/routes/members2.ts
@@ -1,6 +1,6 @@
 import {DataFinder} from '../lib/DataFinder'
+import {MonitConstants} from "../lib/constants2";
 
-const constants = require(__dirname + '/../lib/constants')
 const randomInt = require(__dirname + '/../lib/randomInt')
 const timestampToDatetime = require(__dirname + '/../lib/timestampToDatetime')
 const membersQuality = require(__dirname + '/tools/membersQuality')
@@ -45,7 +45,7 @@ module.exports = async (req: any, res: any, next: any) => {
   
   var { duniterServer  } = req.app.locals
 
-  const dataFinder = new DataFinder(duniterServer)
+  const dataFinder = await DataFinder.getInstanceReindexedIfNecessary()
   
   try {
     // Initaliser les constantes
@@ -74,15 +74,15 @@ module.exports = async (req: any, res: any, next: any) => {
 	let numberOfRandomMembers = req.query.randomCounts || 10
 
 	// Vérifier la valeur de nextYn dans le cache
-	let lastUpgradeTimeDatas = membersQuality(constants.QUALITY_CACHE_ACTION.INIT);
-	let dSenCache = membersQuality(constants.QUALITY_CACHE_ACTION.GET_D_SEN);
+	let lastUpgradeTimeDatas = membersQuality(MonitConstants.QUALITY_CACHE_ACTION.INIT);
+	let dSenCache = membersQuality(MonitConstants.QUALITY_CACHE_ACTION.GET_D_SEN);
 	if (lastUpgradeTimeDatas > 0 && dSenCache > dSen) { previousNextYn == "yes"; }
     
     // Alimenter wotb avec la toile actuelle
 	const wotbInstance = duniterServer.dal.wotb;
 		
 	// Vérifier si le cache doit être Réinitialiser
-	let reinitCache = (Math.floor(Date.now() / 1000) > (membersLastUptime + constants.MIN_MEMBERS_UPDATE_FREQ));
+	let reinitCache = (Math.floor(Date.now() / 1000) > (membersLastUptime + MonitConstants.MIN_MEMBERS_UPDATE_FREQ));
 		
 		// Si changement de conditions, alors forcer le rechargement du cache s'il n'est pas vérouillé, sinon forcer les conditions à celles en mémoire
 		if (previousMode != mode || previousCentrality != centrality || previousNextYn != nextYn || previousRandomList != randomList || numberOfRandomMembers != previousRandomCounts)
@@ -145,7 +145,7 @@ module.exports = async (req: any, res: any, next: any) => {
 			if (nextYn=="yes") { dSen++; }
 
 			// réinitialiser le cache des données de qualité
-			membersQuality(constants.QUALITY_CACHE_ACTION.INIT, 0, dSen, conf.stepMax, conf.xpercent, wotbInstance.memCopy());
+			membersQuality(MonitConstants.QUALITY_CACHE_ACTION.INIT, 0, dSen, conf.stepMax, conf.xpercent, wotbInstance.memCopy());
 			
 			// Réinitialiser le cache des données de centralité
 			if (centrality=='yes')
@@ -181,7 +181,7 @@ module.exports = async (req: any, res: any, next: any) => {
 			for (let m=0;m<membersList.length;m++)
 			{
 				// Récupérer les blockstamp d'écriture et date d'expiration du membership courant du membre m
-				let tmpQueryResult = await dataFinder.membershipWrittenOnExpiresOn(membersList[m].pub);
+				let tmpQueryResult = [await dataFinder.membershipWrittenOnExpiresOn(membersList[m].pub)];
 					membershipsExpireTimeList.push(tmpQueryResult[0].expires_on);
 					
 				// Extraire le numéro de bloc du blockstamp d'écriture du membership courant
@@ -229,14 +229,14 @@ module.exports = async (req: any, res: any, next: any) => {
 				membersNbSentriesUnreached[membersList[m].uid] = parseInt(detailedDistance.nbSentries) - parseInt(detailedDistance.nbSuccess);
 
 				// Calculer la qualité du membre courant
-				if (membersQuality(constants.QUALITY_CACHE_ACTION.GET_QUALITY, membersList[m].wotb_id, (currentMemberIsSentry) ? 1 : 0) >= 1.0) {
+				if (membersQuality(MonitConstants.QUALITY_CACHE_ACTION.GET_QUALITY, membersList[m].wotb_id, (currentMemberIsSentry) ? 1 : 0) >= 1.0) {
 					proportionMembersWithQualityUpper1++;
 				}
 
 				// Calculer la qualité du membre courant s'il n'y avait pas de référents (autrement di si tout les membres était référents)
 				//let membersQualityIfNoSentries = ((detailedDistanceQualityExt.nbReached/membersList.length)/conf.xpercent).toFixed(2);
 				//console.log("membersQualityIfNoSentries[%s] = %s", membersList[m].uid, membersQualityIfNoSentries);
-				if (membersQuality(constants.QUALITY_CACHE_ACTION.GET_QUALITY, membersList[m].wotb_id, -1) >= 1.0) {
+				if (membersQuality(MonitConstants.QUALITY_CACHE_ACTION.GET_QUALITY, membersList[m].wotb_id, -1) >= 1.0) {
 					proportionMembersWithQualityUpper1IfNoSentries++;
 				}
 				
@@ -274,11 +274,11 @@ module.exports = async (req: any, res: any, next: any) => {
 					let tmpQueryGetUidProtagonistCert
 					if (mode == 'emitted')
 					{
-						tmpQueryGetUidProtagonistCert = await dataFinder.getProtagonist(tmpQueryCertifsList[i].receiver)
+						tmpQueryGetUidProtagonistCert = [await dataFinder.getProtagonist(tmpQueryCertifsList[i].receiver)]
 					}
 					else
 					{
-						tmpQueryGetUidProtagonistCert = await dataFinder.getProtagonist(tmpQueryCertifsList[i].issuer)
+						tmpQueryGetUidProtagonistCert = [await dataFinder.getProtagonist(tmpQueryCertifsList[i].issuer)]
 					}
 					let tmpBlockWrittenOn = tmpQueryCertifsList[i].written_on.split("-");
 					
@@ -474,7 +474,7 @@ module.exports = async (req: any, res: any, next: any) => {
 		{ 
 			for (const member of membersList)
 			{
-				tabSort.push(membersQuality(constants.QUALITY_CACHE_ACTION.GET_QUALITY, member.wotb_id));
+				tabSort.push(membersQuality(MonitConstants.QUALITY_CACHE_ACTION.GET_QUALITY, member.wotb_id));
 			}
 		}
     else if (sort_by == "sigCount")
@@ -574,7 +574,7 @@ module.exports = async (req: any, res: any, next: any) => {
     // Sinon, printer le tableau html
     else
     {
-	  let meansMembersQuality = membersQuality(constants.QUALITY_CACHE_ACTION.GET_MEANS);
+	  let meansMembersQuality = membersQuality(MonitConstants.QUALITY_CACHE_ACTION.GET_MEANS);
 
       res.locals = {
 				host: req.headers.host.toString(),
diff --git a/routes/membersCount2.ts b/routes/membersCount2.ts
index 33f6dbd88bad71ddd80ae61ffbc48464fef37b64..936dfb1487de7cddcad5a19c57478c1b704f7e54 100755
--- a/routes/membersCount2.ts
+++ b/routes/membersCount2.ts
@@ -1,19 +1,19 @@
 "use strict";
 
 import {DataFinder} from "../lib/DataFinder";
+import {MonitConstants} from "../lib/constants2";
 
 const co = require('co')
 const timestampToDatetime = require(__dirname + '/../lib/timestampToDatetime')
 const getLang = require(__dirname + '/../lib/getLang')
-const constants = require(__dirname + '/../lib/constants.js')
 
 //const STEP_COUNT_MAX = 150;
 
 module.exports = async (req: any, res: any, next: any) => {
   
-  var { duniterServer, cache } = req.app.locals
+  var { cache } = req.app.locals
 
-	const dataFinder = new DataFinder(duniterServer)
+	const dataFinder = await DataFinder.getInstanceReindexedIfNecessary()
   
   try {
     // get GET parameters
@@ -21,7 +21,7 @@ module.exports = async (req: any, res: any, next: any) => {
 		var pow = req.query.pow || 'no';
     
     // get lg file
-		const LANG = getLang(`${__dirname}/../lg/membersCount_${req.query.lg||constants.DEFAULT_LANGUAGE}.txt`);
+		const LANG = getLang(`${__dirname}/../lg/membersCount_${req.query.lg||MonitConstants.DEFAULT_LANGUAGE}.txt`);
     
     // get blockchain
     var blockchain = await dataFinder.getBlockWhereMedianTimeLteNoLimit(cache.endBlock[0].medianTime);
diff --git a/routes/monetaryMass2.ts b/routes/monetaryMass2.ts
index 12a111511100dd108b707b433d3a0e20a2d1856e..6438b5166a2e1a5086fff2deaa2f6ca2085e27cb 100755
--- a/routes/monetaryMass2.ts
+++ b/routes/monetaryMass2.ts
@@ -1,18 +1,16 @@
-"use strict";
-
 import {DataFinder} from "../lib/DataFinder";
+import {MonitConstants} from "../lib/constants2";
 
 const co = require('co')
 const timestampToDatetime = require(__dirname + '/../lib/timestampToDatetime')
 const getLang = require(__dirname + '/../lib/getLang')
-const constants = require(__dirname + '/../lib/constants.js')
 
 module.exports = async (req:any, res:any, next:any) => {
-  
+
   var { duniterServer  } = req.app.locals
 
-  const dataFinder = new DataFinder(duniterServer)
-  
+  const dataFinder = await DataFinder.getInstanceReindexedIfNecessary()
+
   try {
     // get GET parameters
     var begin = req.query.begin >= 2 && req.query.begin || 2; // Default Value
@@ -21,14 +19,14 @@ module.exports = async (req:any, res:any, next:any) => {
     var format = req.query.format || 'HTML';
 
     // get lg file
-    const LANG = getLang(`${__dirname}/../lg/monetaryMass_${req.query.lg||constants.DEFAULT_LANGUAGE}.txt`);
+    const LANG = getLang(`${__dirname}/../lg/monetaryMass_${req.query.lg||MonitConstants.DEFAULT_LANGUAGE}.txt`);
 
     // calculate meanMonetaryMassAtFullCurrency
     const meanMonetaryMassAtFullCurrency = Math.ceil((1/duniterServer.conf.c)*(duniterServer.conf.dtReeval / duniterServer.conf.dt));
     
     // get beginBlock and endBlock
     var beginBlock = [await dataFinder.getBlock(begin)];
-    var endBlock = null;
+    var endBlock: any = null;
     if (end > 0)
     {
       endBlock = [await dataFinder.getBlock(end)];
@@ -57,7 +55,7 @@ module.exports = async (req:any, res:any, next:any) => {
     const currentBlockNumber = begin+blockchain.length-1;
     const currentBlockchainTimestamp = blockchain[blockchain.length-1].medianTime;
     if (end == -1) { end = begin+blockchain.length-1; }
-    
+
     // create and fill tabMembersCount, tabMonetaryMass, tabCurrency and currentDividend
     var tabCurrency = [];
     var currentDividend = 0;
@@ -97,9 +95,9 @@ module.exports = async (req:any, res:any, next:any) => {
         tabCurrency[i].monetaryMass = parseFloat((((tabCurrency[i].monetaryMassPerMembers / currentDividend) / meanMonetaryMassAtFullCurrency) * 10000).toFixed(2));
         tabCurrency[i].monetaryMassPerMembers = tabCurrency[i].monetaryMass;
       }
-      if (i>0) { tabCurrency[i].derivedChoiceMonetaryMass = parseFloat((((tabCurrency[i].monetaryMass / tabCurrency[i-1].monetaryMass) - 1.0) * 100).toFixed(2)); }
+      if (i>0) { tabCurrency[i].derivedChoiceMonetaryMass = Math.abs(parseFloat((((tabCurrency[i].monetaryMass / tabCurrency[i-1].monetaryMass) - 1.0) * 100).toFixed(2))); }
     }
-    
+
     // Si le client demande la réponse au format JSON, le faire
     if (format == 'JSON')
       res.status(200).jsonp( tabCurrency )
diff --git a/routes/tools/membersQuality.js b/routes/tools/membersQuality.js
index bc65860ee6057ca903b53339dbaec10662517681..fd8fdff36d644d0f8081d390fe8570db3c53add6 100644
--- a/routes/tools/membersQuality.js
+++ b/routes/tools/membersQuality.js
@@ -1,4 +1,4 @@
-const constants = require(__dirname + '/../../lib/constants')
+const constants = require(__dirname + '/../../lib/constants2').MonitConstants
 
 // membersQuality cache
 var lastUpgradeTime = 0;
diff --git a/routes/willMembers2.ts b/routes/willMembers2.ts
index 350c15a29aca8eef98f925d61c430a1573188765..31f92ca59ea0771f7ecda34567052d607efbf915 100755
--- a/routes/willMembers2.ts
+++ b/routes/willMembers2.ts
@@ -3,8 +3,8 @@ import {DBMembership} from 'duniter/app/lib/dal/sqliteDAL/MembershipDAL'
 import {DBIdentity} from 'duniter/app/lib/dal/sqliteDAL/IdentityDAL'
 import {showExecutionTimes} from '../lib/MonitorExecutionTime'
 import {DataFinder} from '../lib/DataFinder'
+import {MonitConstants} from "../lib/constants2";
 
-const constants = require(__dirname + '/../lib/constants')
 const timestampToDatetime = require(__dirname + '/../lib/timestampToDatetime')
 
 // Préserver les résultats en cache
@@ -27,11 +27,11 @@ module.exports = async (req: any, res: any, next: any) => {
   const locals: { duniterServer: Server } = req.app.locals
 
   const duniterServer = locals.duniterServer
-  const dataFinder = new DataFinder(duniterServer)
+  const dataFinder = await DataFinder.getInstanceReindexedIfNecessary()
 
   try {
     // get blockchain timestamp
-    let resultQueryCurrentBlock = await dataFinder.getCurrentBlockOrNull();
+    let resultQueryCurrentBlock: any = await dataFinder.getCurrentBlockOrNull();
     const currentBlockchainTimestamp = resultQueryCurrentBlock.medianTime;
     const currentMembersCount = resultQueryCurrentBlock.membersCount;
     const currentBlockNumber = resultQueryCurrentBlock.number;
@@ -60,7 +60,7 @@ module.exports = async (req: any, res: any, next: any) => {
 
 
     // Vérifier si le cache doit être Réinitialiser
-    let reinitCache = (Math.floor(Date.now() / 1000) > (willMembersLastUptime + constants.MIN_WILLMEMBERS_UPDATE_FREQ));
+    let reinitCache = (Math.floor(Date.now() / 1000) > (willMembersLastUptime + MonitConstants.MIN_WILLMEMBERS_UPDATE_FREQ));
 
     // Si le cache willMembers est dévérouillé, le vérouiller, sinon ne pas réinitialiser le cache
     if (reinitCache && !lockWillMembers) {
@@ -360,7 +360,7 @@ module.exports = async (req: any, res: any, next: any) => {
         {
           // Tester la présence de l'adhésion
           let membership: DBMembership|null = null
-          const pendingMembershipsOfIdty: DBMembership[] = await duniterServer.dal.msDAL.getPendingINOfTarget(identitiesList[idMax].hash);
+          const pendingMembershipsOfIdty: DBMembership[] = await duniterServer.dal.msDAL.getPendingINOfTarget(identitiesList[idMax].hash as string);
           for (const ms of pendingMembershipsOfIdty)
           {
             if (!membership && ms.expires_on > currentBlockchainTimestamp)
diff --git a/routes/wotex2.ts b/routes/wotex2.ts
index 75fbe08e1737e29491bef8b6dbac38cbe0ed2c5f..4242a57114a5a78381e9e03c72de1bcc24f01037 100755
--- a/routes/wotex2.ts
+++ b/routes/wotex2.ts
@@ -1,10 +1,8 @@
-"use strict";
-
 import {DataFinder} from "../lib/DataFinder";
+import {MonitConstants} from "../lib/constants2";
 
 const _ = require('underscore')
 const getLang = require(__dirname + '/../lib/getLang')
-const constants = require(__dirname + '/../lib/constants.js')
 
 const MAX_STEP_LOOK = 7
 
@@ -12,7 +10,7 @@ module.exports = async (req:any, res:any, next:any) => {
   
   var { duniterServer  } = req.app.locals
 
-  const dataFinder = new DataFinder(duniterServer)
+  const dataFinder = await DataFinder.getInstanceReindexedIfNecessary()
 
     try {
       // get GET parameters
@@ -20,7 +18,7 @@ module.exports = async (req:any, res:any, next:any) => {
       var help = req.query.help || 'yes';
       
       // get lg file
-      const LANG = getLang(`${__dirname}/../lg/wotex_${req.query.lg||constants.DEFAULT_LANGUAGE}.txt`);
+      const LANG = getLang(`${__dirname}/../lg/wotex_${req.query.lg||MonitConstants.DEFAULT_LANGUAGE}.txt`);
       
       // Trouve les points de contrôle efficacement grâce au module C (nommé "wotb")
       const wotb = duniterServer.dal.wotb.memCopy();
diff --git a/run.js b/run.js
index 3f097ba8ad0d0c9f57b36af2a7ccddd5318cb351..4cac84304c19bbc1fb56108af468b3ef6df68c78 100644
--- a/run.js
+++ b/run.js
@@ -1 +1 @@
-require('duniter').statics.quickRun('./index.js')
\ No newline at end of file
+require('duniter').Statics.quickRun('./index.js')
diff --git a/test/indexing.ts b/test/indexing.ts
new file mode 100644
index 0000000000000000000000000000000000000000..f5f94bc99c3515e03711f7e6f46863afe991b4f1
--- /dev/null
+++ b/test/indexing.ts
@@ -0,0 +1,97 @@
+// Source file from duniter: Crypto-currency software to manage libre currency such as Ğ1
+// Copyright (C) 2018  Cedric Moreau <cem.moreau@gmail.com>
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+
+import {assertEqual, assertTrue, writeBasicTestWithConfAnd2Users} from "duniter/test/integration/tools/test-framework"
+import {CommonConstants} from "duniter/app/lib/common-libs/constants";
+import {DataFinder, initMonitDB} from "../lib/DataFinder";
+import {MonitDBBlock} from "../lib/SqliteBlockchain";
+
+describe('Indexing blockchain', () => writeBasicTestWithConfAnd2Users({
+  sigQty: 1,
+  medianTimeBlocks: 1,
+  forksize: 2,
+}, (test) => {
+
+  const now = 1500000000
+
+  before(() => {
+    CommonConstants.BLOCKS_IN_MEMORY_MAX = 3 // Must be > forkWindowSize
+  })
+
+  test('Duniter blockchain init', async (s1, cat, tac) => {
+    await cat.createIdentity()
+    await tac.createIdentity()
+    await cat.cert(tac)
+    await tac.cert(cat)
+    await cat.join()
+    await tac.join()
+    for (let i = 0; i < 6; i++) {
+      await s1.commit({ time: now })
+    }
+    const head = await s1.commit({ time: now })
+    assertEqual(head.number, 6);
+    (s1.dal.blockchainArchiveDAL as any)._chunkSize = 2 // Archive 2 blocks per file
+  })
+
+  test('first indexing by monit', async (s1) => {
+    // Simulate that archiving was called on Duniter once (during sync)
+    await s1.dal.archiveBlocks()
+    // Now test Monit
+    await initMonitDB(s1._server, true)
+    assertEqual(await DataFinder.getInstance().getHighestBlockNumber(), 6) // Current block in Monit = current in Duniter
+    assertEqual(await DataFinder.getInstance().getHighestArchivedBlockNumber(), 4) // Highest archived = current - forksize
+  })
+
+  test('second indexing by monit after adding some blocks to the blockchain', async (s1) => {
+    for (let i = 0; i < 3; i++) {
+      await s1.commit({ time: now })
+    }
+    // Now test Monit
+    await DataFinder.getInstance().index()
+    assertEqual(await DataFinder.getInstance().getHighestBlockNumber(), 9)
+    assertEqual(await DataFinder.getInstance().getHighestArchivedBlockNumber(), 7)
+  })
+
+  test('third indexing taking care of forks', async (s1) => {
+
+    // Make a #10 block
+    const b10v1Duniter = await s1.commit({ time: now })
+    await DataFinder.getInstance().index()
+    const b10v1Monit = await DataFinder.getInstance().getHighestBlock() as MonitDBBlock
+    assertEqual(await DataFinder.getInstance().getHighestBlockNumber(), 10)
+    assertEqual(b10v1Monit.number, 10)
+    assertEqual(b10v1Monit.hash, b10v1Duniter.hash)
+    assertEqual(await DataFinder.getInstance().getHighestArchivedBlockNumber(), 8) // Archived level = 10 - forksize
+
+    // Revert
+    await s1.revert()
+
+    // Re-commit
+    const b10v2Duniter = await s1.commit({ time: now + 1 })
+    await DataFinder.getInstance().index()
+    const b10v2Monit = await DataFinder.getInstance().getHighestBlock() as MonitDBBlock
+    assertEqual(await DataFinder.getInstance().getHighestBlockNumber(), 10)
+    assertEqual(b10v2Monit.number, 10)
+    assertEqual(b10v2Monit.hash, b10v2Duniter.hash)
+    assertEqual(await DataFinder.getInstance().getHighestArchivedBlockNumber(), 8) // Archived level = 10 - forksize
+
+    // assertions
+    assertTrue(b10v1Duniter.number === b10v2Duniter.number)
+    assertTrue(b10v1Duniter.hash !== b10v2Duniter.hash)
+    assertTrue(b10v1Monit.number === b10v2Monit.number)
+    assertTrue(b10v1Monit.hash !== b10v2Monit.hash)
+    assertTrue(b10v1Monit.hash === b10v1Duniter.hash)
+    assertTrue(b10v2Monit.hash === b10v2Duniter.hash)
+  })
+}))
+
diff --git a/test/mocha.opts b/test/mocha.opts
new file mode 100644
index 0000000000000000000000000000000000000000..21039253bb7916b4aff02e306631ef57795f1f0b
--- /dev/null
+++ b/test/mocha.opts
@@ -0,0 +1,8 @@
+--compilers ts-node/register
+--require source-map-support/register
+--full-trace
+--growl
+--timeout 60000
+--recursive
+-R spec
+test/
diff --git a/tsconfig.json b/tsconfig.json
index 5cad0e30a8a110973b49d9aabd086ad9c086f856..84a98f52f36aa28b8697d453fbb6581326774c2c 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -15,7 +15,8 @@
   },
   "include": [
     "routes",
-    "lib"
+    "lib",
+    "test"
   ],
   "compileOnSave": true
 }
diff --git a/views/printMenu.js b/views/printMenu.js
index 82854f66fd045b972eaf193494820c11f1ef7efb..d0bc7de57519063ac3d4767ae08385ea332bf8dc 100644
--- a/views/printMenu.js
+++ b/views/printMenu.js
@@ -1,4 +1,4 @@
-const constants = require(__dirname + '/../lib/constants')
+const constants = require(__dirname + '/../lib/constants2').MonitConstants
 
 module.exports = function printMenu(lang, help, location) {
   let htmlMenu = '<ul class="menu">';//'<table align="center" width="100%"><tr>';