diff --git a/app/lib/computation/BlockchainContext.ts b/app/lib/computation/BlockchainContext.ts
index a1f3dcbc6cceb61cefde5acc238cad638d394936..84173d9d41140ee0586bc12f346466ad216d4764 100644
--- a/app/lib/computation/BlockchainContext.ts
+++ b/app/lib/computation/BlockchainContext.ts
@@ -42,14 +42,14 @@ export class BlockchainContext {
    */
   private vHEAD_1:any
 
-  private HEADrefreshed: Promise<any> | null = Promise.resolve();
+  private HEADrefreshed: Promise<void> = Promise.resolve();
 
   /**
    * Refresh the virtual HEAD value for determined variables of the next coming block, avoiding to recompute them
    * each time a new block arrives to check if the values are correct. We can know and store them early on, in vHEAD.
    */
   private refreshHead(): Promise<void> {
-    this.HEADrefreshed = (async (): Promise<void> => {
+    this.HEADrefreshed = (async () => {
       this.vHEAD_1 = await this.dal.head(1);
       // We suppose next block will have same version #, and no particular data in the block (empty index)
       let block;
@@ -122,7 +122,7 @@ export class BlockchainContext {
 
   private async addBlock(obj: BlockDTO, index: any = null, HEAD: DBHead | null = null): Promise<any> {
     const block = await this.blockchain.pushTheBlock(obj, index, HEAD, this.conf, this.dal, this.logger)
-    this.vHEAD_1 = this.vHEAD = this.HEADrefreshed = null
+    this.vHEAD_1 = this.vHEAD = null
     return block
   }
 
@@ -150,6 +150,10 @@ export class BlockchainContext {
     const block = forks[0];
     await this.checkAndAddBlock(BlockDTO.fromJSONObject(block))
     this.logger.debug('Applied block #%s', block.number);
+    if (block.number % 100 === 0) {
+      // Database trimming
+      await this.dal.loki.flushAndTrimData()
+    }
   }
 
   async checkAndAddBlock(block:BlockDTO) {
diff --git a/app/lib/computation/QuickSync.ts b/app/lib/computation/QuickSync.ts
index 4ebe5a28823a08b4bd4c25c78a3f3399a1896859..071e07027bb414d2eb4ae848facb416b0fd1cc3c 100644
--- a/app/lib/computation/QuickSync.ts
+++ b/app/lib/computation/QuickSync.ts
@@ -266,5 +266,7 @@ export class QuickSynchronizer {
         // sync_currConf = {};
       }
     }
+
+    await this.dal.loki.commitData()
   }
 }
diff --git a/app/lib/dal/drivers/LokiFsAdapter.ts b/app/lib/dal/drivers/LokiFsAdapter.ts
index de478b937fab2014c438cb139dc601788a4ee85b..d19e58bf907bbae08a6b64d040f1b22d2cb34b9e 100644
--- a/app/lib/dal/drivers/LokiFsAdapter.ts
+++ b/app/lib/dal/drivers/LokiFsAdapter.ts
@@ -11,10 +11,11 @@
   serialization.
 */
 
-import {RealFS} from "../../system/directory"
+import {FileSystem} from "../../system/directory"
 import {DataErrors} from "../../common-libs/errors"
 import {CFSCore} from "../fileDALs/CFSCore"
 import {getNanosecondsTime} from "../../../ProcessCpuProfiler"
+import {NewLogger} from "../../logger"
 
 const fs = require('fs');
 const readline = require('readline');
@@ -33,6 +34,7 @@ interface IteratorResult<T> {
 
 export interface DBCommit {
   indexFile:string,
+  changes: string[]
   collections: {
     [coll:string]: string
   }
@@ -47,19 +49,45 @@ export class LokiFsAdapter {
   protected dbref = null
   protected dirtyPartitions: string[] = [];
 
-  constructor(dbDir:string) {
-    this.cfs = new CFSCore(dbDir, RealFS())
+  constructor(dbDir:string, fs:FileSystem) {
+    this.cfs = new CFSCore(dbDir, fs)
   }
 
   /**
-   * Main method to manually pilot the DB saving to disk.
+   * Main method to manually pilot the full DB saving to disk.
    * @param loki
    * @returns {Promise}
    */
-  async flush(loki:any) {
+  async dbDump(loki:any) {
     return new Promise(res => loki.saveDatabaseInternal(res))
   }
 
+  /**
+   * Flushes the DB changes to disk.
+   * @param loki
+   * @returns {Promise<number>} The number of changes detected.
+   */
+  async flush(loki:any): Promise<number> {
+    // If the database already has a commit file: incremental changes
+    if (await this.cfs.exists(LokiFsAdapter.COMMIT_FILE)) {
+      const commit = (await this.cfs.readJSON(LokiFsAdapter.COMMIT_FILE)) as DBCommit
+      const changesFilename = 'changes.' + getNanosecondsTime() + ".json"
+      const changes = JSON.parse(loki.serializeChanges())
+      await this.cfs.writeJSON(changesFilename, changes)
+      // Mark the changes as commited
+      commit.changes.push(changesFilename)
+      await this.cfs.writeJSON(LokiFsAdapter.COMMIT_FILE, commit)
+      // Forget about the changes now that we saved them
+      loki.clearChanges()
+      return changes.length
+    } else {
+      // Otherwise we make a full dump
+      await this.dbDump(loki)
+      loki.clearChanges()
+      return 0
+    }
+  }
+
   /**
    *
    * Method indirectly called by `flush`.
@@ -81,6 +109,7 @@ export class LokiFsAdapter {
     // Prepare the commit: inherit from existing commit
     let commit:DBCommit = {
       indexFile: 'index.db.' + getNanosecondsTime() + ".json",
+      changes: [],
       collections: {}
     }
     if (await this.cfs.exists(LokiFsAdapter.COMMIT_FILE)) {
@@ -193,6 +222,7 @@ export class LokiFsAdapter {
 
       for(idx=0; idx < dbcopy.collections.length; idx++) {
         dbcopy.collections[idx].data = [];
+        dbcopy.collections[idx].changes = [];
       }
 
       yield dbcopy.serialize({
@@ -219,7 +249,7 @@ export class LokiFsAdapter {
 
   /**
    *
-   * Automatically called by Loki.js on startup.
+   * Automatically called on startup.
    *
    * Loki persistence adapter interface function which outputs un-prototype db object reference to load from.
    *
@@ -242,7 +272,9 @@ export class LokiFsAdapter {
 
     // make sure file exists
     const dbname = this.cfs.getPath(commitObj.indexFile)
-    return new Promise((res, rej) => {
+
+    // Trimmed data first
+    await new Promise((res, rej) => {
       fs.stat(dbname, function (err:any, stats:any) {
         if (!err && stats.isFile()) {
           instream = fs.createReadStream(dbname);
@@ -274,6 +306,27 @@ export class LokiFsAdapter {
         }
       })
     })
+
+    // Changes data
+    for (const changeFile of commitObj.changes) {
+      const changes = await this.cfs.readJSON(changeFile)
+      let len = changes.length
+      for (let i = 1; i <= len; i++) {
+        const c = changes[i - 1]
+        const coll = loki.getCollection(c.name)
+        if (c.operation === 'I') {
+          c.obj.$loki = undefined
+          await coll.insert(c.obj)
+        }
+        else if (c.operation === 'U') {
+          await coll.update(c.obj)
+        }
+        else if (c.operation === 'R') {
+          await coll.remove(c.obj)
+        }
+        NewLogger().trace('[loki] Processed change %s (%s/%s)', c.name, i, len)
+      }
+    }
   };
 
 
diff --git a/app/lib/dal/drivers/LokiJsDriver.ts b/app/lib/dal/drivers/LokiJsDriver.ts
index 1be76ad45e773a2d62bac5cc2b9cc83d60b426c0..b895cfefcd42d74e9efc1ff111ed8b00167c2403 100644
--- a/app/lib/dal/drivers/LokiJsDriver.ts
+++ b/app/lib/dal/drivers/LokiJsDriver.ts
@@ -1,4 +1,5 @@
 import {LokiFsAdapter} from "./LokiFsAdapter"
+import {MemFS, RealFS} from "../../system/directory"
 
 const loki = require('lokijs')
 
@@ -10,7 +11,7 @@ export class LokiJsDriver {
   constructor(
     private dbFilePath:string = ''
   ) {
-    this.adapter = new LokiFsAdapter(dbFilePath)
+    this.adapter = new LokiFsAdapter(dbFilePath, dbFilePath ? RealFS() : MemFS())
     this.lokiInstance = new loki(dbFilePath + '/loki.db' || 'mem' + Date.now() + '.db', {
       adapter: this.adapter
     })
@@ -30,4 +31,8 @@ export class LokiJsDriver {
   async commitData() {
     return this.adapter.flush(this.lokiInstance)
   }
+
+  async flushAndTrimData() {
+    return this.adapter.dbDump(this.lokiInstance)
+  }
 }
diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts
index 171dfae91ab5e5f9660f4e7370eeb0057f8dbb2b..775fd9e9b04e3d8f948c4b3720b201fce416129b 100644
--- a/app/lib/dal/fileDAL.ts
+++ b/app/lib/dal/fileDAL.ts
@@ -19,7 +19,12 @@ import {BlockDTO} from "../dto/BlockDTO"
 import {DBHead} from "../db/DBHead"
 import {DBIdentity, IdentityDAL} from "./sqliteDAL/IdentityDAL"
 import {
-  CindexEntry, FullCindexEntry, FullMindexEntry, FullSindexEntry, IindexEntry, IndexEntry,
+  CindexEntry,
+  FullCindexEntry,
+  FullMindexEntry,
+  FullSindexEntry,
+  IindexEntry,
+  IndexEntry,
   SindexEntry
 } from "../indexer"
 import {DBPeer, PeerDAL} from "./sqliteDAL/PeerDAL"
@@ -37,7 +42,7 @@ import {MetaDAL} from "./sqliteDAL/MetaDAL"
 import {DataErrors} from "../common-libs/errors"
 import {BasicRevocableIdentity, IdentityDTO} from "../dto/IdentityDTO"
 import {BlockDAL} from "./sqliteDAL/BlockDAL"
-import {Directory, FileSystem} from "../system/directory"
+import {FileSystem} from "../system/directory"
 import {WoTBInstance} from "../wot"
 import {IIndexDAO} from "./indexDAL/abstract/IIndexDAO"
 import {LokiIIndex} from "./indexDAL/loki/LokiIIndex"
@@ -552,6 +557,11 @@ export class FileDAL {
     return idty
   }
 
+  // Duniter-UI dependency
+  async getWrittenIdtyByPubkey(pub:string) {
+    return !!(await this.iindexDAL.getFromPubkey(pub))
+  }
+
   async getWrittenIdtyByPubkeyForExistence(uid:string) {
     return !!(await this.iindexDAL.getFromPubkey(uid))
   }
diff --git a/app/lib/dal/indexDAL/abstract/BlockchainDAO.ts b/app/lib/dal/indexDAL/abstract/BlockchainDAO.ts
index fa992be09000ec490e67873b28b906e8038ae52b..9f99c06e0f2233e9f01b45efd05aac32d4732f99 100644
--- a/app/lib/dal/indexDAL/abstract/BlockchainDAO.ts
+++ b/app/lib/dal/indexDAL/abstract/BlockchainDAO.ts
@@ -23,6 +23,8 @@ export interface BlockchainDAO extends GenericDAO<DBBlock> {
 
   lastBlockOfIssuer(issuer:string): Promise<DBBlock|null>
 
+  lastBlockWithDividend(): Promise<DBBlock|null>
+
   getCountOfBlocksIssuedBy(issuer:string): Promise<number>
 
   saveBunch(blocks:DBBlock[]): Promise<void>
@@ -30,4 +32,8 @@ export interface BlockchainDAO extends GenericDAO<DBBlock> {
   dropNonForkBlocksAbove(number: number): Promise<void>
 
   setSideBlock(number:number, previousBlock:DBBlock|null): Promise<void>
+
+  removeForkBlock(number:number): Promise<void>
+
+  removeForkBlockAboveOrEqual(number:number): Promise<void>
 }
diff --git a/app/lib/dal/indexDAL/abstract/TxsDAO.ts b/app/lib/dal/indexDAL/abstract/TxsDAO.ts
index 40baff42a7d0724e04378a5413c83a9e0ca81ab1..f4189f5920daa6ebb6d7efba7f9e7cdaef08d4c3 100644
--- a/app/lib/dal/indexDAL/abstract/TxsDAO.ts
+++ b/app/lib/dal/indexDAL/abstract/TxsDAO.ts
@@ -27,5 +27,7 @@ export interface TxsDAO extends GenericDAO<DBTx> {
 
   removeTX(hash:string): Promise<DBTx|null>
 
+  removeAll(): Promise<void>
+
   sandbox:SandBox<{ issuers: string[], output_base:number, output_amount:number }>
 }
diff --git a/app/lib/dal/indexDAL/loki/LokiBlockchain.ts b/app/lib/dal/indexDAL/loki/LokiBlockchain.ts
index 99ca9e0f4fab059e1017d486ea65c24e4606cbc3..4fe1109be0de5d152f7fd0c53dc10de0c7f1cc54 100644
--- a/app/lib/dal/indexDAL/loki/LokiBlockchain.ts
+++ b/app/lib/dal/indexDAL/loki/LokiBlockchain.ts
@@ -55,7 +55,6 @@ export class LokiBlockchain extends LokiIndex<DBBlock> implements BlockchainDAO
   }
 
   async insert(record: DBBlock): Promise<void> {
-    this.current = record
     return super.insert(record);
   }
 
@@ -63,6 +62,26 @@ export class LokiBlockchain extends LokiIndex<DBBlock> implements BlockchainDAO
     // Never remove blocks
   }
 
+  async removeForkBlock(number:number): Promise<void> {
+    await this.collection
+      .chain()
+      .find({
+        fork: true,
+        number
+      })
+      .remove()
+  }
+
+  async removeForkBlockAboveOrEqual(number:number): Promise<void> {
+    await this.collection
+      .chain()
+      .find({
+        fork: true,
+        number: { $gte: number }
+      })
+      .remove()
+  }
+
   async getAbsoluteBlock(number: number, hash: string): Promise<DBBlock | null> {
     return this.collection
       .chain()
@@ -113,7 +132,7 @@ export class LokiBlockchain extends LokiIndex<DBBlock> implements BlockchainDAO
       .find({
         fork: true,
         number: { $between: [numberStart, maxNumber] },
-        medianTime: { $gt: medianTimeStart }
+        medianTime: { $gte: medianTimeStart }
       })
       .simplesort('number')
       .data()
@@ -130,18 +149,46 @@ export class LokiBlockchain extends LokiIndex<DBBlock> implements BlockchainDAO
       .data()[0]
   }
 
+  async lastBlockWithDividend(): Promise<DBBlock | null> {
+    return this.collection
+      .chain()
+      .find({
+        fork: false,
+        dividend: { $gt: 0 }
+      })
+      .simplesort('number', true)
+      .data()[0]
+  }
+
   async saveBlock(block: DBBlock): Promise<DBBlock> {
-    block.fork = false
-    await this.insert(block)
     if (!this.current || this.current.number < block.number) {
-      this.current = block
+      this.current = block;
     }
-    return block
+    return this.insertOrUpdate(block, false)
   }
 
   async saveSideBlock(block: DBBlock): Promise<DBBlock> {
-    block.fork = true
-    await this.insert(block)
+    return this.insertOrUpdate(block, true)
+  }
+
+  async insertOrUpdate(block: DBBlock, isFork:boolean): Promise<DBBlock> {
+    block.fork = isFork
+    const conditions = { number: block.number, hash: block.hash }
+    const existing = (await this.findRaw(conditions))[0]
+    if (existing && existing.fork !== isFork) {
+      // Existing block: we only allow to change the fork flag
+      this.collection
+        .chain()
+        .find(conditions)
+        .update(b => {
+          b.fork = isFork
+          b.monetaryMass = block.monetaryMass
+          b.dividend = block.dividend
+        })
+    }
+    else if (!existing) {
+      await this.insert(block)
+    }
     return block
   }
 
@@ -164,6 +211,14 @@ export class LokiBlockchain extends LokiIndex<DBBlock> implements BlockchainDAO
       .update((b:DBBlock) => {
         b.fork = true
       })
+    // Also update the cache if concerned
+    if (this.current && this.current.number === number) {
+      if (previousBlock && this.current.previousHash === previousBlock.hash) {
+        this.current = previousBlock
+      } else {
+        this.current = null
+      }
+    }
   }
 
 }
diff --git a/app/lib/dal/indexDAL/loki/LokiIndex.ts b/app/lib/dal/indexDAL/loki/LokiIndex.ts
index 2c6d45cf002f3c6d1e6479d09bdaa4ebe6376e13..8e978153a1f6d1e8ddad18f0cb71d418c0473c2e 100644
--- a/app/lib/dal/indexDAL/loki/LokiIndex.ts
+++ b/app/lib/dal/indexDAL/loki/LokiIndex.ts
@@ -1,8 +1,4 @@
 import {LokiCollection} from "./LokiTypes"
-import {IindexEntry} from "../../../indexer"
-import {CIndexDAO} from "../abstract/CIndexDAO"
-import {ReduceableDAO} from "../abstract/ReduceableDAO"
-import {Initiable} from "../../sqliteDAL/Initiable"
 import {GenericDAO} from "../abstract/GenericDAO"
 import {NewLogger} from "../../../logger"
 import {LokiProxyCollection} from "./LokiCollection"
@@ -29,7 +25,10 @@ export abstract class LokiIndex<T extends IndexData> implements GenericDAO<T> {
   }
 
   public triggerInit() {
-    const coll = this.loki.addCollection(this.collectionName, { indices: this.indices })
+    const coll = this.loki.addCollection(this.collectionName, {
+      indices: this.indices,
+      disableChangesApi: false
+    })
     this.collection = new LokiProxyCollection(coll, this.collectionName)
     this.resolveCollection()
   }
diff --git a/app/lib/dal/indexDAL/loki/LokiTransactions.ts b/app/lib/dal/indexDAL/loki/LokiTransactions.ts
index dc8997aeff6d7e5fee26a2692dad6777be8b0e45..1987b4a7102ae109cb4368c358e1e3d1c94b5e32 100644
--- a/app/lib/dal/indexDAL/loki/LokiTransactions.ts
+++ b/app/lib/dal/indexDAL/loki/LokiTransactions.ts
@@ -1,7 +1,4 @@
 import {LokiIndex} from "./LokiIndex"
-import {NewLogger} from "../../../logger"
-import {BlockchainDAO} from "../abstract/BlockchainDAO"
-import {DBBlock} from "../../../db/DBBlock"
 import {DBTx} from "../../sqliteDAL/TxsDAL"
 import {TxsDAO} from "../abstract/TxsDAO"
 import {SandBox} from "../../sqliteDAL/SandBox"
@@ -53,7 +50,7 @@ export class LokiTransactions extends LokiIndex<DBTx> implements TxsDAO {
     dbTx.written = true
     dbTx.removed = false
     dbTx.hash = tx.getHash()
-    await this.insert(dbTx)
+    await this.insertOrUpdate(dbTx)
     return dbTx
   }
 
@@ -61,7 +58,30 @@ export class LokiTransactions extends LokiIndex<DBTx> implements TxsDAO {
     dbTx.received = moment().unix()
     dbTx.written = false
     dbTx.removed = false
-    await this.insert(dbTx)
+    await this.insertOrUpdate(dbTx)
+    return dbTx
+  }
+
+  async insertOrUpdate(dbTx: DBTx): Promise<DBTx> {
+    const conditions = { hash: dbTx.hash }
+    const existing = (await this.findRaw(conditions))[0]
+    if (existing) {
+      // Existing block: we only allow to change the fork flag
+      this.collection
+        .chain()
+        .find(conditions)
+        .update(tx => {
+          tx.block_number = dbTx.block_number
+          tx.time = dbTx.time
+          tx.received = dbTx.received
+          tx.written = dbTx.written
+          tx.removed = dbTx.removed
+          tx.hash = dbTx.hash
+        })
+    }
+    else if (!existing) {
+      await this.insert(dbTx)
+    }
     return dbTx
   }
 
@@ -112,14 +132,24 @@ export class LokiTransactions extends LokiIndex<DBTx> implements TxsDAO {
   }
 
   async removeTX(hash: string): Promise<DBTx | null> {
-    const tx = (await this.findRaw({
-      hash: hash
-    }))[0]
-    if (tx) {
-      tx.removed = true;
-      await this.insert(tx)
-    }
-    return tx
+    let txRemoved = null
+    await this.collection
+      .chain()
+      .find({
+        hash: hash
+      })
+      .update(tx => {
+        tx.removed = true
+        txRemoved = tx
+      })
+    return txRemoved
+  }
+
+  async removeAll(): Promise<void> {
+    await this.collection
+      .chain()
+      .find({})
+      .remove()
   }
 
   async trimExpiredNonWrittenTxs(limitTime: number): Promise<void> {
diff --git a/app/lib/db/DBBlock.ts b/app/lib/db/DBBlock.ts
index abcd5542166188e7f5191d363456dadf7d024c04..05e54ba8fca03f753d2153772d1b9f01666a45b9 100644
--- a/app/lib/db/DBBlock.ts
+++ b/app/lib/db/DBBlock.ts
@@ -68,7 +68,7 @@ export class DBBlock {
     dbb.previousHash = b.previousHash
     dbb.issuer = b.issuer
     dbb.previousIssuer = b.previousIssuer
-    dbb.dividend = b.dividend
+    dbb.dividend = (b.dividend === null || b.dividend === undefined ? b.dividend : parseInt(String(b.dividend)))
     dbb.time = b.time
     dbb.powMin = b.powMin
     dbb.unitbase = b.unitbase
diff --git a/app/lib/other_constants.ts b/app/lib/other_constants.ts
index 6fc9b81f4f91aef227edf313f2cbd1530b331053..d10528e99d6325e1c5fe5ced474a1164f9795cfc 100644
--- a/app/lib/other_constants.ts
+++ b/app/lib/other_constants.ts
@@ -14,7 +14,7 @@
 export const OtherConstants = {
 
   MUTE_LOGS_DURING_UNIT_TESTS: false,
-  SQL_TRACES: true,
+  SQL_TRACES: false,
 
   BC_EVENT: {
     SWITCHED: 'switched',
diff --git a/app/lib/system/directory.ts b/app/lib/system/directory.ts
index 0031143d0cc3f38815b83a4f0cdeb622a394005b..931d98a9b7bf1bdb2b55fe2809752aa11f8d2dee 100644
--- a/app/lib/system/directory.ts
+++ b/app/lib/system/directory.ts
@@ -80,7 +80,7 @@ export const RealFS = (): FileSystem => {
   return new QioFileSystem(qfs)
 }
 
-export const MockFS = (initialTree:{ [folder:string]: { [file:string]: string }} = {}): FileSystem => {
+export const MemFS = (initialTree:{ [folder:string]: { [file:string]: string }} = {}): FileSystem => {
   return new QioFileSystem(require('q-io/fs-mock')(initialTree))
 }
 
@@ -99,7 +99,7 @@ export const Directory = {
     const home = theHome || Directory.getHome()
     const params = {
       home: home,
-      fs: isMemory ? MockFS() : RealFS()
+      fs: isMemory ? MemFS() : RealFS()
     }
     if (makeTree) {
       await params.fs.fsMakeDirectory(home)
diff --git a/app/modules/config.ts b/app/modules/config.ts
index e37bd9258cac3b72fc67dcc3b47566a3abc827a5..88556edb73898cc0a7ff3de74ee75f94dca312c3 100644
--- a/app/modules/config.ts
+++ b/app/modules/config.ts
@@ -15,6 +15,9 @@
 import {ConfDTO} from "../lib/dto/ConfDTO"
 import {Server} from "../../server"
 import {CommonConstants} from "../lib/common-libs/constants"
+import {Directory} from "../lib/system/directory"
+
+const _ = require('underscore')
 
 module.exports = {
   duniter: {
@@ -35,6 +38,98 @@ module.exports = {
       desc: 'Register configuration in database',
       // The command does nothing particular, it just stops the process right after configuration phase is over
       onConfiguredExecute: (server:Server, conf:ConfDTO) => Promise.resolve(conf)
+    }, {
+      name: 'parse-logs',
+      desc: 'Extract data from logs.',
+      logs: true,
+      onConfiguredExecute: async (server:Server, conf:ConfDTO) => {
+        const fs = await Directory.getHomeFS(false, Directory.INSTANCE_HOME, false)
+        const lines = (await fs.fs.fsReadFile(Directory.INSTANCE_HOMELOG_FILE)).split('\n')
+        const aggregates = _.uniq(
+          lines
+          .map(l => l.match(/: (\[\w+\](\[\w+\])*)/))
+          .filter(l => l)
+          .map((l:string[]) => l[1])
+        )
+        console.log(aggregates)
+        const results = aggregates.map((a:string) => {
+          return {
+            name: a,
+            time: lines
+              .filter(l => l.match(new RegExp(a
+                .replace(/\[/g, '\\[')
+                .replace(/\]/g, '\\]')
+              )))
+              .map(l => {
+                const m = l.match(/ (\d+)(\.\d+)?(ms|µs)( \d+)?$/)
+                if (!m) {
+                  throw Error('Wrong match')
+                }
+                return m
+              })
+              .map(match => {
+                return {
+                  qty: parseInt(match[1]),
+                  unit: match[3],
+                }
+              })
+              .reduce((sumMicroSeconds, entry) => {
+                return sumMicroSeconds + (entry.qty * (entry.unit === 'ms' ? 1000 : 1))
+              }, 0) / 1000000
+          }
+        })
+        const root:Tree = {
+          name: 'root',
+          leaves: {}
+        }
+        for (const r of results) {
+          recursiveReduce(root, r.name, r.time)
+        }
+        recursiveDump(root)
+      }
     }]
   }
 }
+
+interface Leaf {
+  name:string
+  value:number
+}
+
+interface Tree {
+  name:string
+  leaves: { [k:string]: Tree|Leaf }
+}
+
+function recursiveReduce(tree:Tree, path:string, duration:number) {
+  if (path.match(/\]\[/)) {
+    const m = (path.match(/^(\[\w+\])(\[.+)/) as string[])
+    const key = m[1]
+    if (!tree.leaves[key]) {
+      tree.leaves[key] = {
+        name: key,
+        leaves: {}
+      }
+    }
+    recursiveReduce(tree.leaves[key] as Tree, m[2], duration)
+  } else {
+    tree.leaves[path] = {
+      name: path,
+      value: duration
+    }
+  }
+}
+
+function recursiveDump(tree:Tree, level = -1) {
+  if (level >= 0) {
+    console.log("  ".repeat(level), tree.name)
+  }
+  for (const k of Object.keys(tree.leaves)) {
+    const element = tree.leaves[k]
+    if ((<Tree>element).leaves) {
+      recursiveDump(<Tree>element, level + 1)
+    } else {
+      console.log("  ".repeat(level + 1), (<Leaf>element).name, (<Leaf>element).value + 's')
+    }
+  }
+}
\ No newline at end of file
diff --git a/app/service/BlockchainService.ts b/app/service/BlockchainService.ts
index 4a4f4726475e13b05e408982585925b1f3610462..c9d5a7219f153030a6d28494ade4ced5882604b5 100644
--- a/app/service/BlockchainService.ts
+++ b/app/service/BlockchainService.ts
@@ -162,6 +162,10 @@ export class BlockchainService extends FIFOService {
     return this.mainContext.checkBlock(dto, withPoWAndSignature)
   }
 
+  /**
+   * Return the potential HEADs we could fork to (necessarily above us, since we don't fork on older branches).
+   * @returns {Promise<any>}
+   */
   async branches() {
     const current = await this.current()
     if (!current) {
@@ -193,7 +197,7 @@ export class BlockchainService extends FIFOService {
           throw CommonConstants.ERRORS.OUT_OF_FORK_WINDOW
         }
       }
-      const absolute = await this.dal.getAbsoluteBlockByNumberAndHash(obj.number, obj.hash)
+      const absolute = await this.dal.getAbsoluteBlockByNumberAndHash(parseInt(obj.number), obj.hash)
       if (!absolute) {
         // Save the block in the sandbox
         await this.mainContext.addSideBlock(dto);
@@ -207,6 +211,7 @@ export class BlockchainService extends FIFOService {
               await this.blockResolution()
               // Resolve the potential forks
               await this.forkResolution()
+              console.log(dto)
               const current = await this.current()
               this.push({
                 bcEvent: OtherConstants.BC_EVENT.RESOLUTION_DONE,
diff --git a/bin/duniter b/bin/duniter
index 60946d8e66b60339c941fc107373764248fe9a38..806c1602e0fae6c5ce75d29f96d3fb60ed520ca9 100755
--- a/bin/duniter
+++ b/bin/duniter
@@ -13,7 +13,7 @@ process.on('uncaughtException', (err) => {
   }
 });
 
-return co(function*() {
+return co(function* () {
 
   try {
     const duniter = require('../index');
diff --git a/server.ts b/server.ts
index cd32a0172c62a6110e932f65720302ffcb6c7865..04137cba28e5a1720b0dc91a5ff7282cf6dcb681 100644
--- a/server.ts
+++ b/server.ts
@@ -337,6 +337,8 @@ export class Server extends stream.Duplex implements HookableServer {
         await this.revert();
       }
     }
+    // Database trimming
+    await this.dal.loki.flushAndTrimData()
     // Eventual block resolution
     await this.BlockchainService.blockResolution()
     // Eventual fork resolution
diff --git a/test/dal/loki.ts b/test/dal/loki.ts
index 58a857de8e0dbd7285dda4dcbbbd1761145b124a..ebe6b3ccca71ce2cbacb6431ae92f03bbe441386 100644
--- a/test/dal/loki.ts
+++ b/test/dal/loki.ts
@@ -31,10 +31,10 @@ describe("Loki data layer", () => {
   })
 
   it('should be able to commit data', async () => {
-    const coll = driver.getLokiInstance().addCollection('block')
+    const coll = driver.getLokiInstance().addCollection('block', { disableChangesApi: false })
     coll.insert({ a: 1 })
     coll.insert({ b: 2 })
-    await driver.commitData()
+    await driver.flushAndTrimData()
   })
 
   it('should be able restart the DB and read the data', async () => {
@@ -45,6 +45,42 @@ describe("Loki data layer", () => {
     assert.equal(coll.find().length, 2)
   })
 
+  it('should be able to add few changes data', async () => {
+    const driver2 = new LokiJsDriver(dbPath)
+    await driver2.loadDatabase()
+    const coll = driver2.getLokiInstance().getCollection('block')
+    coll.insert({ c: 3 })
+    coll.chain().find({ c: 3 }).update((o:any) => o.c = 4)
+    coll.chain().find({ a: 1 }).remove()
+    const changesCount1 = await driver2.commitData()
+    assert.equal(changesCount1, 3)
+    const changesCount2 = await driver2.commitData()
+    assert.equal(changesCount2, 0)
+  })
+
+  it('should be able restart the DB and read the commited data', async () => {
+    const driver2 = new LokiJsDriver(dbPath)
+    await driver2.loadDatabase()
+    const coll = driver2.getLokiInstance().getCollection('block')
+    assert.equal(coll.find().length, 2)
+    assert.equal(coll.find({ a: 1 }).length, 0)
+    assert.equal(coll.find({ b: 2 }).length, 1)
+    assert.equal(coll.find({ c: 4 }).length, 1)
+  })
+
+  it('should be able to trim then restart the DB and read the commited data', async () => {
+    const driverTrim = new LokiJsDriver(dbPath)
+    await driverTrim.loadDatabase()
+    await driverTrim.flushAndTrimData()
+    const driver2 = new LokiJsDriver(dbPath)
+    await driver2.loadDatabase()
+    const coll = driver2.getLokiInstance().getCollection('block')
+    assert.equal(coll.find().length, 2)
+    assert.equal(coll.find({ a: 1 }).length, 0)
+    assert.equal(coll.find({ b: 2 }).length, 1)
+    assert.equal(coll.find({ c: 4 }).length, 1)
+  })
+
   it('should not see any data if commit file is absent', async () => {
     const rfs = RealFS()
     await rfs.fsUnlink(path.join(dbPath, 'commit.json'))
@@ -70,7 +106,7 @@ describe("Loki data layer", () => {
     const coll = driver4.getLokiInstance().addCollection('block')
     coll.insert({ a: 1 })
     coll.insert({ b: 2 })
-    await driver.commitData()
+    await driver.flushAndTrimData()
     const oldCommit:DBCommit = JSON.parse(await rfs.fsReadFile(path.join(dbPath, 'commit.json')))
     oldCommit.collections['block'] = 'wrong-file.json'
     const driver5 = new LokiJsDriver(dbPath)
diff --git a/test/dal/source_dal.js b/test/dal/source_dal.js
index 70ad9b194b9d0bbd8e552c843d303007457973c7..0e618485a1d20b97dd4bafdabc39fe837ba09b6d 100644
--- a/test/dal/source_dal.js
+++ b/test/dal/source_dal.js
@@ -45,7 +45,7 @@ describe("Source DAL", function(){
     source1.should.have.property('consumed').equal(true);
     const udSources = yield dal.sindexDAL.getUDSources('ABC');
     udSources.should.have.length(2);
-    udSources[0].should.have.property('consumed').equal(true);
-    udSources[1].should.have.property('consumed').equal(false);
+    udSources[0].should.have.property('consumed').equal(false);
+    udSources[1].should.have.property('consumed').equal(true);
   }));
 });
diff --git a/test/fast/cfs.js b/test/fast/cfs.js
index 8dbdfa9bb189b7e3abfc6ed841b5bb9966a5789b..518c4a11ac76c034dbdfd644ba7f7dfdff6df1e4 100644
--- a/test/fast/cfs.js
+++ b/test/fast/cfs.js
@@ -17,7 +17,7 @@ var assert = require('assert');
 var should = require('should');
 var co = require('co');
 var CFSCore = require('../../app/lib/dal/fileDALs/CFSCore').CFSCore;
-const mockFS = require('../../app/lib/system/directory').MockFS({
+const mockFS = require('../../app/lib/system/directory').MemFS({
   'B5_a': {
     "A.json": '{ "text": "Content of A from B5_a" }'
   },
diff --git a/test/fast/dal/basic-loki.ts b/test/fast/dal/basic-loki.ts
index 06b95f07ca39f9d7460e7376846294ad9298916c..d95f0b1a333d75bc473a46da97b1cd51176b9a2c 100644
--- a/test/fast/dal/basic-loki.ts
+++ b/test/fast/dal/basic-loki.ts
@@ -29,8 +29,10 @@ class TheIndex extends LokiIndex<TestEntity> {
 
 describe("Basic LokiJS database", () => {
 
-  before(() => {
+  before(async () => {
     lokiIndex = new TheIndex(new loki('index.db'), 'iindex', [])
+    await lokiIndex.triggerInit()
+    await lokiIndex.init()
   })
 
   it('should be able instanciate the index', async () => {
diff --git a/test/fast/dal/iindex-loki.ts b/test/fast/dal/iindex-loki.ts
index ea37b835b8bbd3605f2bbc1ea2dc9b7a98edc1c1..107f321933d5f660e23566d873e6ceaef267f9b3 100644
--- a/test/fast/dal/iindex-loki.ts
+++ b/test/fast/dal/iindex-loki.ts
@@ -12,7 +12,6 @@
 // GNU Affero General Public License for more details.
 
 import * as assert from "assert"
-import {LokiIndex} from "../../../app/lib/dal/indexDAL/loki/LokiIndex"
 import {LokiIIndex} from "../../../app/lib/dal/indexDAL/loki/LokiIIndex"
 
 const loki = require('lokijs')
@@ -21,8 +20,10 @@ let lokiIndex:LokiIIndex
 
 describe("IIndex LokiJS", () => {
 
-  before(() => {
+  before(async () => {
     lokiIndex = new LokiIIndex(new loki('index.db'))
+    await lokiIndex.triggerInit()
+    await lokiIndex.init()
   })
 
   it('should be able instanciate the index', async () => {
diff --git a/test/integration/branches2.ts b/test/integration/branches2.ts
index 2000dacc69cb11b3b7ea9404af3d27137a1379fb..5ca0a684cc602bf7a790571614fc59d52673881f 100644
--- a/test/integration/branches2.ts
+++ b/test/integration/branches2.ts
@@ -115,7 +115,7 @@ describe("SelfFork", function() {
     yield waitToHaveBlock(s2, 2)
     let s2p = yield s2.PeeringService.peer();
 
-    yield commitS2();
+    yield commitS2(); // <-- block#3 is a fork block, S2 is committing another one than S1 issued
     yield commitS2();
     yield commitS2();
     yield commitS2();
@@ -223,7 +223,7 @@ describe("SelfFork", function() {
     });
 
     it('should have 2 branch', async () => {
-      const branches:any[] = await s1.BlockchainService.branches()
+      const branches = await s1.BlockchainService.branches()
       branches.should.have.length(1)
     })
   });
diff --git a/test/integration/branches_revert2.js b/test/integration/branches_revert2.js
index 93295937b62184b760c658afbaf20e5a555e76af..cb268cc9113920acc5744c0ed7bf31fae002f1f5 100644
--- a/test/integration/branches_revert2.js
+++ b/test/integration/branches_revert2.js
@@ -196,9 +196,9 @@ describe("Revert two blocks", function() {
   describe("commit again (but send less, to check that the account is not cleaned this time)", () => {
 
     before(() => co(function*() {
-      yield s1.dal.txsDAL.sqlDeleteAll()
+      yield s1.dal.txsDAL.removeAll()
       yield cat.sendP(19, toc);
-      yield s1.dal.blockDAL.exec('DELETE FROM block WHERE fork AND number = 3')
+      yield s1.dal.blockDAL.removeBlock('DELETE FROM block WHERE fork AND number = 3')
       yield commit(s1)({ time: now + 1 });
     }))
 
diff --git a/test/integration/branches_revert_balance.js b/test/integration/branches_revert_balance.js
index b586b053ecb079cd2a5b71adec5227f766e5841e..b32d329a335c8b55393372cfa992b17cd9d89eba 100644
--- a/test/integration/branches_revert_balance.js
+++ b/test/integration/branches_revert_balance.js
@@ -75,7 +75,7 @@ describe("Revert balance", () => {
 
   it('cat should be able to RE-send 60 units to tac', () => co(function*() {
     const txsPending = yield s1.dal.txsDAL.getAllPending(1)
-    yield s1.dal.blockDAL.exec('DELETE FROM block WHERE fork AND number = 3')
+    yield s1.dal.blockDAL.removeForkBlock(3)
     txsPending.should.have.length(1)
     yield s1.commit({ time: now + 1 })
     yield s1.expect('/tx/sources/' + cat.pub, (res) => {
diff --git a/test/integration/register-fork-blocks.js b/test/integration/register-fork-blocks.js
index b31bbe2d75b2e8f39c60a44700e0af80d1002d9f..7e1c803ddf11bea83fb5301ca37e0edcd5364d52 100644
--- a/test/integration/register-fork-blocks.js
+++ b/test/integration/register-fork-blocks.js
@@ -196,8 +196,10 @@ describe("Fork blocks", function() {
     yield s2.writeBlock(b6a)
     yield s2.writeBlock(b7a)
     yield s2.writeBlock(b8a)
-    yield s2.waitToHaveBlock(8)
-    yield s2.waitForkResolution(8)
+    yield Promise.all([
+      s2.waitToHaveBlock(8),
+      s2.waitForkResolution(8)
+    ])
   }))
 
   it('should exist a same current block on each node', () => co(function*() {
diff --git a/test/integration/revocation-test.js b/test/integration/revocation-test.js
index 0dad545100d4a80187b62c296c5a17d21910b143..c0a62ae78262f77cabc2ad10da6520267125a316 100644
--- a/test/integration/revocation-test.js
+++ b/test/integration/revocation-test.js
@@ -198,7 +198,7 @@ describe("Revocation", function() {
   it('if we revert the commit, cat should not be revoked', () => co(function *() {
     yield s1.revert();
     yield s1.revert();
-    yield s1.dal.blockDAL.exec('DELETE FROM block WHERE fork AND number >= 2')
+    yield s1.dal.blockDAL.removeForkBlockAboveOrEqual(2)
     return expectAnswer(rp('http://127.0.0.1:9964/wot/lookup/cat', { json: true }), function(res) {
       res.should.have.property('results').length(1);
       res.results[0].should.have.property('uids').length(1);
diff --git a/test/integration/v1.1-dividend.js b/test/integration/v1.1-dividend.js
index c3579929c31e51201e11ac1776fa7004fef95877..475593909345ea6809697947e79c3a14b66ba977 100644
--- a/test/integration/v1.1-dividend.js
+++ b/test/integration/v1.1-dividend.js
@@ -95,14 +95,14 @@ describe("Protocol 1.1 Dividend", function() {
       res.sources[1].should.have.property('amount').equal(100);
       res.sources[2].should.have.property('amount').equal(101);
       res.sources[3].should.have.property('amount').equal(103);
-      res.sources[4].should.have.property('amount').equal(105);
-      res.sources[5].should.have.property('amount').equal(106);
+      res.sources[4].should.have.property('amount').equal(106);
+      res.sources[5].should.have.property('amount').equal(105);
       res.sources[0].should.have.property('type').equal('D');
       res.sources[1].should.have.property('type').equal('D');
       res.sources[2].should.have.property('type').equal('D');
       res.sources[3].should.have.property('type').equal('D');
-      res.sources[4].should.have.property('type').equal('T');
-      res.sources[5].should.have.property('type').equal('D');
+      res.sources[4].should.have.property('type').equal('D');
+      res.sources[5].should.have.property('type').equal('T');
     })
   }));