diff --git a/.eslintignore b/.eslintignore
index d25a2f246707fc5599e607933c70bf26171f3e2e..052737334621d89f18f1b6e66abf9c0fa60712a4 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -43,5 +43,7 @@ app/modules/bma/lib/entity/*.js
 app/modules/bma/lib/controllers/*.js
 app/modules/crawler/*.js
 app/modules/crawler/lib/*.js
+app/ProcessCpuProfiler.js
+app/lib/common/package.js
 test/*.js
 test/**/*.js
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 0fdf9f4d60fe86d44c161a39da1f7b8d1484dd74..9b656c46522606b19e13f3c4310831863ca3426b 100644
--- a/.gitignore
+++ b/.gitignore
@@ -56,6 +56,9 @@ test/integration/tools/TestUser.js*
 test/integration/tools/TestUser.d.ts
 test/integration/documents-currency.js*
 test/integration/documents-currency.d.ts
+test/integration/forwarding.js
+test/integration/branches_switch.js
+test/integration/branches2.js
 test/fast/modules/crawler/block_pulling.js*
 test/fast/modules/crawler/block_pulling.d.ts
 test/fast/fork*.js*
@@ -66,3 +69,6 @@ test/fast/modules/ws2p/*.js*
 test/fast/modules/ws2p/*.d.ts
 test/fast/modules/common/grammar.js*
 test/fast/modules/common/grammar.d.ts
+test/fast/prover/pow-1-cluster.d.ts
+test/fast/prover/pow-1-cluster.js
+test/fast/prover/pow-1-cluster.js.map
diff --git a/.travis.yml b/.travis.yml
index 32368217c48147b7d3adc318b5d47af12bdc8662..a3f3862e0e3586fec203e2daa1561f787c4051a5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,7 +2,7 @@
 language: node_js
 
 node_js:
-  - 6.11.1
+  - 8.9.2
 env:
   - CXX=g++-4.8
 addons:
diff --git a/app/modules/prover/lib/PowWorker.ts b/app/modules/prover/lib/PowWorker.ts
new file mode 100644
index 0000000000000000000000000000000000000000..fd225941a7b8897a0c6f5046ec4dd25fea603ba7
--- /dev/null
+++ b/app/modules/prover/lib/PowWorker.ts
@@ -0,0 +1,95 @@
+import {Querable} from "./permanentProver"
+
+const querablep = require('querablep')
+
+/*********
+ *
+ * PoW worker
+ * ----------
+ *
+ * Its model is super simple: we ask him to find a proof, and we can wait for it.
+ * Eventually, we can tell him to cancel his proof, which makes it answer `null` as proof value.
+ *
+ * The worker also provides two properties:
+ *
+ * - `worker.online`: a promise which is resolved when the worker gets « online » for the first time
+ * - `worker.exit`: a promise which is resolved when the worker exits (which occurs when the worker is being closed or killed)
+ *
+ ********/
+
+export class PowWorker {
+
+  private onlinePromise:Promise<void>
+  private onlineResolver:()=>void
+
+  private exitPromise:Promise<void>
+  private exitResolver:()=>void
+
+  private proofPromise:Querable<{ message: { answer:any }}|null>
+  private proofResolver:(proof:{ message: { answer:any }}|null)=>void
+
+  private messageHandler:((worker:any, msg:any)=>void)
+
+  constructor(
+    private nodejsWorker:any,
+    private onPowMessage:(message:any)=>void,
+    private onlineHandler:()=>void,
+    private exitHandler:(code:any, signal:any)=>void) {
+
+    // Handle "online" promise
+    this.onlinePromise = new Promise(res => this.onlineResolver = res)
+    nodejsWorker.on('online', () => {
+      this.onlineHandler()
+      this.onlineResolver()
+    })
+
+    // Handle "exit" promise
+    this.exitPromise = new Promise(res => this.exitResolver = res)
+    nodejsWorker.on('exit', (code:any, signal:any) => {
+      this.exitHandler(code, signal)
+      this.exitResolver()
+    })
+
+    nodejsWorker.on('message', (message:any) => {
+      if (message) {
+        this.onPowMessage(message)
+      }
+      if (this.proofPromise && message.uuid && !this.proofPromise.isResolved() && this.proofResolver) {
+        const result:{ message: { answer:any }}|null = message ? { message } : null
+        this.proofResolver(result)
+      }
+    })
+  }
+
+  get online() {
+    return this.onlinePromise
+  }
+
+  get exited() {
+    return this.exitPromise
+  }
+
+  get pid() {
+    return this.nodejsWorker.process.pid
+  }
+
+  askProof(commandMessage:{ uuid:string, command:string, value:any }) {
+    this.proofPromise = querablep(new Promise<{ message: { answer:any }}|null>(res => this.proofResolver = res))
+    this.nodejsWorker.send(commandMessage)
+    return this.proofPromise
+  }
+
+  sendConf(confMessage:{ command:string, value:any }) {
+    this.nodejsWorker.send(confMessage)
+  }
+
+  sendCancel() {
+    this.nodejsWorker.send({
+      command: 'cancel'
+    })
+  }
+
+  kill() {
+    this.nodejsWorker.kill()
+  }
+}
\ No newline at end of file
diff --git a/app/modules/prover/lib/blockGenerator.ts b/app/modules/prover/lib/blockGenerator.ts
index a941bbaeb7fd0325e07f772a48b0eed2f2df1ae8..c41dfb24283028cae37c2d7cd09d4fad815a6fdc 100644
--- a/app/modules/prover/lib/blockGenerator.ts
+++ b/app/modules/prover/lib/blockGenerator.ts
@@ -279,7 +279,9 @@ export class BlockGenerator {
         const currentMembership = await this.dal.mindexDAL.getReducedMS(ms.issuer);
         const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1;
         if (!join.identity.revoked && currentMSN < parseInt(join.ms.number)) {
-          preJoinData[join.identity.pubkey] = join;
+          if (!preJoinData[join.identity.pubkey] || preJoinData[join.identity.pubkey].certs.length < join.certs.length) {
+            preJoinData[join.identity.pubkey] = join;
+          }
         }
       } catch (err) {
         if (err && !err.uerr) {
diff --git a/app/modules/prover/lib/blockProver.ts b/app/modules/prover/lib/blockProver.ts
index 9cd215d98d478a456857b52f9f5c5388e1afa153..32d5699ecc014e8ec690c6b5b98722d43b864a6a 100644
--- a/app/modules/prover/lib/blockProver.ts
+++ b/app/modules/prover/lib/blockProver.ts
@@ -44,6 +44,9 @@ export class WorkerFarm {
     })
   }
 
+  get nbWorkers() {
+    return this.theEngine.getNbWorkers()
+  }
 
   changeCPU(cpu:any) {
     return this.theEngine.setConf({ cpu })
@@ -70,7 +73,7 @@ export class WorkerFarm {
   }
 
   shutDownEngine() {
-    this.theEngine.shutDown()
+    return this.theEngine.shutDown()
   }
 
   /**
@@ -175,7 +178,6 @@ export class BlockProver {
       const start = Date.now();
       let result = await powFarm.askNewProof({
         newPoW: {
-          turnDuration: os.arch().match(/arm/) ? CommonConstants.POW_TURN_DURATION_ARM : CommonConstants.POW_TURN_DURATION_PC,
           conf: {
             cpu: this.conf.cpu,
             prefix: this.conf.prefix,
@@ -194,10 +196,10 @@ export class BlockProver {
         throw 'Proof-of-work computation canceled because block received';
       } else {
         const proof = result.block;
-        const testsCount = result.testsCount;
+        const testsCount = result.testsCount * powFarm.nbWorkers
         const duration = (Date.now() - start);
-        const testsPerSecond = (testsCount / (duration / 1000)).toFixed(2);
-        this.logger.info('Done: #%s, %s in %ss (%s tests, ~%s tests/s)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond);
+        const testsPerSecond = testsCount / (duration / 1000)
+        this.logger.info('Done: #%s, %s in %ss (~%s tests, ~%s tests/s, using %s cores, CPU %s%)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond.toFixed(2), powFarm.nbWorkers, Math.floor(100*this.conf.cpu))
         this.logger.info('FOUND proof-of-work with %s leading zeros followed by [0-' + highMark + ']!', nbZeros);
         return BlockDTO.fromJSONObject(proof)
       }
diff --git a/app/modules/prover/lib/constants.ts b/app/modules/prover/lib/constants.ts
index 0a454d38fd9c85e58115c2f971bbb6cb1cea812e..bb0cfcf31821b76e4a86b066049431d345317613 100644
--- a/app/modules/prover/lib/constants.ts
+++ b/app/modules/prover/lib/constants.ts
@@ -13,6 +13,7 @@ export const ProverConstants = {
   NONCE_RANGE: 1000 * 1000 * 1000 * 100,
 
   POW_MAXIMUM_ACCEPTABLE_HANDICAP: 64,
+  POW_NB_PAUSES_PER_ROUND: 10,
 
   // When to trigger the PoW process again if no PoW is triggered for a while. In milliseconds.
   POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000
diff --git a/app/modules/prover/lib/engine.ts b/app/modules/prover/lib/engine.ts
index cc83f46822764786bf91c1c02156c597966cc299..6ab1ca6458f52047e3e2dfa2548b1fd73f316ed9 100644
--- a/app/modules/prover/lib/engine.ts
+++ b/app/modules/prover/lib/engine.ts
@@ -1,4 +1,3 @@
-import {ProverConstants} from "./constants"
 import {Master as PowCluster} from "./powCluster"
 import {ConfDTO} from "../../../lib/dto/ConfDTO"
 
@@ -25,21 +24,16 @@ export class PowEngine {
     this.id = this.cluster.clusterId
   }
 
+  getNbWorkers() {
+    return this.cluster.nbWorkers
+  }
+
   forceInit() {
     return this.cluster.initCluster()
   }
 
   async prove(stuff:any) {
-
-    if (this.cluster.hasProofPending) {
-      await this.cluster.cancelWork()
-    }
-
-    const cpus = os.cpus()
-
-    if (os.arch().match(/arm/) || cpus[0].model.match(/Atom/)) {
-      stuff.newPoW.conf.nbCores /= 2; // Make sure that only once each physical core is used (for Hyperthreading).
-    }
+    await this.cluster.cancelWork()
     return await this.cluster.proveByWorkers(stuff)
   }
 
@@ -48,9 +42,6 @@ export class PowEngine {
   }
 
   setConf(value:any) {
-    if (os.arch().match(/arm/) && value.cpu !== undefined) {
-      value.cpu /= 2; // Don't know exactly why is ARM so much saturated by PoW, so let's divide by 2
-    }
     return this.cluster.changeConf(value)
   }
 
diff --git a/app/modules/prover/lib/permanentProver.ts b/app/modules/prover/lib/permanentProver.ts
index 636a68b986b358b506e00b40714257b7180b82e3..d5c4fefedeceb4550d8a4972fddfc4b4a3a09e49 100644
--- a/app/modules/prover/lib/permanentProver.ts
+++ b/app/modules/prover/lib/permanentProver.ts
@@ -9,7 +9,7 @@ import {Server} from "../../../../server"
 
 const querablep = require('querablep');
 
-interface Querable<T> extends Promise<T> {
+export interface Querable<T> extends Promise<T> {
   isFulfilled(): boolean
   isResolved(): boolean
   isRejected(): boolean
@@ -213,6 +213,8 @@ export class PermanentProver {
     await this.prover.cancel();
     // If we were waiting, stop it and process the continuous generation
     this.blockchainChangedResolver && this.blockchainChangedResolver();
+    const farm = await this.prover.getWorker()
+    await farm.shutDownEngine()
   }
 
   private checkTrialIsNotTooHigh(trial:number, current:DBBlock, selfPubkey:string) {
diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts
index 4d4820777cb9b279aebea2d09680d1be0b3aab1f..d0c64c4e422ff0d95111d2d2f491e97d38a06967 100644
--- a/app/modules/prover/lib/powCluster.ts
+++ b/app/modules/prover/lib/powCluster.ts
@@ -1,24 +1,36 @@
 import {ConfDTO} from "../../../lib/dto/ConfDTO"
 import {ProverConstants} from "./constants"
+import {createPowWorker} from "./proof"
+import {PowWorker} from "./PowWorker"
 
 const _ = require('underscore')
 const nuuid = require('node-uuid');
-const moment = require('moment');
 const cluster = require('cluster')
 const querablep = require('querablep')
-const logger = require('../../../lib/logger').NewLogger()
 
 let clusterId = 0
+cluster.setMaxListeners(3)
+
+export interface SlaveWorker {
+  worker:PowWorker,
+  index:number,
+  online:Promise<void>,
+  nonceBeginning:number
+}
 
 /**
  * Cluster controller, handles the messages between the main program and the PoW cluster.
  */
 export class Master {
 
+  nbCancels = 0
+
   clusterId:number
   currentPromise:any|null = null
-  slaves:any[] = []
-  slavesMap:any = {}
+  slaves:SlaveWorker[] = []
+  slavesMap:{
+    [k:number]: SlaveWorker|null
+  } = {}
   conf:any = {}
   logger:any
   onInfoCallback:any
@@ -36,38 +48,50 @@ export class Master {
     return this.slaves.length
   }
 
-  get hasProofPending() {
-    return !!this.currentPromise
-  }
-
   set onInfoMessage(callback:any) {
     this.onInfoCallback = callback
   }
 
-  onWorkerMessage(worker:any, message:any) {
+  onWorkerMessage(workerIndex:number, message:any) {
     // this.logger.info(`worker#${this.slavesMap[worker.id].index} sent message:${message}`)
-    if (message.pow && message.pow.pow) {
+    if (message && message.pow) {
       this.onInfoCallback && this.onInfoCallback(message)
     }
-    if (this.currentPromise && message.uuid === this.currentPromise.extras.uuid && !this.currentPromise.isResolved() && message.answer) {
-      this.logger.info(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index} HAS FOUND A PROOF #${message.answer.pow.pow}`)
-      this.currentPromise.extras.resolve(message.answer)
-      // Stop the slaves' current work
-      this.cancelWork()
+    if (this.currentPromise && message.uuid && !this.currentPromise.isResolved() && message.answer) {
+      this.logger.info(`ENGINE c#${this.clusterId}#${workerIndex} HAS FOUND A PROOF #${message.answer.pow.pow}`)
+    } else if (message.canceled) {
+      this.nbCancels++
     }
     // this.logger.debug(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index}:`, message)
   }
 
+  /*****************
+   * CLUSTER METHODS
+   ****************/
+
   initCluster() {
     // Setup master
     cluster.setupMaster({
-      exec: __filename
+      exec: __filename,
+      execArgv: [] // Do not try to debug forks
     })
 
     this.slaves = Array.from({ length: this.nbCores }).map((value, index) => {
-      const worker = cluster.fork()
-      this.logger.info(`Creating worker c#${this.clusterId}#w#${worker.id}`)
-      this.slavesMap[worker.id] = {
+      const nodejsWorker = cluster.fork()
+      const worker = new PowWorker(nodejsWorker, message => {
+        this.onWorkerMessage(index, message)
+      }, () => {
+        this.logger.info(`[online] worker c#${this.clusterId}#w#${index}`)
+        worker.sendConf({
+          command: 'conf',
+          value: this.conf
+        })
+      }, (code:any, signal:any) => {
+        this.logger.info(`worker ${worker.pid} died with code ${code} and signal ${signal}`)
+      })
+
+      this.logger.info(`Creating worker c#${this.clusterId}#w#${nodejsWorker.id}`)
+      const slave = {
 
         // The Node.js worker
         worker,
@@ -76,43 +100,16 @@ export class Master {
         index,
 
         // Worker ready
-        online: (function onlinePromise() {
-          let resolve
-          const p = querablep(new Promise(res => resolve = res))
-          p.extras = { resolve }
-          return p
-        })(),
+        online: worker.online,
 
         // Each worker has his own chunk of possible nonces
         nonceBeginning: this.nbCores === 1 ? 0 : (index + 1) * ProverConstants.NONCE_RANGE
       }
-      return this.slavesMap[worker.id]
-    })
-
-    cluster.on('exit', (worker:any, code:any, signal:any) => {
-      this.logger.info(`worker ${worker.process.pid} died with code ${code} and signal ${signal}`)
-    })
-
-    cluster.on('online', (worker:any) => {
-      // We just listen to the workers of this Master
-      if (this.slavesMap[worker.id]) {
-        this.logger.info(`[online] worker c#${this.clusterId}#w#${worker.id}`)
-        this.slavesMap[worker.id].online.extras.resolve()
-        worker.send({
-          command: 'conf',
-          value: this.conf
-        })
-      }
-    })
-
-    cluster.on('message', (worker:any, msg:any) => {
-      // Message for this cluster
-      if (this.slavesMap[worker.id]) {
-        this.onWorkerMessage(worker, msg)
-      }
+      this.slavesMap[nodejsWorker.id] = slave
+      return slave
     })
 
-    this.workersOnline = this.slaves.map((s:any) => s.online)
+    this.workersOnline = this.slaves.map((s) => s.online)
     return Promise.all(this.workersOnline)
   }
 
@@ -121,7 +118,7 @@ export class Master {
     this.conf.cpu = conf.cpu || this.conf.cpu
     this.conf.prefix = this.conf.prefix || conf.prefix
     this.slaves.forEach(s => {
-      s.worker.send({
+      s.worker.sendConf({
         command: 'conf',
         value: this.conf
       })
@@ -129,39 +126,28 @@ export class Master {
     return Promise.resolve(_.clone(conf))
   }
 
-  cancelWork() {
-    this.logger.info(`Cancelling the work on PoW cluster`)
+  private cancelWorkersWork() {
     this.slaves.forEach(s => {
-      s.worker.send({
-        command: 'cancel'
-      })
+      s.worker.sendCancel()
     })
+  }
 
-    // Eventually force the end of current promise
-    if (this.currentPromise && !this.currentPromise.isFulfilled()) {
-      this.currentPromise.extras.resolve(null)
-    }
-
+  async cancelWork() {
+    this.cancelWorkersWork()
+    const workEnded = this.currentPromise
     // Current promise is done
     this.currentPromise = null
-
-    return Promise.resolve()
-  }
-
-  newPromise(uuid:string) {
-    let resolve
-    const p = querablep(new Promise(res => resolve = res))
-    p.extras = { resolve, uuid }
-    return p
+    return await workEnded
   }
 
   async shutDownWorkers() {
     if (this.workersOnline) {
       await Promise.all(this.workersOnline)
-      await Promise.all(this.slaves.map(async (s:any) => {
+      await Promise.all(this.slaves.map(async (s) => {
         s.worker.kill()
       }))
     }
+    this.slaves = []
   }
 
   proveByWorkers(stuff:any) {
@@ -173,9 +159,7 @@ export class Master {
 
     // Register the new proof uuid
     const uuid = nuuid.v4()
-    this.currentPromise = this.newPromise(uuid)
-
-    return (async () => {
+    this.currentPromise = querablep((async () => {
       await Promise.all(this.workersOnline)
 
       if (!this.currentPromise) {
@@ -184,18 +168,18 @@ export class Master {
       }
 
       // Start the salves' job
-      this.slaves.forEach((s:any, index) => {
-        s.worker.send({
+      const asks = this.slaves.map(async (s, index) => {
+        const proof = await s.worker.askProof({
           uuid,
           command: 'newPoW',
           value: {
-            block: stuff.newPoW.block,
+            initialTestsPerRound: stuff.initialTestsPerRound,
+            maxDuration: stuff.maxDuration,block: stuff.newPoW.block,
             nonceBeginning: s.nonceBeginning,
             zeros: stuff.newPoW.zeros,
             highMark: stuff.newPoW.highMark,
             pair: _.clone(stuff.newPoW.pair),
             forcedTime: stuff.newPoW.forcedTime,
-            turnDuration: stuff.newPoW.turnDuration,
             conf: {
               medianTimeBlocks: stuff.newPoW.conf.medianTimeBlocks,
               avgGenTime: stuff.newPoW.conf.avgGenTime,
@@ -204,10 +188,29 @@ export class Master {
             }
           }
         })
+        this.logger.info(`[done] worker c#${this.clusterId}#w#${index}`)
+        return {
+          workerID: index,
+          proof
+        }
       })
 
-      return await this.currentPromise
-    })()
+      // Find a proof
+      const result = await Promise.race(asks)
+      this.cancelWorkersWork()
+      // Wait for all workers to have stopped looking for a proof
+      await Promise.all(asks)
+
+      if (!result.proof || !result.proof.message.answer) {
+        this.logger.info('No engine found the proof. It was probably cancelled.')
+        return null
+      } else {
+        this.logger.info(`ENGINE c#${this.clusterId}#${result.workerID} HAS FOUND A PROOF #${result.proof.message.answer.pow.pow}`)
+        return result.proof.message.answer
+      }
+    })())
+
+    return this.currentPromise
   }
 
   static defaultLogger() {
@@ -229,9 +232,8 @@ if (cluster.isMaster) {
 } else {
 
   process.on("SIGTERM", function() {
-    logger.info(`SIGTERM received, closing worker ${process.pid}`);
     process.exit(0)
   });
 
-  require('./proof')
+  createPowWorker()
 }
diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index 9b15c0be5aaff08b078c5dd495dbb0432be983ea..c08fde11ed3dc6132a2773d73a9994c075b58a2d 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -6,296 +6,303 @@ import {ProverConstants} from "./constants"
 import {KeyGen} from "../../../lib/common-libs/crypto/keyring"
 import {dos2unix} from "../../../lib/common-libs/dos2unix"
 import {rawer} from "../../../lib/common-libs/index"
+import {ProcessCpuProfiler} from "../../../ProcessCpuProfiler"
 
 const moment = require('moment');
 const querablep = require('querablep');
 
-const PAUSES_PER_TURN = 5;
+export function createPowWorker() {
 
-// This value can be changed
-let TURN_DURATION_IN_MILLISEC = 100;
-
-let computing = querablep(Promise.resolve(null));
-let askedStop = false;
+  let computing = querablep(Promise.resolve(null));
+  let askedStop = false;
 
 // By default, we do not prefix the PoW by any number
-let prefix = 0;
+  let prefix = 0;
 
-let signatureFunc:any, lastSecret:any, currentCPU = 1;
+  let signatureFunc:any, lastSecret:any, currentCPU = 1;
 
-process.on('uncaughtException', (err:any) => {
-  console.error(err.stack || Error(err))
-  if (process.send) {
-    process.send({error: err});
-  } else {
-    throw Error('process.send() is not defined')
-  }
-});
+  process.on('uncaughtException', (err:any) => {
+    console.error(err.stack || Error(err))
+    if (process.send) {
+      process.send({error: err});
+    } else {
+      throw Error('process.send() is not defined')
+    }
+  });
 
-process.on('message', async (message) => {
+  process.on('unhandledRejection', () => {
+    process.exit()
+  })
 
-  switch (message.command) {
+  process.on('message', async (message) => {
 
-    case 'newPoW':
-      (async () => {
-        askedStop = true
+    switch (message.command) {
 
-        // Very important: do not await if the computation is already done, to keep the lock on JS engine
-        if (!computing.isFulfilled()) {
-          await computing;
-        }
+      case 'newPoW':
+        (async () => {
+          askedStop = true
 
-        const res = await beginNewProofOfWork(message.value);
-        answer(message, res);
-      })()
-      break;
+          // Very important: do not await if the computation is already done, to keep the lock on JS engine
+          if (!computing.isFulfilled()) {
+            await computing;
+          }
 
-    case 'cancel':
-      if (!computing.isFulfilled()) {
-        askedStop = true;
-      }
-      break;
+          const res = await beginNewProofOfWork(message.value);
+          answer(message, res);
+        })()
+        break;
 
-    case 'conf':
-      if (message.value.cpu !== undefined) {
-        currentCPU = message.value.cpu
-      }
-      if (message.value.prefix !== undefined) {
-        prefix = message.value.prefix
-      }
-      answer(message, { currentCPU, prefix });
-      break;
-  }
+      case 'cancel':
+        if (!computing.isFulfilled()) {
+          askedStop = true;
+        }
+        break;
 
-})
-
-function beginNewProofOfWork(stuff:any) {
-  askedStop = false;
-  computing = querablep((async () => {
-
-    /*****************
-     * PREPARE POW STUFF
-     ****************/
-
-    let nonce = 0;
-    const conf = stuff.conf;
-    const block = stuff.block;
-    const nonceBeginning = stuff.nonceBeginning;
-    const nbZeros = stuff.zeros;
-    const pair = stuff.pair;
-    const forcedTime = stuff.forcedTime;
-    currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU;
-    prefix = parseInt(conf.prefix || prefix)
-    if (prefix && prefix < ProverConstants.NONCE_RANGE) {
-      prefix *= 100 * ProverConstants.NONCE_RANGE
-    }
-    const highMark = stuff.highMark;
-    const turnDuration = stuff.turnDuration || TURN_DURATION_IN_MILLISEC
-    let sigFunc = null;
-    if (signatureFunc && lastSecret === pair.sec) {
-      sigFunc = signatureFunc;
-    }
-    else {
-      lastSecret = pair.sec;
-      sigFunc = (msg:string) => KeyGen(pair.pub, pair.sec).signSync(msg)
+      case 'conf':
+        if (message.value.cpu !== undefined) {
+          currentCPU = message.value.cpu
+        }
+        if (message.value.prefix !== undefined) {
+          prefix = message.value.prefix
+        }
+        answer(message, { currentCPU, prefix });
+        break;
     }
-    signatureFunc = sigFunc;
-    let pow = "", sig = "", raw = "";
 
-    /*****************
-     * GO!
-     ****************/
+  })
 
-    let testsCount = 0;
-    let found = false;
-    let score = 0;
-    let turn = 0;
+  function beginNewProofOfWork(stuff:any) {
+    askedStop = false;
+    computing = querablep((async () => {
+
+      /*****************
+       * PREPARE POW STUFF
+       ****************/
 
-    while (!found && !askedStop) {
+      let nonce = 0;
+      const maxDuration = stuff.maxDuration || 1000
+      const conf = stuff.conf;
+      const block = stuff.block;
+      const nonceBeginning = stuff.nonceBeginning;
+      const nbZeros = stuff.zeros;
+      const pair = stuff.pair;
+      const forcedTime = stuff.forcedTime;
+      currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU;
+      prefix = parseInt(conf.prefix || prefix)
+      if (prefix && prefix < ProverConstants.NONCE_RANGE) {
+        prefix *= 100 * ProverConstants.NONCE_RANGE
+      }
+      const highMark = stuff.highMark;
+      let sigFunc = null;
+      if (signatureFunc && lastSecret === pair.sec) {
+        sigFunc = signatureFunc;
+      }
+      else {
+        lastSecret = pair.sec;
+        sigFunc = (msg:string) => KeyGen(pair.pub, pair.sec).signSync(msg)
+      }
+      signatureFunc = sigFunc;
+      let pow = "", sig = "", raw = "";
 
       /*****************
-       * A TURN
+       * GO!
        ****************/
 
-      await Promise.race([
+      let pausePeriod = 1;
+      let testsCount = 0;
+      let found = false;
+      let turn = 0;
+      const profiler = new ProcessCpuProfiler(100)
+      let cpuUsage = profiler.cpuUsageOverLastMilliseconds(1)
+      // We limit the number of tests according to CPU usage
+      let testsPerRound = stuff.initialTestsPerRound || 1
+      let turnDuration = 20 // We initially goes quickly to the max speed = 50 reevaluations per second (1000 / 20)
 
-        // I. Stop the turn if it exceeds `turnDuration` ms
-        countDown(turnDuration),
+      while (!found && !askedStop) {
 
-        // II. Process the turn's PoW
-        (async () => {
+        /*****************
+         * A TURN ~ 100ms
+         ****************/
 
-          /*****************
-           * A TURN OF POW ~= 100ms by default
-           * --------------------
-           *
-           * The concept of "turn" is required to limit the CPU usage.
-           * We need a time reference to have the speed = nb tests / period of time.
-           * Here we have:
-           *
-           *   - speed = testsCount / turn
-           *
-           * We have taken 1 turn = 100ms to control the CPU usage after 100ms of PoW. This means that during the
-           * very first 100ms of the PoW, CPU usage = 100%. Then it becomes controlled to the %CPU set.
-           ****************/
+        await Promise.race([
 
-            // Prove
-          let i = 0;
-          const thisTurn = turn;
-          const pausePeriod = score ? score / PAUSES_PER_TURN : 10; // number of pauses per turn
-          // We limit the number of tests according to CPU usage
-          const testsPerRound = score ? Math.floor(score * currentCPU) : 1000 * 1000 * 1000
-
-          // Time is updated regularly during the proof
-          block.time = getBlockTime(block, conf, forcedTime)
-          if (block.number === 0) {
-            block.medianTime = block.time
-          }
-          block.inner_hash = getBlockInnerHash(block);
+          // I. Stop the turn if it exceeds `turnDuration` ms
+          countDown(turnDuration),
 
-          /*****************
-           * Iterations of a turn
-           ****************/
+          // II. Process the turn's PoW
+          (async () => {
 
-          while(!found && i < testsPerRound && thisTurn === turn && !askedStop) {
+            // Prove
+            let i = 0;
+            const thisTurn = turn;
 
-            // Nonce change (what makes the PoW change if the time field remains the same)
-            nonce++
+            // Time is updated regularly during the proof
+            block.time = getBlockTime(block, conf, forcedTime)
+            if (block.number === 0) {
+              block.medianTime = block.time
+            }
+            block.inner_hash = getBlockInnerHash(block);
 
             /*****************
-             * A PROOF OF WORK
+             * Iterations of a turn
              ****************/
 
-            // The final nonce is composed of 3 parts
-            block.nonce = prefix + nonceBeginning + nonce
-            raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n")
-            sig = dos2unix(sigFunc(raw))
-            pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase()
+            while(!found && i < testsPerRound && thisTurn === turn && !askedStop) {
 
-            /*****************
-             * Check the POW result
-             ****************/
+              // Nonce change (what makes the PoW change if the time field remains the same)
+              nonce++
 
-            let j = 0, charOK = true;
-            while (j < nbZeros && charOK) {
-              charOK = pow[j] === '0';
-              j++;
-            }
-            if (charOK) {
-              found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']')))
-            }
-            if (!found && nbZeros > 0 && j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW) {
-              pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }});
-            }
+              /*****************
+               * A PROOF OF WORK
+               ****************/
 
-            /*****************
-             * - Update local vars
-             * - Allow to receive stop signal
-             ****************/
+              // The final nonce is composed of 3 parts
+              block.nonce = prefix + nonceBeginning + nonce
+              raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n")
+              sig = dos2unix(sigFunc(raw))
+              pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase()
 
-            if (!found && !askedStop) {
-              i++;
-              testsCount++;
-              if (i % pausePeriod === 0) {
-                await countDown(0); // Very low pause, just the time to process eventual end of the turn
-              }
-            }
-          }
+              /*****************
+               * Check the POW result
+               ****************/
 
-          /*****************
-           * Check the POW result
-           ****************/
-          if (!found) {
+              let j = 0, charOK = true;
+              while (j < nbZeros && charOK) {
+                charOK = pow[j] === '0';
+                j++;
+              }
+              if (charOK) {
+                found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']')))
+              }
+              if (!found && nbZeros > 0 && j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW) {
+                pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }});
+              }
 
-            // CPU speed recording
-            if (turn > 0 && !score) {
-              score = testsCount;
+              /*****************
+               * - Update local vars
+               * - Allow to receive stop signal
+               ****************/
+
+              if (!found && !askedStop) {
+                i++;
+                testsCount++;
+                if (i % pausePeriod === 0) {
+                  await countDown(1); // Very low pause, just the time to process eventual end of the turn
+                }
+              }
             }
 
             /*****************
-             * UNLOAD CPU CHARGE
+             * Check the POW result
              ****************/
-            // We wait for a maximum time of `turnDuration`.
-            // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script
-            // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu"
-            // parameter.
-            await countDown(turnDuration);
-          }
-        })()
-      ]);
+            if (!found) {
+
+              // CPU speed recording
+              if (turn > 0) {
+                cpuUsage = profiler.cpuUsageOverLastMilliseconds(turnDuration)
+                if (cpuUsage > currentCPU + 0.005 || cpuUsage < currentCPU - 0.005) {
+                  let powVariationFactor
+                  // powVariationFactor = currentCPU / (cpuUsage || 0.01) / 5 // divide by 2 to avoid extreme responses
+                  if (currentCPU > cpuUsage) {
+                    powVariationFactor = 1.01
+                    testsPerRound = Math.max(1, Math.ceil(testsPerRound * powVariationFactor))
+                  } else {
+                    powVariationFactor = 0.99
+                    testsPerRound = Math.max(1, Math.floor(testsPerRound * powVariationFactor))
+                  }
+                  pausePeriod = Math.floor(testsPerRound / ProverConstants.POW_NB_PAUSES_PER_ROUND)
+                }
+              }
 
-      // Next turn
-      turn++
-    }
+              /*****************
+               * UNLOAD CPU CHARGE FOR THIS TURN
+               ****************/
+              // We wait for a maximum time of `turnDuration`.
+              // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script
+              // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu"
+              // parameter.
+              await countDown(turnDuration);
+            }
+          })()
+        ]);
 
-    /*****************
-     * POW IS OVER
-     * -----------
-     *
-     * We either have found a valid POW or a stop event has been detected.
-     ****************/
+        // Next turn
+        turn++
 
-    if (askedStop) {
+        turnDuration += 1
+        turnDuration = Math.min(turnDuration, maxDuration) // Max 1 second per turn
+      }
 
-      // PoW stopped
-      askedStop = false;
-      return null
+      /*****************
+       * POW IS OVER
+       * -----------
+       *
+       * We either have found a valid POW or a stop event has been detected.
+       ****************/
 
-    } else {
+      if (askedStop) {
+
+        // PoW stopped
+        askedStop = false;
+        pSend({ canceled: true })
+        return null
 
-      // PoW success
-      block.hash = pow
-      block.signature = sig
-      return {
-        pow: {
-          block: block,
-          testsCount: testsCount,
-          pow: pow
+      } else {
+
+        // PoW success
+        block.hash = pow
+        block.signature = sig
+        return {
+          pow: {
+            block: block,
+            testsCount: testsCount,
+            pow: pow
+          }
         }
       }
-    }
-  })())
+    })())
 
-  return computing;
-}
+    return computing;
+  }
 
-function countDown(duration:number) {
-  return new Promise((resolve) => setTimeout(resolve, duration));
-}
+  function countDown(duration:number) {
+    return new Promise((resolve) => setTimeout(resolve, duration));
+  }
 
-function getBlockInnerHash(block:DBBlock) {
-  const raw = rawer.getBlockInnerPart(block);
-  return hashf(raw)
-}
+  function getBlockInnerHash(block:DBBlock) {
+    const raw = rawer.getBlockInnerPart(block);
+    return hashf(raw)
+  }
 
-function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) {
-  if (forcedTime) {
-    return forcedTime;
+  function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) {
+    if (forcedTime) {
+      return forcedTime;
+    }
+    const now = moment.utc().unix();
+    const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf);
+    const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0;
+    const medianTime = block.medianTime;
+    const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset);
+    return Math.max(medianTime, upperBound);
   }
-  const now = moment.utc().unix();
-  const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf);
-  const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0;
-  const medianTime = block.medianTime;
-  const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset);
-  return Math.max(medianTime, upperBound);
-}
 
-function answer(message:any, theAnswer:any) {
-  return pSend({
-    uuid: message.uuid,
-    answer: theAnswer
-  })
-}
+  function answer(message:any, theAnswer:any) {
+    return pSend({
+      uuid: message.uuid,
+      answer: theAnswer
+    })
+  }
 
-function pSend(stuff:any) {
-  return new Promise(function (resolve, reject) {
-    if (process.send) {
-      process.send(stuff, function (error:any) {
-        !error && resolve();
-        error && reject();
-      })
-    } else {
-      reject('process.send() is not defined')
-    }
-  });
+  function pSend(stuff:any) {
+    return new Promise(function (resolve, reject) {
+      if (process.send) {
+        process.send(stuff, function (error:any) {
+          !error && resolve();
+          error && reject();
+        })
+      } else {
+        reject('process.send() is not defined')
+      }
+    });
+  }
 }
diff --git a/app/modules/router.ts b/app/modules/router.ts
index d6484f05ba8d3ef1a74e8440c44c25aae80af128..9c7f2b14d4eded2c706118fdb1a6d3ee0c0b621a 100644
--- a/app/modules/router.ts
+++ b/app/modules/router.ts
@@ -5,9 +5,7 @@ import * as stream from "stream"
 import {Multicaster} from "../lib/streams/multicaster"
 import {RouterStream} from "../lib/streams/router"
 
-const constants = require('../lib/constants');
-
-module.exports = {
+export const RouterDependency = {
   duniter: {
     service: {
       output: (server:Server, conf:ConfDTO, logger:any) => new Router(server)
@@ -26,7 +24,7 @@ module.exports = {
  * Service which triggers the server's peering generation (actualization of the Peer document).
  * @constructor
  */
-class Router extends stream.Transform {
+export class Router extends stream.Transform {
 
   theRouter:any
   theMulticaster:Multicaster = new Multicaster()
diff --git a/app/modules/ws2p/lib/WS2PCluster.ts b/app/modules/ws2p/lib/WS2PCluster.ts
index bf5ac43a5574cb698c86c760e0b9c6d67c09ce8b..3502138a3b4655bc2218819b86e4809922d87f81 100644
--- a/app/modules/ws2p/lib/WS2PCluster.ts
+++ b/app/modules/ws2p/lib/WS2PCluster.ts
@@ -1,4 +1,4 @@
-import { DEFAULT_ENCODING } from 'crypto';
+import {DEFAULT_ENCODING} from 'crypto';
 import {WS2PServer} from "./WS2PServer"
 import {Server} from "../../../../server"
 import {WS2PClient} from "./WS2PClient"
@@ -8,7 +8,7 @@ import {CrawlerConstants} from "../../crawler/lib/constants"
 import {WS2PBlockPuller} from "./WS2PBlockPuller"
 import {WS2PDocpoolPuller} from "./WS2PDocpoolPuller"
 import {WS2PConstants} from "./constants"
-import { PeerDTO, WS2PEndpoint } from '../../../lib/dto/PeerDTO';
+import {PeerDTO, WS2PEndpoint} from '../../../lib/dto/PeerDTO';
 import {GlobalFifoPromise} from "../../../service/GlobalFifoPromise"
 import {OtherConstants} from "../../../lib/other_constants"
 import {Key, verify} from "../../../lib/common-libs/crypto/keyring"
@@ -681,11 +681,11 @@ export class WS2PCluster {
       let uuids = Object.keys(this.ws2pClients)
       uuids = _.shuffle(uuids)
       let lowPriorityConnectionUUID:string = uuids[0]
-      let minPriorityLevel = this.keyPriorityLevel(this.ws2pClients[lowPriorityConnectionUUID].connection.pubkey, preferedKeys)
+      let minPriorityLevel = await this.keyPriorityLevel(this.ws2pClients[lowPriorityConnectionUUID].connection.pubkey, preferedKeys)
       for (const uuid of uuids) {
         const client = this.ws2pClients[uuid]
           if (uuid !== lowPriorityConnectionUUID) {
-            let uuidPriorityLevel = this.keyPriorityLevel(client.connection.pubkey, preferedKeys)
+            let uuidPriorityLevel = await this.keyPriorityLevel(client.connection.pubkey, preferedKeys)
             if (uuidPriorityLevel < minPriorityLevel) {
               lowPriorityConnectionUUID = uuid
               minPriorityLevel = uuidPriorityLevel
diff --git a/app/modules/ws2p/lib/WS2PServer.ts b/app/modules/ws2p/lib/WS2PServer.ts
index 002d0cf82c52c82c136b950e92aa6b808bd0c010..ce08493018528ec69b57f2e2c84efc1d1a838685 100644
--- a/app/modules/ws2p/lib/WS2PServer.ts
+++ b/app/modules/ws2p/lib/WS2PServer.ts
@@ -7,7 +7,6 @@ import {WS2PConstants} from "./constants"
 import {WS2PMessageHandler} from "./impl/WS2PMessageHandler"
 import {WS2PStreamer} from "./WS2PStreamer"
 import {WS2PSingleWriteStream} from "./WS2PSingleWriteStream"
-import { WS2PCluster } from './WS2PCluster';
 
 const WebSocketServer = require('ws').Server
 
@@ -159,10 +158,10 @@ export class WS2PServer extends events.EventEmitter {
 
   async removeLowPriorityConnection(privilegedKeys:string[]) {
     let lowPriorityConnection:WS2PConnection = this.connections[0]
-    let minPriorityLevel = this.keyPriorityLevel(lowPriorityConnection.pubkey, privilegedKeys)
+    let minPriorityLevel = await this.keyPriorityLevel(lowPriorityConnection.pubkey, privilegedKeys)
     for (const c of this.connections) {
       if (c !== lowPriorityConnection) {
-        let cPriorityLevel = this.keyPriorityLevel(c.pubkey, privilegedKeys)
+        let cPriorityLevel = await this.keyPriorityLevel(c.pubkey, privilegedKeys)
         if (cPriorityLevel < minPriorityLevel) {
           lowPriorityConnection = c
           minPriorityLevel = cPriorityLevel
diff --git a/appveyor.yml b/appveyor.yml
index 8fbf7baf69c2f8d27ec6c67b7cdfeff7b293b55e..1a271d5e4c01d8f303faad4f390484c16c87b2eb 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -1,6 +1,6 @@
 environment:
   matrix:
-    - nodejs_version: "6.9.2"
+    - nodejs_version: "8.9.2"
       ADDON_VERSION: "48"
 
 platform:
diff --git a/index.ts b/index.ts
index ecf7be9d67231df6e4e63ef96c903457272b675d..e7e6a32e0194faf11b4035e741684784b0921f66 100644
--- a/index.ts
+++ b/index.ts
@@ -9,6 +9,7 @@ import {BmaDependency} from "./app/modules/bma/index"
 import {WS2PDependency} from "./app/modules/ws2p/index"
 import {ProverConstants} from "./app/modules/prover/lib/constants"
 import { ProxiesConf } from './app/lib/proxy';
+import {RouterDependency} from "./app/modules/router"
 
 const path = require('path');
 const _ = require('underscore');
@@ -25,9 +26,16 @@ const reapplyDependency   = require('./app/modules/reapply');
 const revertDependency    = require('./app/modules/revert');
 const daemonDependency    = require('./app/modules/daemon');
 const pSignalDependency   = require('./app/modules/peersignal');
-const routerDependency    = require('./app/modules/router');
 const pluginDependency    = require('./app/modules/plugin');
 
+let sigintListening = false
+
+// Trace errors
+process.on('unhandledRejection', (reason) => {
+  logger.error('Unhandled rejection: ' + reason);
+  logger.error(reason);
+});
+
 class Stacks {
 
   static todoOnRunDone:() => any = () => process.exit()
@@ -102,7 +110,7 @@ const DEFAULT_DEPENDENCIES = MINIMAL_DEPENDENCIES.concat([
   { name: 'duniter-revert',    required: revertDependency },
   { name: 'duniter-daemon',    required: daemonDependency },
   { name: 'duniter-psignal',   required: pSignalDependency },
-  { name: 'duniter-router',    required: routerDependency },
+  { name: 'duniter-router',    required: RouterDependency },
   { name: 'duniter-plugin',    required: pluginDependency },
   { name: 'duniter-prover',    required: ProverDependency },
   { name: 'duniter-keypair',   required: KeypairDependency },
@@ -157,6 +165,8 @@ export interface TransformableDuniterService extends DuniterService, stream.Tran
 
 class Stack {
 
+  private injectedServices = false
+
   private cli:any
   private configLoadingCallbacks:any[]
   private configBeforeSaveCallbacks:any[]
@@ -279,10 +289,12 @@ class Stack {
     }
 
     const server = new Server(home, program.memory === true, commandLineConf(program));
+    let piped = false
 
     // If ever the process gets interrupted
     let isSaving = false;
-    process.on('SIGINT', async () => {
+    if (!sigintListening) {
+      process.on('SIGINT', async () => {
         if (!isSaving) {
           isSaving = true;
           // Save DB
@@ -294,7 +306,9 @@ class Stack {
             process.exit(3);
           }
         }
-    });
+      })
+      sigintListening = true
+    }
 
     // Config or Data reset hooks
     server.resetDataHook = async () => {
@@ -366,26 +380,30 @@ class Stack {
        * Service injection
        * -----------------
        */
-      for (const def of this.definitions) {
-        if (def.service) {
-          // To feed data coming from some I/O (network, disk, other module, ...)
-          if (def.service.input) {
-            this.streams.input.push(def.service.input(server, conf, logger));
-          }
-          // To handle data this has been submitted by INPUT stream
-          if (def.service.process) {
-            this.streams.process.push(def.service.process(server, conf, logger));
-          }
-          // To handle data this has been validated by PROCESS stream
-          if (def.service.output) {
-            this.streams.output.push(def.service.output(server, conf, logger));
-          }
-          // Special service which does not stream anything particular (ex.: piloting the `server` object)
-          if (def.service.neutral) {
-            this.streams.neutral.push(def.service.neutral(server, conf, logger));
+      if (!this.injectedServices) {
+        this.injectedServices = true
+        for (const def of this.definitions) {
+          if (def.service) {
+            // To feed data coming from some I/O (network, disk, other module, ...)
+            if (def.service.input) {
+              this.streams.input.push(def.service.input(server, conf, logger));
+            }
+            // To handle data this has been submitted by INPUT stream
+            if (def.service.process) {
+              this.streams.process.push(def.service.process(server, conf, logger));
+            }
+            // To handle data this has been validated by PROCESS stream
+            if (def.service.output) {
+              this.streams.output.push(def.service.output(server, conf, logger));
+            }
+            // Special service which does not stream anything particular (ex.: piloting the `server` object)
+            if (def.service.neutral) {
+              this.streams.neutral.push(def.service.neutral(server, conf, logger));
+            }
           }
         }
       }
+      piped = true
       // All inputs write to global INPUT stream
       for (const module of this.streams.input) module.pipe(this.INPUT);
       // All processes read from global INPUT stream
@@ -408,13 +426,6 @@ class Stack {
           const modules = this.streams.input.concat(this.streams.process).concat(this.streams.output).concat(this.streams.neutral);
           // Any streaming module must implement a `stopService` method
           await Promise.all(modules.map((module:DuniterService) => module.stopService()))
-          // // Stop reading inputs
-          // for (const module of streams.input) module.unpipe();
-          // Stop reading from global INPUT
-          // INPUT.unpipe();
-          // for (const module of streams.process) module.unpipe();
-          // // Stop reading from global PROCESS
-          // PROCESS.unpipe();
         },
 
         this);
@@ -422,17 +433,20 @@ class Stack {
     } catch (e) {
       server.disconnect();
       throw e;
+    } finally {
+      if (piped) {
+        // Unpipe everything, as the command is done
+        for (const module of this.streams.input) module.unpipe()
+        for (const module of this.streams.process) module.unpipe()
+        for (const module of this.streams.output) module.unpipe()
+        this.INPUT.unpipe()
+        this.PROCESS.unpipe()
+      }
     }
   }
 
   executeStack(argv:string[]) {
 
-    // Trace these errors
-    process.on('unhandledRejection', (reason) => {
-      logger.error('Unhandled rejection: ' + reason);
-      logger.error(reason);
-    });
-
     // Executes the command
     return this.cli.execute(argv);
   }
diff --git a/test/fast/prover/pow-1-cluster.js b/test/fast/prover/pow-1-cluster.ts
similarity index 63%
rename from test/fast/prover/pow-1-cluster.js
rename to test/fast/prover/pow-1-cluster.ts
index 96d58c12b9f36dd1391c91d3b649bb4f12f25bbc..a2e76947e030785b11c2396118d011fb32f433ee 100644
--- a/test/fast/prover/pow-1-cluster.js
+++ b/test/fast/prover/pow-1-cluster.ts
@@ -1,16 +1,19 @@
-"use strict";
+import {Master} from "../../../app/modules/prover/lib/powCluster"
 
 const co = require('co')
-const should = require('should')
-const PowCluster = require('../../../app/modules/prover/lib/powCluster').Master
+require('should')
 const logger = require('../../../app/lib/logger').NewLogger()
 
-let master
+let master:Master
 
 describe('PoW Cluster', () => {
 
   before(() => {
-    master = new PowCluster(1, logger)
+    master = new Master(1, logger)
+  })
+
+  after(() => {
+    return master.shutDownWorkers()
   })
 
   it('should have an empty cluster if no PoW was asked', () => {
@@ -73,4 +76,35 @@ describe('PoW Cluster', () => {
     delay.should.be.below(50)
   }))
 
+  it('should be able to stop all the cores on cancel', async () => {
+    master.proveByWorkers({
+      initialTestsPerRound: 100,
+      maxDuration: 1000,
+      newPoW: {
+        block: {
+          number: 0
+        },
+        zeros: 10,
+        highMark: 'F',
+        conf: {
+          medianTimeBlocks: 1,
+          avgGenTime: 100,
+          cpu: 0.8,
+          prefix: '8',
+          nbCores: 1
+        },
+        pair: {
+          pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
+          sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
+        }
+      }
+    })
+    await new Promise(res => {
+      master.onInfoMessage = () => res()
+    })
+    await master.cancelWork()
+    await new Promise(res => setTimeout(res, 100))
+    master.nbCancels.should.equal(1)
+  })
+
 });
diff --git a/test/fast/prover/pow-2-engine.js b/test/fast/prover/pow-2-engine.js
index 8238438d02c1866d4bf7acc0d26625f2f3549a32..743744ba550dd7db189020a2949321393c844e52 100644
--- a/test/fast/prover/pow-2-engine.js
+++ b/test/fast/prover/pow-2-engine.js
@@ -10,6 +10,7 @@ describe('PoW Engine', () => {
   it('should be configurable', () => co(function*(){
     const e1 = new PowEngine({ nbCores: 1 }, logger);
     (yield e1.setConf({ cpu: 0.2, prefix: '34' })).should.deepEqual({ cpu: 0.2, prefix: '34' });
+    yield e1.shutDown()
   }));
 
   it('should be able to make a proof', () => co(function*(){
@@ -52,6 +53,7 @@ describe('PoW Engine', () => {
         pow: '009A52E6E2E4EA7DE950A2DA673114FA55B070EBE350D75FF0C62C6AAE9A37E5'
       }
     });
+    yield e1.shutDown()
   }));
 
   it('should be able to stop a proof', () => co(function*(){
@@ -85,5 +87,6 @@ describe('PoW Engine', () => {
     yield e1.cancel()
     // const proof = yield proofPromise;
     // should.not.exist(proof);
+    yield e1.shutDown()
   }));
 });
diff --git a/test/integration/branches.js b/test/integration/branches.js
deleted file mode 100644
index dadf77a17461865a6b6a4b9adff1026220653e81..0000000000000000000000000000000000000000
--- a/test/integration/branches.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-
-const _         = require('underscore');
-const co        = require('co');
-const should    = require('should');
-const duniter   = require('../../index');
-const bma       = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const rp        = require('request-promise');
-const httpTest  = require('./tools/http');
-const shutDownEngine  = require('./tools/shutDownEngine');
-
-const expectAnswer   = httpTest.expectAnswer;
-
-const MEMORY_MODE = true;
-const commonConf = {
-  ipv4: '127.0.0.1',
-  currency: 'bb',
-  httpLogs: true,
-  forksize: 3,
-  sigQty: 1
-};
-
-let s1
-
-describe("Branches", () => co(function*() {
-
-  before(() => co(function*() {
-
-    s1 = duniter(
-      '/bb1',
-      MEMORY_MODE,
-      _.extend({
-        port: '7778',
-        pair: {
-          pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-          sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-        }
-      }, commonConf));
-
-    const server = yield s1.initWithDAL();
-    const bmapi = yield bma(server);
-    yield bmapi.openConnections();
-  }));
-
-  after(() => {
-    return shutDownEngine(s1)
-  })
-
-  describe("Server 1 /blockchain", function() {
-
-    it('should have a 3 blocks fork window size', function() {
-      return expectAnswer(rp('http://127.0.0.1:7778/node/summary', { json: true }), function(res) {
-        res.should.have.property('duniter').property('software').equal('duniter');
-        res.should.have.property('duniter').property('version').equal('1.6.14');
-        res.should.have.property('duniter').property('forkWindowSize').equal(3);
-      });
-    });
-  });
-}));
diff --git a/test/integration/branches2.js b/test/integration/branches2.js
deleted file mode 100644
index 18d0555795268fae555bad1a150e1a087db3d873..0000000000000000000000000000000000000000
--- a/test/integration/branches2.js
+++ /dev/null
@@ -1,214 +0,0 @@
-"use strict";
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
-    return new (P || (P = Promise))(function (resolve, reject) {
-        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
-        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
-        function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
-        step((generator = generator.apply(thisArg, _arguments || [])).next());
-    });
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const other_constants_1 = require("../../app/lib/other_constants");
-const logger_1 = require("../../app/lib/logger");
-const index_1 = require("../../app/modules/bma/index");
-const index_2 = require("../../app/modules/crawler/index");
-const toolbox_1 = require("./tools/toolbox");
-const TestUser_1 = require("./tools/TestUser");
-const co = require('co');
-const _ = require('underscore');
-const duniter = require('../../index');
-const bma = index_1.BmaDependency.duniter.methods.bma;
-const rp = require('request-promise');
-const httpTest = require('./tools/http');
-const commit = require('./tools/commit');
-const sync = require('./tools/sync');
-const shutDownEngine = require('./tools/shutDownEngine');
-const expectJSON = httpTest.expectJSON;
-const expectHttpCode = httpTest.expectHttpCode;
-if (other_constants_1.OtherConstants.MUTE_LOGS_DURING_UNIT_TESTS) {
-    logger_1.NewLogger().mute();
-}
-// Trace these errors
-process.on('unhandledRejection', (reason) => {
-    console.error('Unhandled rejection: ' + reason);
-    console.error(reason);
-});
-const MEMORY_MODE = true;
-const commonConf = {
-    ipv4: '127.0.0.1',
-    currency: 'bb',
-    httpLogs: true,
-    forksize: 10,
-    switchOnHeadAdvance: 6,
-    avgGenTime: 30 * 60,
-    sigQty: 1
-};
-let s1, s2, cat, toc;
-const now = Math.round(new Date().getTime() / 1000);
-describe("SelfFork", function () {
-    before(() => co(function* () {
-        s1 = duniter('/bb4', MEMORY_MODE, _.extend({
-            port: '7781',
-            pair: {
-                pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-                sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-            }
-        }, commonConf));
-        s2 = duniter('/bb5', MEMORY_MODE, _.extend({
-            port: '7782',
-            pair: {
-                pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
-                sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
-            }
-        }, commonConf));
-        cat = new TestUser_1.TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 });
-        toc = new TestUser_1.TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 });
-        const commitS1 = commit(s1);
-        const commitS2 = commit(s2, {
-            time: now + 37180
-        });
-        yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        s1.addEndpointsDefinitions(() => index_1.BmaDependency.duniter.methods.getMainEndpoint(s1.conf));
-        s2.addEndpointsDefinitions(() => index_1.BmaDependency.duniter.methods.getMainEndpoint(s2.conf));
-        // Server 1
-        yield cat.createIdentity();
-        yield toc.createIdentity();
-        yield toc.cert(cat);
-        yield cat.cert(toc);
-        yield cat.join();
-        yield toc.join();
-        yield commitS1({
-            time: now
-        });
-        yield commitS1();
-        yield commitS1();
-        yield commitS1();
-        // Server 2
-        yield sync(0, 2, s1, s2);
-        yield toolbox_1.waitToHaveBlock(s2, 2);
-        let s2p = yield s2.PeeringService.peer();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield s1.writePeer(s2p);
-        // Forking S1 from S2
-        yield Promise.all([
-            toolbox_1.waitForkResolution(s1, 9),
-            index_2.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey)
-        ]);
-    }));
-    after(() => {
-        return Promise.all([
-            shutDownEngine(s1),
-            shutDownEngine(s2)
-        ]);
-    });
-    describe("Server 1 /blockchain", function () {
-        it('/block/0 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/0', { json: true }), {
-                number: 0,
-                issuersCount: 0,
-                issuersFrame: 1,
-                issuersFrameVar: 0
-            });
-        });
-        it('/block/1 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/1', { json: true }), {
-                number: 1,
-                issuersCount: 1,
-                issuersFrame: 1,
-                issuersFrameVar: 5
-            });
-        });
-        it('/block/2 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/2', { json: true }), {
-                number: 2,
-                issuersCount: 1,
-                issuersFrame: 2,
-                issuersFrameVar: 4
-            });
-        });
-        it('/block/3 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/3', { json: true }), {
-                number: 3,
-                issuersCount: 1,
-                issuersFrame: 3,
-                issuersFrameVar: 3
-            });
-        });
-        it('/block/4 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/4', { json: true }), {
-                number: 4,
-                issuersCount: 2,
-                issuersFrame: 4,
-                issuersFrameVar: 7
-            });
-        });
-        it('/block/5 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/5', { json: true }), {
-                number: 5,
-                issuersCount: 2,
-                issuersFrame: 5,
-                issuersFrameVar: 6
-            });
-        });
-        it('/block/6 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/6', { json: true }), {
-                number: 6,
-                issuersCount: 2,
-                issuersFrame: 6,
-                issuersFrameVar: 5
-            });
-        });
-        it('/block/7 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/7', { json: true }), {
-                number: 7,
-                issuersCount: 2,
-                issuersFrame: 7,
-                issuersFrameVar: 4
-            });
-        });
-        it('/block/88 should not exist', function () {
-            return expectHttpCode(404, rp('http://127.0.0.1:7781/blockchain/block/88'));
-        });
-        it('/current should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/current', { json: true }), {
-                number: 9
-            });
-        });
-        it('should have 2 branch', () => __awaiter(this, void 0, void 0, function* () {
-            const branches = yield s1.BlockchainService.branches();
-            branches.should.have.length(1);
-        }));
-    });
-    describe("Server 2 /blockchain", function () {
-        it('/block/0 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/0', { json: true }), {
-                number: 0
-            });
-        });
-        it('/block/1 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/1', { json: true }), {
-                number: 1
-            });
-        });
-        it('/block/88 should not exist', function () {
-            return expectHttpCode(404, rp('http://127.0.0.1:7782/blockchain/block/88'));
-        });
-        it('/current should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/current', { json: true }), {
-                number: 9
-            });
-        });
-        it('should have 1 branch', () => co(function* () {
-            const branches = yield s2.BlockchainService.branches();
-            branches.should.have.length(1);
-        }));
-    });
-});
-//# sourceMappingURL=branches2.js.map
\ No newline at end of file
diff --git a/test/integration/branches_switch.js b/test/integration/branches_switch.js
deleted file mode 100644
index 353ef8113098859a1f9e67a0b9592211759b1194..0000000000000000000000000000000000000000
--- a/test/integration/branches_switch.js
+++ /dev/null
@@ -1,99 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-const index_1 = require("../../app/modules/crawler/index");
-const index_2 = require("../../app/modules/bma/index");
-const co = require('co');
-const _ = require('underscore');
-const duniter = require('../../index');
-const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const TestUser = require('./tools/TestUser').TestUser;
-const rp = require('request-promise');
-const httpTest = require('./tools/http');
-const commit = require('./tools/commit');
-const sync = require('./tools/sync');
-const shutDownEngine = require('./tools/shutDownEngine');
-const expectJSON = httpTest.expectJSON;
-const MEMORY_MODE = true;
-const commonConf = {
-    ipv4: '127.0.0.1',
-    currency: 'bb',
-    httpLogs: true,
-    forksize: 30,
-    avgGenTime: 1,
-    sigQty: 1
-};
-let s1, s2, cat, toc;
-describe("Switch", function () {
-    before(() => co(function* () {
-        s1 = duniter('/bb11', MEMORY_MODE, _.extend({
-            switchOnHeadAdvance: 0,
-            port: '7788',
-            pair: {
-                pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-                sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-            },
-            rootoffset: 10,
-            sigQty: 1, dt: 1, ud0: 120
-        }, commonConf));
-        s2 = duniter('/bb12', MEMORY_MODE, _.extend({
-            switchOnHeadAdvance: 0,
-            port: '7789',
-            pair: {
-                pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
-                sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
-            }
-        }, commonConf));
-        cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 });
-        toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 });
-        yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        s1.addEndpointsDefinitions(() => index_2.BmaDependency.duniter.methods.getMainEndpoint(s1.conf));
-        s2.addEndpointsDefinitions(() => index_2.BmaDependency.duniter.methods.getMainEndpoint(s2.conf));
-        yield cat.createIdentity();
-        yield toc.createIdentity();
-        yield toc.cert(cat);
-        yield cat.cert(toc);
-        yield cat.join();
-        yield toc.join();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield sync(0, 2, s1, s2);
-        let s2p = yield s2.PeeringService.peer();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        // So we now have:
-        // S1 01234
-        // S2   `3456789
-        yield s1.writePeer(s2p);
-        // Forking S1 from S2
-        yield index_1.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey);
-        // S1 should have switched to the other branch
-    }));
-    after(() => {
-        return Promise.all([
-            shutDownEngine(s1),
-            shutDownEngine(s2)
-        ]);
-    });
-    describe("Server 1 /blockchain", function () {
-        it('/block/8 should exist on S1', function () {
-            return expectJSON(rp('http://127.0.0.1:7788/blockchain/block/8', { json: true }), {
-                number: 8
-            });
-        });
-        it('/block/8 should exist on S2', function () {
-            return expectJSON(rp('http://127.0.0.1:7789/blockchain/block/8', { json: true }), {
-                number: 8
-            });
-        });
-    });
-});
-//# sourceMappingURL=branches_switch.js.map
\ No newline at end of file
diff --git a/test/integration/branches_switch.ts b/test/integration/branches_switch.ts
index 2eb7fae68263581106b049d653402e133aa6508a..ecce9de72ba20fb22fbaeb610ea56ca95f02f8a2 100644
--- a/test/integration/branches_switch.ts
+++ b/test/integration/branches_switch.ts
@@ -11,6 +11,7 @@ const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
 const commit    = require('./tools/commit');
 const sync      = require('./tools/sync');
+const cluster   = require('cluster')
 const shutDownEngine  = require('./tools/shutDownEngine');
 
 const expectJSON     = httpTest.expectJSON;
@@ -31,6 +32,8 @@ describe("Switch", function() {
 
   before(() => co(function *() {
 
+    cluster.setMaxListeners(6)
+
     s1 = duniter(
       '/bb11',
       MEMORY_MODE,
@@ -97,6 +100,7 @@ describe("Switch", function() {
   }));
 
   after(() => {
+    cluster.setMaxListeners(3)
     return Promise.all([
       shutDownEngine(s1),
       shutDownEngine(s2)
diff --git a/test/integration/continuous-proof.js b/test/integration/continuous-proof.js
index 289d5b694b3f5ceeef79fb2f479af64c6080548f..157477a80f3dca199030a6ba498ea6d0cf218e44 100644
--- a/test/integration/continuous-proof.js
+++ b/test/integration/continuous-proof.js
@@ -37,6 +37,7 @@ describe("Continous proof-of-work", function() {
     yield i1.join();
     yield i2.join();
     yield s1.commit();
+    yield s1.closeCluster();
   }));
 
   it('should automatically stop waiting if nothing happens', () => co(function*() {
@@ -104,7 +105,7 @@ describe("Continous proof-of-work", function() {
     s2.conf.cpu = 1.0;
     s2.startBlockComputation();
     yield s2.until('block', 15);
-    s2.stopBlockComputation();
+    yield s2.stopBlockComputation();
     yield [
       require('../../app/modules/crawler').CrawlerDependency.duniter.methods.pullBlocks(s3),
       new Promise(res => {
@@ -121,11 +122,6 @@ describe("Continous proof-of-work", function() {
     const current = yield s3.get('/blockchain/current')
     yield s3.stopBlockComputation();
     current.number.should.be.aboveOrEqual(14)
+    yield s1.closeCluster()
   }));
-
-  after(() => {
-    return Promise.all([
-      s1.closeCluster()
-    ])
-  })
 });
diff --git a/test/integration/forwarding.js b/test/integration/forwarding.js
deleted file mode 100644
index 993247afcae309b74a8001914f99576823085ed0..0000000000000000000000000000000000000000
--- a/test/integration/forwarding.js
+++ /dev/null
@@ -1,184 +0,0 @@
-"use strict";
-const should = require('should');
-const assert = require('assert');
-const async  = require('async');
-const _      = require('underscore');
-const co     = require('co');
-const node   = require('./tools/node');
-const TestUser = require('./tools/TestUser').TestUser
-const jspckg = require('../../package');
-const constants = require('../../app/lib/constants');
-
-require('../../app/modules/bma').BmaDependency.duniter.methods.noLimit(); // Disables the HTTP limiter
-
-if (constants.MUTE_LOGS_DURING_UNIT_TESTS) {
-  require('../../app/lib/logger').NewLogger().mute();
-}
-
-describe("Forwarding", function() {
-
-  describe("Nodes", function() {
-
-    const common = { currency: 'bb', nobma: false, bmaWithCrawler:true, ws2p: { upnp: false }, ipv4: '127.0.0.1', remoteipv4: '127.0.0.1', rootoffset: 0, sigQty: 1 };
-
-    const node1 = node('db_1', _({ upnp: false, httplogs: false, port: 9600, remoteport: 9600, pair: { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'} }).extend(common));
-    const node2 = node('db_2', _({ upnp: false, httplogs: false, port: 9601, remoteport: 9601, pair: { pub: 'G2CBgZBPLe6FSFUgpx2Jf1Aqsgta6iib3vmDRA1yLiqU', sec: '58LDg8QLmF5pv6Dn9h7X4yFKfMTdP8fdAiWVcyDoTRJu454fwRihCLULH4MW37zncsg4ruoTGJPZneWk22QmG1w4'} }).extend(common));
-
-    const cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, node1);
-    const tac = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, node1);
-    const tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, node1);
-    const toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, node1);
-
-    before(() => co(function*(){
-      yield [node1, node2].map((theNode) => theNode.startTesting());
-      yield new Promise(function(resolve, reject){
-        async.waterfall([
-          function(next) {
-            node2.peering(next);
-          },
-          function(peer, next) {
-            node1.submitPeer(peer, function(err) {
-              next(err);
-            });
-          },
-          function(next) {
-            node1.peering(next);
-          },
-          function(peer, next) {
-            node2.submitPeer(peer, next);
-          }
-        ], function(err) {
-          err ? reject(err) : resolve();
-        });
-      });
-      yield [
-        node2.until('identity', 4),
-        node2.until('certification', 2),
-        node2.until('block', 1),
-        co(function *() {
-
-          // Self certifications
-          yield cat.createIdentity();
-          yield tac.createIdentity();
-          yield tic.createIdentity();
-          yield toc.createIdentity();
-          // Certifications
-          yield cat.cert(tac);
-          yield tac.cert(cat);
-          yield cat.join();
-          yield tac.join();
-          yield node1.commitP();
-        })
-      ];
-      yield [
-        node2.until('revocation', 1),
-        co(function *() {
-          yield cat.revoke();
-        })
-      ];
-    }));
-
-    describe("Testing technical API", function(){
-
-      it('Node1 should be up and running', node1.summary(function(summary, done){
-        should.exists(summary);
-        should.exists(summary.duniter);
-        should.exists(summary.duniter.software);
-        should.exists(summary.duniter.version);
-        assert.equal(summary.duniter.software, "duniter");
-        assert.equal(summary.duniter.version, jspckg.version);
-        done();
-      }));
-
-      it('Node2 should be up and running', node2.summary(function(summary, done){
-        should.exists(summary);
-        should.exists(summary.duniter);
-        should.exists(summary.duniter.software);
-        should.exists(summary.duniter.version);
-        assert.equal(summary.duniter.software, "duniter");
-        assert.equal(summary.duniter.version, jspckg.version);
-        done();
-      }));
-    });
-
-    describe('Node 1', doTests(node1));
-    describe('Node 2', doTests(node2));
-
-  });
-});
-
-function doTests(theNode) {
-
-  return function(){
-
-    describe("user cat", function(){
-
-      it('should give only 1 result', theNode.lookup('cat', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have sent 1 signature', theNode.lookup('cat', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].signed.length, 1);
-          should.exists(res.results[0].signed[0].isMember);
-          should.exists(res.results[0].signed[0].wasMember);
-          assert.equal(res.results[0].signed[0].isMember, true);
-          assert.equal(res.results[0].signed[0].wasMember, true);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-    });
-
-    describe("user tac", function(){
-
-      it('should give only 1 result', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have 1 signature', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].uids[0].others.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have sent 1 signature', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].signed.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-    });
-
-    it('toc should give only 1 result', theNode.lookup('toc', function(res, done){
-      should.not.exists(res);
-      done();
-    }));
-
-    it('tic should give only 1 results', theNode.lookup('tic', function(res, done){
-      should.not.exists(res);
-      done();
-    }));
-  };
-}
diff --git a/test/integration/forwarding.ts b/test/integration/forwarding.ts
new file mode 100644
index 0000000000000000000000000000000000000000..8adbba5eecbbb90794e343be1a6b6dab41e603a5
--- /dev/null
+++ b/test/integration/forwarding.ts
@@ -0,0 +1,136 @@
+import {NewLogger} from "../../app/lib/logger"
+import {BmaDependency} from "../../app/modules/bma/index"
+import {TestUser} from "./tools/TestUser"
+import {simpleTestingConf, simpleTestingServer, TestingServer} from "./tools/toolbox"
+import {RouterDependency} from "../../app/modules/router"
+
+require('should');
+const assert = require('assert');
+const jspckg = require('../../package');
+const constants = require('../../app/lib/constants');
+
+BmaDependency.duniter.methods.noLimit(); // Disables the HTTP limiter
+
+if (constants.MUTE_LOGS_DURING_UNIT_TESTS) {
+  NewLogger().mute()
+}
+
+describe("Forwarding", function() {
+
+  describe("Nodes", function() {
+
+    const now = 1500000000
+    const conf1 = simpleTestingConf(now, { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'})
+    const conf2 = simpleTestingConf(now, { pub: 'G2CBgZBPLe6FSFUgpx2Jf1Aqsgta6iib3vmDRA1yLiqU', sec: '58LDg8QLmF5pv6Dn9h7X4yFKfMTdP8fdAiWVcyDoTRJu454fwRihCLULH4MW37zncsg4ruoTGJPZneWk22QmG1w4'})
+
+    const node1 = simpleTestingServer(conf1)
+    const node2 = simpleTestingServer(conf2)
+
+    const cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, node1);
+    const tac = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, node1);
+    const tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, node1);
+    const toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, node1);
+
+    before(async () => {
+      await node1.initDalBmaConnections()
+      await node2.initDalBmaConnections()
+      await node1.sharePeeringWith(node2)
+      await node2.sharePeeringWith(node1)
+      RouterDependency.duniter.methods.routeToNetwork(node1._server)
+      RouterDependency.duniter.methods.routeToNetwork(node2._server)
+      await Promise.all([
+        node2.until('identity', 4),
+        node2.until('certification', 2),
+        node2.until('block', 1),
+        (async () => {
+
+          // Self certifications
+          await cat.createIdentity();
+          await tac.createIdentity();
+          await tic.createIdentity();
+          await toc.createIdentity();
+          // Certifications
+          await cat.cert(tac);
+          await tac.cert(cat);
+          await cat.join();
+          await tac.join();
+          await node1.commit({ time: now })
+        })()
+      ])
+      await Promise.all([
+        node2.until('revocation', 1),
+        cat.revoke()
+      ])
+    })
+
+    describe("Testing technical API", function(){
+
+      it('Node1 should be up and running', () => node1.expectThat('/node/summary', (summary:any) => {
+        should.exists(summary);
+        should.exists(summary.duniter);
+        should.exists(summary.duniter.software);
+        should.exists(summary.duniter.version);
+        assert.equal(summary.duniter.software, "duniter");
+        assert.equal(summary.duniter.version, jspckg.version);
+      }))
+
+      it('Node2 should be up and running', () => node2.expectThat('/node/summary', (summary:any) => {
+        should.exists(summary);
+        should.exists(summary.duniter);
+        should.exists(summary.duniter.software);
+        should.exists(summary.duniter.version);
+        assert.equal(summary.duniter.software, "duniter");
+        assert.equal(summary.duniter.version, jspckg.version);
+      }))
+    });
+
+    describe('Node 1', doTests(node1));
+    describe('Node 2', doTests(node2));
+
+  });
+});
+
+function doTests(theNode:TestingServer) {
+
+  return () => {
+
+    describe("user cat", () => {
+
+      it('should give only 1 result', () => theNode.expectThat('/wot/lookup/cat', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results.length, 1);
+      }));
+
+      it('should have sent 1 signature', () => theNode.expectThat('/wot/lookup/cat', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].signed.length, 1);
+        should.exists(res.results[0].signed[0].isMember);
+        should.exists(res.results[0].signed[0].wasMember);
+        assert.equal(res.results[0].signed[0].isMember, true);
+        assert.equal(res.results[0].signed[0].wasMember, true);
+      }));
+    });
+
+    describe("user tac", () => {
+
+      it('should give only 1 result', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results.length, 1);
+      }))
+
+      it('should have 1 signature', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].uids[0].others.length, 1);
+      }))
+
+      it('should have sent 1 signature', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].signed.length, 1);
+      }))
+    })
+
+    it('toc should give no result', () => theNode.expectError('/wot/lookup/toc', 404, 'No matching identity'))
+
+    it('tic should give no results', () => theNode.expectError('/wot/lookup/tic', 404, 'No matching identity'))
+  }
+}
diff --git a/test/integration/http_api.js b/test/integration/http_api.js
index 83cd3ccb202b102879c48a2bbed2170436992c67..9454b78d613b86f0b88ea912648efb70e36ce5fe 100644
--- a/test/integration/http_api.js
+++ b/test/integration/http_api.js
@@ -336,7 +336,6 @@ function expectJSON(promise, json) {
 
 function postBlock(server2) {
   return function(block) {
-    console.log(typeof block == 'string' ? block : block.getRawSigned())
     return post(server2, '/blockchain/block')({
       block: typeof block == 'string' ? block : block.getRawSigned()
     })
diff --git a/test/integration/network-update.js b/test/integration/network-update.js
index 44c7ce9293b128fd57563f2c44527f610e265adb..7da00b7cf58913dda10b67898e0ccbbbe001a299 100644
--- a/test/integration/network-update.js
+++ b/test/integration/network-update.js
@@ -60,7 +60,7 @@ describe("Network updating", function() {
       yield [s1, s2].reduce((p, server) => co(function*() {
         yield p;
         yield server.initDalBmaConnections()
-        require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+        require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
       }), Promise.resolve());
 
       // Server 1
diff --git a/test/integration/peer-outdated.js b/test/integration/peer-outdated.js
index 3f2cb0e1f4b37553114a501b8b8ceb66e34c4ab5..1855c043183697cb8a64986b9368e26dce22ad8c 100644
--- a/test/integration/peer-outdated.js
+++ b/test/integration/peer-outdated.js
@@ -42,7 +42,7 @@ describe("Peer document expiry", function() {
     yield [s1, s2].reduce((p, server) => co(function*() {
       yield p;
       yield server.initDalBmaConnections()
-      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+      require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
     }), Promise.resolve());
 
     // Server 1
diff --git a/test/integration/peerings.js b/test/integration/peerings.js
index 4b227f6317b951c2776dbd8b32a672bf31ca4237..5fc5d49bfa55b94f4614a3280c9c8aa1e95652b6 100644
--- a/test/integration/peerings.js
+++ b/test/integration/peerings.js
@@ -93,7 +93,7 @@ describe("Network", function() {
               return bmaAPI.openConnections()
                 .then(() => {
                   server.bma = bmaAPI;
-                  require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+                  require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
                 });
             });
         });
diff --git a/test/integration/peers-same-pubkey.js b/test/integration/peers-same-pubkey.js
index 41c4b9c19edb5c6362a53784f1120d73fae53cb4..6375127aa429522f1f265815dcacccb9395fde6f 100644
--- a/test/integration/peers-same-pubkey.js
+++ b/test/integration/peers-same-pubkey.js
@@ -36,7 +36,7 @@ describe("Peer document", function() {
     yield [s1, s2, s3].reduce((p, server) => co(function*() {
       yield p;
       yield server.initDalBmaConnections()
-      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+      require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
     }), Promise.resolve());
 
     // Server 1
diff --git a/test/integration/start_generate_blocks.js b/test/integration/start_generate_blocks.js
index 3c79c48341eadb7b7948f71467ac79fd44259b78..a8d813cb9e8fecda634057b5edb7094563ac569f 100644
--- a/test/integration/start_generate_blocks.js
+++ b/test/integration/start_generate_blocks.js
@@ -76,7 +76,7 @@ describe("Generation", function() {
         yield server.initWithDAL();
         server.bma = yield bma(server);
         yield server.bma.openConnections();
-        require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+        require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
         yield server.PeeringService.generateSelfPeer(server.conf);
         const prover = require('../../app/modules/prover').ProverDependency.duniter.methods.prover(server);
         server.startBlockComputation = () => prover.startService();
diff --git a/test/integration/tools/toolbox.ts b/test/integration/tools/toolbox.ts
index ea81eb794f4235b5456151abe0b7f4f6c8422c61..af71bd3ae15e46c3b4a0fc1168a2d61014fc80a3 100644
--- a/test/integration/tools/toolbox.ts
+++ b/test/integration/tools/toolbox.ts
@@ -23,6 +23,7 @@ import {WS2PCluster} from "../../../app/modules/ws2p/lib/WS2PCluster"
 import {WS2PServer} from "../../../app/modules/ws2p/lib/WS2PServer"
 import {WS2PServerMessageHandler} from "../../../app/modules/ws2p/lib/interface/WS2PServerMessageHandler"
 import {TestUser} from "./TestUser"
+import {RouterDependency} from "../../../app/modules/router"
 
 const assert      = require('assert');
 const _           = require('underscore');
@@ -100,8 +101,8 @@ export const simpleNetworkOf2NodesAnd2Users = async (options:any) => {
   await tac.join();
 
   // Each server forwards to each other
-  require('../../../app/modules/router').duniter.methods.routeToNetwork(s1);
-  require('../../../app/modules/router').duniter.methods.routeToNetwork(s2);
+  RouterDependency.duniter.methods.routeToNetwork(s1._server)
+  RouterDependency.duniter.methods.routeToNetwork(s2._server)
 
   return { s1, s2, cat, tac };
 }
@@ -601,7 +602,7 @@ export class TestingServer {
     const bmaAPI = await bma(this.server);
     await bmaAPI.openConnections();
     this.bma = bmaAPI;
-    require('../../../app/modules/router').duniter.methods.routeToNetwork(this.server);
+    RouterDependency.duniter.methods.routeToNetwork(this.server)
     // Extra: for /wot/requirements URL
     require('../../../app/modules/prover').ProverDependency.duniter.methods.hookServer(this.server);
   }