diff --git a/.eslintignore b/.eslintignore
index d25a2f246707fc5599e607933c70bf26171f3e2e..052737334621d89f18f1b6e66abf9c0fa60712a4 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -43,5 +43,7 @@ app/modules/bma/lib/entity/*.js
 app/modules/bma/lib/controllers/*.js
 app/modules/crawler/*.js
 app/modules/crawler/lib/*.js
+app/ProcessCpuProfiler.js
+app/lib/common/package.js
 test/*.js
 test/**/*.js
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 0fdf9f4d60fe86d44c161a39da1f7b8d1484dd74..dcf30dda1096f9c692e98b34df94ad7a442c65eb 100644
--- a/.gitignore
+++ b/.gitignore
@@ -19,6 +19,7 @@ vagrant/*.log
 vagrant/duniter
 
 # Releases
+/work
 *.deb
 *.tar.gz
 *.log
@@ -56,6 +57,10 @@ test/integration/tools/TestUser.js*
 test/integration/tools/TestUser.d.ts
 test/integration/documents-currency.js*
 test/integration/documents-currency.d.ts
+test/integration/forwarding.js
+test/integration/branches_switch.js
+test/integration/branches2.js
+test/integration/transactions-chaining.js
 test/fast/modules/crawler/block_pulling.js*
 test/fast/modules/crawler/block_pulling.d.ts
 test/fast/fork*.js*
@@ -66,3 +71,9 @@ test/fast/modules/ws2p/*.js*
 test/fast/modules/ws2p/*.d.ts
 test/fast/modules/common/grammar.js*
 test/fast/modules/common/grammar.d.ts
+test/fast/prover/pow-1-cluster.d.ts
+test/fast/prover/pow-1-cluster.js
+test/fast/prover/pow-1-cluster.js.map
+test/fast/protocol-local-rule-chained-tx-depth.js
+test/fast/protocol-local-rule-chained-tx-depth.js.map
+test/fast/protocol-local-rule-chained-tx-depth.d.ts
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ddd9044a720e14011a188719c8798117bf76649a..c53f5387130c38241f0a14b167f82db0f62a0dad 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,19 +1,15 @@
 stages:
-    - github-sync
-    - test
-
-before_script:
-    - export NVM_DIR="$HOME/.nvm"
-    - . "$NVM_DIR/nvm.sh"
-
+  - github-sync
+  - build
+  - test
+  - releases
+  - releases-page
 push_to_github:
     stage: github-sync
     variables:
         GIT_STRATEGY: none
     tags:
-        - github
-    before_script:
-        - ''
+        - redshift
     script:
         - rm -rf ./*
         - rm -rf .git
@@ -25,32 +21,77 @@ push_to_github:
         - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
-
-enforce_readme:
-    stage: github-sync
-    variables:
-        GIT_STRATEGY: none
-    tags:
-        - github
-    before_script:
-        - ''
-    script:
-      - rm -rf ./*
-      - rm -rf .git
-      - git clone $GITHUB_URL_AND_KEY .
-      - git config --global user.email "contact@duniter.org"
-      - git config --global user.name "Duniter"
-      - git checkout master
-      - cat .github/github_disclaimer.md > README.md.new
-      - cat README.md >> README.md.new
-      - mv README.md.new README.md
-      - git commit -am "Enforce github readme"
-      - git push origin master
+        
+build:
+  stage: build
+  tags:
+    - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
+  script:
+    - yarn
 
 test:
   stage: test
   tags:
     - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
   script:
     - yarn
     - yarn test
+
+releases:test:
+  stage: releases
+  image: duniter/release-builder:v1.0.1
+  tags:
+    - redshift-duniter-builder
+  variables:
+    DAY: $(date +%Y%m%d)
+    HOUR: $(date +%H%M)
+    SEC: $(date +%S)
+  script:
+    - bash "release/arch/linux/build-lin.sh" "$(date +%Y%m%d).$(date +%H%M).$(date +%S)"
+  artifacts:
+    paths:
+      - work/bin/
+    expire_in: 8h
+  when: manual
+  except:
+    - tags
+  
+
+releases:
+  stage: releases
+  image: duniter/release-builder:v1.0.1
+  tags:
+    - redshift-duniter-builder
+  script:
+    - bash "release/arch/linux/build-lin.sh" "${CI_COMMIT_TAG#v}"
+  artifacts:
+    paths:
+      - work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb
+      - work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz
+      - work/bin/duniter-server-${CI_COMMIT_TAG}-linux-x64.deb
+    expire_in: 8h
+  when: manual
+  only:
+  - tags
+  - master
+    
+releases-message:
+  stage: releases-page
+  image: tensorflow/tensorflow:latest-py3
+  tags:
+    - redshift-duniter-builder
+  variables:
+    JOB_ARTIFACTS: 'releases'
+    EXPECTED_ARTIFACTS: '["work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","work/bin/duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
+  script:
+    - python3 .gitlab/releaser.py
+  when: manual
+  only:
+  - tags
+  - master
diff --git a/.gitlab/release_template.md b/.gitlab/release_template.md
new file mode 100644
index 0000000000000000000000000000000000000000..99b23ff463aae901591987721b0db1f310710db9
--- /dev/null
+++ b/.gitlab/release_template.md
@@ -0,0 +1,9 @@
+{{current_message}}
+
+# Downloads
+{% for artifact in artifacts %}
+***
+[{{artifact.icon}} {{artifact.name}}]({{artifact.url}})  
+_{{artifact.size}}_
+***
+{% endfor %}
diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
new file mode 100644
index 0000000000000000000000000000000000000000..833027fd0e589815d62f1d55308639c93eaa78d8
--- /dev/null
+++ b/.gitlab/releaser.py
@@ -0,0 +1,143 @@
+#!/usr/bin/python3
+'''
+This module is meant to overload the release note in gitlab for the current project.
+Expects to find in environment following variables:
+  - CI_PROJECT_URL - Automatically set by gitlab-ci
+  - CI_COMMIT_TAG - Automatically set by gitlab-ci
+  - CI_PROJECT_ID - Automatically set by gitlab-ci
+  - CI_COMMIT_TAG - Automatically set by gitlab-ci
+  - RELEASER_TOKEN - Token used by technical user
+  - JOB_ARTIFACTS - String containing job name containing all artifacts, to set manually
+  - EXPECTED_ARTIFACTS - List containing all artifacts generated to set manually
+'''
+
+import math
+import urllib.request
+import urllib.error
+import json
+import os
+import jinja2
+
+def convert_size(size_bytes):
+    '''Print proper size'''
+    if size_bytes == 0:
+        return '0B'
+    size_name = ('B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB')
+    i = int(math.floor(math.log(size_bytes, 1024)))
+    power = math.pow(1024, i)
+    size = round(size_bytes / power, 2)
+    return '%s %s' % (size, size_name[i])
+
+def get_current_message():
+    '''Get current release message'''
+    ci_project_id = os.environ['CI_PROJECT_ID']
+    ci_commit_tag = os.environ['CI_COMMIT_TAG']
+    tag_url = 'https://git.duniter.org/api/v4/projects/'
+    tag_url += ci_project_id
+    tag_url += '/repository/tags/'
+    tag_url += ci_commit_tag
+    request = urllib.request.Request(tag_url)
+    response = urllib.request.urlopen(request)
+    response_data = response.read().decode()
+    data = json.loads(response_data)
+    if data['release'] is None:
+        return False, ''
+    else:
+        return True, data['release']['description'].split('# Downloads')[0]
+
+def build_artifact_url(artifact, source):
+    '''Given an artifact name, builds the url to download it'''
+    job_artifacts = os.environ['JOB_ARTIFACTS']
+    ci_project_url = os.environ['CI_PROJECT_URL']
+    ci_commit_tag = os.environ['CI_COMMIT_TAG']
+    if source:
+        source_url = ci_project_url
+        source_url += '/repository/'
+        source_url += ci_commit_tag
+        source_url += '/archive.'
+        source_url += artifact
+        return source_url
+    else:
+        artifact_url = ci_project_url
+        artifact_url += '/-/jobs/artifacts/'
+        artifact_url += ci_commit_tag
+        artifact_url += '/raw/'
+        artifact_url += artifact
+        artifact_url += '?job='
+        artifact_url += job_artifacts
+        return artifact_url
+
+def get_artifact_weight(location):
+    '''Retrieve size of artifacts'''
+    size = os.path.getsize(location)
+    return convert_size(int(size))
+
+
+def build_compiled_message(current_message):
+    '''Create a new release message using the release template'''
+
+    expected_artifacts = os.environ['EXPECTED_ARTIFACTS']
+    try:
+        expected_artifacts = json.loads(expected_artifacts)
+    except json.decoder.JSONDecodeError:
+        print('CRITICAL EXPECTED_ARTIFACTS environment variable JSON probably malformed')
+        print('CRITICAL Correct : \'["test_linux.txt","test_windows.txt"]\' ')
+        print('CRITICAL Not Correct: "[\'test_linux.txt\',\'test_windows.txt\']" ')
+        exit(1)
+    artifacts_list = []
+    for artifact in expected_artifacts:
+        artifact_dict = {
+            'name': artifact.split('/')[-1],
+            'url': build_artifact_url(artifact, False),
+            'size': get_artifact_weight(artifact),
+            'icon': ':package:'
+        }
+        artifacts_list.append(artifact_dict)
+
+    j2_env = jinja2.Environment(
+        loader=jinja2.FileSystemLoader(
+            os.path.dirname(os.path.abspath(__file__))
+            ),
+        trim_blocks=True
+        )
+    # pylint: disable=maybe-no-member
+    template = j2_env.get_template('release_template.md')
+    return template.render(
+        current_message=current_message,
+        artifacts=artifacts_list
+    )
+
+
+def send_compiled_message(exists_release, compiled_message):
+    '''Send to gitlab new message'''
+    releaser_token = os.environ['RELEASER_TOKEN']
+    ci_project_id = os.environ['CI_PROJECT_ID']
+    ci_commit_tag = os.environ['CI_COMMIT_TAG']
+    release_url = 'https://git.duniter.org/api/v4/projects/'
+    release_url += ci_project_id
+    release_url += '/repository/tags/'
+    release_url += ci_commit_tag
+    release_url += '/release'
+    if exists_release:
+        # We need to send a PUT request
+        method = 'PUT'
+    else:
+        # We need to send a POST request
+        method = 'POST'
+    send_data = {
+        'tag_name':ci_commit_tag,
+        'description':compiled_message
+        }
+    send_data_serialized = json.dumps(send_data).encode('utf-8')
+    request = urllib.request.Request(release_url, data=send_data_serialized, method=method)
+    request.add_header('Private-Token', releaser_token)
+    request.add_header('Content-Type', 'application/json')
+    response = urllib.request.urlopen(request)
+
+def main():
+    '''Execute main scenario'''
+    exists_release, current_message = get_current_message()
+    compiled_message = build_compiled_message(current_message)
+    send_compiled_message(exists_release, compiled_message)
+    print('Artifacts uploaded successfully')
+main()
diff --git a/.travis.yml b/.travis.yml
index 32368217c48147b7d3adc318b5d47af12bdc8662..a3f3862e0e3586fec203e2daa1561f787c4051a5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,7 +2,7 @@
 language: node_js
 
 node_js:
-  - 6.11.1
+  - 8.9.2
 env:
   - CXX=g++-4.8
 addons:
diff --git a/app/lib/common-libs/constants.ts b/app/lib/common-libs/constants.ts
index b34cb6278407c8679b19c508cc6b86ce7f5473f2..e1be3f6661f221523d06aad8460cb82d3c36f6a4 100644
--- a/app/lib/common-libs/constants.ts
+++ b/app/lib/common-libs/constants.ts
@@ -285,6 +285,8 @@ export const CommonConstants = {
     BLOCK: find("Block: (" + INTEGER + "-" + FINGERPRINT + ")"),
     SPECIAL_BLOCK
   },
+
+  BLOCK_MAX_TX_CHAINING_DEPTH: 5
 }
 
 function exact (regexpContent:string) {
diff --git a/app/lib/indexer.ts b/app/lib/indexer.ts
index ddb1cfa9988822c7b50fcb99465c4adf2d95aec3..b5f1c47be1b391cf00f6e99e7fa3a77d46db7375 100644
--- a/app/lib/indexer.ts
+++ b/app/lib/indexer.ts
@@ -865,21 +865,12 @@ export class Indexer {
       }
     }))
 
-    // BR_G46
-    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
-      const reducable = await dal.sindexDAL.sqlFind({
-        identifier: ENTRY.identifier,
-        pos: ENTRY.pos,
-        amount: ENTRY.amount,
-        base: ENTRY.base
-      });
-      ENTRY.conditions = reduce(reducable).conditions; // We valuate the input conditions, so we can map these records to a same account
-      ENTRY.available = reduce(reducable).consumed === false;
-    }))
-
-    // BR_G47
-    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
-      let source = _.filter(sindex, (src:SindexEntry) => src.identifier == ENTRY.identifier && src.pos == ENTRY.pos && src.conditions && src.op === constants.IDX_CREATE)[0];
+    const getInputLocalFirstOrFallbackGlobally = async (sindex:SindexEntry[], ENTRY:SindexEntry) => {
+      let source = _.filter(sindex, (src:SindexEntry) =>
+        src.identifier == ENTRY.identifier
+        && src.pos == ENTRY.pos
+        && src.conditions
+        && src.op === constants.IDX_CREATE)[0];
       if (!source) {
         const reducable = await dal.sindexDAL.sqlFind({
           identifier: ENTRY.identifier,
@@ -887,20 +878,29 @@ export class Indexer {
           amount: ENTRY.amount,
           base: ENTRY.base
         });
-        source = reduce(reducable);
+        source = reduce(reducable)
       }
+      return source
+    }
+
+    // BR_G46
+    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
+      const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY)
+      ENTRY.conditions = source.conditions; // We valuate the input conditions, so we can map these records to a same account
+      ENTRY.available = source.consumed === false;
+    }))
+
+    // BR_G47
+    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
+      const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY)
       ENTRY.conditions = source.conditions;
       ENTRY.isLocked = !txSourceUnlock(ENTRY, source, HEAD);
     }))
 
     // BR_G48
     await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
-      ENTRY.isTimeLocked = ENTRY.written_time - reduce(await dal.sindexDAL.sqlFind({
-          identifier: ENTRY.identifier,
-          pos: ENTRY.pos,
-          amount: ENTRY.amount,
-          base: ENTRY.base
-        })).written_time < ENTRY.locktime;
+      const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY)
+      ENTRY.isTimeLocked = ENTRY.written_time - source.written_time < ENTRY.locktime;
     }))
 
     return HEAD;
diff --git a/app/lib/rules/index.ts b/app/lib/rules/index.ts
index 44ca3d2535efb9cd8d8733317a255ad80f74cf5e..28d13899b467bb69fe4a8c9be2e9c0bcd021bcac 100644
--- a/app/lib/rules/index.ts
+++ b/app/lib/rules/index.ts
@@ -33,6 +33,7 @@ export const ALIAS = {
     await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block);
     await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block);
     await LOCAL_RULES_FUNCTIONS.checkTxSignature(block);
+    await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index);
   },
 
   ALL_LOCAL_BUT_POW_AND_SIGNATURE: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => {
@@ -60,6 +61,7 @@ export const ALIAS = {
     await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block);
     await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block);
     await LOCAL_RULES_FUNCTIONS.checkTxSignature(block);
+    await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index);
   }
 }
 
diff --git a/app/lib/rules/local_rules.ts b/app/lib/rules/local_rules.ts
index 41d19f163d8b3a377fb2e6832c7be37dae36d2d3..1ce1ebd171119d13f8847bbf22afa40f6742843e 100644
--- a/app/lib/rules/local_rules.ts
+++ b/app/lib/rules/local_rules.ts
@@ -379,28 +379,70 @@ export const LOCAL_RULES_FUNCTIONS = {
       }
     }
     return true;
+  },
+
+  checkMaxTransactionChainingDepth: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => {
+    const sindex = Indexer.sindex(index)
+    const max = getMaxTransactionDepth(sindex)
+    //
+    const allowedMax = block.medianTime > 1519862400 ? CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH : 1
+    if (max > allowedMax) {
+      throw "The maximum transaction chaining length per block is " + CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH
+    }
+    return true
+  }
+}
+
+export interface SindexShortEntry {
+  op:string,
+  identifier:string,
+  pos:number,
+  tx:string|null
+}
+
+function getMaxTransactionDepth(sindex:SindexShortEntry[]) {
+  const ids = _.uniq(_.pluck(sindex, 'tx'))
+  let maxTxChainingDepth = 0
+  for (let id of ids) {
+    maxTxChainingDepth = Math.max(maxTxChainingDepth, getTransactionDepth(id, sindex, 0))
   }
+  return maxTxChainingDepth
+}
+
+function getTransactionDepth(txHash:string, sindex:SindexShortEntry[], localDepth = 0) {
+  const inputs = _.filter(sindex, (s:SindexShortEntry) => s.op === 'UPDATE' && s.tx === txHash)
+  let depth = localDepth
+  for (let input of inputs) {
+    const consumedOutput = _.findWhere(sindex, { op: 'CREATE', identifier: input.identifier, pos: input.pos })
+    if (consumedOutput) {
+      if (localDepth < 5) {
+        const subTxDepth = getTransactionDepth(consumedOutput.tx, sindex, localDepth + 1)
+        depth = Math.max(depth, subTxDepth)
+      } else {
+        depth++
+      }
+    }
+  }
+  return depth
 }
 
 function checkSingleMembershipSignature(ms:any) {
   return verify(ms.getRaw(), ms.signature, ms.issuer);
 }
 
-function checkBunchOfTransactions(transactions:TransactionDTO[], done:any = undefined){
-  const block:any = { transactions };
+function checkBunchOfTransactions(transactions:TransactionDTO[], conf:ConfDTO, options?:{ dontCareAboutChaining?:boolean }){
+  const block:any = { transactions, identities: [], joiners: [], actives: [], leavers: [], revoked: [], excluded: [], certifications: [] };
+  const index = Indexer.localIndex(block, conf)
   return (async () => {
-    try {
-      let local_rule = LOCAL_RULES_FUNCTIONS;
-      await local_rule.checkTxLen(block);
-      await local_rule.checkTxIssuers(block);
-      await local_rule.checkTxSources(block);
-      await local_rule.checkTxRecipients(block);
-      await local_rule.checkTxAmounts(block);
-      await local_rule.checkTxSignature(block);
-      done && done();
-    } catch (err) {
-      if (done) return done(err);
-      throw err;
+    let local_rule = LOCAL_RULES_FUNCTIONS;
+    await local_rule.checkTxLen(block);
+    await local_rule.checkTxIssuers(block);
+    await local_rule.checkTxSources(block);
+    await local_rule.checkTxRecipients(block);
+    await local_rule.checkTxAmounts(block);
+    await local_rule.checkTxSignature(block);
+    if (!options || !options.dontCareAboutChaining) {
+      await local_rule.checkMaxTransactionChainingDepth(block, conf, index);
     }
   })()
 }
@@ -411,9 +453,13 @@ export const LOCAL_RULES_HELPERS = {
 
   checkSingleMembershipSignature: checkSingleMembershipSignature,
 
-  checkBunchOfTransactions: checkBunchOfTransactions,
+  checkBunchOfTransactions,
+
+  getTransactionDepth,
+
+  getMaxTransactionDepth,
 
-  checkSingleTransactionLocally: (tx:any, done:any = undefined) => checkBunchOfTransactions([tx], done),
+  checkSingleTransactionLocally: (tx:any, conf:ConfDTO) => checkBunchOfTransactions([tx], conf),
 
   checkTxAmountsValidity: (tx:TransactionDTO) => {
     const inputs = tx.inputsAsObjects()
diff --git a/app/modules/daemon.ts b/app/modules/daemon.ts
index 1ea72f5a647af1c5df1ab99bbb47556ba2f78012..584920f3c4444c8d83859d85abbce5fce3d8a8f2 100644
--- a/app/modules/daemon.ts
+++ b/app/modules/daemon.ts
@@ -95,6 +95,9 @@ module.exports = {
 
         logger.info(">> Server starting...");
 
+        // Log NodeJS version
+        logger.info('NodeJS version: ' + process.version);
+
         await server.checkConfig();
         // Add signing & public key functions to PeeringService
         logger.info('Node version: ' + server.version);
diff --git a/app/modules/prover/lib/PowWorker.ts b/app/modules/prover/lib/PowWorker.ts
new file mode 100644
index 0000000000000000000000000000000000000000..fd225941a7b8897a0c6f5046ec4dd25fea603ba7
--- /dev/null
+++ b/app/modules/prover/lib/PowWorker.ts
@@ -0,0 +1,95 @@
+import {Querable} from "./permanentProver"
+
+const querablep = require('querablep')
+
+/*********
+ *
+ * PoW worker
+ * ----------
+ *
+ * Its model is super simple: we ask him to find a proof, and we can wait for it.
+ * Eventually, we can tell him to cancel his proof, which makes it answer `null` as proof value.
+ *
+ * The worker also provides two properties:
+ *
+ * - `worker.online`: a promise which is resolved when the worker gets « online » for the first time
+ * - `worker.exit`: a promise which is resolved when the worker exits (which occurs when the worker is being closed or killed)
+ *
+ ********/
+
+export class PowWorker {
+
+  private onlinePromise:Promise<void>
+  private onlineResolver:()=>void
+
+  private exitPromise:Promise<void>
+  private exitResolver:()=>void
+
+  private proofPromise:Querable<{ message: { answer:any }}|null>
+  private proofResolver:(proof:{ message: { answer:any }}|null)=>void
+
+  private messageHandler:((worker:any, msg:any)=>void)
+
+  constructor(
+    private nodejsWorker:any,
+    private onPowMessage:(message:any)=>void,
+    private onlineHandler:()=>void,
+    private exitHandler:(code:any, signal:any)=>void) {
+
+    // Handle "online" promise
+    this.onlinePromise = new Promise(res => this.onlineResolver = res)
+    nodejsWorker.on('online', () => {
+      this.onlineHandler()
+      this.onlineResolver()
+    })
+
+    // Handle "exit" promise
+    this.exitPromise = new Promise(res => this.exitResolver = res)
+    nodejsWorker.on('exit', (code:any, signal:any) => {
+      this.exitHandler(code, signal)
+      this.exitResolver()
+    })
+
+    nodejsWorker.on('message', (message:any) => {
+      if (message) {
+        this.onPowMessage(message)
+      }
+      if (this.proofPromise && message.uuid && !this.proofPromise.isResolved() && this.proofResolver) {
+        const result:{ message: { answer:any }}|null = message ? { message } : null
+        this.proofResolver(result)
+      }
+    })
+  }
+
+  get online() {
+    return this.onlinePromise
+  }
+
+  get exited() {
+    return this.exitPromise
+  }
+
+  get pid() {
+    return this.nodejsWorker.process.pid
+  }
+
+  askProof(commandMessage:{ uuid:string, command:string, value:any }) {
+    this.proofPromise = querablep(new Promise<{ message: { answer:any }}|null>(res => this.proofResolver = res))
+    this.nodejsWorker.send(commandMessage)
+    return this.proofPromise
+  }
+
+  sendConf(confMessage:{ command:string, value:any }) {
+    this.nodejsWorker.send(confMessage)
+  }
+
+  sendCancel() {
+    this.nodejsWorker.send({
+      command: 'cancel'
+    })
+  }
+
+  kill() {
+    this.nodejsWorker.kill()
+  }
+}
\ No newline at end of file
diff --git a/app/modules/prover/lib/blockGenerator.ts b/app/modules/prover/lib/blockGenerator.ts
index a941bbaeb7fd0325e07f772a48b0eed2f2df1ae8..39d9cb45aee91ab8d19df5bdd52f9ccee77944d3 100644
--- a/app/modules/prover/lib/blockGenerator.ts
+++ b/app/modules/prover/lib/blockGenerator.ts
@@ -65,7 +65,7 @@ export class BlockGenerator {
     const wereExcludeds = await this.dal.getRevokedPubkeys();
     const newCertsFromWoT = await generator.findNewCertsFromWoT(current);
     const newcomersLeavers = await this.findNewcomersAndLeavers(current, (joinersData:any) => generator.filterJoiners(joinersData));
-    const transactions = await this.findTransactions(current);
+    const transactions = await this.findTransactions(current, manualValues);
     const joinData = newcomersLeavers[2];
     const leaveData = newcomersLeavers[3];
     const newCertsFromNewcomers = newcomersLeavers[4];
@@ -104,7 +104,8 @@ export class BlockGenerator {
     return [cur, newWoTMembers, finalJoinData, leavers, updates];
   }
 
-  private async findTransactions(current:DBBlock) {
+  private async findTransactions(current:DBBlock, options:{ dontCareAboutChaining?:boolean }) {
+    const ALSO_CHECK_PENDING_TXS = true
     const versionMin = current ? Math.min(CommonConstants.LAST_VERSION_FOR_TX, current.version) : CommonConstants.DOCUMENTS_VERSION;
     const txs = await this.dal.getTransactionsPending(versionMin);
     const transactions = [];
@@ -113,14 +114,9 @@ export class BlockGenerator {
       obj.currency = this.conf.currency
       const tx = TransactionDTO.fromJSONObject(obj);
       try {
-        await new Promise((resolve, reject) => {
-          LOCAL_RULES_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), (err:any, res:any) => {
-            if (err) return reject(err)
-            return resolve(res)
-          })
-        })
+        await LOCAL_RULES_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), this.conf, options)
         const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 };
-        await GLOBAL_RULES_HELPERS.checkSingleTransaction(tx, nextBlockWithFakeTimeVariation, this.conf, this.dal);
+        await GLOBAL_RULES_HELPERS.checkSingleTransaction(tx, nextBlockWithFakeTimeVariation, this.conf, this.dal, ALSO_CHECK_PENDING_TXS);
         await GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal);
         transactions.push(tx);
         passingTxs.push(tx);
@@ -279,7 +275,9 @@ export class BlockGenerator {
         const currentMembership = await this.dal.mindexDAL.getReducedMS(ms.issuer);
         const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1;
         if (!join.identity.revoked && currentMSN < parseInt(join.ms.number)) {
-          preJoinData[join.identity.pubkey] = join;
+          if (!preJoinData[join.identity.pubkey] || preJoinData[join.identity.pubkey].certs.length < join.certs.length) {
+            preJoinData[join.identity.pubkey] = join;
+          }
         }
       } catch (err) {
         if (err && !err.uerr) {
diff --git a/app/modules/prover/lib/blockProver.ts b/app/modules/prover/lib/blockProver.ts
index 9cd215d98d478a456857b52f9f5c5388e1afa153..32d5699ecc014e8ec690c6b5b98722d43b864a6a 100644
--- a/app/modules/prover/lib/blockProver.ts
+++ b/app/modules/prover/lib/blockProver.ts
@@ -44,6 +44,9 @@ export class WorkerFarm {
     })
   }
 
+  get nbWorkers() {
+    return this.theEngine.getNbWorkers()
+  }
 
   changeCPU(cpu:any) {
     return this.theEngine.setConf({ cpu })
@@ -70,7 +73,7 @@ export class WorkerFarm {
   }
 
   shutDownEngine() {
-    this.theEngine.shutDown()
+    return this.theEngine.shutDown()
   }
 
   /**
@@ -175,7 +178,6 @@ export class BlockProver {
       const start = Date.now();
       let result = await powFarm.askNewProof({
         newPoW: {
-          turnDuration: os.arch().match(/arm/) ? CommonConstants.POW_TURN_DURATION_ARM : CommonConstants.POW_TURN_DURATION_PC,
           conf: {
             cpu: this.conf.cpu,
             prefix: this.conf.prefix,
@@ -194,10 +196,10 @@ export class BlockProver {
         throw 'Proof-of-work computation canceled because block received';
       } else {
         const proof = result.block;
-        const testsCount = result.testsCount;
+        const testsCount = result.testsCount * powFarm.nbWorkers
         const duration = (Date.now() - start);
-        const testsPerSecond = (testsCount / (duration / 1000)).toFixed(2);
-        this.logger.info('Done: #%s, %s in %ss (%s tests, ~%s tests/s)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond);
+        const testsPerSecond = testsCount / (duration / 1000)
+        this.logger.info('Done: #%s, %s in %ss (~%s tests, ~%s tests/s, using %s cores, CPU %s%)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond.toFixed(2), powFarm.nbWorkers, Math.floor(100*this.conf.cpu))
         this.logger.info('FOUND proof-of-work with %s leading zeros followed by [0-' + highMark + ']!', nbZeros);
         return BlockDTO.fromJSONObject(proof)
       }
diff --git a/app/modules/prover/lib/constants.ts b/app/modules/prover/lib/constants.ts
index 0a454d38fd9c85e58115c2f971bbb6cb1cea812e..bb0cfcf31821b76e4a86b066049431d345317613 100644
--- a/app/modules/prover/lib/constants.ts
+++ b/app/modules/prover/lib/constants.ts
@@ -13,6 +13,7 @@ export const ProverConstants = {
   NONCE_RANGE: 1000 * 1000 * 1000 * 100,
 
   POW_MAXIMUM_ACCEPTABLE_HANDICAP: 64,
+  POW_NB_PAUSES_PER_ROUND: 10,
 
   // When to trigger the PoW process again if no PoW is triggered for a while. In milliseconds.
   POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000
diff --git a/app/modules/prover/lib/engine.ts b/app/modules/prover/lib/engine.ts
index cc83f46822764786bf91c1c02156c597966cc299..6ab1ca6458f52047e3e2dfa2548b1fd73f316ed9 100644
--- a/app/modules/prover/lib/engine.ts
+++ b/app/modules/prover/lib/engine.ts
@@ -1,4 +1,3 @@
-import {ProverConstants} from "./constants"
 import {Master as PowCluster} from "./powCluster"
 import {ConfDTO} from "../../../lib/dto/ConfDTO"
 
@@ -25,21 +24,16 @@ export class PowEngine {
     this.id = this.cluster.clusterId
   }
 
+  getNbWorkers() {
+    return this.cluster.nbWorkers
+  }
+
   forceInit() {
     return this.cluster.initCluster()
   }
 
   async prove(stuff:any) {
-
-    if (this.cluster.hasProofPending) {
-      await this.cluster.cancelWork()
-    }
-
-    const cpus = os.cpus()
-
-    if (os.arch().match(/arm/) || cpus[0].model.match(/Atom/)) {
-      stuff.newPoW.conf.nbCores /= 2; // Make sure that only once each physical core is used (for Hyperthreading).
-    }
+    await this.cluster.cancelWork()
     return await this.cluster.proveByWorkers(stuff)
   }
 
@@ -48,9 +42,6 @@ export class PowEngine {
   }
 
   setConf(value:any) {
-    if (os.arch().match(/arm/) && value.cpu !== undefined) {
-      value.cpu /= 2; // Don't know exactly why is ARM so much saturated by PoW, so let's divide by 2
-    }
     return this.cluster.changeConf(value)
   }
 
diff --git a/app/modules/prover/lib/permanentProver.ts b/app/modules/prover/lib/permanentProver.ts
index 636a68b986b358b506e00b40714257b7180b82e3..d5c4fefedeceb4550d8a4972fddfc4b4a3a09e49 100644
--- a/app/modules/prover/lib/permanentProver.ts
+++ b/app/modules/prover/lib/permanentProver.ts
@@ -9,7 +9,7 @@ import {Server} from "../../../../server"
 
 const querablep = require('querablep');
 
-interface Querable<T> extends Promise<T> {
+export interface Querable<T> extends Promise<T> {
   isFulfilled(): boolean
   isResolved(): boolean
   isRejected(): boolean
@@ -213,6 +213,8 @@ export class PermanentProver {
     await this.prover.cancel();
     // If we were waiting, stop it and process the continuous generation
     this.blockchainChangedResolver && this.blockchainChangedResolver();
+    const farm = await this.prover.getWorker()
+    await farm.shutDownEngine()
   }
 
   private checkTrialIsNotTooHigh(trial:number, current:DBBlock, selfPubkey:string) {
diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts
index 4d4820777cb9b279aebea2d09680d1be0b3aab1f..d0c64c4e422ff0d95111d2d2f491e97d38a06967 100644
--- a/app/modules/prover/lib/powCluster.ts
+++ b/app/modules/prover/lib/powCluster.ts
@@ -1,24 +1,36 @@
 import {ConfDTO} from "../../../lib/dto/ConfDTO"
 import {ProverConstants} from "./constants"
+import {createPowWorker} from "./proof"
+import {PowWorker} from "./PowWorker"
 
 const _ = require('underscore')
 const nuuid = require('node-uuid');
-const moment = require('moment');
 const cluster = require('cluster')
 const querablep = require('querablep')
-const logger = require('../../../lib/logger').NewLogger()
 
 let clusterId = 0
+cluster.setMaxListeners(3)
+
+export interface SlaveWorker {
+  worker:PowWorker,
+  index:number,
+  online:Promise<void>,
+  nonceBeginning:number
+}
 
 /**
  * Cluster controller, handles the messages between the main program and the PoW cluster.
  */
 export class Master {
 
+  nbCancels = 0
+
   clusterId:number
   currentPromise:any|null = null
-  slaves:any[] = []
-  slavesMap:any = {}
+  slaves:SlaveWorker[] = []
+  slavesMap:{
+    [k:number]: SlaveWorker|null
+  } = {}
   conf:any = {}
   logger:any
   onInfoCallback:any
@@ -36,38 +48,50 @@ export class Master {
     return this.slaves.length
   }
 
-  get hasProofPending() {
-    return !!this.currentPromise
-  }
-
   set onInfoMessage(callback:any) {
     this.onInfoCallback = callback
   }
 
-  onWorkerMessage(worker:any, message:any) {
+  onWorkerMessage(workerIndex:number, message:any) {
     // this.logger.info(`worker#${this.slavesMap[worker.id].index} sent message:${message}`)
-    if (message.pow && message.pow.pow) {
+    if (message && message.pow) {
       this.onInfoCallback && this.onInfoCallback(message)
     }
-    if (this.currentPromise && message.uuid === this.currentPromise.extras.uuid && !this.currentPromise.isResolved() && message.answer) {
-      this.logger.info(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index} HAS FOUND A PROOF #${message.answer.pow.pow}`)
-      this.currentPromise.extras.resolve(message.answer)
-      // Stop the slaves' current work
-      this.cancelWork()
+    if (this.currentPromise && message.uuid && !this.currentPromise.isResolved() && message.answer) {
+      this.logger.info(`ENGINE c#${this.clusterId}#${workerIndex} HAS FOUND A PROOF #${message.answer.pow.pow}`)
+    } else if (message.canceled) {
+      this.nbCancels++
     }
     // this.logger.debug(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index}:`, message)
   }
 
+  /*****************
+   * CLUSTER METHODS
+   ****************/
+
   initCluster() {
     // Setup master
     cluster.setupMaster({
-      exec: __filename
+      exec: __filename,
+      execArgv: [] // Do not try to debug forks
     })
 
     this.slaves = Array.from({ length: this.nbCores }).map((value, index) => {
-      const worker = cluster.fork()
-      this.logger.info(`Creating worker c#${this.clusterId}#w#${worker.id}`)
-      this.slavesMap[worker.id] = {
+      const nodejsWorker = cluster.fork()
+      const worker = new PowWorker(nodejsWorker, message => {
+        this.onWorkerMessage(index, message)
+      }, () => {
+        this.logger.info(`[online] worker c#${this.clusterId}#w#${index}`)
+        worker.sendConf({
+          command: 'conf',
+          value: this.conf
+        })
+      }, (code:any, signal:any) => {
+        this.logger.info(`worker ${worker.pid} died with code ${code} and signal ${signal}`)
+      })
+
+      this.logger.info(`Creating worker c#${this.clusterId}#w#${nodejsWorker.id}`)
+      const slave = {
 
         // The Node.js worker
         worker,
@@ -76,43 +100,16 @@ export class Master {
         index,
 
         // Worker ready
-        online: (function onlinePromise() {
-          let resolve
-          const p = querablep(new Promise(res => resolve = res))
-          p.extras = { resolve }
-          return p
-        })(),
+        online: worker.online,
 
         // Each worker has his own chunk of possible nonces
         nonceBeginning: this.nbCores === 1 ? 0 : (index + 1) * ProverConstants.NONCE_RANGE
       }
-      return this.slavesMap[worker.id]
-    })
-
-    cluster.on('exit', (worker:any, code:any, signal:any) => {
-      this.logger.info(`worker ${worker.process.pid} died with code ${code} and signal ${signal}`)
-    })
-
-    cluster.on('online', (worker:any) => {
-      // We just listen to the workers of this Master
-      if (this.slavesMap[worker.id]) {
-        this.logger.info(`[online] worker c#${this.clusterId}#w#${worker.id}`)
-        this.slavesMap[worker.id].online.extras.resolve()
-        worker.send({
-          command: 'conf',
-          value: this.conf
-        })
-      }
-    })
-
-    cluster.on('message', (worker:any, msg:any) => {
-      // Message for this cluster
-      if (this.slavesMap[worker.id]) {
-        this.onWorkerMessage(worker, msg)
-      }
+      this.slavesMap[nodejsWorker.id] = slave
+      return slave
     })
 
-    this.workersOnline = this.slaves.map((s:any) => s.online)
+    this.workersOnline = this.slaves.map((s) => s.online)
     return Promise.all(this.workersOnline)
   }
 
@@ -121,7 +118,7 @@ export class Master {
     this.conf.cpu = conf.cpu || this.conf.cpu
     this.conf.prefix = this.conf.prefix || conf.prefix
     this.slaves.forEach(s => {
-      s.worker.send({
+      s.worker.sendConf({
         command: 'conf',
         value: this.conf
       })
@@ -129,39 +126,28 @@ export class Master {
     return Promise.resolve(_.clone(conf))
   }
 
-  cancelWork() {
-    this.logger.info(`Cancelling the work on PoW cluster`)
+  private cancelWorkersWork() {
     this.slaves.forEach(s => {
-      s.worker.send({
-        command: 'cancel'
-      })
+      s.worker.sendCancel()
     })
+  }
 
-    // Eventually force the end of current promise
-    if (this.currentPromise && !this.currentPromise.isFulfilled()) {
-      this.currentPromise.extras.resolve(null)
-    }
-
+  async cancelWork() {
+    this.cancelWorkersWork()
+    const workEnded = this.currentPromise
     // Current promise is done
     this.currentPromise = null
-
-    return Promise.resolve()
-  }
-
-  newPromise(uuid:string) {
-    let resolve
-    const p = querablep(new Promise(res => resolve = res))
-    p.extras = { resolve, uuid }
-    return p
+    return await workEnded
   }
 
   async shutDownWorkers() {
     if (this.workersOnline) {
       await Promise.all(this.workersOnline)
-      await Promise.all(this.slaves.map(async (s:any) => {
+      await Promise.all(this.slaves.map(async (s) => {
         s.worker.kill()
       }))
     }
+    this.slaves = []
   }
 
   proveByWorkers(stuff:any) {
@@ -173,9 +159,7 @@ export class Master {
 
     // Register the new proof uuid
     const uuid = nuuid.v4()
-    this.currentPromise = this.newPromise(uuid)
-
-    return (async () => {
+    this.currentPromise = querablep((async () => {
       await Promise.all(this.workersOnline)
 
       if (!this.currentPromise) {
@@ -184,18 +168,18 @@ export class Master {
       }
 
       // Start the salves' job
-      this.slaves.forEach((s:any, index) => {
-        s.worker.send({
+      const asks = this.slaves.map(async (s, index) => {
+        const proof = await s.worker.askProof({
           uuid,
           command: 'newPoW',
           value: {
-            block: stuff.newPoW.block,
+            initialTestsPerRound: stuff.initialTestsPerRound,
+            maxDuration: stuff.maxDuration,block: stuff.newPoW.block,
             nonceBeginning: s.nonceBeginning,
             zeros: stuff.newPoW.zeros,
             highMark: stuff.newPoW.highMark,
             pair: _.clone(stuff.newPoW.pair),
             forcedTime: stuff.newPoW.forcedTime,
-            turnDuration: stuff.newPoW.turnDuration,
             conf: {
               medianTimeBlocks: stuff.newPoW.conf.medianTimeBlocks,
               avgGenTime: stuff.newPoW.conf.avgGenTime,
@@ -204,10 +188,29 @@ export class Master {
             }
           }
         })
+        this.logger.info(`[done] worker c#${this.clusterId}#w#${index}`)
+        return {
+          workerID: index,
+          proof
+        }
       })
 
-      return await this.currentPromise
-    })()
+      // Find a proof
+      const result = await Promise.race(asks)
+      this.cancelWorkersWork()
+      // Wait for all workers to have stopped looking for a proof
+      await Promise.all(asks)
+
+      if (!result.proof || !result.proof.message.answer) {
+        this.logger.info('No engine found the proof. It was probably cancelled.')
+        return null
+      } else {
+        this.logger.info(`ENGINE c#${this.clusterId}#${result.workerID} HAS FOUND A PROOF #${result.proof.message.answer.pow.pow}`)
+        return result.proof.message.answer
+      }
+    })())
+
+    return this.currentPromise
   }
 
   static defaultLogger() {
@@ -229,9 +232,8 @@ if (cluster.isMaster) {
 } else {
 
   process.on("SIGTERM", function() {
-    logger.info(`SIGTERM received, closing worker ${process.pid}`);
     process.exit(0)
   });
 
-  require('./proof')
+  createPowWorker()
 }
diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index 9b15c0be5aaff08b078c5dd495dbb0432be983ea..c08fde11ed3dc6132a2773d73a9994c075b58a2d 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -6,296 +6,303 @@ import {ProverConstants} from "./constants"
 import {KeyGen} from "../../../lib/common-libs/crypto/keyring"
 import {dos2unix} from "../../../lib/common-libs/dos2unix"
 import {rawer} from "../../../lib/common-libs/index"
+import {ProcessCpuProfiler} from "../../../ProcessCpuProfiler"
 
 const moment = require('moment');
 const querablep = require('querablep');
 
-const PAUSES_PER_TURN = 5;
+export function createPowWorker() {
 
-// This value can be changed
-let TURN_DURATION_IN_MILLISEC = 100;
-
-let computing = querablep(Promise.resolve(null));
-let askedStop = false;
+  let computing = querablep(Promise.resolve(null));
+  let askedStop = false;
 
 // By default, we do not prefix the PoW by any number
-let prefix = 0;
+  let prefix = 0;
 
-let signatureFunc:any, lastSecret:any, currentCPU = 1;
+  let signatureFunc:any, lastSecret:any, currentCPU = 1;
 
-process.on('uncaughtException', (err:any) => {
-  console.error(err.stack || Error(err))
-  if (process.send) {
-    process.send({error: err});
-  } else {
-    throw Error('process.send() is not defined')
-  }
-});
+  process.on('uncaughtException', (err:any) => {
+    console.error(err.stack || Error(err))
+    if (process.send) {
+      process.send({error: err});
+    } else {
+      throw Error('process.send() is not defined')
+    }
+  });
 
-process.on('message', async (message) => {
+  process.on('unhandledRejection', () => {
+    process.exit()
+  })
 
-  switch (message.command) {
+  process.on('message', async (message) => {
 
-    case 'newPoW':
-      (async () => {
-        askedStop = true
+    switch (message.command) {
 
-        // Very important: do not await if the computation is already done, to keep the lock on JS engine
-        if (!computing.isFulfilled()) {
-          await computing;
-        }
+      case 'newPoW':
+        (async () => {
+          askedStop = true
 
-        const res = await beginNewProofOfWork(message.value);
-        answer(message, res);
-      })()
-      break;
+          // Very important: do not await if the computation is already done, to keep the lock on JS engine
+          if (!computing.isFulfilled()) {
+            await computing;
+          }
 
-    case 'cancel':
-      if (!computing.isFulfilled()) {
-        askedStop = true;
-      }
-      break;
+          const res = await beginNewProofOfWork(message.value);
+          answer(message, res);
+        })()
+        break;
 
-    case 'conf':
-      if (message.value.cpu !== undefined) {
-        currentCPU = message.value.cpu
-      }
-      if (message.value.prefix !== undefined) {
-        prefix = message.value.prefix
-      }
-      answer(message, { currentCPU, prefix });
-      break;
-  }
+      case 'cancel':
+        if (!computing.isFulfilled()) {
+          askedStop = true;
+        }
+        break;
 
-})
-
-function beginNewProofOfWork(stuff:any) {
-  askedStop = false;
-  computing = querablep((async () => {
-
-    /*****************
-     * PREPARE POW STUFF
-     ****************/
-
-    let nonce = 0;
-    const conf = stuff.conf;
-    const block = stuff.block;
-    const nonceBeginning = stuff.nonceBeginning;
-    const nbZeros = stuff.zeros;
-    const pair = stuff.pair;
-    const forcedTime = stuff.forcedTime;
-    currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU;
-    prefix = parseInt(conf.prefix || prefix)
-    if (prefix && prefix < ProverConstants.NONCE_RANGE) {
-      prefix *= 100 * ProverConstants.NONCE_RANGE
-    }
-    const highMark = stuff.highMark;
-    const turnDuration = stuff.turnDuration || TURN_DURATION_IN_MILLISEC
-    let sigFunc = null;
-    if (signatureFunc && lastSecret === pair.sec) {
-      sigFunc = signatureFunc;
-    }
-    else {
-      lastSecret = pair.sec;
-      sigFunc = (msg:string) => KeyGen(pair.pub, pair.sec).signSync(msg)
+      case 'conf':
+        if (message.value.cpu !== undefined) {
+          currentCPU = message.value.cpu
+        }
+        if (message.value.prefix !== undefined) {
+          prefix = message.value.prefix
+        }
+        answer(message, { currentCPU, prefix });
+        break;
     }
-    signatureFunc = sigFunc;
-    let pow = "", sig = "", raw = "";
 
-    /*****************
-     * GO!
-     ****************/
+  })
 
-    let testsCount = 0;
-    let found = false;
-    let score = 0;
-    let turn = 0;
+  function beginNewProofOfWork(stuff:any) {
+    askedStop = false;
+    computing = querablep((async () => {
+
+      /*****************
+       * PREPARE POW STUFF
+       ****************/
 
-    while (!found && !askedStop) {
+      let nonce = 0;
+      const maxDuration = stuff.maxDuration || 1000
+      const conf = stuff.conf;
+      const block = stuff.block;
+      const nonceBeginning = stuff.nonceBeginning;
+      const nbZeros = stuff.zeros;
+      const pair = stuff.pair;
+      const forcedTime = stuff.forcedTime;
+      currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU;
+      prefix = parseInt(conf.prefix || prefix)
+      if (prefix && prefix < ProverConstants.NONCE_RANGE) {
+        prefix *= 100 * ProverConstants.NONCE_RANGE
+      }
+      const highMark = stuff.highMark;
+      let sigFunc = null;
+      if (signatureFunc && lastSecret === pair.sec) {
+        sigFunc = signatureFunc;
+      }
+      else {
+        lastSecret = pair.sec;
+        sigFunc = (msg:string) => KeyGen(pair.pub, pair.sec).signSync(msg)
+      }
+      signatureFunc = sigFunc;
+      let pow = "", sig = "", raw = "";
 
       /*****************
-       * A TURN
+       * GO!
        ****************/
 
-      await Promise.race([
+      let pausePeriod = 1;
+      let testsCount = 0;
+      let found = false;
+      let turn = 0;
+      const profiler = new ProcessCpuProfiler(100)
+      let cpuUsage = profiler.cpuUsageOverLastMilliseconds(1)
+      // We limit the number of tests according to CPU usage
+      let testsPerRound = stuff.initialTestsPerRound || 1
+      let turnDuration = 20 // We initially goes quickly to the max speed = 50 reevaluations per second (1000 / 20)
 
-        // I. Stop the turn if it exceeds `turnDuration` ms
-        countDown(turnDuration),
+      while (!found && !askedStop) {
 
-        // II. Process the turn's PoW
-        (async () => {
+        /*****************
+         * A TURN ~ 100ms
+         ****************/
 
-          /*****************
-           * A TURN OF POW ~= 100ms by default
-           * --------------------
-           *
-           * The concept of "turn" is required to limit the CPU usage.
-           * We need a time reference to have the speed = nb tests / period of time.
-           * Here we have:
-           *
-           *   - speed = testsCount / turn
-           *
-           * We have taken 1 turn = 100ms to control the CPU usage after 100ms of PoW. This means that during the
-           * very first 100ms of the PoW, CPU usage = 100%. Then it becomes controlled to the %CPU set.
-           ****************/
+        await Promise.race([
 
-            // Prove
-          let i = 0;
-          const thisTurn = turn;
-          const pausePeriod = score ? score / PAUSES_PER_TURN : 10; // number of pauses per turn
-          // We limit the number of tests according to CPU usage
-          const testsPerRound = score ? Math.floor(score * currentCPU) : 1000 * 1000 * 1000
-
-          // Time is updated regularly during the proof
-          block.time = getBlockTime(block, conf, forcedTime)
-          if (block.number === 0) {
-            block.medianTime = block.time
-          }
-          block.inner_hash = getBlockInnerHash(block);
+          // I. Stop the turn if it exceeds `turnDuration` ms
+          countDown(turnDuration),
 
-          /*****************
-           * Iterations of a turn
-           ****************/
+          // II. Process the turn's PoW
+          (async () => {
 
-          while(!found && i < testsPerRound && thisTurn === turn && !askedStop) {
+            // Prove
+            let i = 0;
+            const thisTurn = turn;
 
-            // Nonce change (what makes the PoW change if the time field remains the same)
-            nonce++
+            // Time is updated regularly during the proof
+            block.time = getBlockTime(block, conf, forcedTime)
+            if (block.number === 0) {
+              block.medianTime = block.time
+            }
+            block.inner_hash = getBlockInnerHash(block);
 
             /*****************
-             * A PROOF OF WORK
+             * Iterations of a turn
              ****************/
 
-            // The final nonce is composed of 3 parts
-            block.nonce = prefix + nonceBeginning + nonce
-            raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n")
-            sig = dos2unix(sigFunc(raw))
-            pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase()
+            while(!found && i < testsPerRound && thisTurn === turn && !askedStop) {
 
-            /*****************
-             * Check the POW result
-             ****************/
+              // Nonce change (what makes the PoW change if the time field remains the same)
+              nonce++
 
-            let j = 0, charOK = true;
-            while (j < nbZeros && charOK) {
-              charOK = pow[j] === '0';
-              j++;
-            }
-            if (charOK) {
-              found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']')))
-            }
-            if (!found && nbZeros > 0 && j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW) {
-              pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }});
-            }
+              /*****************
+               * A PROOF OF WORK
+               ****************/
 
-            /*****************
-             * - Update local vars
-             * - Allow to receive stop signal
-             ****************/
+              // The final nonce is composed of 3 parts
+              block.nonce = prefix + nonceBeginning + nonce
+              raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n")
+              sig = dos2unix(sigFunc(raw))
+              pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase()
 
-            if (!found && !askedStop) {
-              i++;
-              testsCount++;
-              if (i % pausePeriod === 0) {
-                await countDown(0); // Very low pause, just the time to process eventual end of the turn
-              }
-            }
-          }
+              /*****************
+               * Check the POW result
+               ****************/
 
-          /*****************
-           * Check the POW result
-           ****************/
-          if (!found) {
+              let j = 0, charOK = true;
+              while (j < nbZeros && charOK) {
+                charOK = pow[j] === '0';
+                j++;
+              }
+              if (charOK) {
+                found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']')))
+              }
+              if (!found && nbZeros > 0 && j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW) {
+                pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }});
+              }
 
-            // CPU speed recording
-            if (turn > 0 && !score) {
-              score = testsCount;
+              /*****************
+               * - Update local vars
+               * - Allow to receive stop signal
+               ****************/
+
+              if (!found && !askedStop) {
+                i++;
+                testsCount++;
+                if (i % pausePeriod === 0) {
+                  await countDown(1); // Very low pause, just the time to process eventual end of the turn
+                }
+              }
             }
 
             /*****************
-             * UNLOAD CPU CHARGE
+             * Check the POW result
              ****************/
-            // We wait for a maximum time of `turnDuration`.
-            // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script
-            // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu"
-            // parameter.
-            await countDown(turnDuration);
-          }
-        })()
-      ]);
+            if (!found) {
+
+              // CPU speed recording
+              if (turn > 0) {
+                cpuUsage = profiler.cpuUsageOverLastMilliseconds(turnDuration)
+                if (cpuUsage > currentCPU + 0.005 || cpuUsage < currentCPU - 0.005) {
+                  let powVariationFactor
+                  // powVariationFactor = currentCPU / (cpuUsage || 0.01) / 5 // divide by 2 to avoid extreme responses
+                  if (currentCPU > cpuUsage) {
+                    powVariationFactor = 1.01
+                    testsPerRound = Math.max(1, Math.ceil(testsPerRound * powVariationFactor))
+                  } else {
+                    powVariationFactor = 0.99
+                    testsPerRound = Math.max(1, Math.floor(testsPerRound * powVariationFactor))
+                  }
+                  pausePeriod = Math.floor(testsPerRound / ProverConstants.POW_NB_PAUSES_PER_ROUND)
+                }
+              }
 
-      // Next turn
-      turn++
-    }
+              /*****************
+               * UNLOAD CPU CHARGE FOR THIS TURN
+               ****************/
+              // We wait for a maximum time of `turnDuration`.
+              // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script
+              // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu"
+              // parameter.
+              await countDown(turnDuration);
+            }
+          })()
+        ]);
 
-    /*****************
-     * POW IS OVER
-     * -----------
-     *
-     * We either have found a valid POW or a stop event has been detected.
-     ****************/
+        // Next turn
+        turn++
 
-    if (askedStop) {
+        turnDuration += 1
+        turnDuration = Math.min(turnDuration, maxDuration) // Max 1 second per turn
+      }
 
-      // PoW stopped
-      askedStop = false;
-      return null
+      /*****************
+       * POW IS OVER
+       * -----------
+       *
+       * We either have found a valid POW or a stop event has been detected.
+       ****************/
 
-    } else {
+      if (askedStop) {
+
+        // PoW stopped
+        askedStop = false;
+        pSend({ canceled: true })
+        return null
 
-      // PoW success
-      block.hash = pow
-      block.signature = sig
-      return {
-        pow: {
-          block: block,
-          testsCount: testsCount,
-          pow: pow
+      } else {
+
+        // PoW success
+        block.hash = pow
+        block.signature = sig
+        return {
+          pow: {
+            block: block,
+            testsCount: testsCount,
+            pow: pow
+          }
         }
       }
-    }
-  })())
+    })())
 
-  return computing;
-}
+    return computing;
+  }
 
-function countDown(duration:number) {
-  return new Promise((resolve) => setTimeout(resolve, duration));
-}
+  function countDown(duration:number) {
+    return new Promise((resolve) => setTimeout(resolve, duration));
+  }
 
-function getBlockInnerHash(block:DBBlock) {
-  const raw = rawer.getBlockInnerPart(block);
-  return hashf(raw)
-}
+  function getBlockInnerHash(block:DBBlock) {
+    const raw = rawer.getBlockInnerPart(block);
+    return hashf(raw)
+  }
 
-function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) {
-  if (forcedTime) {
-    return forcedTime;
+  function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) {
+    if (forcedTime) {
+      return forcedTime;
+    }
+    const now = moment.utc().unix();
+    const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf);
+    const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0;
+    const medianTime = block.medianTime;
+    const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset);
+    return Math.max(medianTime, upperBound);
   }
-  const now = moment.utc().unix();
-  const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf);
-  const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0;
-  const medianTime = block.medianTime;
-  const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset);
-  return Math.max(medianTime, upperBound);
-}
 
-function answer(message:any, theAnswer:any) {
-  return pSend({
-    uuid: message.uuid,
-    answer: theAnswer
-  })
-}
+  function answer(message:any, theAnswer:any) {
+    return pSend({
+      uuid: message.uuid,
+      answer: theAnswer
+    })
+  }
 
-function pSend(stuff:any) {
-  return new Promise(function (resolve, reject) {
-    if (process.send) {
-      process.send(stuff, function (error:any) {
-        !error && resolve();
-        error && reject();
-      })
-    } else {
-      reject('process.send() is not defined')
-    }
-  });
+  function pSend(stuff:any) {
+    return new Promise(function (resolve, reject) {
+      if (process.send) {
+        process.send(stuff, function (error:any) {
+          !error && resolve();
+          error && reject();
+        })
+      } else {
+        reject('process.send() is not defined')
+      }
+    });
+  }
 }
diff --git a/app/modules/router.ts b/app/modules/router.ts
index d6484f05ba8d3ef1a74e8440c44c25aae80af128..9c7f2b14d4eded2c706118fdb1a6d3ee0c0b621a 100644
--- a/app/modules/router.ts
+++ b/app/modules/router.ts
@@ -5,9 +5,7 @@ import * as stream from "stream"
 import {Multicaster} from "../lib/streams/multicaster"
 import {RouterStream} from "../lib/streams/router"
 
-const constants = require('../lib/constants');
-
-module.exports = {
+export const RouterDependency = {
   duniter: {
     service: {
       output: (server:Server, conf:ConfDTO, logger:any) => new Router(server)
@@ -26,7 +24,7 @@ module.exports = {
  * Service which triggers the server's peering generation (actualization of the Peer document).
  * @constructor
  */
-class Router extends stream.Transform {
+export class Router extends stream.Transform {
 
   theRouter:any
   theMulticaster:Multicaster = new Multicaster()
diff --git a/app/modules/ws2p/lib/WS2PCluster.ts b/app/modules/ws2p/lib/WS2PCluster.ts
index bf5ac43a5574cb698c86c760e0b9c6d67c09ce8b..3502138a3b4655bc2218819b86e4809922d87f81 100644
--- a/app/modules/ws2p/lib/WS2PCluster.ts
+++ b/app/modules/ws2p/lib/WS2PCluster.ts
@@ -1,4 +1,4 @@
-import { DEFAULT_ENCODING } from 'crypto';
+import {DEFAULT_ENCODING} from 'crypto';
 import {WS2PServer} from "./WS2PServer"
 import {Server} from "../../../../server"
 import {WS2PClient} from "./WS2PClient"
@@ -8,7 +8,7 @@ import {CrawlerConstants} from "../../crawler/lib/constants"
 import {WS2PBlockPuller} from "./WS2PBlockPuller"
 import {WS2PDocpoolPuller} from "./WS2PDocpoolPuller"
 import {WS2PConstants} from "./constants"
-import { PeerDTO, WS2PEndpoint } from '../../../lib/dto/PeerDTO';
+import {PeerDTO, WS2PEndpoint} from '../../../lib/dto/PeerDTO';
 import {GlobalFifoPromise} from "../../../service/GlobalFifoPromise"
 import {OtherConstants} from "../../../lib/other_constants"
 import {Key, verify} from "../../../lib/common-libs/crypto/keyring"
@@ -681,11 +681,11 @@ export class WS2PCluster {
       let uuids = Object.keys(this.ws2pClients)
       uuids = _.shuffle(uuids)
       let lowPriorityConnectionUUID:string = uuids[0]
-      let minPriorityLevel = this.keyPriorityLevel(this.ws2pClients[lowPriorityConnectionUUID].connection.pubkey, preferedKeys)
+      let minPriorityLevel = await this.keyPriorityLevel(this.ws2pClients[lowPriorityConnectionUUID].connection.pubkey, preferedKeys)
       for (const uuid of uuids) {
         const client = this.ws2pClients[uuid]
           if (uuid !== lowPriorityConnectionUUID) {
-            let uuidPriorityLevel = this.keyPriorityLevel(client.connection.pubkey, preferedKeys)
+            let uuidPriorityLevel = await this.keyPriorityLevel(client.connection.pubkey, preferedKeys)
             if (uuidPriorityLevel < minPriorityLevel) {
               lowPriorityConnectionUUID = uuid
               minPriorityLevel = uuidPriorityLevel
diff --git a/app/modules/ws2p/lib/WS2PServer.ts b/app/modules/ws2p/lib/WS2PServer.ts
index 002d0cf82c52c82c136b950e92aa6b808bd0c010..ce08493018528ec69b57f2e2c84efc1d1a838685 100644
--- a/app/modules/ws2p/lib/WS2PServer.ts
+++ b/app/modules/ws2p/lib/WS2PServer.ts
@@ -7,7 +7,6 @@ import {WS2PConstants} from "./constants"
 import {WS2PMessageHandler} from "./impl/WS2PMessageHandler"
 import {WS2PStreamer} from "./WS2PStreamer"
 import {WS2PSingleWriteStream} from "./WS2PSingleWriteStream"
-import { WS2PCluster } from './WS2PCluster';
 
 const WebSocketServer = require('ws').Server
 
@@ -159,10 +158,10 @@ export class WS2PServer extends events.EventEmitter {
 
   async removeLowPriorityConnection(privilegedKeys:string[]) {
     let lowPriorityConnection:WS2PConnection = this.connections[0]
-    let minPriorityLevel = this.keyPriorityLevel(lowPriorityConnection.pubkey, privilegedKeys)
+    let minPriorityLevel = await this.keyPriorityLevel(lowPriorityConnection.pubkey, privilegedKeys)
     for (const c of this.connections) {
       if (c !== lowPriorityConnection) {
-        let cPriorityLevel = this.keyPriorityLevel(c.pubkey, privilegedKeys)
+        let cPriorityLevel = await this.keyPriorityLevel(c.pubkey, privilegedKeys)
         if (cPriorityLevel < minPriorityLevel) {
           lowPriorityConnection = c
           minPriorityLevel = cPriorityLevel
diff --git a/app/service/BlockchainService.ts b/app/service/BlockchainService.ts
index ba652048307abf4a5d3bbacc9da9f89edd562bce..e8148d81d2945f241a7f7be44384050b6b75991d 100644
--- a/app/service/BlockchainService.ts
+++ b/app/service/BlockchainService.ts
@@ -217,8 +217,9 @@ export class BlockchainService extends FIFOService {
         } catch (e) {
           this.logger.error(e)
           added = false
+          const theError = e && (e.message || e)
           this.push({
-            blockResolutionError: e && e.message
+            blockResolutionError: theError
           })
         }
         i++
diff --git a/app/service/TransactionsService.ts b/app/service/TransactionsService.ts
index e6ddf1263234920e509ca684438cc19cec1ecb5f..3068c1b71430fb521ad15146ff6fae11fb879965 100644
--- a/app/service/TransactionsService.ts
+++ b/app/service/TransactionsService.ts
@@ -41,7 +41,7 @@ export class TransactionService extends FIFOService {
         // Start checks...
         const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 };
         const dto = TransactionDTO.fromJSONObject(tx)
-        await LOCAL_RULES_HELPERS.checkSingleTransactionLocally(dto)
+        await LOCAL_RULES_HELPERS.checkSingleTransactionLocally(dto, this.conf)
         await GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal);
         await GLOBAL_RULES_HELPERS.checkSingleTransaction(dto, nextBlockWithFakeTimeVariation, this.conf, this.dal, CHECK_PENDING_TRANSACTIONS);
         const server_pubkey = this.conf.pair && this.conf.pair.pub;
diff --git a/appveyor.yml b/appveyor.yml
index 8fbf7baf69c2f8d27ec6c67b7cdfeff7b293b55e..1a271d5e4c01d8f303faad4f390484c16c87b2eb 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -1,6 +1,6 @@
 environment:
   matrix:
-    - nodejs_version: "6.9.2"
+    - nodejs_version: "8.9.2"
       ADDON_VERSION: "48"
 
 platform:
diff --git a/doc/Protocol.md b/doc/Protocol.md
index b657553f46cbe40dcdc0a1a8541732ea4a5f4c15..68a5300a4c3744b878f4c44cf058d8c89764e801 100644
--- a/doc/Protocol.md
+++ b/doc/Protocol.md
@@ -1549,6 +1549,40 @@ TRUE
 > Functionally: we cannot create nor lose money through transactions. We can only transfer coins we own.
 > Functionally: also, we cannot convert a superiod unit base into a lower one.
 
+##### Transactions chaining max depth
+
+    FUNCTION `getTransactionDepth(txHash, LOCAL_DEPTH)`:
+
+        INPUTS = LOCAL_SINDEX[op='UPDATE',tx=txHash]
+        DEPTH = LOCAL_DEPTH
+
+        FOR EACH `INPUT` OF `INPUTS`
+            CONSUMED = LOCAL_SINDEX[op='CREATE',identifier=INPUT.identifier,pos=INPUT.pos]
+            IF (CONSUMED != NULL)
+                IF (LOCAL_DEPTH < 5)
+                    DEPTH = MAX(DEPTH, getTransactionDepth(CONSUMED.tx, LOCAL_DEPTH +1)
+                ELSE
+                    DEPTH++
+                END_IF
+            END_IF
+        END_FOR
+
+        RETURN DEPTH
+
+    END_FUNCTION
+
+Then:
+
+    maxTxChainingDepth = 0
+
+For each `TX_HASH` of `UNIQ(PICK(LOCAL_SINDEX, 'tx))`:
+
+    maxTxChainingDepth = MAX(maxTxChainingDepth, getTransactionDepth(TX_HASH, 0))
+
+Rule:
+
+    maxTxChainingDepth <= 5
+
 #### Global
 
 Global validation verifies the coherence of a locally-validated block, in the context of the whole blockchain, including the block.
@@ -1580,6 +1614,7 @@ Function references:
 > If values count is even, the median is computed over the 2 centered values by an arithmetical median on them, *NOT* rounded.
 
 * *UNIQ* returns a list of the unique values in a list of values
+* *PICK* returns a list of the values by picking a particular property on each record
 * *INTEGER_PART* return the integer part of a number
 * *FIRST* return the first element in a list of values matching the given condition
 * *REDUCE* merges a set of elements into a single one, by extending the non-null properties from each record into the resulting record.
@@ -2244,7 +2279,7 @@ Else:
 
 ####### BR_G102 - ENTRY.age
 
-For each ENTRY in local IINDEX where `op = 'UPDATE'`:
+For each ENTRY in local SINDEX where `op = 'UPDATE'`:
 
     REF_BLOCK = HEAD~<HEAD~1.number + 1 - NUMBER(ENTRY.hash)>[hash=HASH(ENTRY.created_on)]
     
@@ -2266,17 +2301,31 @@ EndIf
 
 For each `LOCAL_SINDEX[op='UPDATE'] as ENTRY`:
 
-    INPUT = REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base])
+    INPUT_ENTRIES = LOCAL_SINDEX[op='CREATE',identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    If COUNT(INPUT_ENTRIES) == 0 Then
+        INPUT_ENTRIES = GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    EndIf
+    INPUT = REDUCE(INPUT_ENTRIES)
     ENTRY.conditions = INPUT.conditions
     ENTRY.available = INPUT.consumed == false
 
 ####### BR_G47 - ENTRY.isLocked
 
-    ENTRY.isLocked = TX_SOURCE_UNLOCK(REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]).conditions, ENTRY)
+    INPUT_ENTRIES = LOCAL_SINDEX[op='CREATE',identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    If COUNT(INPUT_ENTRIES) == 0 Then
+        INPUT_ENTRIES = GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    EndIf
+    INPUT = REDUCE(INPUT_ENTRIES)
+    ENTRY.isLocked = TX_SOURCE_UNLOCK(INPUT.conditions, ENTRY)
     
 ####### BR_G48 - ENTRY.isTimeLocked
 
-    ENTRY.isTimeLocked = ENTRY.written_time - REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]).written_time < ENTRY.locktime
+    INPUT_ENTRIES = LOCAL_SINDEX[op='CREATE',identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    If COUNT(INPUT_ENTRIES) == 0 Then
+        INPUT_ENTRIES = GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    EndIf
+    INPUT = REDUCE(INPUT_ENTRIES)
+    ENTRY.isTimeLocked = ENTRY.written_time - INPUT.written_time < ENTRY.locktime
 
 ##### Rules
 
diff --git a/index.ts b/index.ts
index ecf7be9d67231df6e4e63ef96c903457272b675d..e7e6a32e0194faf11b4035e741684784b0921f66 100644
--- a/index.ts
+++ b/index.ts
@@ -9,6 +9,7 @@ import {BmaDependency} from "./app/modules/bma/index"
 import {WS2PDependency} from "./app/modules/ws2p/index"
 import {ProverConstants} from "./app/modules/prover/lib/constants"
 import { ProxiesConf } from './app/lib/proxy';
+import {RouterDependency} from "./app/modules/router"
 
 const path = require('path');
 const _ = require('underscore');
@@ -25,9 +26,16 @@ const reapplyDependency   = require('./app/modules/reapply');
 const revertDependency    = require('./app/modules/revert');
 const daemonDependency    = require('./app/modules/daemon');
 const pSignalDependency   = require('./app/modules/peersignal');
-const routerDependency    = require('./app/modules/router');
 const pluginDependency    = require('./app/modules/plugin');
 
+let sigintListening = false
+
+// Trace errors
+process.on('unhandledRejection', (reason) => {
+  logger.error('Unhandled rejection: ' + reason);
+  logger.error(reason);
+});
+
 class Stacks {
 
   static todoOnRunDone:() => any = () => process.exit()
@@ -102,7 +110,7 @@ const DEFAULT_DEPENDENCIES = MINIMAL_DEPENDENCIES.concat([
   { name: 'duniter-revert',    required: revertDependency },
   { name: 'duniter-daemon',    required: daemonDependency },
   { name: 'duniter-psignal',   required: pSignalDependency },
-  { name: 'duniter-router',    required: routerDependency },
+  { name: 'duniter-router',    required: RouterDependency },
   { name: 'duniter-plugin',    required: pluginDependency },
   { name: 'duniter-prover',    required: ProverDependency },
   { name: 'duniter-keypair',   required: KeypairDependency },
@@ -157,6 +165,8 @@ export interface TransformableDuniterService extends DuniterService, stream.Tran
 
 class Stack {
 
+  private injectedServices = false
+
   private cli:any
   private configLoadingCallbacks:any[]
   private configBeforeSaveCallbacks:any[]
@@ -279,10 +289,12 @@ class Stack {
     }
 
     const server = new Server(home, program.memory === true, commandLineConf(program));
+    let piped = false
 
     // If ever the process gets interrupted
     let isSaving = false;
-    process.on('SIGINT', async () => {
+    if (!sigintListening) {
+      process.on('SIGINT', async () => {
         if (!isSaving) {
           isSaving = true;
           // Save DB
@@ -294,7 +306,9 @@ class Stack {
             process.exit(3);
           }
         }
-    });
+      })
+      sigintListening = true
+    }
 
     // Config or Data reset hooks
     server.resetDataHook = async () => {
@@ -366,26 +380,30 @@ class Stack {
        * Service injection
        * -----------------
        */
-      for (const def of this.definitions) {
-        if (def.service) {
-          // To feed data coming from some I/O (network, disk, other module, ...)
-          if (def.service.input) {
-            this.streams.input.push(def.service.input(server, conf, logger));
-          }
-          // To handle data this has been submitted by INPUT stream
-          if (def.service.process) {
-            this.streams.process.push(def.service.process(server, conf, logger));
-          }
-          // To handle data this has been validated by PROCESS stream
-          if (def.service.output) {
-            this.streams.output.push(def.service.output(server, conf, logger));
-          }
-          // Special service which does not stream anything particular (ex.: piloting the `server` object)
-          if (def.service.neutral) {
-            this.streams.neutral.push(def.service.neutral(server, conf, logger));
+      if (!this.injectedServices) {
+        this.injectedServices = true
+        for (const def of this.definitions) {
+          if (def.service) {
+            // To feed data coming from some I/O (network, disk, other module, ...)
+            if (def.service.input) {
+              this.streams.input.push(def.service.input(server, conf, logger));
+            }
+            // To handle data this has been submitted by INPUT stream
+            if (def.service.process) {
+              this.streams.process.push(def.service.process(server, conf, logger));
+            }
+            // To handle data this has been validated by PROCESS stream
+            if (def.service.output) {
+              this.streams.output.push(def.service.output(server, conf, logger));
+            }
+            // Special service which does not stream anything particular (ex.: piloting the `server` object)
+            if (def.service.neutral) {
+              this.streams.neutral.push(def.service.neutral(server, conf, logger));
+            }
           }
         }
       }
+      piped = true
       // All inputs write to global INPUT stream
       for (const module of this.streams.input) module.pipe(this.INPUT);
       // All processes read from global INPUT stream
@@ -408,13 +426,6 @@ class Stack {
           const modules = this.streams.input.concat(this.streams.process).concat(this.streams.output).concat(this.streams.neutral);
           // Any streaming module must implement a `stopService` method
           await Promise.all(modules.map((module:DuniterService) => module.stopService()))
-          // // Stop reading inputs
-          // for (const module of streams.input) module.unpipe();
-          // Stop reading from global INPUT
-          // INPUT.unpipe();
-          // for (const module of streams.process) module.unpipe();
-          // // Stop reading from global PROCESS
-          // PROCESS.unpipe();
         },
 
         this);
@@ -422,17 +433,20 @@ class Stack {
     } catch (e) {
       server.disconnect();
       throw e;
+    } finally {
+      if (piped) {
+        // Unpipe everything, as the command is done
+        for (const module of this.streams.input) module.unpipe()
+        for (const module of this.streams.process) module.unpipe()
+        for (const module of this.streams.output) module.unpipe()
+        this.INPUT.unpipe()
+        this.PROCESS.unpipe()
+      }
     }
   }
 
   executeStack(argv:string[]) {
 
-    // Trace these errors
-    process.on('unhandledRejection', (reason) => {
-      logger.error('Unhandled rejection: ' + reason);
-      logger.error(reason);
-    });
-
     // Executes the command
     return this.cli.execute(argv);
   }
diff --git a/release/arch/arm/build-arm.sh b/release/arch/arm/build-arm.sh
index 68d0bcf4526d2219e0e3a88db36e66e3fd4c48be..fc0891fd38fdbf083984dafac5a972f7219606ae 100755
--- a/release/arch/arm/build-arm.sh
+++ b/release/arch/arm/build-arm.sh
@@ -86,7 +86,7 @@ mkdir -p duniter_release
 cp -R ${SRC}/* duniter_release/
 
 # Creating DEB packaging
-mv duniter_release/release/arch/debian/package duniter-${ARCH}
+mv duniter_release/release/extra/debian/package duniter-${ARCH}
 mkdir -p duniter-${ARCH}/opt/duniter/
 chmod 755 duniter-${ARCH}/DEBIAN/post*
 chmod 755 duniter-${ARCH}/DEBIAN/pre*
diff --git a/release/arch/debian/Vagrantfile b/release/arch/debian/Vagrantfile
deleted file mode 100644
index da912f7fbaa6332b1081e4f486ca5e24dc3086ea..0000000000000000000000000000000000000000
--- a/release/arch/debian/Vagrantfile
+++ /dev/null
@@ -1,72 +0,0 @@
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-# All Vagrant configuration is done below. The "2" in Vagrant.configure
-# configures the configuration version (we support older styles for
-# backwards compatibility). Please don't change it unless you know what
-# you're doing.
-Vagrant.configure("2") do |config|
-  # The most common configuration options are documented and commented below.
-  # For a complete reference, please see the online documentation at
-  # https://docs.vagrantup.com.
-
-  # Every Vagrant development environment requires a box. You can search for
-  # boxes at https://atlas.hashicorp.com/search.
-  config.vm.box = "https://s3.eu-central-1.amazonaws.com/duniter/vagrant/duniter_trusty64.box"
-  config.vm.provision :shell, path: "bootstrap.sh"
-
-  # Disable automatic box update checking. If you disable this, then
-  # boxes will only be checked for updates when the user runs
-  # `vagrant box outdated`. This is not recommended.
-  # config.vm.box_check_update = false
-
-  # Create a forwarded port mapping which allows access to a specific port
-  # within the machine from a port on the host machine. In the example below,
-  # accessing "localhost:8080" will access port 80 on the guest machine.
-  # config.vm.network "forwarded_port", guest: 80, host: 8080
-
-  # Create a private network, which allows host-only access to the machine
-  # using a specific IP.
-  # config.vm.network "private_network", ip: "192.168.33.10"
-
-  # Create a public network, which generally matched to bridged network.
-  # Bridged networks make the machine appear as another physical device on
-  # your network.
-  # config.vm.network "public_network"
-
-  # Share an additional folder to the guest VM. The first argument is
-  # the path on the host to the actual folder. The second argument is
-  # the path on the guest to mount the folder. And the optional third
-  # argument is a set of non-required options.
-  # config.vm.synced_folder "../data", "/vagrant_data"
-
-  # Provider-specific configuration so you can fine-tune various
-  # backing providers for Vagrant. These expose provider-specific options.
-  # Example for VirtualBox:
-  #
-   config.vm.provider "virtualbox" do |vb|
-     # Display the VirtualBox GUI when booting the machine
-     #vb.gui = true
-  
-     # Customize the amount of memory on the VM:
-     vb.memory = "2048"
-   end
-  #
-  # View the documentation for the provider you are using for more
-  # information on available options.
-
-  # Define a Vagrant Push strategy for pushing to Atlas. Other push strategies
-  # such as FTP and Heroku are also available. See the documentation at
-  # https://docs.vagrantup.com/v2/push/atlas.html for more information.
-  # config.push.define "atlas" do |push|
-  #   push.app = "YOUR_ATLAS_USERNAME/YOUR_APPLICATION_NAME"
-  # end
-
-  # Enable provisioning with a shell script. Additional provisioners such as
-  # Puppet, Chef, Ansible, Salt, and Docker are also available. Please see the
-  # documentation for more information about their specific syntax and use.
-  # config.vm.provision "shell", inline: <<-SHELL
-  #   apt-get update
-  #   apt-get install -y apache2
-  # SHELL
-end
diff --git a/release/arch/debian/bootstrap.sh b/release/arch/debian/bootstrap.sh
deleted file mode 100644
index 6666f97b5365f01b41da099362a4e4ae51301e2f..0000000000000000000000000000000000000000
--- a/release/arch/debian/bootstrap.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-# Yarn
-curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
-echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
-
-# System tools
-apt-get update
-apt-get install --yes git curl build-essential yarn python-minimal zip
-
-# User installation
-sudo su vagrant -c "bash /vagrant/user-bootstrap.sh"
diff --git a/release/arch/debian/build-deb.sh b/release/arch/debian/build-deb.sh
deleted file mode 100644
index 528b044cb93565a64a955afb5e8de84af9f8db60..0000000000000000000000000000000000000000
--- a/release/arch/debian/build-deb.sh
+++ /dev/null
@@ -1,210 +0,0 @@
-#!/bin/bash
-
-# NVM
-export NVM_DIR="$HOME/.nvm"
-[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"  # This loads nvm
-
-# Prepare
-NODE_VERSION=8.9.1
-NVER="v$NODE_VERSION"
-DUNITER_TAG=$1
-ADDON_VERSION=57
-NW_VERSION=0.24.4
-NW_RELEASE="v${NW_VERSION}"
-NW="nwjs-${NW_RELEASE}-linux-x64"
-NW_GZ="${NW}.tar.gz"
-
-nvm install ${NVER}
-nvm use ${NVER}
-npm install -g node-pre-gyp
-npm install -g nw-gyp
-# Folders
-ROOT=`pwd`
-DOWNLOADS="$ROOT/downloads"
-RELEASES="$ROOT/releases"
-
-mkdir -p "$DOWNLOADS"
-
-# -----------
-# Clean sources + releases
-# -----------
-rm -rf "$DOWNLOADS/duniter"
-rm -rf "$RELEASES"
-rm -rf /vagrant/*.deb
-rm -rf /vagrant/*.tar.gz
-
-# -----------
-# Downloads
-# -----------
-
-cd "$DOWNLOADS"
-
-if [ ! -d "$DOWNLOADS/duniter" ]; then
-  mv /vagrant/duniter-source duniter
-  cd duniter
-  git checkout "v${DUNITER_TAG}"
-  cd ..
-fi
-
-DUNITER_DEB_VER=" $DUNITER_TAG"
-DUNITER_TAG="v$DUNITER_TAG"
-
-if [ ! -f "$DOWNLOADS/$NW_GZ" ]; then
-  wget https://dl.nwjs.io/${NW_RELEASE}/${NW_GZ}
-  tar xvzf ${NW_GZ}
-fi
-
-if [ ! -f "$DOWNLOADS/node-${NVER}-linux-x64.tar.gz" ]; then
-  # Download Node.js and package it with the sources
-  wget http://nodejs.org/dist/${NVER}/node-${NVER}-linux-x64.tar.gz
-  tar xzf node-${NVER}-linux-x64.tar.gz
-fi
-
-# -----------
-# Releases
-# -----------
-
-rm -rf "$RELEASES"
-mkdir -p "$RELEASES"
-
-cp -r "$DOWNLOADS/duniter" "$RELEASES/duniter"
-cd "$RELEASES"
-
-# NPM build
-cp -r duniter _npm
-
-# Releases builds
-cd ${RELEASES}/duniter
-# Remove git files
-rm -Rf .git
-[[ $? -eq 0 ]] && echo ">> VM: building modules..."
-[[ $? -eq 0 ]] && npm install
-
-# Duniter UI
-[[ $? -eq 0 ]] && npm install duniter-ui@1.6.x
-[[ $? -eq 0 ]] && npm prune --production
-
-cp -r "$RELEASES/duniter" "$RELEASES/desktop_"
-cp -r "$RELEASES/duniter" "$RELEASES/server_"
-
-# -------------------------------------------------
-# Build Desktop version (Nw.js is embedded)
-# -------------------------------------------------
-
-cd "$RELEASES/desktop_"
-echo "$NW_RELEASE"
-
-cd "$RELEASES/desktop_/node_modules/wotb"
-
-# FIX: bug of nw.js, we need to patch first.
-# TODO: remove this patch once a correct version of Nw.js is out (NodeJS 8 or 9 if the above modules are compliant)
-cp /vagrant/0.24.4_common.gypi ~/.nw-gyp/0.24.4/common.gypi
-
-#yarn --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/wotb.node lib/binding/Release/node-v$ADDON_VERSION-linux-x64/wotb.node
-cd "$RELEASES/desktop_/node_modules/naclb"
-#npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/naclb.node lib/binding/Release/node-v$ADDON_VERSION-linux-x64/naclb.node
-cd "$RELEASES/desktop_/node_modules/scryptb"
-#npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/scryptb.node lib/binding/Release/node-v$ADDON_VERSION-linux-x64/scryptb.node
-cd "$RELEASES/desktop_/node_modules/sqlite3"
-#npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/node-webkit-$NW_RELEASE-linux-x64/node_sqlite3.node lib/binding/node-v$ADDON_VERSION-linux-x64/node_sqlite3.node
-
-# Unused binaries
-cd "$RELEASES/desktop_/"
-rm -rf node_modules/sqlite3/build
-#rm -rf node_modules/naclb/build
-#rm -rf node_modules/wotb/build
-#rm -rf node_modules/scryptb/build
-
-## Install Nw.js
-mkdir -p "$RELEASES/desktop_release"
-
-# -------------------------------------------------
-# Build Desktop version .tar.gz
-# -------------------------------------------------
-
-cp -r $DOWNLOADS/${NW}/* "$RELEASES/desktop_release/"
-# Embed Node.js with Nw.js to make Duniter modules installable
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/lib "$RELEASES/desktop_release/"
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/include "$RELEASES/desktop_release/"
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/bin "$RELEASES/desktop_release/"
-# Add some specific files for GUI
-cp ${RELEASES}/desktop_/gui/* "$RELEASES/desktop_release/"
-# Add Duniter sources
-cp -R $RELEASES/desktop_/* "$RELEASES/desktop_release/"
-## Insert Nw specific fields while they do not exist (1.3.3)
-sed -i "s/\"main\": \"index.js\",/\"main\": \"index.html\",/" "$RELEASES/desktop_release/package.json"
-# Add links for Node.js + NPM
-cd "$RELEASES/desktop_release/bin"
-ln -s ../lib/node_modules/npm/bin/npm-cli.js ./npm -f
-cd ..
-ln -s ./bin/node node -f
-ln -s ./bin/npm npm -f
-#sed -i "s/\"node-main\": \"\.\.\/sources\/bin\/duniter\",/\"node-main\": \".\/bin\/duniter\",/" "$RELEASES/desktop_release/package.json"
-# Create a copy for TGZ binary
-cp -R "$RELEASES/desktop_release" "$RELEASES/desktop_release_tgz"
-#cd "$RELEASES/desktop_release_tgz/"
-#rm -rf node_modules/sqlite3/lib/binding/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/wotb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/naclb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/scryptb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-cd "$RELEASES/desktop_release_tgz"
-tar czf /vagrant/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz * --exclude ".git" --exclude "coverage" --exclude "test"
-
-# -------------------------------------------------
-# Build Desktop version .deb
-# -------------------------------------------------
-
-# Create .deb tree + package it
-#cp -r "$RELEASES/desktop_release/release/arch/debian/package" "$RELEASES/duniter-x64"
-cp -r "/vagrant/package" "$RELEASES/duniter-x64"
-mkdir -p "$RELEASES/duniter-x64/opt/duniter/"
-chmod 755 ${RELEASES}/duniter-x64/DEBIAN/post*
-chmod 755 ${RELEASES}/duniter-x64/DEBIAN/pre*
-sed -i "s/Version:.*/Version:$DUNITER_DEB_VER/g" ${RELEASES}/duniter-x64/DEBIAN/control
-cd ${RELEASES}/desktop_release/
-#rm -rf node_modules/sqlite3/lib/binding/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/wotb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/naclb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/scryptb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/sqlite3/lib/binding/node-v$ADDON_VERSION-linux-x64
-#rm -rf node_modules/wotb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-#rm -rf node_modules/naclb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-#rm -rf node_modules/scryptb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-zip -qr ${RELEASES}/duniter-x64/opt/duniter/duniter-desktop.nw *
-
-sed -i "s/Package: .*/Package: duniter-desktop/g" ${RELEASES}/duniter-x64/DEBIAN/control
-cd ${RELEASES}/
-fakeroot dpkg-deb --build duniter-x64
-mv duniter-x64.deb /vagrant/duniter-desktop-${DUNITER_TAG}-linux-x64.deb
-
-# -------------------------------------------------
-# Build Server version (Node.js is embedded, not Nw.js)
-# -------------------------------------------------
-
-cd ${RELEASES}
-rm -rf duniter-server-x64
-cp -r duniter-x64 duniter-server-x64
-
-# Remove Nw.js
-rm -rf duniter-server-x64/opt/duniter/duniter-desktop.nw*
-
-cd ${RELEASES}/server_
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64 node
-zip -qr ${RELEASES}/duniter-server-x64/opt/duniter/duniter-desktop.nw *
-cd ${RELEASES}
-sed -i "s/Package: .*/Package: duniter/g" ${RELEASES}/duniter-server-x64/DEBIAN/control
-rm -rf ${RELEASES}/duniter-server-x64/usr
-fakeroot dpkg-deb --build duniter-server-x64
-mv duniter-server-x64.deb /vagrant/duniter-server-${DUNITER_TAG}-linux-x64.deb
diff --git a/release/arch/debian/user-bootstrap.sh b/release/arch/debian/user-bootstrap.sh
deleted file mode 100644
index 38df75d12426297d394d40c3113496de092d6718..0000000000000000000000000000000000000000
--- a/release/arch/debian/user-bootstrap.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-# NVM
-curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.33.1/install.sh | bash
-export NVM_DIR="$HOME/.nvm"
-[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"  # This loads nvm
-
-# Node.js
-nvm install 6
-
-# node-pre-gyp
-npm install -g nw-gyp node-pre-gyp
diff --git a/release/arch/debian/0.24.4_common.gypi b/release/arch/linux/0.24.4_common.gypi
similarity index 100%
rename from release/arch/debian/0.24.4_common.gypi
rename to release/arch/linux/0.24.4_common.gypi
diff --git a/release/arch/linux/build-lin.sh b/release/arch/linux/build-lin.sh
new file mode 100644
index 0000000000000000000000000000000000000000..6abf5c16fa4a80be1b7cf4d0f24143c0c8fbb5d8
--- /dev/null
+++ b/release/arch/linux/build-lin.sh
@@ -0,0 +1,196 @@
+#!/bin/bash
+
+if [[ -z "${1}" ]]; then
+	echo "Fatal: no version given to build script"
+	exit 1
+fi
+if [[ -s "$NVM_DIR/nvm.sh" ]]; then
+	source "$NVM_DIR/nvm.sh"
+else
+	echo "Fatal: could not load nvm"
+	exit 1
+fi
+
+# -----------
+# Prepare
+# -----------
+
+NODE_VERSION=8.9.1
+NVER="v${NODE_VERSION}"
+DUNITER_TAG="v${1}"
+DUNITER_DEB_VER=" ${1}"
+ADDON_VERSION=57
+NW_VERSION=0.24.4
+NW_RELEASE="v${NW_VERSION}"
+NW="nwjs-${NW_RELEASE}-linux-x64"
+NW_GZ="${NW}.tar.gz"
+DUNITER_UI_VER="1.6.x"
+
+nvm install ${NVER} || exit 1
+nvm use ${NVER} || exit 1
+npm install -g node-pre-gyp || exit 1
+npm install -g nw-gyp || exit 1
+
+# -----------
+# Folders
+# -----------
+
+ROOT="${PWD}"
+WORK_NAME=work
+WORK="${ROOT}/${WORK_NAME}"
+DOWNLOADS="${WORK}/downloads"
+RELEASES="${WORK}/releases"
+BIN="${WORK}/bin"
+
+mkdir -p "${DOWNLOADS}" "${RELEASES}" "${BIN}" || exit 1
+rm -rf "${BIN}/"*.{deb,tar.gz} # Clean up
+
+# -----------
+# Downloads
+# -----------
+
+cd "${DOWNLOADS}"
+curl -O https://dl.nwjs.io/${NW_RELEASE}/${NW_GZ} || exit 1
+tar xzf ${NW_GZ} || exit 1
+rm ${NW_GZ}
+curl -O http://nodejs.org/dist/${NVER}/node-${NVER}-linux-x64.tar.gz || exit 1
+tar xzf node-${NVER}-linux-x64.tar.gz || exit 1
+rm node-${NVER}-linux-x64.tar.gz
+
+# -----------
+# Releases
+# -----------
+
+# Prepare sources
+mkdir -p "${RELEASES}/duniter" || exit 1
+cp -r $(find "${ROOT}" -mindepth 1 -maxdepth 1 ! -name "${WORK_NAME}") "${RELEASES}/duniter" || exit 1
+cd "${RELEASES}/duniter"
+rm -Rf .gitignore .git || exit 1 # Remove git files
+
+# Build
+echo ">> VM: building modules..."
+npm install || exit 1
+
+# Duniter UI
+npm install "duniter-ui@${DUNITER_UI_VER}" || exit 1
+npm prune --production || exit 1
+
+rm -rf release coverage test # Non production folders
+cp -r "${RELEASES}/duniter" "${RELEASES}/desktop_" || exit 1
+cp -r "${RELEASES}/duniter" "${RELEASES}/server_" || exit 1
+
+# -------------------------------------
+# Build Desktop version against nw.js
+# -------------------------------------
+
+nw_copy() {
+	[[ -z ${1} ]] && exit 1
+	cp lib/binding/Release/node-webkit-v${NW_VERSION}-linux-x64/${1}.node \
+		lib/binding/Release/node-v${ADDON_VERSION}-linux-x64/${1}.node || exit 1
+}
+
+nw_copy_node() {
+	[[ -z ${1} ]] && exit 1
+	cp lib/binding/node-webkit-v${NW_VERSION}-linux-x64/node_${1}.node \
+		lib/binding/node-v${ADDON_VERSION}-linux-x64/node_${1}.node || exit 1
+}
+
+nw_compile() {
+	[[ -z ${1} ]] && exit 1
+	cd ${1} || exit 1
+	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} configure || exit 1
+	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} build || exit 1
+	[[ -z ${2} ]] || ${2} ${1}
+	cd ..
+}
+
+echo "${NW_RELEASE}"
+
+# FIX: bug of nw.js, we need to patch first.
+# TODO: remove this patch once a correct version of Nw.js is out (NodeJS 8 or 9 if the above modules are compliant)
+cd "${RELEASES}/desktop_/node_modules/wotb"
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure \
+  || echo "This failure is expected"
+cp ${ROOT}/release/arch/linux/0.24.4_common.gypi ~/.nw-gyp/0.24.4/common.gypi || exit 1
+
+cd "${RELEASES}/desktop_/node_modules/"
+nw_compile wotb nw_copy
+nw_compile naclb nw_copy
+nw_compile scryptb nw_copy
+nw_compile sqlite3 nw_copy_node
+
+# Unused binaries
+cd "${RELEASES}/desktop_/"
+rm -rf node_modules/sqlite3/build
+
+# --------------------------------
+# Embed nw.js in desktop version
+# --------------------------------
+
+# Install Nw.js
+mkdir -p "${RELEASES}/desktop_release" || exit 1
+cp -r "${DOWNLOADS}/${NW}/"* "${RELEASES}/desktop_release/" || exit 1
+# Embed Node.js with Nw.js to make Duniter modules installable
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64/lib" "${RELEASES}/desktop_release/" || exit 1
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64/include" "${RELEASES}/desktop_release/" || exit 1
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64/bin" "${RELEASES}/desktop_release/" || exit 1
+# Add some specific files for GUI
+cp "${RELEASES}/desktop_/gui/"* "${RELEASES}/desktop_release/" || exit 1
+# Add Duniter sources
+cp -R "${RELEASES}/desktop_/"* "${RELEASES}/desktop_release/" || exit 1
+# Insert Nw specific fields while they do not exist (1.3.3)
+sed -i "s/\"main\": \"index.js\",/\"main\": \"index.html\",/" "${RELEASES}/desktop_release/package.json" || exit 1
+# Add links for Node.js + NPM
+cd "${RELEASES}/desktop_release/bin"
+ln -s "../lib/node_modules/npm/bin/npm-cli.js" "./npm" -f || exit 1
+cd ..
+ln -s "./bin/node" "node" -f || exit 1
+ln -s "./bin/npm" "npm" -f || exit 1
+#sed -i "s/\"node-main\": \"\.\.\/sources\/bin\/duniter\",/\"node-main\": \".\/bin\/duniter\",/" "$RELEASES/desktop_release/package.json"
+rm -rf "${RELEASES}/desktop_"
+mv "${RELEASES}/desktop_release" "${RELEASES}/desktop_"
+
+# ---------------------------------
+# Embed node.js in server version
+# ---------------------------------
+
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64" "${RELEASES}/server_/node" || exit 1
+
+# ---------------
+# Build .tar.gz
+# ---------------
+
+cd "${RELEASES}/desktop_"
+tar czf "${BIN}/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz" * || exit 1
+
+# -----------------------
+# Build Debian packages
+# -----------------------
+
+# Parameters
+# 1: either "server" or "desktop".
+# 2: package name for Debian.
+build_deb_pack() {
+	rm -rf "${RELEASES}/duniter-x64"
+	mkdir "${RELEASES}/duniter-x64" || exit 1
+	cp -r "${ROOT}/release/extra/debian/package/"* "${RELEASES}/duniter-x64" || exit 1
+	if [[ "${1}" == "desktop" ]]; then
+		cp -r "${ROOT}/release/extra/desktop/"* "${RELEASES}/duniter-x64" || exit 1
+	fi
+	mkdir -p "${RELEASES}/duniter-x64/opt/duniter/" || exit 1
+	chmod 755 "${RELEASES}/duniter-x64/DEBIAN/"post* || exit 1
+	chmod 755 "${RELEASES}/duniter-x64/DEBIAN/"pre* || exit 1
+	sed -i "s/Version:.*/Version:${DUNITER_DEB_VER}/g" "${RELEASES}/duniter-x64/DEBIAN/control" || exit 1
+
+	cd "${RELEASES}/${1}_/"
+	zip -qr "${RELEASES}/duniter-x64/opt/duniter/duniter.zip" * || exit 1
+
+	sed -i "s/Package: .*/Package: ${2}/g" "${RELEASES}/duniter-x64/DEBIAN/control" || exit 1
+
+	cd "${RELEASES}"
+	fakeroot dpkg-deb --build duniter-x64 || exit 1
+	mv duniter-x64.deb "${BIN}/duniter-${1}-${DUNITER_TAG}-linux-x64.deb" || exit 1
+}
+
+build_deb_pack desktop duniter-desktop
+build_deb_pack server duniter
diff --git a/release/arch/debian/package/DEBIAN/control b/release/extra/debian/package/DEBIAN/control
similarity index 100%
rename from release/arch/debian/package/DEBIAN/control
rename to release/extra/debian/package/DEBIAN/control
diff --git a/release/arch/debian/package/DEBIAN/postinst b/release/extra/debian/package/DEBIAN/postinst
similarity index 88%
rename from release/arch/debian/package/DEBIAN/postinst
rename to release/extra/debian/package/DEBIAN/postinst
index 8938ddb32899360b05791ebe244a871f88765bd0..dcf677ed0117132e4c5d92fd39f14abb3867d983 100755
--- a/release/arch/debian/package/DEBIAN/postinst
+++ b/release/extra/debian/package/DEBIAN/postinst
@@ -5,9 +5,9 @@ DUN_SOURCES=$DUN_ROOT/
 mkdir -p $DUN_SOURCES
 
 # Duniter sources extraction
-if [[ -f $DUN_ROOT/duniter-desktop.nw ]]; then
-  unzip -q -d $DUN_SOURCES/ $DUN_ROOT/duniter-desktop.nw
-  rm -rf $DUN_ROOT/duniter-desktop.nw
+if [[ -f $DUN_ROOT/duniter.zip ]]; then
+  unzip -q -d $DUN_SOURCES/ $DUN_ROOT/duniter.zip
+  rm -rf $DUN_ROOT/duniter.zip
 fi
 
 # Duniter-Desktop
diff --git a/release/arch/debian/package/DEBIAN/prerm b/release/extra/debian/package/DEBIAN/prerm
similarity index 100%
rename from release/arch/debian/package/DEBIAN/prerm
rename to release/extra/debian/package/DEBIAN/prerm
diff --git a/release/arch/debian/package/usr/share/applications/duniter.desktop b/release/extra/desktop/usr/share/applications/duniter.desktop
similarity index 100%
rename from release/arch/debian/package/usr/share/applications/duniter.desktop
rename to release/extra/desktop/usr/share/applications/duniter.desktop
diff --git a/release/new_prerelease.sh b/release/new_prerelease.sh
index bd5d78caa6a0c3d2b6612f3200e4011a9bb6e2e1..1a3a0a5206507451297dcf16e40c3ebc1cd6bf8d 100755
--- a/release/new_prerelease.sh
+++ b/release/new_prerelease.sh
@@ -42,13 +42,13 @@ for asset in $EXPECTED_ASSETS; do
 
     echo "Missing asset: $asset"
 
-    # Debian
+    # Linux
     if [[ $asset == *"linux-x64.deb" ]] || [[ $asset == *"linux-x64.tar.gz" ]]; then
       if [[ $ARCH == "x86_64" ]]; then
-        echo "Starting Debian build..."
-        ./release/scripts/build.sh make deb $TAG
-        DEB_PATH="$PWD/release/arch/debian/$asset"
-        node ./release/scripts/upload-release.js $TOKEN $TAG $DEB_PATH
+        echo "Starting Linux build..."
+        ./release/scripts/build.sh make lin $TAG
+        LIN_PATH="$PWD/release/arch/linux/$asset"
+        node ./release/scripts/upload-release.js $TOKEN $TAG $LIN_PATH
       else
         echo "This computer cannot build this asset, required architecture is 'x86_64'. Skipping."
       fi
diff --git a/release/new_version.sh b/release/new_version.sh
index fbbda1bdcf6dc7121e9980b3b17b8c53de176f16..6195860cb8be24a6e9715d1da33356498d8b4c06 100755
--- a/release/new_version.sh
+++ b/release/new_version.sh
@@ -8,7 +8,7 @@ if [[ $1 =~ ^[0-9]+.[0-9]+.[0-9]+((a|b)[0-9]+)?$ ]]; then
   echo "Changing to version: $1"
   # Change the version in package.json and test file
   sed -i "s/version\": .*/version\": \"$1\",/g" package.json
-  sed -i "s/Version: .*/Version: $1/g" release/arch/debian/package/DEBIAN/control
+  sed -i "s/Version: .*/Version: $1/g" release/extra/debian/package/DEBIAN/control
   sed -i "s/version').equal('.*/version').equal('$1');/g" test/integration/branches.js
   sed -i "s/ release: .*/ release: v$1/g" appveyor.yml
 
@@ -21,7 +21,7 @@ if [[ $1 =~ ^[0-9]+.[0-9]+.[0-9]+((a|b)[0-9]+)?$ ]]; then
 
   # Commit
   git reset HEAD
-  git add package.json test/integration/branches.js gui/index.html release/arch/debian/package/DEBIAN/control release/arch/windows/duniter.iss
+  git add package.json test/integration/branches.js gui/index.html release/extra/debian/package/DEBIAN/control release/arch/windows/duniter.iss
   git commit -m "v$1"
   git tag "v$1"
 else
diff --git a/release/scripts/build.sh b/release/scripts/build.sh
index 973cc774326940dc2a6f2a9c2544bc5efd1fcedc..6a0ae2940c38b69a1e1c81226a894f35fd5b35b7 100755
--- a/release/scripts/build.sh
+++ b/release/scripts/build.sh
@@ -1,5 +1,7 @@
 #!/bin/bash
 
+BUILDER_TAG="v1.0.1"
+
 TAG="$3"
 ORIGIN="$4"
 IS_LOCAL_TAG=0
@@ -41,34 +43,36 @@ make)
       echo ">> Build success."
     fi
     ;;
-  deb)
-    cd release/arch/debian
+  lin)
+    cd release/arch/linux
     if [[ ! -f "duniter-desktop-$TAG-linux-x64.deb" ]]; then
 
       #### PREPARE SOURCE CODE ####
-      rm -rf duniter-source
       # Clone from remote
       echo ">> VM: Cloning sources from ${ORIGIN}..."
       git clone "${ORIGIN}" duniter-source
-      if [ ${IS_LOCAL_TAG} -eq 1 ]; then
-        cd duniter-source
-        ./release/new_version.sh "$TAG"
-        cd ..
-      fi
+      cd duniter-source
+      [[ ${IS_LOCAL_TAG} -eq 1 ]] && ./release/new_version.sh "${TAG}"
+      git checkout "v${TAG}"
+      cd ..
 
-      [[ $? -eq 0 ]] && echo ">> Starting Vagrant Ubuntu VM..."
-      [[ $? -eq 0 ]] && vagrant up
-      [[ $? -eq 0 ]] && echo ">> VM: building Duniter..."
-      [[ $? -eq 0 ]] && vagrant ssh -- "bash -s ${TAG}" < ./build-deb.sh
+      docker pull duniter/release-builder:${BUILDER_TAG}
+cat <<EOF |
+      cd /builds/duniter-source
+      bash "release/arch/linux/build-lin.sh" "${TAG}" || exit 1
+      exit 0
+EOF
+      docker run --rm -i -v ${PWD}/duniter-source:/builds/duniter-source duniter/release-builder:${BUILDER_TAG}
       if [ ! $? -eq 0 ]; then
         echo ">> Something went wrong. Stopping build."
       else
+        mv duniter-source/work/bin/* .
         echo ">> Build success. Shutting the VM down."
       fi
-      vagrant halt
+      rm -rf duniter-source
       echo ">> VM closed."
     else
-      echo "Debian binaries already built. Ready for upload."
+      echo "Linux binaries already built. Ready for upload."
     fi
     ;;
   win)
diff --git a/test/fast/protocol-local-rule-chained-tx-depth.ts b/test/fast/protocol-local-rule-chained-tx-depth.ts
new file mode 100644
index 0000000000000000000000000000000000000000..a13f0fb3c45bc7d979aa3c98e7c914fe8657a640
--- /dev/null
+++ b/test/fast/protocol-local-rule-chained-tx-depth.ts
@@ -0,0 +1,45 @@
+import {LOCAL_RULES_HELPERS} from "../../app/lib/rules/local_rules"
+
+const _ = require('underscore')
+const assert = require('assert')
+
+describe("Protocol BR_G110 - chained tx depth", () => {
+
+  const sindex = [
+    { tx: 'A', op: 'UPDATE', identifier: 'UD1', pos: 0 },
+    { tx: 'A', op: 'CREATE', identifier: 'TXA', pos: 0 },
+    { tx: 'B', op: 'UPDATE', identifier: 'TXA', pos: 0 },
+    { tx: 'B', op: 'CREATE', identifier: 'TXB', pos: 0 },
+    { tx: 'C', op: 'UPDATE', identifier: 'TXB', pos: 0 },
+    { tx: 'C', op: 'CREATE', identifier: 'TXC', pos: 0 },
+    { tx: 'D', op: 'UPDATE', identifier: 'TXC', pos: 0 },
+    { tx: 'D', op: 'CREATE', identifier: 'TXD', pos: 0 },
+    { tx: 'E', op: 'UPDATE', identifier: 'TXD', pos: 0 },
+    { tx: 'E', op: 'CREATE', identifier: 'TXE', pos: 0 },
+    { tx: 'F', op: 'UPDATE', identifier: 'TXE', pos: 0 },
+    { tx: 'F', op: 'CREATE', identifier: 'TXF', pos: 0 },
+    { tx: 'G', op: 'UPDATE', identifier: 'TXF', pos: 0 },
+    { tx: 'G', op: 'CREATE', identifier: 'TXG', pos: 0 },
+    { tx: 'H', op: 'UPDATE', identifier: 'TXG', pos: 0 },
+    { tx: 'H', op: 'CREATE', identifier: 'TXH', pos: 0 },
+  ]
+
+  it('should detected normal depth', () => {
+    assert.equal(0, LOCAL_RULES_HELPERS.getTransactionDepth('A', sindex))
+    assert.equal(1, LOCAL_RULES_HELPERS.getTransactionDepth('B', sindex))
+    assert.equal(2, LOCAL_RULES_HELPERS.getTransactionDepth('C', sindex))
+    assert.equal(3, LOCAL_RULES_HELPERS.getTransactionDepth('D', sindex))
+    assert.equal(4, LOCAL_RULES_HELPERS.getTransactionDepth('E', sindex))
+    assert.equal(5, LOCAL_RULES_HELPERS.getTransactionDepth('F', sindex))
+    assert.equal(6, LOCAL_RULES_HELPERS.getTransactionDepth('G', sindex))
+  })
+
+  it('should detected max the depth to 6', () => {
+    assert.equal(6, LOCAL_RULES_HELPERS.getTransactionDepth('H', sindex))
+  })
+
+  it('should find the max depth globally', () => {
+    assert.equal(6, LOCAL_RULES_HELPERS.getMaxTransactionDepth(sindex))
+  })
+})
+
diff --git a/test/fast/prover/pow-1-cluster.js b/test/fast/prover/pow-1-cluster.ts
similarity index 63%
rename from test/fast/prover/pow-1-cluster.js
rename to test/fast/prover/pow-1-cluster.ts
index 96d58c12b9f36dd1391c91d3b649bb4f12f25bbc..a2e76947e030785b11c2396118d011fb32f433ee 100644
--- a/test/fast/prover/pow-1-cluster.js
+++ b/test/fast/prover/pow-1-cluster.ts
@@ -1,16 +1,19 @@
-"use strict";
+import {Master} from "../../../app/modules/prover/lib/powCluster"
 
 const co = require('co')
-const should = require('should')
-const PowCluster = require('../../../app/modules/prover/lib/powCluster').Master
+require('should')
 const logger = require('../../../app/lib/logger').NewLogger()
 
-let master
+let master:Master
 
 describe('PoW Cluster', () => {
 
   before(() => {
-    master = new PowCluster(1, logger)
+    master = new Master(1, logger)
+  })
+
+  after(() => {
+    return master.shutDownWorkers()
   })
 
   it('should have an empty cluster if no PoW was asked', () => {
@@ -73,4 +76,35 @@ describe('PoW Cluster', () => {
     delay.should.be.below(50)
   }))
 
+  it('should be able to stop all the cores on cancel', async () => {
+    master.proveByWorkers({
+      initialTestsPerRound: 100,
+      maxDuration: 1000,
+      newPoW: {
+        block: {
+          number: 0
+        },
+        zeros: 10,
+        highMark: 'F',
+        conf: {
+          medianTimeBlocks: 1,
+          avgGenTime: 100,
+          cpu: 0.8,
+          prefix: '8',
+          nbCores: 1
+        },
+        pair: {
+          pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
+          sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
+        }
+      }
+    })
+    await new Promise(res => {
+      master.onInfoMessage = () => res()
+    })
+    await master.cancelWork()
+    await new Promise(res => setTimeout(res, 100))
+    master.nbCancels.should.equal(1)
+  })
+
 });
diff --git a/test/fast/prover/pow-2-engine.js b/test/fast/prover/pow-2-engine.js
index 8238438d02c1866d4bf7acc0d26625f2f3549a32..743744ba550dd7db189020a2949321393c844e52 100644
--- a/test/fast/prover/pow-2-engine.js
+++ b/test/fast/prover/pow-2-engine.js
@@ -10,6 +10,7 @@ describe('PoW Engine', () => {
   it('should be configurable', () => co(function*(){
     const e1 = new PowEngine({ nbCores: 1 }, logger);
     (yield e1.setConf({ cpu: 0.2, prefix: '34' })).should.deepEqual({ cpu: 0.2, prefix: '34' });
+    yield e1.shutDown()
   }));
 
   it('should be able to make a proof', () => co(function*(){
@@ -52,6 +53,7 @@ describe('PoW Engine', () => {
         pow: '009A52E6E2E4EA7DE950A2DA673114FA55B070EBE350D75FF0C62C6AAE9A37E5'
       }
     });
+    yield e1.shutDown()
   }));
 
   it('should be able to stop a proof', () => co(function*(){
@@ -85,5 +87,6 @@ describe('PoW Engine', () => {
     yield e1.cancel()
     // const proof = yield proofPromise;
     // should.not.exist(proof);
+    yield e1.shutDown()
   }));
 });
diff --git a/test/integration/branches.js b/test/integration/branches.js
deleted file mode 100644
index dadf77a17461865a6b6a4b9adff1026220653e81..0000000000000000000000000000000000000000
--- a/test/integration/branches.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-
-const _         = require('underscore');
-const co        = require('co');
-const should    = require('should');
-const duniter   = require('../../index');
-const bma       = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const rp        = require('request-promise');
-const httpTest  = require('./tools/http');
-const shutDownEngine  = require('./tools/shutDownEngine');
-
-const expectAnswer   = httpTest.expectAnswer;
-
-const MEMORY_MODE = true;
-const commonConf = {
-  ipv4: '127.0.0.1',
-  currency: 'bb',
-  httpLogs: true,
-  forksize: 3,
-  sigQty: 1
-};
-
-let s1
-
-describe("Branches", () => co(function*() {
-
-  before(() => co(function*() {
-
-    s1 = duniter(
-      '/bb1',
-      MEMORY_MODE,
-      _.extend({
-        port: '7778',
-        pair: {
-          pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-          sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-        }
-      }, commonConf));
-
-    const server = yield s1.initWithDAL();
-    const bmapi = yield bma(server);
-    yield bmapi.openConnections();
-  }));
-
-  after(() => {
-    return shutDownEngine(s1)
-  })
-
-  describe("Server 1 /blockchain", function() {
-
-    it('should have a 3 blocks fork window size', function() {
-      return expectAnswer(rp('http://127.0.0.1:7778/node/summary', { json: true }), function(res) {
-        res.should.have.property('duniter').property('software').equal('duniter');
-        res.should.have.property('duniter').property('version').equal('1.6.14');
-        res.should.have.property('duniter').property('forkWindowSize').equal(3);
-      });
-    });
-  });
-}));
diff --git a/test/integration/branches2.js b/test/integration/branches2.js
deleted file mode 100644
index 18d0555795268fae555bad1a150e1a087db3d873..0000000000000000000000000000000000000000
--- a/test/integration/branches2.js
+++ /dev/null
@@ -1,214 +0,0 @@
-"use strict";
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
-    return new (P || (P = Promise))(function (resolve, reject) {
-        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
-        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
-        function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
-        step((generator = generator.apply(thisArg, _arguments || [])).next());
-    });
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const other_constants_1 = require("../../app/lib/other_constants");
-const logger_1 = require("../../app/lib/logger");
-const index_1 = require("../../app/modules/bma/index");
-const index_2 = require("../../app/modules/crawler/index");
-const toolbox_1 = require("./tools/toolbox");
-const TestUser_1 = require("./tools/TestUser");
-const co = require('co');
-const _ = require('underscore');
-const duniter = require('../../index');
-const bma = index_1.BmaDependency.duniter.methods.bma;
-const rp = require('request-promise');
-const httpTest = require('./tools/http');
-const commit = require('./tools/commit');
-const sync = require('./tools/sync');
-const shutDownEngine = require('./tools/shutDownEngine');
-const expectJSON = httpTest.expectJSON;
-const expectHttpCode = httpTest.expectHttpCode;
-if (other_constants_1.OtherConstants.MUTE_LOGS_DURING_UNIT_TESTS) {
-    logger_1.NewLogger().mute();
-}
-// Trace these errors
-process.on('unhandledRejection', (reason) => {
-    console.error('Unhandled rejection: ' + reason);
-    console.error(reason);
-});
-const MEMORY_MODE = true;
-const commonConf = {
-    ipv4: '127.0.0.1',
-    currency: 'bb',
-    httpLogs: true,
-    forksize: 10,
-    switchOnHeadAdvance: 6,
-    avgGenTime: 30 * 60,
-    sigQty: 1
-};
-let s1, s2, cat, toc;
-const now = Math.round(new Date().getTime() / 1000);
-describe("SelfFork", function () {
-    before(() => co(function* () {
-        s1 = duniter('/bb4', MEMORY_MODE, _.extend({
-            port: '7781',
-            pair: {
-                pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-                sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-            }
-        }, commonConf));
-        s2 = duniter('/bb5', MEMORY_MODE, _.extend({
-            port: '7782',
-            pair: {
-                pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
-                sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
-            }
-        }, commonConf));
-        cat = new TestUser_1.TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 });
-        toc = new TestUser_1.TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 });
-        const commitS1 = commit(s1);
-        const commitS2 = commit(s2, {
-            time: now + 37180
-        });
-        yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        s1.addEndpointsDefinitions(() => index_1.BmaDependency.duniter.methods.getMainEndpoint(s1.conf));
-        s2.addEndpointsDefinitions(() => index_1.BmaDependency.duniter.methods.getMainEndpoint(s2.conf));
-        // Server 1
-        yield cat.createIdentity();
-        yield toc.createIdentity();
-        yield toc.cert(cat);
-        yield cat.cert(toc);
-        yield cat.join();
-        yield toc.join();
-        yield commitS1({
-            time: now
-        });
-        yield commitS1();
-        yield commitS1();
-        yield commitS1();
-        // Server 2
-        yield sync(0, 2, s1, s2);
-        yield toolbox_1.waitToHaveBlock(s2, 2);
-        let s2p = yield s2.PeeringService.peer();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield s1.writePeer(s2p);
-        // Forking S1 from S2
-        yield Promise.all([
-            toolbox_1.waitForkResolution(s1, 9),
-            index_2.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey)
-        ]);
-    }));
-    after(() => {
-        return Promise.all([
-            shutDownEngine(s1),
-            shutDownEngine(s2)
-        ]);
-    });
-    describe("Server 1 /blockchain", function () {
-        it('/block/0 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/0', { json: true }), {
-                number: 0,
-                issuersCount: 0,
-                issuersFrame: 1,
-                issuersFrameVar: 0
-            });
-        });
-        it('/block/1 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/1', { json: true }), {
-                number: 1,
-                issuersCount: 1,
-                issuersFrame: 1,
-                issuersFrameVar: 5
-            });
-        });
-        it('/block/2 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/2', { json: true }), {
-                number: 2,
-                issuersCount: 1,
-                issuersFrame: 2,
-                issuersFrameVar: 4
-            });
-        });
-        it('/block/3 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/3', { json: true }), {
-                number: 3,
-                issuersCount: 1,
-                issuersFrame: 3,
-                issuersFrameVar: 3
-            });
-        });
-        it('/block/4 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/4', { json: true }), {
-                number: 4,
-                issuersCount: 2,
-                issuersFrame: 4,
-                issuersFrameVar: 7
-            });
-        });
-        it('/block/5 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/5', { json: true }), {
-                number: 5,
-                issuersCount: 2,
-                issuersFrame: 5,
-                issuersFrameVar: 6
-            });
-        });
-        it('/block/6 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/6', { json: true }), {
-                number: 6,
-                issuersCount: 2,
-                issuersFrame: 6,
-                issuersFrameVar: 5
-            });
-        });
-        it('/block/7 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/7', { json: true }), {
-                number: 7,
-                issuersCount: 2,
-                issuersFrame: 7,
-                issuersFrameVar: 4
-            });
-        });
-        it('/block/88 should not exist', function () {
-            return expectHttpCode(404, rp('http://127.0.0.1:7781/blockchain/block/88'));
-        });
-        it('/current should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/current', { json: true }), {
-                number: 9
-            });
-        });
-        it('should have 2 branch', () => __awaiter(this, void 0, void 0, function* () {
-            const branches = yield s1.BlockchainService.branches();
-            branches.should.have.length(1);
-        }));
-    });
-    describe("Server 2 /blockchain", function () {
-        it('/block/0 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/0', { json: true }), {
-                number: 0
-            });
-        });
-        it('/block/1 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/1', { json: true }), {
-                number: 1
-            });
-        });
-        it('/block/88 should not exist', function () {
-            return expectHttpCode(404, rp('http://127.0.0.1:7782/blockchain/block/88'));
-        });
-        it('/current should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/current', { json: true }), {
-                number: 9
-            });
-        });
-        it('should have 1 branch', () => co(function* () {
-            const branches = yield s2.BlockchainService.branches();
-            branches.should.have.length(1);
-        }));
-    });
-});
-//# sourceMappingURL=branches2.js.map
\ No newline at end of file
diff --git a/test/integration/branches_switch.js b/test/integration/branches_switch.js
deleted file mode 100644
index 353ef8113098859a1f9e67a0b9592211759b1194..0000000000000000000000000000000000000000
--- a/test/integration/branches_switch.js
+++ /dev/null
@@ -1,99 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-const index_1 = require("../../app/modules/crawler/index");
-const index_2 = require("../../app/modules/bma/index");
-const co = require('co');
-const _ = require('underscore');
-const duniter = require('../../index');
-const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const TestUser = require('./tools/TestUser').TestUser;
-const rp = require('request-promise');
-const httpTest = require('./tools/http');
-const commit = require('./tools/commit');
-const sync = require('./tools/sync');
-const shutDownEngine = require('./tools/shutDownEngine');
-const expectJSON = httpTest.expectJSON;
-const MEMORY_MODE = true;
-const commonConf = {
-    ipv4: '127.0.0.1',
-    currency: 'bb',
-    httpLogs: true,
-    forksize: 30,
-    avgGenTime: 1,
-    sigQty: 1
-};
-let s1, s2, cat, toc;
-describe("Switch", function () {
-    before(() => co(function* () {
-        s1 = duniter('/bb11', MEMORY_MODE, _.extend({
-            switchOnHeadAdvance: 0,
-            port: '7788',
-            pair: {
-                pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-                sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-            },
-            rootoffset: 10,
-            sigQty: 1, dt: 1, ud0: 120
-        }, commonConf));
-        s2 = duniter('/bb12', MEMORY_MODE, _.extend({
-            switchOnHeadAdvance: 0,
-            port: '7789',
-            pair: {
-                pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
-                sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
-            }
-        }, commonConf));
-        cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 });
-        toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 });
-        yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        s1.addEndpointsDefinitions(() => index_2.BmaDependency.duniter.methods.getMainEndpoint(s1.conf));
-        s2.addEndpointsDefinitions(() => index_2.BmaDependency.duniter.methods.getMainEndpoint(s2.conf));
-        yield cat.createIdentity();
-        yield toc.createIdentity();
-        yield toc.cert(cat);
-        yield cat.cert(toc);
-        yield cat.join();
-        yield toc.join();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield sync(0, 2, s1, s2);
-        let s2p = yield s2.PeeringService.peer();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        // So we now have:
-        // S1 01234
-        // S2   `3456789
-        yield s1.writePeer(s2p);
-        // Forking S1 from S2
-        yield index_1.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey);
-        // S1 should have switched to the other branch
-    }));
-    after(() => {
-        return Promise.all([
-            shutDownEngine(s1),
-            shutDownEngine(s2)
-        ]);
-    });
-    describe("Server 1 /blockchain", function () {
-        it('/block/8 should exist on S1', function () {
-            return expectJSON(rp('http://127.0.0.1:7788/blockchain/block/8', { json: true }), {
-                number: 8
-            });
-        });
-        it('/block/8 should exist on S2', function () {
-            return expectJSON(rp('http://127.0.0.1:7789/blockchain/block/8', { json: true }), {
-                number: 8
-            });
-        });
-    });
-});
-//# sourceMappingURL=branches_switch.js.map
\ No newline at end of file
diff --git a/test/integration/branches_switch.ts b/test/integration/branches_switch.ts
index 2eb7fae68263581106b049d653402e133aa6508a..ecce9de72ba20fb22fbaeb610ea56ca95f02f8a2 100644
--- a/test/integration/branches_switch.ts
+++ b/test/integration/branches_switch.ts
@@ -11,6 +11,7 @@ const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
 const commit    = require('./tools/commit');
 const sync      = require('./tools/sync');
+const cluster   = require('cluster')
 const shutDownEngine  = require('./tools/shutDownEngine');
 
 const expectJSON     = httpTest.expectJSON;
@@ -31,6 +32,8 @@ describe("Switch", function() {
 
   before(() => co(function *() {
 
+    cluster.setMaxListeners(6)
+
     s1 = duniter(
       '/bb11',
       MEMORY_MODE,
@@ -97,6 +100,7 @@ describe("Switch", function() {
   }));
 
   after(() => {
+    cluster.setMaxListeners(3)
     return Promise.all([
       shutDownEngine(s1),
       shutDownEngine(s2)
diff --git a/test/integration/continuous-proof.js b/test/integration/continuous-proof.js
index 289d5b694b3f5ceeef79fb2f479af64c6080548f..157477a80f3dca199030a6ba498ea6d0cf218e44 100644
--- a/test/integration/continuous-proof.js
+++ b/test/integration/continuous-proof.js
@@ -37,6 +37,7 @@ describe("Continous proof-of-work", function() {
     yield i1.join();
     yield i2.join();
     yield s1.commit();
+    yield s1.closeCluster();
   }));
 
   it('should automatically stop waiting if nothing happens', () => co(function*() {
@@ -104,7 +105,7 @@ describe("Continous proof-of-work", function() {
     s2.conf.cpu = 1.0;
     s2.startBlockComputation();
     yield s2.until('block', 15);
-    s2.stopBlockComputation();
+    yield s2.stopBlockComputation();
     yield [
       require('../../app/modules/crawler').CrawlerDependency.duniter.methods.pullBlocks(s3),
       new Promise(res => {
@@ -121,11 +122,6 @@ describe("Continous proof-of-work", function() {
     const current = yield s3.get('/blockchain/current')
     yield s3.stopBlockComputation();
     current.number.should.be.aboveOrEqual(14)
+    yield s1.closeCluster()
   }));
-
-  after(() => {
-    return Promise.all([
-      s1.closeCluster()
-    ])
-  })
 });
diff --git a/test/integration/forwarding.js b/test/integration/forwarding.js
deleted file mode 100644
index 993247afcae309b74a8001914f99576823085ed0..0000000000000000000000000000000000000000
--- a/test/integration/forwarding.js
+++ /dev/null
@@ -1,184 +0,0 @@
-"use strict";
-const should = require('should');
-const assert = require('assert');
-const async  = require('async');
-const _      = require('underscore');
-const co     = require('co');
-const node   = require('./tools/node');
-const TestUser = require('./tools/TestUser').TestUser
-const jspckg = require('../../package');
-const constants = require('../../app/lib/constants');
-
-require('../../app/modules/bma').BmaDependency.duniter.methods.noLimit(); // Disables the HTTP limiter
-
-if (constants.MUTE_LOGS_DURING_UNIT_TESTS) {
-  require('../../app/lib/logger').NewLogger().mute();
-}
-
-describe("Forwarding", function() {
-
-  describe("Nodes", function() {
-
-    const common = { currency: 'bb', nobma: false, bmaWithCrawler:true, ws2p: { upnp: false }, ipv4: '127.0.0.1', remoteipv4: '127.0.0.1', rootoffset: 0, sigQty: 1 };
-
-    const node1 = node('db_1', _({ upnp: false, httplogs: false, port: 9600, remoteport: 9600, pair: { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'} }).extend(common));
-    const node2 = node('db_2', _({ upnp: false, httplogs: false, port: 9601, remoteport: 9601, pair: { pub: 'G2CBgZBPLe6FSFUgpx2Jf1Aqsgta6iib3vmDRA1yLiqU', sec: '58LDg8QLmF5pv6Dn9h7X4yFKfMTdP8fdAiWVcyDoTRJu454fwRihCLULH4MW37zncsg4ruoTGJPZneWk22QmG1w4'} }).extend(common));
-
-    const cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, node1);
-    const tac = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, node1);
-    const tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, node1);
-    const toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, node1);
-
-    before(() => co(function*(){
-      yield [node1, node2].map((theNode) => theNode.startTesting());
-      yield new Promise(function(resolve, reject){
-        async.waterfall([
-          function(next) {
-            node2.peering(next);
-          },
-          function(peer, next) {
-            node1.submitPeer(peer, function(err) {
-              next(err);
-            });
-          },
-          function(next) {
-            node1.peering(next);
-          },
-          function(peer, next) {
-            node2.submitPeer(peer, next);
-          }
-        ], function(err) {
-          err ? reject(err) : resolve();
-        });
-      });
-      yield [
-        node2.until('identity', 4),
-        node2.until('certification', 2),
-        node2.until('block', 1),
-        co(function *() {
-
-          // Self certifications
-          yield cat.createIdentity();
-          yield tac.createIdentity();
-          yield tic.createIdentity();
-          yield toc.createIdentity();
-          // Certifications
-          yield cat.cert(tac);
-          yield tac.cert(cat);
-          yield cat.join();
-          yield tac.join();
-          yield node1.commitP();
-        })
-      ];
-      yield [
-        node2.until('revocation', 1),
-        co(function *() {
-          yield cat.revoke();
-        })
-      ];
-    }));
-
-    describe("Testing technical API", function(){
-
-      it('Node1 should be up and running', node1.summary(function(summary, done){
-        should.exists(summary);
-        should.exists(summary.duniter);
-        should.exists(summary.duniter.software);
-        should.exists(summary.duniter.version);
-        assert.equal(summary.duniter.software, "duniter");
-        assert.equal(summary.duniter.version, jspckg.version);
-        done();
-      }));
-
-      it('Node2 should be up and running', node2.summary(function(summary, done){
-        should.exists(summary);
-        should.exists(summary.duniter);
-        should.exists(summary.duniter.software);
-        should.exists(summary.duniter.version);
-        assert.equal(summary.duniter.software, "duniter");
-        assert.equal(summary.duniter.version, jspckg.version);
-        done();
-      }));
-    });
-
-    describe('Node 1', doTests(node1));
-    describe('Node 2', doTests(node2));
-
-  });
-});
-
-function doTests(theNode) {
-
-  return function(){
-
-    describe("user cat", function(){
-
-      it('should give only 1 result', theNode.lookup('cat', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have sent 1 signature', theNode.lookup('cat', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].signed.length, 1);
-          should.exists(res.results[0].signed[0].isMember);
-          should.exists(res.results[0].signed[0].wasMember);
-          assert.equal(res.results[0].signed[0].isMember, true);
-          assert.equal(res.results[0].signed[0].wasMember, true);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-    });
-
-    describe("user tac", function(){
-
-      it('should give only 1 result', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have 1 signature', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].uids[0].others.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have sent 1 signature', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].signed.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-    });
-
-    it('toc should give only 1 result', theNode.lookup('toc', function(res, done){
-      should.not.exists(res);
-      done();
-    }));
-
-    it('tic should give only 1 results', theNode.lookup('tic', function(res, done){
-      should.not.exists(res);
-      done();
-    }));
-  };
-}
diff --git a/test/integration/forwarding.ts b/test/integration/forwarding.ts
new file mode 100644
index 0000000000000000000000000000000000000000..8adbba5eecbbb90794e343be1a6b6dab41e603a5
--- /dev/null
+++ b/test/integration/forwarding.ts
@@ -0,0 +1,136 @@
+import {NewLogger} from "../../app/lib/logger"
+import {BmaDependency} from "../../app/modules/bma/index"
+import {TestUser} from "./tools/TestUser"
+import {simpleTestingConf, simpleTestingServer, TestingServer} from "./tools/toolbox"
+import {RouterDependency} from "../../app/modules/router"
+
+require('should');
+const assert = require('assert');
+const jspckg = require('../../package');
+const constants = require('../../app/lib/constants');
+
+BmaDependency.duniter.methods.noLimit(); // Disables the HTTP limiter
+
+if (constants.MUTE_LOGS_DURING_UNIT_TESTS) {
+  NewLogger().mute()
+}
+
+describe("Forwarding", function() {
+
+  describe("Nodes", function() {
+
+    const now = 1500000000
+    const conf1 = simpleTestingConf(now, { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'})
+    const conf2 = simpleTestingConf(now, { pub: 'G2CBgZBPLe6FSFUgpx2Jf1Aqsgta6iib3vmDRA1yLiqU', sec: '58LDg8QLmF5pv6Dn9h7X4yFKfMTdP8fdAiWVcyDoTRJu454fwRihCLULH4MW37zncsg4ruoTGJPZneWk22QmG1w4'})
+
+    const node1 = simpleTestingServer(conf1)
+    const node2 = simpleTestingServer(conf2)
+
+    const cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, node1);
+    const tac = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, node1);
+    const tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, node1);
+    const toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, node1);
+
+    before(async () => {
+      await node1.initDalBmaConnections()
+      await node2.initDalBmaConnections()
+      await node1.sharePeeringWith(node2)
+      await node2.sharePeeringWith(node1)
+      RouterDependency.duniter.methods.routeToNetwork(node1._server)
+      RouterDependency.duniter.methods.routeToNetwork(node2._server)
+      await Promise.all([
+        node2.until('identity', 4),
+        node2.until('certification', 2),
+        node2.until('block', 1),
+        (async () => {
+
+          // Self certifications
+          await cat.createIdentity();
+          await tac.createIdentity();
+          await tic.createIdentity();
+          await toc.createIdentity();
+          // Certifications
+          await cat.cert(tac);
+          await tac.cert(cat);
+          await cat.join();
+          await tac.join();
+          await node1.commit({ time: now })
+        })()
+      ])
+      await Promise.all([
+        node2.until('revocation', 1),
+        cat.revoke()
+      ])
+    })
+
+    describe("Testing technical API", function(){
+
+      it('Node1 should be up and running', () => node1.expectThat('/node/summary', (summary:any) => {
+        should.exists(summary);
+        should.exists(summary.duniter);
+        should.exists(summary.duniter.software);
+        should.exists(summary.duniter.version);
+        assert.equal(summary.duniter.software, "duniter");
+        assert.equal(summary.duniter.version, jspckg.version);
+      }))
+
+      it('Node2 should be up and running', () => node2.expectThat('/node/summary', (summary:any) => {
+        should.exists(summary);
+        should.exists(summary.duniter);
+        should.exists(summary.duniter.software);
+        should.exists(summary.duniter.version);
+        assert.equal(summary.duniter.software, "duniter");
+        assert.equal(summary.duniter.version, jspckg.version);
+      }))
+    });
+
+    describe('Node 1', doTests(node1));
+    describe('Node 2', doTests(node2));
+
+  });
+});
+
+function doTests(theNode:TestingServer) {
+
+  return () => {
+
+    describe("user cat", () => {
+
+      it('should give only 1 result', () => theNode.expectThat('/wot/lookup/cat', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results.length, 1);
+      }));
+
+      it('should have sent 1 signature', () => theNode.expectThat('/wot/lookup/cat', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].signed.length, 1);
+        should.exists(res.results[0].signed[0].isMember);
+        should.exists(res.results[0].signed[0].wasMember);
+        assert.equal(res.results[0].signed[0].isMember, true);
+        assert.equal(res.results[0].signed[0].wasMember, true);
+      }));
+    });
+
+    describe("user tac", () => {
+
+      it('should give only 1 result', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results.length, 1);
+      }))
+
+      it('should have 1 signature', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].uids[0].others.length, 1);
+      }))
+
+      it('should have sent 1 signature', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].signed.length, 1);
+      }))
+    })
+
+    it('toc should give no result', () => theNode.expectError('/wot/lookup/toc', 404, 'No matching identity'))
+
+    it('tic should give no results', () => theNode.expectError('/wot/lookup/tic', 404, 'No matching identity'))
+  }
+}
diff --git a/test/integration/http_api.js b/test/integration/http_api.js
index 83cd3ccb202b102879c48a2bbed2170436992c67..9454b78d613b86f0b88ea912648efb70e36ce5fe 100644
--- a/test/integration/http_api.js
+++ b/test/integration/http_api.js
@@ -336,7 +336,6 @@ function expectJSON(promise, json) {
 
 function postBlock(server2) {
   return function(block) {
-    console.log(typeof block == 'string' ? block : block.getRawSigned())
     return post(server2, '/blockchain/block')({
       block: typeof block == 'string' ? block : block.getRawSigned()
     })
diff --git a/test/integration/network-update.js b/test/integration/network-update.js
index 44c7ce9293b128fd57563f2c44527f610e265adb..7da00b7cf58913dda10b67898e0ccbbbe001a299 100644
--- a/test/integration/network-update.js
+++ b/test/integration/network-update.js
@@ -60,7 +60,7 @@ describe("Network updating", function() {
       yield [s1, s2].reduce((p, server) => co(function*() {
         yield p;
         yield server.initDalBmaConnections()
-        require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+        require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
       }), Promise.resolve());
 
       // Server 1
diff --git a/test/integration/peer-outdated.js b/test/integration/peer-outdated.js
index 3f2cb0e1f4b37553114a501b8b8ceb66e34c4ab5..1855c043183697cb8a64986b9368e26dce22ad8c 100644
--- a/test/integration/peer-outdated.js
+++ b/test/integration/peer-outdated.js
@@ -42,7 +42,7 @@ describe("Peer document expiry", function() {
     yield [s1, s2].reduce((p, server) => co(function*() {
       yield p;
       yield server.initDalBmaConnections()
-      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+      require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
     }), Promise.resolve());
 
     // Server 1
diff --git a/test/integration/peerings.js b/test/integration/peerings.js
index 4b227f6317b951c2776dbd8b32a672bf31ca4237..5fc5d49bfa55b94f4614a3280c9c8aa1e95652b6 100644
--- a/test/integration/peerings.js
+++ b/test/integration/peerings.js
@@ -93,7 +93,7 @@ describe("Network", function() {
               return bmaAPI.openConnections()
                 .then(() => {
                   server.bma = bmaAPI;
-                  require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+                  require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
                 });
             });
         });
diff --git a/test/integration/peers-same-pubkey.js b/test/integration/peers-same-pubkey.js
index 41c4b9c19edb5c6362a53784f1120d73fae53cb4..6375127aa429522f1f265815dcacccb9395fde6f 100644
--- a/test/integration/peers-same-pubkey.js
+++ b/test/integration/peers-same-pubkey.js
@@ -36,7 +36,7 @@ describe("Peer document", function() {
     yield [s1, s2, s3].reduce((p, server) => co(function*() {
       yield p;
       yield server.initDalBmaConnections()
-      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+      require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
     }), Promise.resolve());
 
     // Server 1
diff --git a/test/integration/start_generate_blocks.js b/test/integration/start_generate_blocks.js
index 3c79c48341eadb7b7948f71467ac79fd44259b78..a8d813cb9e8fecda634057b5edb7094563ac569f 100644
--- a/test/integration/start_generate_blocks.js
+++ b/test/integration/start_generate_blocks.js
@@ -76,7 +76,7 @@ describe("Generation", function() {
         yield server.initWithDAL();
         server.bma = yield bma(server);
         yield server.bma.openConnections();
-        require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+        require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
         yield server.PeeringService.generateSelfPeer(server.conf);
         const prover = require('../../app/modules/prover').ProverDependency.duniter.methods.prover(server);
         server.startBlockComputation = () => prover.startService();
diff --git a/test/integration/tools/toolbox.ts b/test/integration/tools/toolbox.ts
index ea81eb794f4235b5456151abe0b7f4f6c8422c61..af71bd3ae15e46c3b4a0fc1168a2d61014fc80a3 100644
--- a/test/integration/tools/toolbox.ts
+++ b/test/integration/tools/toolbox.ts
@@ -23,6 +23,7 @@ import {WS2PCluster} from "../../../app/modules/ws2p/lib/WS2PCluster"
 import {WS2PServer} from "../../../app/modules/ws2p/lib/WS2PServer"
 import {WS2PServerMessageHandler} from "../../../app/modules/ws2p/lib/interface/WS2PServerMessageHandler"
 import {TestUser} from "./TestUser"
+import {RouterDependency} from "../../../app/modules/router"
 
 const assert      = require('assert');
 const _           = require('underscore');
@@ -100,8 +101,8 @@ export const simpleNetworkOf2NodesAnd2Users = async (options:any) => {
   await tac.join();
 
   // Each server forwards to each other
-  require('../../../app/modules/router').duniter.methods.routeToNetwork(s1);
-  require('../../../app/modules/router').duniter.methods.routeToNetwork(s2);
+  RouterDependency.duniter.methods.routeToNetwork(s1._server)
+  RouterDependency.duniter.methods.routeToNetwork(s2._server)
 
   return { s1, s2, cat, tac };
 }
@@ -601,7 +602,7 @@ export class TestingServer {
     const bmaAPI = await bma(this.server);
     await bmaAPI.openConnections();
     this.bma = bmaAPI;
-    require('../../../app/modules/router').duniter.methods.routeToNetwork(this.server);
+    RouterDependency.duniter.methods.routeToNetwork(this.server)
     // Extra: for /wot/requirements URL
     require('../../../app/modules/prover').ProverDependency.duniter.methods.hookServer(this.server);
   }
diff --git a/test/integration/transactions-chaining.js b/test/integration/transactions-chaining.js
deleted file mode 100644
index 66a02c1ca402f6046a8ece5faaa057bbd26efba0..0000000000000000000000000000000000000000
--- a/test/integration/transactions-chaining.js
+++ /dev/null
@@ -1,92 +0,0 @@
-"use strict";
-
-const co = require('co');
-const _ = require('underscore');
-const should = require('should');
-const assert = require('assert');
-const constants = require('../../app/lib/constants');
-const bma       = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const CommonConstants = require('../../app/lib/common-libs/constants').CommonConstants
-const toolbox   = require('./tools/toolbox');
-const node   = require('./tools/node');
-const TestUser = require('./tools/TestUser').TestUser
-const unit   = require('./tools/unit');
-const http   = require('./tools/http');
-
-describe("Transaction chaining", function() {
-
-  const now = 1456644632;
-
-  let s1, tic, toc
-
-  before(() => co(function*() {
-
-    s1 = toolbox.server({
-      pair: {
-        pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
-        sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
-      },
-      dt: 3600,
-      udTime0: now + 3600,
-      ud0: 1200,
-      c: 0.1
-    });
-
-    tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, { server: s1 });
-    toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 });
-
-    yield s1.initDalBmaConnections();
-    yield tic.createIdentity();
-    yield toc.createIdentity();
-    yield tic.cert(toc);
-    yield toc.cert(tic);
-    yield tic.join();
-    yield toc.join();
-    yield s1.commit({ time: now });
-    yield s1.commit({ time: now + 7210 });
-    yield s1.commit({ time: now + 7210 });
-  }));
-
-  after(() => {
-    return Promise.all([
-      s1.closeCluster()
-    ])
-  })
-
-  describe("Sources", function(){
-
-    it('it should exist block#2 with UD of 1200', () => s1.expect('/blockchain/block/2', (block) => {
-      should.exists(block);
-      assert.equal(block.number, 2);
-      assert.equal(block.dividend, 1200);
-    }));
-  });
-
-  describe("Chaining", function(){
-
-    it('with SIG and XHX', () => co(function *() {
-      // Current state
-      let current = yield s1.get('/blockchain/current');
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
-      let tx1 = yield toc.prepareITX(1040, tic); // Rest = 1200 - 1040 = 160
-      let tx2 = yield toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 160, base: 0, lock: 'SIG(' + tic.pub + ')' }], {
-        comment: 'also take the remaining 160 units',
-        blockstamp: [current.number, current.hash].join('-'),
-        theseOutputsStart: 1
-      });
-      const tmp = CommonConstants.TRANSACTION_MAX_TRIES;
-      CommonConstants.TRANSACTION_MAX_TRIES = 2;
-      yield unit.shouldNotFail(toc.sendTX(tx1));
-      yield unit.shouldNotFail(toc.sendTX(tx2));
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(1);
-      yield s1.commit({ time: now + 7210 }); // TX1 commited
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // The 160 remaining units
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(2); // The UD + 1040 units sent by toc
-      yield s1.commit({ time: now + 7210 }); // TX2 commited
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3); // The UD + 1040 + 160 units sent by toc
-      CommonConstants.TRANSACTION_MAX_TRIES = tmp;
-    }));
-  });
-});
diff --git a/test/integration/transactions-chaining.ts b/test/integration/transactions-chaining.ts
new file mode 100644
index 0000000000000000000000000000000000000000..cbcbddf8fad80dcdbefe0440839365cde84efa9d
--- /dev/null
+++ b/test/integration/transactions-chaining.ts
@@ -0,0 +1,109 @@
+import {CommonConstants} from "../../app/lib/common-libs/constants"
+import {TestUser} from "./tools/TestUser"
+import {TestingServer} from "./tools/toolbox"
+import {NewLogger} from "../../app/lib/logger"
+
+const should = require('should');
+const assert = require('assert');
+const toolbox   = require('./tools/toolbox');
+const unit   = require('./tools/unit');
+
+describe("Transaction chaining", () => {
+
+  const now = 1456644632;
+
+  let s1:TestingServer, tic:TestUser, toc:TestUser
+
+  before(async () => {
+
+    s1 = toolbox.server({
+      pair: {
+        pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
+        sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
+      },
+      dt: 3600,
+      udTime0: now + 3600,
+      ud0: 1200,
+      c: 0.1
+    });
+
+    tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, { server: s1 });
+    toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 });
+
+    await s1.initDalBmaConnections();
+    await tic.createIdentity();
+    await toc.createIdentity();
+    await tic.cert(toc);
+    await toc.cert(tic);
+    await tic.join();
+    await toc.join();
+    await s1.commit({ time: now });
+    await s1.commit({ time: now + 7210 });
+    await s1.commit({ time: now + 7210 });
+  })
+
+  after(() => {
+    return s1.closeCluster()
+  })
+
+  describe("Sources", () => {
+
+    it('it should exist block#2 with UD of 1200', () => s1.expect('/blockchain/block/2', (block: { number:number, dividend:number }) => {
+      should.exists(block);
+      assert.equal(block.number, 2);
+      assert.equal(block.dividend, 1200);
+    }))
+  })
+
+  describe("Chaining", () => {
+
+    it('with SIG and XHX', async () => {
+      // Current state
+      let current = await s1.get('/blockchain/current');
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
+      let tx1 = await toc.prepareITX(1040, tic); // Rest = 1200 - 1040 = 160
+      let tx2 = await toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 160, base: 0, lock: 'SIG(' + tic.pub + ')' }], {
+        comment: 'also take the remaining 160 units',
+        blockstamp: [current.number, current.hash].join('-'),
+        theseOutputsStart: 1
+      });
+      const tmp = CommonConstants.TRANSACTION_MAX_TRIES;
+      CommonConstants.TRANSACTION_MAX_TRIES = 2;
+      await unit.shouldNotFail(toc.sendTX(tx1));
+      await unit.shouldNotFail(toc.sendTX(tx2));
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(1);
+      await s1.commit({ time: now + 7210 }); // TX1 + TX2 commited
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3); // The UD + 1040 + 160 units sent by toc
+      CommonConstants.TRANSACTION_MAX_TRIES = tmp;
+    })
+
+    it('should refuse a block with more than 5 chained tx in it', async () => {
+      // Current state
+      let current = await s1.get('/blockchain/current');
+      const blockstamp = [current.number, current.hash].join('-');
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3);
+      // Ping-pong of 1200 units
+      let tx1 = await tic.prepareITX(1200, toc, "PING-PONG TX1");
+      let tx2 = await toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + tic.pub + ')' }], { blockstamp, comment: "PING-PONG TX2" });
+      let tx3 = await tic.prepareUTX(tx2, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { blockstamp, comment: "PING-PONG TX3" });
+      let tx4 = await toc.prepareUTX(tx3, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + tic.pub + ')' }], { blockstamp, comment: "PING-PONG TX4" });
+      let tx5 = await tic.prepareUTX(tx4, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { blockstamp, comment: "PING-PONG TX5" });
+      let tx6 = await toc.prepareUTX(tx5, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + tic.pub + ')' }], { blockstamp, comment: "PING-PONG TX6" });
+      let tx7 = await tic.prepareUTX(tx6, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { blockstamp, comment: "PING-PONG TX7" });
+      const tmp = CommonConstants.TRANSACTION_MAX_TRIES;
+      CommonConstants.TRANSACTION_MAX_TRIES = 2;
+      await unit.shouldNotFail(toc.sendTX(tx1));
+      await unit.shouldNotFail(toc.sendTX(tx2));
+      await unit.shouldNotFail(toc.sendTX(tx3));
+      await unit.shouldNotFail(toc.sendTX(tx4));
+      await unit.shouldNotFail(toc.sendTX(tx5));
+      await unit.shouldNotFail(toc.sendTX(tx6));
+      await unit.shouldNotFail(toc.sendTX(tx7));
+      await s1.commitWaitError({ dontCareAboutChaining: true }, 'The maximum transaction chaining length per block is 5')
+      CommonConstants.TRANSACTION_MAX_TRIES = tmp;
+    })
+  });
+});