diff --git a/.eslintignore b/.eslintignore
index d25a2f246707fc5599e607933c70bf26171f3e2e..052737334621d89f18f1b6e66abf9c0fa60712a4 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -43,5 +43,7 @@ app/modules/bma/lib/entity/*.js
 app/modules/bma/lib/controllers/*.js
 app/modules/crawler/*.js
 app/modules/crawler/lib/*.js
+app/ProcessCpuProfiler.js
+app/lib/common/package.js
 test/*.js
 test/**/*.js
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 0fdf9f4d60fe86d44c161a39da1f7b8d1484dd74..0a79bb0ce1ddaf1033f74641aff5c1ba8f6ce180 100644
--- a/.gitignore
+++ b/.gitignore
@@ -18,7 +18,11 @@ gui/nw
 vagrant/*.log
 vagrant/duniter
 
+# Python compiled
+*.pyc
+
 # Releases
+/work
 *.deb
 *.tar.gz
 *.log
@@ -56,6 +60,10 @@ test/integration/tools/TestUser.js*
 test/integration/tools/TestUser.d.ts
 test/integration/documents-currency.js*
 test/integration/documents-currency.d.ts
+test/integration/forwarding.js
+test/integration/branches_switch.js
+test/integration/branches2.js
+test/integration/transactions-chaining.js
 test/fast/modules/crawler/block_pulling.js*
 test/fast/modules/crawler/block_pulling.d.ts
 test/fast/fork*.js*
@@ -66,3 +74,9 @@ test/fast/modules/ws2p/*.js*
 test/fast/modules/ws2p/*.d.ts
 test/fast/modules/common/grammar.js*
 test/fast/modules/common/grammar.d.ts
+test/fast/prover/pow-1-cluster.d.ts
+test/fast/prover/pow-1-cluster.js
+test/fast/prover/pow-1-cluster.js.map
+test/fast/protocol-local-rule-chained-tx-depth.js
+test/fast/protocol-local-rule-chained-tx-depth.js.map
+test/fast/protocol-local-rule-chained-tx-depth.d.ts
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 6433af71b6e25e4e39e8a14d12f0ea9715bffc59..d75bd05da15a7dd66f0c65fee3450f34f25ab961 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,20 +1,103 @@
 stages:
-    - github-sync
+  - github-sync
+  - build
+  - test
+  - package
+  - prerelease
+  - release
 
 push_to_github:
-    stage: github-sync
-    variables:
-        GIT_STRATEGY: none
-    tags:
-        - github
-    script:
-        - rm -rf ./*
-        - rm -rf .git
-        - git clone --mirror $CI_REPOSITORY_URL .
-        - git remote add github $GITHUB_URL_AND_KEY
-        - git config --global user.email "contact@duniter.org"
-        - git config --global user.name "Duniter"
-        # Job would fail if we don't remove refs about pull requests
-        - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
-        - mv packed-refs-new packed-refs
-        - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
+  stage: github-sync
+  variables:
+    GIT_STRATEGY: none
+  tags:
+    - redshift
+  script:
+    - rm -rf ./*
+    - rm -rf .git
+    - git clone --mirror $CI_REPOSITORY_URL .
+    - git remote add github $GITHUB_URL_AND_KEY
+    - git config --global user.email "contact@duniter.org"
+    - git config --global user.name "Duniter"
+    # Job would fail if we don't remove refs about pull requests
+    - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
+    - mv packed-refs-new packed-refs
+    - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
+  only:
+    - nodes/typescript/duniter
+
+.nvm_env: &nvm_env
+  tags:
+    - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
+  
+build:
+  <<: *nvm_env
+  stage: build
+  script:
+    - yarn
+
+test:
+  <<: *nvm_env
+  stage: test
+  script:
+    - yarn
+    - yarn test
+
+.build_releases: &build_releases
+  stage: package
+  allow_failure: false
+  image: duniter/release-builder:v1.0.1
+  tags:
+    - redshift-duniter-builder
+  when: manual
+  artifacts:
+    paths: &releases_artifacts
+      - work/bin/
+
+releases:test:
+  <<: *build_releases
+  script:
+    - bash "release/arch/linux/build-lin.sh" "$(date +%Y%m%d).$(date +%H%M).$(date +%S)"
+  artifacts:
+    paths: *releases_artifacts
+    expire_in: 4h
+  except:
+    - tags
+
+releases:x64:
+  <<: *build_releases
+  script:
+    - bash "release/arch/linux/build-lin.sh" "${CI_COMMIT_TAG#v}"
+  artifacts:
+    paths: *releases_artifacts
+    expire_in: 2 weeks
+  only:
+    - tags
+
+.release_jobs: &release_jobs
+  image: tensorflow/tensorflow:latest-py3
+  tags:
+    - redshift-duniter-builder
+  script:
+    - python3 .gitlab/releaser
+  only:
+    - tags
+
+prerelease:
+  <<: *release_jobs
+  stage: prerelease
+  variables:
+    RELEASE_BIN_DIR: work/bin/
+    SOURCE_EXT: '["tar.gz", "zip"]'
+
+publish:
+  <<: *release_jobs
+  stage: release
+  variables:
+    RELEASE_BIN_DIR: work/bin/
+    WIKI_RELEASE: Releases
+  allow_failure: false
+  when: manual
diff --git a/.gitlab/release_template.md b/.gitlab/release_template.md
new file mode 100644
index 0000000000000000000000000000000000000000..3b6b85120b0044464f44567dcf140d8af9a92e8b
--- /dev/null
+++ b/.gitlab/release_template.md
@@ -0,0 +1,34 @@
+{% block prerelease %}
+# :gift: Pre-release
+
+[Go to Pipeline page :arrow_forward:](https://git.duniter.org/nodes/typescript/duniter/pipelines/{{pipeline}})
+
+{% endblock %}
+
+{% block release %}
+# :white_check_mark: Release
+
+{% endblock %}
+
+{% block notebody %}
+<placeholder content="end-title" />
+<placeholder content="note">
+{{current_message}}
+</placeholder>
+
+## Downloads
+
+| Category | Arch | Type | Size | File |
+|----------|------|------|------|------|
+{% for artifact in artifacts %}
+| {{artifact.category}} | {{artifact.arch}} | {{artifact.type}} | {{artifact.size}} | [{{artifact.icon}} {{artifact.name}}]({{artifact.url}}) |
+{% endfor %}
+{% endblock %}
+
+{% block previouswiki %}
+
+
+## {{tag}}
+
+{{body}}
+{% endblock %}
diff --git a/.gitlab/releaser/__init__.py b/.gitlab/releaser/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..4f0a11e46162aa197c5f61768335d949955ae02b
--- /dev/null
+++ b/.gitlab/releaser/__init__.py
@@ -0,0 +1,12 @@
+'''
+This module is meant add release notes in gitlab for the current project.
+Expects to find in environment following variables:
+  - CI_PROJECT_URL - Automatically set by gitlab-ci
+  - CI_PROJECT_ID - Automatically set by gitlab-ci
+  - CI_COMMIT_TAG - Automatically set by gitlab-ci
+  - CI_PIPELINE_ID - Automatically set by gitlab-ci
+  - RELEASE_BIN_DIR - Directory where releases are to be found
+  - SOURCE_EXT - Source extensions (pre-release only)
+  - WIKI_RELEASE - Wiki page where releases are stored (release only)
+  - RELEASER_TOKEN - Token used by technical user
+'''
diff --git a/.gitlab/releaser/__main__.py b/.gitlab/releaser/__main__.py
new file mode 100644
index 0000000000000000000000000000000000000000..83a061b83f7f74905faeee23b9fa4702ffe3ab11
--- /dev/null
+++ b/.gitlab/releaser/__main__.py
@@ -0,0 +1,3 @@
+from releaser import Releaser
+
+Releaser().release()
diff --git a/.gitlab/releaser/artifact.py b/.gitlab/releaser/artifact.py
new file mode 100644
index 0000000000000000000000000000000000000000..e0d37f33a7d6896b471a708bf20a5c9e43ce8082
--- /dev/null
+++ b/.gitlab/releaser/artifact.py
@@ -0,0 +1,84 @@
+class Artifact:
+    '''
+    An artifact to be uploaded.
+    '''
+
+    def __init__(self, file_name, category, arch, dtype, icon):
+        '''
+        :param file_name: The name of the artifact file (may have directory).
+        :param category: The category (OS, distrib) for the artifact.
+        :param arch: The architecture name.
+        :param dtype: The delivery type (either server or desktop).
+        :param icon: The name of the icon to be used for artifact representation.
+        :type file_name: str
+        :type category: str
+        :type arch: str
+        :type dtype: str
+        :type icon: str
+        '''
+        self.file_name = file_name
+        self.category = category
+        self.arch = arch
+        self.dtype = dtype
+        self.icon = icon
+
+    def __lt__(self, other):
+        if not isinstance(other, Artifact): raise TypeError()
+        return self.category < other.category or \
+            (self.category == other.category and self.arch < other.arch) or \
+            (self.category == other.category and self.arch == other.arch and self.dtype < other.dtype)
+
+    def __le__(self, other):
+        if not isinstance(other, Artifact): raise TypeError()
+        return self.category <= other.category or \
+            (self.category == other.category and self.arch <= other.arch) or \
+            (self.category == other.category and self.arch == other.arch and self.dtype <= other.dtype)
+
+    def __eq__(self, other):
+        if not isinstance(other, Artifact): raise TypeError()
+        return self.category == other.category and self.arch == other.arch and self.dtype == other.dtype
+
+    def __ne__(self, other):
+        if not isinstance(other, Artifact): raise TypeError()
+        return self.category != other.category or self.arch != other.arch or self.dtype != other.dtype
+
+    def __gt__(self, other):
+        if not isinstance(other, Artifact): raise TypeError()
+        return self.category > other.category or \
+            (self.category == other.category and self.arch > other.arch) or \
+            (self.category == other.category and self.arch == other.arch and self.dtype > other.dtype)
+
+    def __ge__(self, other):
+        if not isinstance(other, Artifact): raise TypeError()
+        return self.category >= other.category or \
+            (self.category == other.category and self.arch >= other.arch) or \
+            (self.category == other.category and self.arch == other.arch and self.dtype >= other.dtype)
+
+    def to_dict(self):
+        '''
+        :return: A dictionnary containing artifact data.
+        :rtype: dict
+        '''
+        return {
+            'name': self.file_name.split('/')[-1],
+            'category': self.category,
+            'arch': self.arch,
+            'type': self.dtype,
+            'url': self._build_url(),
+            'size': self._get_size(),
+            'icon': ':{}:'.format(self.icon)
+        }
+
+    def _get_size(self):
+        '''
+        :return: The size of the artifact.
+        :rtype: FSItemSize
+        '''
+        raise NotImplementedError()
+
+    def _build_url(self):
+        '''
+        :return: The URL which can be used to get this artifact.
+        :rtype: str
+        '''
+        raise NotImplementedError()
diff --git a/.gitlab/releaser/binartifact.py b/.gitlab/releaser/binartifact.py
new file mode 100644
index 0000000000000000000000000000000000000000..cc3ab04e05fa1386e93d745ff610dd5ea9def5a4
--- /dev/null
+++ b/.gitlab/releaser/binartifact.py
@@ -0,0 +1,37 @@
+import json
+import os
+
+from artifact import Artifact
+from fsitemsize import FSItemSize
+
+class BinArtifact(Artifact):
+    '''
+    A binary artifact.
+    '''
+
+    def __init__(self, folder, desc_file, desc_ext):
+        '''
+        :param folder: The folder where files can be found.
+        :param desc_file: The name of the description file.
+        :param desc_ext: The extention of the description file.
+        :type folder: str
+        :type desc_file: str
+        :type desc_ext: str
+        '''
+        try:
+            description = json.load(open(desc_file))
+        except json.decoder.JSONDecodeError:
+            print('CRITICAL Description file {} could not be read'.format(desc_file))
+            exit(1)
+
+        self.tag = description['version']
+        self.job = description['job']
+        file_name = desc_file[:-len(desc_ext)]
+        Artifact.__init__(self, file_name, description['category'], description['arch'], description['type'], 'package')
+
+    def _get_size(self):
+        return FSItemSize(int(os.path.getsize(self.file_name)))
+
+    def _build_url(self):
+        return '{}/-/jobs/{}/artifacts/raw/{}'.format(
+            os.environ['CI_PROJECT_URL'], self.job, self.file_name)
diff --git a/.gitlab/releaser/fsitemsize.py b/.gitlab/releaser/fsitemsize.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6c8be232cdaf2ae00cebef732c225c27b427a3c
--- /dev/null
+++ b/.gitlab/releaser/fsitemsize.py
@@ -0,0 +1,28 @@
+import math
+
+class FSItemSize:
+    '''
+    The size of a file system item.
+    '''
+
+    def __init__(self, bsize = None):
+        '''
+        :param bsize: Size of item in bytes.
+        :type bsize: int
+        '''
+        self.bsize = bsize
+
+    def __str__(self):
+        '''
+        :return: Human readable size.
+        :rtype: str
+        '''
+        if self.bsize is None:
+            return '(unknown)'
+        elif self.bsize == 0:
+            return '0 B'
+        size_name = ('B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB')
+        i = int(math.floor(math.log(self.bsize, 1024)))
+        power = math.pow(1024, i)
+        size = round(self.bsize / power, 2)
+        return '{} {}'.format(size, size_name[i])
diff --git a/.gitlab/releaser/job.py b/.gitlab/releaser/job.py
new file mode 100644
index 0000000000000000000000000000000000000000..2dc55c08fcb14a76074b597b81f892a8aa339646
--- /dev/null
+++ b/.gitlab/releaser/job.py
@@ -0,0 +1,22 @@
+import urllib.request
+
+from projectapi import ProjectApi
+
+class Job(ProjectApi):
+    '''
+    Job data API.
+    '''
+
+    def __init__(self, job_id):
+        '''
+        :param job_id: The job id.
+        :type job_id: int
+        '''
+        ProjectApi.__init__(self, '/jobs/{}'.format(job_id))
+
+    def keep_artifacts(self):
+        '''
+        Force artifacts to be kept forever.
+        '''
+        request = self.build_request('/artifacts/keep', method='POST')
+        urllib.request.urlopen(request)
diff --git a/.gitlab/releaser/placeholder.py b/.gitlab/releaser/placeholder.py
new file mode 100644
index 0000000000000000000000000000000000000000..a3624f0b3b94ac494f3451820fbbb2ec127aba9a
--- /dev/null
+++ b/.gitlab/releaser/placeholder.py
@@ -0,0 +1,120 @@
+class PlaceHolder:
+    '''
+    Placeholder tags in Markdown texts.
+    '''
+    __PLACEHOLDER_PART = '<placeholder'
+    __PLACEHOLDER_START = '<placeholder content="{}">'
+    __PLACEHOLDER_STOP = '</placeholder>'
+    __PLACEHOLDER_FULL = '<placeholder content="{}" />'
+
+    def __init__(self, content_id):
+        '''
+        :param content_id: The identifier to be used for placeholder content.
+        :type content_id: str
+        '''
+        self.ph_start = PlaceHolder.__PLACEHOLDER_START.format(content_id)
+        self.ph_stop = PlaceHolder.__PLACEHOLDER_STOP
+        self.ph_full = PlaceHolder.__PLACEHOLDER_FULL.format(content_id)
+
+    def get_content(self, text):
+        '''
+        :param text: The text in which to extract content.
+        :type text: str
+        :return: The content between placeholder markers.
+        :rtype: str
+        '''
+        pos = text.find(self.ph_start)
+        if pos >= 0:
+            text = text[pos + len(self.ph_start):]
+            pos = text.find(self.ph_stop)
+            if pos >= 0: text = text[:pos]
+        return text
+
+    def get_before(self, text, keep_mark=False):
+        '''
+        :param text: The text in which to extract content.
+        :param keep_mark: If true, the mark is kept in final text.
+        :type text: str
+        :type keep_mark: bool
+        :return: The content before (full) placeholder marker.
+        :rtype: str
+        '''
+        pos = text.find(self.ph_full)
+        if pos >= 0:
+            if keep_mark: pos += len(self.ph_full)
+            text = text[:pos]
+        return text
+
+    def get_after(self, text, keep_mark=False):
+        '''
+        :param text: The text in which to extract content.
+        :param keep_mark: If true, the mark is kept in final text.
+        :type text: str
+        :type keep_mark: bool
+        :return: The content after (full) placeholder marker.
+        :rtype: str
+        '''
+        pos = text.find(self.ph_full)
+        if pos >= 0:
+            if not keep_mark: pos += len(self.ph_full)
+            text = text[pos:]
+        return text
+
+    def replace_content(self, text, content):
+        '''
+        :param text: The text in which to extract content.
+        :param content: The new content to insert.
+        :type text: str
+        :type content: str
+        :return: The text where content has been replaced.
+        :rtype: str
+        '''
+        pos = text.find(self.ph_start)
+        if pos >= 0:
+            pos += len(self.ph_start)
+            text_before = text[:pos]
+        else:
+            pos = 0
+            text_before = ''
+        pos = text.find(self.ph_stop, pos)
+        if pos >= 0:
+            text_after = text[pos:]
+        else:
+            text_after = ''
+        return text_before + content + text_after
+
+    def insert_after(self, text, content):
+        '''
+        :param text: The text in which to extract content.
+        :param content: The new content to insert.
+        :type text: str
+        :type content: str
+        :return: The text where content has been inserted.
+        :rtype: str
+        '''
+        pos = text.find(self.ph_full)
+        if pos >= 0: pos += len(self.ph_full)
+        else: pos = 0
+        text_before = text[:pos]
+        text_after = text[pos:]
+        return text_before + content + text_after
+
+    def clear_all(text):
+        '''
+        Clear all placeholders from given text.
+        :param text: The text to clear.
+        :type text: str
+        :return: The clean text.
+        :rtype: str
+        '''
+        while True:
+            pos = text.find(PlaceHolder.__PLACEHOLDER_PART)
+            if pos < 0: break
+            end = text.find('>')
+            if end < 0: end = len(text)
+            text = text[:pos] + text[end + 1:]
+        while True:
+            pos = text.find(PlaceHolder.__PLACEHOLDER_STOP)
+            if pos < 0: break
+            text = text[:pos] + text[pos + len(PlaceHolder.__PLACEHOLDER_STOP):]
+        return text
diff --git a/.gitlab/releaser/projectapi.py b/.gitlab/releaser/projectapi.py
new file mode 100644
index 0000000000000000000000000000000000000000..eea07e769b95af92960e6538d7f2af60f3c699e8
--- /dev/null
+++ b/.gitlab/releaser/projectapi.py
@@ -0,0 +1,31 @@
+import os
+import urllib.request
+
+class ProjectApi:
+    '''
+    Gitlab API project access.
+    '''
+    __PROJECT_URL = 'https://git.duniter.org/api/v4/projects/{}'
+
+    def __init__(self, url=''):
+        '''
+        :param url: The URL portion to add to base project URL (if needed).
+        :type url: str
+        '''
+        self.base_url = ProjectApi.__PROJECT_URL.format(os.environ['CI_PROJECT_ID'])
+        self.base_url += url
+        self.token = ('Private-Token', os.environ['RELEASER_TOKEN'])
+
+    def build_request(self, url='', **params):
+        '''
+        Create the request to send to project API.
+        :param url: The portion of URL to add to base URL (if needed).
+        :param params: The optional parameters.
+        :type url: str
+        :type params: dict
+        :return: The request, ready to be used.
+        :rtype: urllib.request.Request
+        '''
+        request = urllib.request.Request(self.base_url + url, **params)
+        request.add_header(*self.token)
+        return request
diff --git a/.gitlab/releaser/releasenote.py b/.gitlab/releaser/releasenote.py
new file mode 100644
index 0000000000000000000000000000000000000000..c4fff9a756273d38fed03907fb585e3db7fa90b1
--- /dev/null
+++ b/.gitlab/releaser/releasenote.py
@@ -0,0 +1,74 @@
+import json
+import os
+import urllib.request
+
+from placeholder import PlaceHolder
+from projectapi import ProjectApi
+
+class ReleaseNote(ProjectApi):
+    '''
+    Release note API.
+    '''
+    __PH_TITLE = PlaceHolder('end-title')
+    __PH_NOTE = PlaceHolder('note')
+
+    def __init__(self):
+        ProjectApi.__init__(self, '/repository/tags/{}'.format(os.environ['CI_COMMIT_TAG']))
+        self.message_read = False
+
+    def get_note(self):
+        '''
+        Get full release note.
+        :return: The note if it exists, None otherwise.
+        :rtype: str or None
+        '''
+        request = self.build_request()
+        response = urllib.request.urlopen(request)
+        response_data = response.read().decode()
+        data = json.loads(response_data)
+        if data['release'] is None:
+            return None
+        else:
+            self.message_read = True
+            return data['release']['description']
+
+    def get_message(self):
+        '''
+        Get release message. Message is extracted from full note.
+        :return: The message if it exists, empty string otherwise.
+        :rtype: str
+        '''
+        data = self.get_note()
+        if data is None:
+            return ''
+        else:
+            return ReleaseNote.__PH_NOTE.get_content(data)
+
+    def get_note_body(self):
+        '''
+        Get release note body (without title). Body is extracted from full note.
+        :return: The body.
+        :rtype: str
+        '''
+        data = self.get_note()
+        if data is None:
+            print('CRITICAL No release information to publish')
+            exit(1)
+        return ReleaseNote.__PH_TITLE.get_after(data, True)
+
+    def send_note(self, note):
+        '''
+        Send the full release note. The current message should have been read
+        unless you are sure there are none.
+        :param note: The full note to send.
+        :type note: str
+        '''
+        method = 'PUT' if self.message_read else 'POST'
+        send_data = {
+            'tag_name': os.environ['CI_COMMIT_TAG'],
+            'description': note
+        }
+        send_data_serialized = json.dumps(send_data).encode('utf-8')
+        request = self.build_request('/release', data=send_data_serialized, method=method)
+        request.add_header('Content-Type', 'application/json')
+        urllib.request.urlopen(request)
diff --git a/.gitlab/releaser/releaser.py b/.gitlab/releaser/releaser.py
new file mode 100644
index 0000000000000000000000000000000000000000..dca04456186c0c187cdb899c67ee6f8dd58c5429
--- /dev/null
+++ b/.gitlab/releaser/releaser.py
@@ -0,0 +1,107 @@
+import glob
+import jinja2
+import json
+import os
+
+from binartifact import BinArtifact
+from job import Job
+from placeholder import PlaceHolder
+from releasenote import ReleaseNote
+from releasewikipage import ReleaseWikiPage
+from sourceartifact import SourceArtifact
+from template import Template
+
+class Releaser:
+    '''
+    The main releaser class
+    '''
+
+    def __init__(self):
+        self.template = Template('release_template.md')
+        if 'RELEASE_BIN_DIR' in os.environ:
+            self.release_bin_dir = os.environ['RELEASE_BIN_DIR']
+            if not self.release_bin_dir.endswith('/'): self.release_bin_dir += '/'
+        else:
+            print('CRITICAL RELEASE_BIN_DIR environment variable not set')
+            exit(1)
+        if 'SOURCE_EXT' in os.environ:
+            self.source_ext = os.environ['SOURCE_EXT']
+            try:
+                self.source_ext = json.loads(self.source_ext)
+            except json.decoder.JSONDecodeError:
+                print('CRITICAL SOURCE_EXT environment variable JSON probably malformed')
+                print('CRITICAL Correct : \'["zip","tar.gz"]\' ')
+                print('CRITICAL Not Correct: "[\'zip\',\'tar.gz\']" ')
+                exit(1)
+        else: self.source_ext = None
+
+    def release(self):
+        if self.source_ext is None:
+            self.publish_release()
+        else:
+            self.publish_prerelease()
+
+    def publish_prerelease(self):
+        '''
+        Main job to publish a pre-release.
+        '''
+        releaseNote = ReleaseNote()
+        current_message = releaseNote.get_message()
+        artifacts_list = []
+
+        # Get releases
+        artifacts_list += self._get_bin_artifacts()
+        artifacts_list.sort()
+        artifacts_list += list(map(lambda e: SourceArtifact(e), self.source_ext))
+
+        # Send result
+        note = self.template.render('notebody', {
+            'current_message': current_message,
+            'artifacts': list(map(lambda a: a.to_dict(), artifacts_list))
+        })
+        title_line = self.template.render('prerelease', {
+            'tag': os.environ['CI_COMMIT_TAG'],
+            'pipeline': os.environ['CI_PIPELINE_ID']
+        })
+        releaseNote.send_note(title_line + note)
+
+        print('Pre-release published')
+
+    def publish_release(self):
+        '''
+        Main job to publish the final release.
+        '''
+        # Change release note
+        releaseNote = ReleaseNote()
+        note = releaseNote.get_note_body()
+        title_line = self.template.render('release', {
+            'tag': os.environ['CI_COMMIT_TAG'],
+            'pipeline': os.environ['CI_PIPELINE_ID']
+        })
+        releaseNote.send_note(title_line + note)
+
+        # Update Wiki release page
+        wiki_page = ReleaseWikiPage(self.template)
+        wiki_page.add_release(os.environ['CI_COMMIT_TAG'], PlaceHolder.clear_all(note))
+        wiki_page.save()
+
+        # Keep artifacts
+        jobs = []
+        for artifact in self._get_bin_artifacts():
+            if not artifact.job in jobs:
+                jobs.append(artifact.job)
+        for job_id in jobs: Job(job_id).keep_artifacts()
+
+        print('Release published')
+
+    def _get_bin_artifacts(self):
+        '''
+        Get the binary artifacts for the current tag.
+        :return: The list of binary artifacts, based on found descriptions.
+        :rtype: list of BinArtifact
+        '''
+        DESC_EXT = '.desc'
+        artifacts = glob.glob('{}*{}'.format(self.release_bin_dir, DESC_EXT))
+        artifacts = map(lambda d: BinArtifact(self.release_bin_dir, d, DESC_EXT), artifacts)
+        artifacts = filter(lambda a: a.tag == os.environ['CI_COMMIT_TAG'], artifacts)
+        return list(artifacts)
diff --git a/.gitlab/releaser/releasewikipage.py b/.gitlab/releaser/releasewikipage.py
new file mode 100644
index 0000000000000000000000000000000000000000..c76479610d4eba3ee0244df475f632c9fc9818fd
--- /dev/null
+++ b/.gitlab/releaser/releasewikipage.py
@@ -0,0 +1,59 @@
+import json
+import os
+import urllib.request
+
+from placeholder import PlaceHolder
+from projectapi import ProjectApi
+
+class ReleaseWikiPage(ProjectApi):
+    '''
+    Release Wiki page API.
+    '''
+    __PH_TAG = PlaceHolder('tag')
+    __PH_NOTE = PlaceHolder('note')
+    __PH_PREVIOUS = PlaceHolder('previous-beg')
+
+    def __init__(self, template):
+        '''
+        :param template: The template to use.
+        :type template: Template
+        '''
+        if not 'WIKI_RELEASE' in os.environ:
+            print('CRITICAL WIKI_RELEASE variable is not defined')
+            exit(1)
+        ProjectApi.__init__(self, '/wikis/{}'.format(os.environ['WIKI_RELEASE']))
+        self.template = template
+
+        # Query existing page
+        request = self.build_request()
+        response = urllib.request.urlopen(request)
+        response_data = response.read().decode()
+        data = json.loads(response_data)
+        self.page_content = data['content']
+
+    def add_release(self, tag, note):
+        '''
+        Add the release to the Wiki page.
+        '''
+        prev_tag = ReleaseWikiPage.__PH_TAG.get_content(self.page_content)
+        prev_note = ReleaseWikiPage.__PH_NOTE.get_content(self.page_content)
+        self.page_content = ReleaseWikiPage.__PH_TAG.replace_content(self.page_content, tag)
+        self.page_content = ReleaseWikiPage.__PH_NOTE.replace_content(self.page_content, note)
+        previous = self.template.render('previouswiki', {
+            'tag': prev_tag,
+            'body': prev_note
+        })
+        self.page_content = ReleaseWikiPage.__PH_PREVIOUS.insert_after(
+            self.page_content, previous)
+
+    def save(self):
+        send_data = {
+            'content': self.page_content,
+            'format': 'markdown',
+            'slug': os.environ['WIKI_RELEASE'],
+            'title': os.environ['WIKI_RELEASE']
+        }
+        send_data_serialized = json.dumps(send_data).encode('utf-8')
+        request = self.build_request(data=send_data_serialized, method='PUT')
+        request.add_header('Content-Type', 'application/json')
+        urllib.request.urlopen(request)
diff --git a/.gitlab/releaser/sourceartifact.py b/.gitlab/releaser/sourceartifact.py
new file mode 100644
index 0000000000000000000000000000000000000000..b5eda1c81d26cbed6ced8339aff1001bdc562c70
--- /dev/null
+++ b/.gitlab/releaser/sourceartifact.py
@@ -0,0 +1,23 @@
+import os
+
+from artifact import Artifact
+from fsitemsize import FSItemSize
+
+class SourceArtifact(Artifact):
+    '''
+    A source artifact.
+    '''
+
+    def __init__(self, extention):
+        '''
+        :param extention: The extention of the source archive.
+        :type extention: str
+        '''
+        Artifact.__init__(self, 'archive.{}'.format(extention), 'Source code ({})'.format(extention), '', '', 'compression')
+
+    def _get_size(self):
+        return FSItemSize()
+
+    def _build_url(self):
+        return '{}/repository/{}/{}'.format(
+            os.environ['CI_PROJECT_URL'], os.environ['CI_COMMIT_TAG'], self.file_name)
diff --git a/.gitlab/releaser/template.py b/.gitlab/releaser/template.py
new file mode 100644
index 0000000000000000000000000000000000000000..0c607a059dade516d96db0a0a89b8d43b82921e6
--- /dev/null
+++ b/.gitlab/releaser/template.py
@@ -0,0 +1,31 @@
+import jinja2
+import os
+
+class Template:
+    '''
+    Manages the template file. The template file is split into blocks.
+    '''
+    def __init__(self, fname):
+        '''
+        :param fname: The name of the template file.
+        :type fname: str
+        '''
+        path = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+        environment = jinja2.Environment(
+            loader=jinja2.FileSystemLoader(path),
+            trim_blocks=True
+        )
+        self.template = environment.get_template(fname)
+
+    def render(self, block, params):
+        '''
+        Render a block from the template file.
+        :param block: The name of the block to render.
+        :param params: The parameters to be used in the block.
+        :type block: str
+        :type params: dict
+        :return: The rendered block.
+        :rtype: str
+        '''
+        context = self.template.new_context(params)
+        return jinja2.utils.concat(self.template.blocks[block](context))
diff --git a/.travis.yml b/.travis.yml
index 32368217c48147b7d3adc318b5d47af12bdc8662..a3f3862e0e3586fec203e2daa1561f787c4051a5 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,7 +2,7 @@
 language: node_js
 
 node_js:
-  - 6.11.1
+  - 8.9.2
 env:
   - CXX=g++-4.8
 addons:
diff --git a/app/ProcessCpuProfiler.ts b/app/ProcessCpuProfiler.ts
new file mode 100644
index 0000000000000000000000000000000000000000..31bbf45117b8a38b3b5a6e447f1896025738a2a2
--- /dev/null
+++ b/app/ProcessCpuProfiler.ts
@@ -0,0 +1,72 @@
+const SAMPLING_PERIOD = 150 // milliseconds
+const MAX_SAMPLES_DISTANCE = 20 * 1000000 // seconds
+
+function getMicrosecondsTime() {
+  const [ seconds, nanoseconds ] = process.hrtime()
+  return seconds * 1000000 + nanoseconds / 1000
+}
+
+interface CpuUsage {
+  user: number
+  system:number
+}
+
+interface CpuUsageAt {
+  usage:number
+  at:number // microseconds timestamp
+  elapsed:number // microseconds elapsed for this result
+}
+
+export class ProcessCpuProfiler {
+
+  private cumulatedUsage: CpuUsage
+  private startedAt:number // microseconds timestamp
+  private samples:CpuUsageAt[] = []
+
+  constructor(samplingPeriod = SAMPLING_PERIOD) {
+    // Initial state
+    const start = getMicrosecondsTime()
+    this.startedAt = start
+    this.cumulatedUsage = process.cpuUsage()
+    this.samples.push({ usage: 0, at: start, elapsed: 1 })
+    // Periodic sample
+    setInterval(() => {
+      const newSampleAt = getMicrosecondsTime()
+      const newUsage:CpuUsage = process.cpuUsage()
+      const elapsed = newSampleAt - this.lastSampleAt
+      const userDiff = newUsage.user - this.cumulatedUsage.user
+      const usagePercent = userDiff / elapsed // The percent of time consumed by the process since last sample
+      this.samples.push({ usage: usagePercent, at: newSampleAt, elapsed })
+      while(this.samplesDistance > MAX_SAMPLES_DISTANCE) {
+        this.samples.shift()
+      }
+      this.cumulatedUsage = newUsage
+      // console.log('Time elapsed: %s microseconds, = %s %CPU', elapsed, (usagePercent*100).toFixed(2))
+    }, samplingPeriod)
+  }
+
+  private get lastSampleAt() {
+    return this.samples[this.samples.length - 1].at
+  }
+
+  private get samplesDistance() {
+    return this.samples[this.samples.length - 1].at - this.samples[0].at
+  }
+
+  cpuUsageOverLastMilliseconds(elapsedMilliseconds:number) {
+    return this.cpuUsageOverLastX(elapsedMilliseconds * 1000)
+  }
+
+  private cpuUsageOverLastX(nbMicrosecondsElapsed:number) {
+    return this.getSamplesResult(getMicrosecondsTime() - nbMicrosecondsElapsed)
+  }
+
+  private getSamplesResult(minTimestamp:number) {
+    const matchingSamples = this.samples.filter(s => s.at >= minTimestamp - SAMPLING_PERIOD * 1000)
+    const cumulativeElapsed = matchingSamples.reduce((sum, s) => sum + s.elapsed, 0)
+    return matchingSamples.reduce((cumulated, percent) => {
+      const weight = percent.elapsed / cumulativeElapsed
+      return cumulated + percent.usage * weight
+    }, 0)
+  }
+}
\ No newline at end of file
diff --git a/app/cli.ts b/app/cli.ts
index 3c2e495153853106e8b04d3253c36be1703752c1..887724f418e72399566968f7c645c14fb0c2598d 100644
--- a/app/cli.ts
+++ b/app/cli.ts
@@ -1,7 +1,7 @@
 const Command = require('commander').Command;
 const pjson = require('../package.json');
 const duniter = require('../index');
-
+ 
 export const ExecuteCommand = () => {
 
   const options:any = [];
diff --git a/app/lib/common-libs/constants.ts b/app/lib/common-libs/constants.ts
index f9cda71f7a61d29af6075eeb98a09c2b9264a49b..e1be3f6661f221523d06aad8460cb82d3c36f6a4 100644
--- a/app/lib/common-libs/constants.ts
+++ b/app/lib/common-libs/constants.ts
@@ -32,8 +32,10 @@ const CONDITIONS   = "(&&|\\|\\|| |[()]|(SIG\\(" + PUBKEY + "\\)|(XHX\\([A-F0-9]
 
 const BMA_REGEXP  = /^BASIC_MERKLED_API( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/
 const BMATOR_REGEXP = /^BMATOR( ([a-z0-9]{16})\.onion)( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/
-const WS2P_REGEXP = /^WS2P ([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/
-const WS2PTOR_REGEXP = /^WS2PTOR ([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/
+const WS2P_REGEXP = /^WS2P (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/
+const WS2P_V2_REGEXP = /^WS2P ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/
+const WS2PTOR_REGEXP = /^WS2PTOR (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/
+const WS2PTOR_V2_REGEXP = /^WS2PTOR ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/
 const WS_FULL_ADDRESS_ONION_REGEX = /^(?:wss?:\/\/)(?:www\.)?([0-9a-z]{16}\.onion)(:[0-9]+)?$/
 const IPV4_REGEXP = /^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$/;
 const IPV6_REGEXP = /^((([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}:[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){5}:([0-9A-Fa-f]{1,4}:)?[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){4}:([0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){3}:([0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){2}:([0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(([0-9A-Fa-f]{1,4}:){0,5}:((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(::([0-9A-Fa-f]{1,4}:){0,5}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|([0-9A-Fa-f]{1,4}::([0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4})|(::([0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){1,7}:))$/;
@@ -81,7 +83,8 @@ export const CommonConstants = {
     SOFTWARE,
     SOFT_VERSION,
     POW_PREFIX,
-    ZERO_OR_POSITIVE_INT
+    ZERO_OR_POSITIVE_INT,
+    SIGNATURE
   },
 
   BLOCK_GENERATED_VERSION: 10,
@@ -95,7 +98,9 @@ export const CommonConstants = {
   BMA_REGEXP,
   BMATOR_REGEXP,
   WS2P_REGEXP,
+  WS2P_V2_REGEXP,
   WS2PTOR_REGEXP,
+  WS2PTOR_V2_REGEXP,
   WS_FULL_ADDRESS_ONION_REGEX,
   IPV4_REGEXP,
   IPV6_REGEXP,
@@ -280,6 +285,8 @@ export const CommonConstants = {
     BLOCK: find("Block: (" + INTEGER + "-" + FINGERPRINT + ")"),
     SPECIAL_BLOCK
   },
+
+  BLOCK_MAX_TX_CHAINING_DEPTH: 5
 }
 
 function exact (regexpContent:string) {
diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts
index a79dc8d8dabfe41814656c40f7f9d9b5297afebd..d4a5d7481923c17816907793556a4f8a70cf434c 100644
--- a/app/lib/dal/fileDAL.ts
+++ b/app/lib/dal/fileDAL.ts
@@ -16,6 +16,7 @@ import {DBMembership} from "./sqliteDAL/MembershipDAL"
 import {MerkleDTO} from "../dto/MerkleDTO"
 import {CommonConstants} from "../common-libs/constants"
 import { ProxiesConf } from '../proxy';
+import {PowDAL} from "./fileDALs/PowDAL";
 
 const fs      = require('fs')
 const path    = require('path')
@@ -40,6 +41,7 @@ export class FileDAL {
   wotb:any
   profile:string
 
+  powDAL:PowDAL
   confDAL:any
   metaDAL:any
   peerDAL:any
@@ -68,6 +70,7 @@ export class FileDAL {
     this.profile = 'DAL'
 
     // DALs
+    this.powDAL = new PowDAL(this.rootPath, this.myFS)
     this.confDAL = new ConfDAL(this.rootPath, this.myFS)
     this.metaDAL = new (require('./sqliteDAL/MetaDAL').MetaDAL)(this.sqliteDriver);
     this.peerDAL = new (require('./sqliteDAL/PeerDAL').PeerDAL)(this.sqliteDriver);
@@ -85,6 +88,7 @@ export class FileDAL {
     this.cindexDAL = new (require('./sqliteDAL/index/CIndexDAL').CIndexDAL)(this.sqliteDriver);
 
     this.newDals = {
+      'powDAL': this.powDAL,
       'metaDAL': this.metaDAL,
       'blockDAL': this.blockDAL,
       'certDAL': this.certDAL,
@@ -503,13 +507,13 @@ export class FileDAL {
     return this.sindexDAL.getSource(identifier, pos)
   }
 
-  async isMember(pubkey:string) {
+  async isMember(pubkey:string):Promise<boolean> {
     try {
       const idty = await this.iindexDAL.getFromPubkey(pubkey);
-      if (!idty) {
+      if (idty === null) {
         return false
       }
-      return idty.member;
+      return true;
     } catch (err) {
       return false;
     }
diff --git a/app/lib/dal/fileDALs/PowDAL.ts b/app/lib/dal/fileDALs/PowDAL.ts
new file mode 100644
index 0000000000000000000000000000000000000000..6a3d0102a6b5eb6986da81cc038bf34043526efc
--- /dev/null
+++ b/app/lib/dal/fileDALs/PowDAL.ts
@@ -0,0 +1,22 @@
+import {AbstractCFS} from "./AbstractCFS"
+
+export class PowDAL extends AbstractCFS {
+
+  private static POW_FILE = "pow.txt"
+
+  constructor(rootPath:string, qioFS:any) {
+    super(rootPath, qioFS)
+  }
+
+  init() {
+    return this.coreFS.remove(PowDAL.POW_FILE, false).catch(() => {})
+  }
+
+  async getCurrent() {
+    return await this.coreFS.read(PowDAL.POW_FILE);
+  }
+
+  async writeCurrent(current:string) {
+    await this.coreFS.write(PowDAL.POW_FILE, current, false);
+  }
+}
diff --git a/app/lib/dal/sqliteDAL/DocumentDAL.ts b/app/lib/dal/sqliteDAL/DocumentDAL.ts
index d2097d7f025f31ee507e1b83051bfd36ca3f2f55..e5ce3c562c98d6f3830523d06a1b9fe67eb846b8 100644
--- a/app/lib/dal/sqliteDAL/DocumentDAL.ts
+++ b/app/lib/dal/sqliteDAL/DocumentDAL.ts
@@ -1,3 +1,3 @@
 export interface DBDocument {
-  issuers?: string[]
+  issuers: string[]
 }
\ No newline at end of file
diff --git a/app/lib/dal/sqliteDAL/SandBox.ts b/app/lib/dal/sqliteDAL/SandBox.ts
index 09431c3aada77ab98f4282737d0270eb5be85ff3..d8e9f3d96c7fa07438baf8cca9b117a549f63232 100644
--- a/app/lib/dal/sqliteDAL/SandBox.ts
+++ b/app/lib/dal/sqliteDAL/SandBox.ts
@@ -14,7 +14,7 @@ export class SandBox<T extends DBDocument> {
   
   async acceptNewSandBoxEntry(element:T, pubkey:string) {
     // Accept any document which has the exception pubkey (= the node pubkey)
-    if (element.issuers !== undefined && element.issuers.indexOf(pubkey) !== -1) {
+    if (element.issuers.indexOf(pubkey) !== -1) {
       return true;
     }
     const elements = await this.findElements()
diff --git a/app/lib/dto/ConfDTO.ts b/app/lib/dto/ConfDTO.ts
index 2e77e92fc70336c82b99bbb7b908ee9251918d09..f9405f1f4efbbe7a2017f8dee565dea38be47854 100644
--- a/app/lib/dto/ConfDTO.ts
+++ b/app/lib/dto/ConfDTO.ts
@@ -8,6 +8,10 @@ export interface Keypair {
   sec: string
 }
 
+export interface PowDTO {
+  powNoSecurity:boolean
+}
+
 export interface BranchingDTO {
   switchOnHeadAdvance:number
   avgGenTime:number
@@ -82,7 +86,7 @@ export interface WS2PConfDTO {
   }
 }
 
-export class ConfDTO implements CurrencyConfDTO, KeypairConfDTO, NetworkConfDTO, BranchingDTO, WS2PConfDTO {
+export class ConfDTO implements CurrencyConfDTO, KeypairConfDTO, NetworkConfDTO, BranchingDTO, WS2PConfDTO, PowDTO {
 
   constructor(
     public loglevel: string,
@@ -158,7 +162,8 @@ export class ConfDTO implements CurrencyConfDTO, KeypairConfDTO, NetworkConfDTO,
       privilegedOnly: boolean
       maxPublic?:number
       maxPrivate?:number
-    }
+    },
+    public powNoSecurity = false
 ) {}
 
   static mock() {
diff --git a/app/lib/dto/PeerDTO.ts b/app/lib/dto/PeerDTO.ts
index dc5754b6893349a0009b66e1cbb21c78d7b116a7..4b9555bfc617bb4ceff21c2c4ef2104f4ae97cb5 100644
--- a/app/lib/dto/PeerDTO.ts
+++ b/app/lib/dto/PeerDTO.ts
@@ -2,11 +2,14 @@ import {DBPeer} from "../dal/sqliteDAL/PeerDAL"
 import {hashf} from "../common"
 import {CommonConstants} from "../common-libs/constants"
 import {Cloneable} from "./Cloneable"
+import { WS2PConstants } from '../../modules/ws2p/lib/constants';
 
 export interface WS2PEndpoint {
+  version:number
   uuid:string
   host:string
   port:number
+  path:string
 }
 
 export class PeerDTO implements Cloneable {
@@ -94,29 +97,59 @@ export class PeerDTO implements Cloneable {
     return bma || {};
   }
 
-  getWS2P(canReachTorEp:boolean, canReachClearEp:boolean) {
-    let api:{ uuid:string, host:string, port:number, path:string }|null = null
-    const endpointRegexp = (canReachTorEp) ? CommonConstants.WS2PTOR_REGEXP:CommonConstants.WS2P_REGEXP
+  getOnceWS2PEndpoint(canReachTorEp:boolean, canReachClearEp:boolean, uuidExcluded:string[] = []) {
+    let api:WS2PEndpoint|null = null
+    let bestWS2PVersionAvailable:number = 0
+    let bestWS2PTORVersionAvailable:number = 0
     for (const ep of this.endpoints) {
       if (canReachTorEp) {
-        const matches:any = ep.match(CommonConstants.WS2PTOR_REGEXP)
-        if (matches) {
-          return {
-            uuid: matches[1],
-            host: matches[2] || '',
-            port: parseInt(matches[3]) || 0,
-            path: matches[4]
+        let matches:RegExpMatchArray | null = ep.match(CommonConstants.WS2PTOR_V2_REGEXP)
+        if (matches && parseInt(matches[1]) > bestWS2PTORVersionAvailable && (uuidExcluded.indexOf(matches[2]) === -1)) {
+          bestWS2PTORVersionAvailable = parseInt(matches[1])
+          api = {
+            version: parseInt(matches[1]),
+            uuid: matches[2],
+            host: matches[3] || '',
+            port: parseInt(matches[4]) || 0,
+            path: matches[5]
+          }
+        } else {
+          matches = ep.match(CommonConstants.WS2PTOR_REGEXP)
+          if (matches && bestWS2PTORVersionAvailable == 0 && (uuidExcluded.indexOf(matches[1]) === -1)) {
+            bestWS2PTORVersionAvailable = 1
+            api = {
+              version: 1,
+              uuid: matches[1],
+              host: matches[2] || '',
+              port: parseInt(matches[3]) || 0,
+              path: matches[4]
+            }
           }
         }
       }
-      if (canReachClearEp) {
-        const matches:any = !api && ep.match(CommonConstants.WS2P_REGEXP)
-        if (matches) {
+      // If can reach clear endpoint and not found tor endpoint
+      if (canReachClearEp && bestWS2PTORVersionAvailable == 0) {
+        let matches:any = ep.match(CommonConstants.WS2P_V2_REGEXP)
+        if (matches && parseInt(matches[1]) > bestWS2PVersionAvailable && (uuidExcluded.indexOf(matches[2]) === -1)) {
+          bestWS2PVersionAvailable = parseInt(matches[1])
           api = {
-            uuid: matches[1],
-            host: matches[2] || '',
-            port: parseInt(matches[3]) || 0,
-            path: matches[4]
+            version: parseInt(matches[1]),
+            uuid: matches[2],
+            host: matches[3] || '',
+            port: parseInt(matches[4]) || 0,
+            path: matches[5]
+          }
+        } else {
+          matches = ep.match(CommonConstants.WS2P_REGEXP)
+          if (matches && bestWS2PVersionAvailable == 0 && (uuidExcluded.indexOf(matches[1]) === -1)) {
+            bestWS2PVersionAvailable = 1
+            api = {
+              version: 1,
+              uuid: matches[1],
+              host: matches[2] || '',
+              port: parseInt(matches[3]) || 0,
+              path: matches[4]
+            }
           }
         }
       }
@@ -124,6 +157,18 @@ export class PeerDTO implements Cloneable {
     return api || null
   }
 
+  getAllWS2PEndpoints(canReachTorEp:boolean, canReachClearEp:boolean, myUUID:string) {
+    let apis:WS2PEndpoint[] = []
+    let uuidExcluded:string[] = [myUUID]
+    let api = this.getOnceWS2PEndpoint(canReachTorEp, canReachClearEp, uuidExcluded)
+    while (api !== null) {
+      uuidExcluded.push(api.uuid)
+      apis.push(api)
+      api = this.getOnceWS2PEndpoint(canReachTorEp, canReachClearEp, uuidExcluded)
+    }
+    return apis
+  }
+
   getDns() {
     const bma = this.getBMA();
     return bma.dns ? bma.dns : null;
diff --git a/app/lib/indexer.ts b/app/lib/indexer.ts
index ddb1cfa9988822c7b50fcb99465c4adf2d95aec3..b5f1c47be1b391cf00f6e99e7fa3a77d46db7375 100644
--- a/app/lib/indexer.ts
+++ b/app/lib/indexer.ts
@@ -865,21 +865,12 @@ export class Indexer {
       }
     }))
 
-    // BR_G46
-    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
-      const reducable = await dal.sindexDAL.sqlFind({
-        identifier: ENTRY.identifier,
-        pos: ENTRY.pos,
-        amount: ENTRY.amount,
-        base: ENTRY.base
-      });
-      ENTRY.conditions = reduce(reducable).conditions; // We valuate the input conditions, so we can map these records to a same account
-      ENTRY.available = reduce(reducable).consumed === false;
-    }))
-
-    // BR_G47
-    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
-      let source = _.filter(sindex, (src:SindexEntry) => src.identifier == ENTRY.identifier && src.pos == ENTRY.pos && src.conditions && src.op === constants.IDX_CREATE)[0];
+    const getInputLocalFirstOrFallbackGlobally = async (sindex:SindexEntry[], ENTRY:SindexEntry) => {
+      let source = _.filter(sindex, (src:SindexEntry) =>
+        src.identifier == ENTRY.identifier
+        && src.pos == ENTRY.pos
+        && src.conditions
+        && src.op === constants.IDX_CREATE)[0];
       if (!source) {
         const reducable = await dal.sindexDAL.sqlFind({
           identifier: ENTRY.identifier,
@@ -887,20 +878,29 @@ export class Indexer {
           amount: ENTRY.amount,
           base: ENTRY.base
         });
-        source = reduce(reducable);
+        source = reduce(reducable)
       }
+      return source
+    }
+
+    // BR_G46
+    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
+      const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY)
+      ENTRY.conditions = source.conditions; // We valuate the input conditions, so we can map these records to a same account
+      ENTRY.available = source.consumed === false;
+    }))
+
+    // BR_G47
+    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
+      const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY)
       ENTRY.conditions = source.conditions;
       ENTRY.isLocked = !txSourceUnlock(ENTRY, source, HEAD);
     }))
 
     // BR_G48
     await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
-      ENTRY.isTimeLocked = ENTRY.written_time - reduce(await dal.sindexDAL.sqlFind({
-          identifier: ENTRY.identifier,
-          pos: ENTRY.pos,
-          amount: ENTRY.amount,
-          base: ENTRY.base
-        })).written_time < ENTRY.locktime;
+      const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY)
+      ENTRY.isTimeLocked = ENTRY.written_time - source.written_time < ENTRY.locktime;
     }))
 
     return HEAD;
diff --git a/app/lib/rules/index.ts b/app/lib/rules/index.ts
index 44ca3d2535efb9cd8d8733317a255ad80f74cf5e..28d13899b467bb69fe4a8c9be2e9c0bcd021bcac 100644
--- a/app/lib/rules/index.ts
+++ b/app/lib/rules/index.ts
@@ -33,6 +33,7 @@ export const ALIAS = {
     await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block);
     await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block);
     await LOCAL_RULES_FUNCTIONS.checkTxSignature(block);
+    await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index);
   },
 
   ALL_LOCAL_BUT_POW_AND_SIGNATURE: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => {
@@ -60,6 +61,7 @@ export const ALIAS = {
     await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block);
     await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block);
     await LOCAL_RULES_FUNCTIONS.checkTxSignature(block);
+    await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index);
   }
 }
 
diff --git a/app/lib/rules/local_rules.ts b/app/lib/rules/local_rules.ts
index 41d19f163d8b3a377fb2e6832c7be37dae36d2d3..93b8e4aa19df8131ab24ec43599e4a926badf6ce 100644
--- a/app/lib/rules/local_rules.ts
+++ b/app/lib/rules/local_rules.ts
@@ -379,28 +379,70 @@ export const LOCAL_RULES_FUNCTIONS = {
       }
     }
     return true;
+  },
+
+  checkMaxTransactionChainingDepth: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => {
+    const sindex = Indexer.sindex(index)
+    const max = getMaxTransactionDepth(sindex)
+    //
+    const allowedMax = block.medianTime > 1519862400 ? CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH : 0
+    if (max > allowedMax) {
+      throw "The maximum transaction chaining length per block is " + CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH
+    }
+    return true
+  }
+}
+
+export interface SindexShortEntry {
+  op:string,
+  identifier:string,
+  pos:number,
+  tx:string|null
+}
+
+function getMaxTransactionDepth(sindex:SindexShortEntry[]) {
+  const ids = _.uniq(_.pluck(sindex, 'tx'))
+  let maxTxChainingDepth = 0
+  for (let id of ids) {
+    maxTxChainingDepth = Math.max(maxTxChainingDepth, getTransactionDepth(id, sindex, 0))
   }
+  return maxTxChainingDepth
+}
+
+function getTransactionDepth(txHash:string, sindex:SindexShortEntry[], localDepth = 0) {
+  const inputs = _.filter(sindex, (s:SindexShortEntry) => s.op === 'UPDATE' && s.tx === txHash)
+  let depth = localDepth
+  for (let input of inputs) {
+    const consumedOutput = _.findWhere(sindex, { op: 'CREATE', identifier: input.identifier, pos: input.pos })
+    if (consumedOutput) {
+      if (localDepth < 5) {
+        const subTxDepth = getTransactionDepth(consumedOutput.tx, sindex, localDepth + 1)
+        depth = Math.max(depth, subTxDepth)
+      } else {
+        depth++
+      }
+    }
+  }
+  return depth
 }
 
 function checkSingleMembershipSignature(ms:any) {
   return verify(ms.getRaw(), ms.signature, ms.issuer);
 }
 
-function checkBunchOfTransactions(transactions:TransactionDTO[], done:any = undefined){
-  const block:any = { transactions };
+function checkBunchOfTransactions(transactions:TransactionDTO[], conf:ConfDTO, options?:{ dontCareAboutChaining?:boolean }){
+  const block:any = { transactions, identities: [], joiners: [], actives: [], leavers: [], revoked: [], excluded: [], certifications: [] };
+  const index = Indexer.localIndex(block, conf)
   return (async () => {
-    try {
-      let local_rule = LOCAL_RULES_FUNCTIONS;
-      await local_rule.checkTxLen(block);
-      await local_rule.checkTxIssuers(block);
-      await local_rule.checkTxSources(block);
-      await local_rule.checkTxRecipients(block);
-      await local_rule.checkTxAmounts(block);
-      await local_rule.checkTxSignature(block);
-      done && done();
-    } catch (err) {
-      if (done) return done(err);
-      throw err;
+    let local_rule = LOCAL_RULES_FUNCTIONS;
+    await local_rule.checkTxLen(block);
+    await local_rule.checkTxIssuers(block);
+    await local_rule.checkTxSources(block);
+    await local_rule.checkTxRecipients(block);
+    await local_rule.checkTxAmounts(block);
+    await local_rule.checkTxSignature(block);
+    if (!options || !options.dontCareAboutChaining) {
+      await local_rule.checkMaxTransactionChainingDepth(block, conf, index);
     }
   })()
 }
@@ -411,9 +453,13 @@ export const LOCAL_RULES_HELPERS = {
 
   checkSingleMembershipSignature: checkSingleMembershipSignature,
 
-  checkBunchOfTransactions: checkBunchOfTransactions,
+  checkBunchOfTransactions,
+
+  getTransactionDepth,
+
+  getMaxTransactionDepth,
 
-  checkSingleTransactionLocally: (tx:any, done:any = undefined) => checkBunchOfTransactions([tx], done),
+  checkSingleTransactionLocally: (tx:any, conf:ConfDTO) => checkBunchOfTransactions([tx], conf),
 
   checkTxAmountsValidity: (tx:TransactionDTO) => {
     const inputs = tx.inputsAsObjects()
diff --git a/app/lib/system/directory.ts b/app/lib/system/directory.ts
index 3846c4b8b0aee27b91743164c4574506e03ad7c8..2c1a4d658347aca032e727dda062a01a69e0f1f7 100644
--- a/app/lib/system/directory.ts
+++ b/app/lib/system/directory.ts
@@ -28,7 +28,7 @@ const dir = module.exports = {
 
   getHome: (profile:string|null = null, directory:string|null = null) => getHomePath(profile, directory),
 
-  getHomeFS: async (isMemory:boolean, theHome:string) => {
+  getHomeFS: async (isMemory:boolean, theHome:string, makeTree = true) => {
     const home = theHome || dir.getHome();
     const params:any = {
       home: home
@@ -38,7 +38,9 @@ const dir = module.exports = {
     } else {
       params.fs = qfs;
     }
-    await params.fs.makeTree(home);
+    if (makeTree) {
+      await params.fs.makeTree(home)
+    }
     return params;
   },
 
diff --git a/app/modules/daemon.ts b/app/modules/daemon.ts
index 1ea72f5a647af1c5df1ab99bbb47556ba2f78012..584920f3c4444c8d83859d85abbce5fce3d8a8f2 100644
--- a/app/modules/daemon.ts
+++ b/app/modules/daemon.ts
@@ -95,6 +95,9 @@ module.exports = {
 
         logger.info(">> Server starting...");
 
+        // Log NodeJS version
+        logger.info('NodeJS version: ' + process.version);
+
         await server.checkConfig();
         // Add signing & public key functions to PeeringService
         logger.info('Node version: ' + server.version);
diff --git a/app/modules/prover/lib/PowWorker.ts b/app/modules/prover/lib/PowWorker.ts
new file mode 100644
index 0000000000000000000000000000000000000000..bb0b73b000e1fff991045bdd7f6899539d85da13
--- /dev/null
+++ b/app/modules/prover/lib/PowWorker.ts
@@ -0,0 +1,95 @@
+import {Querable} from "./permanentProver"
+
+const querablep = require('querablep')
+
+/*********
+ *
+ * PoW worker
+ * ----------
+ *
+ * Its model is super simple: we ask him to find a proof, and we can wait for it.
+ * Eventually, we can tell him to cancel his proof, which makes it answer `null` as proof value.
+ *
+ * The worker also provides two properties:
+ *
+ * - `worker.online`: a promise which is resolved when the worker gets « online » for the first time
+ * - `worker.exit`: a promise which is resolved when the worker exits (which occurs when the worker is being closed or killed)
+ *
+ ********/
+
+export class PowWorker {
+
+  private onlinePromise:Promise<void>
+  private onlineResolver:()=>void
+
+  private exitPromise:Promise<void>
+  private exitResolver:()=>void
+
+  private proofPromise:Querable<{ message: { answer:any }}|null>
+  private proofResolver:(proof:{ message: { answer:any }}|null)=>void
+
+  private messageHandler:((worker:any, msg:any)=>void)
+
+  constructor(
+    private nodejsWorker:any,
+    private onPowMessage:(message:any)=>void,
+    private onlineHandler:()=>void,
+    private exitHandler:(code:any, signal:any)=>void) {
+
+    // Handle "online" promise
+    this.onlinePromise = new Promise(res => this.onlineResolver = res)
+    nodejsWorker.on('online', () => {
+      this.onlineHandler()
+      this.onlineResolver()
+    })
+
+    // Handle "exit" promise
+    this.exitPromise = new Promise(res => this.exitResolver = res)
+    nodejsWorker.on('exit', (code:any, signal:any) => {
+      this.exitHandler(code, signal)
+      this.exitResolver()
+    })
+
+    nodejsWorker.on('message', (message:any) => {
+      if (message) {
+        this.onPowMessage(message)
+      }
+      if (this.proofPromise && message.uuid && !this.proofPromise.isResolved() && this.proofResolver) {
+        const result:{ message: { answer:any }}|null = message ? { message } : null
+        this.proofResolver(result)
+      }
+    })
+  }
+
+  get online() {
+    return this.onlinePromise
+  }
+
+  get exited() {
+    return this.exitPromise
+  }
+
+  get pid() {
+    return this.nodejsWorker.process.pid
+  }
+
+  askProof(commandMessage:{ uuid:string, command:string, value:any }) {
+    this.proofPromise = querablep(new Promise<{ message: { answer:any }}|null>(res => this.proofResolver = res))
+    this.nodejsWorker.send(commandMessage)
+    return this.proofPromise
+  }
+
+  sendConf(confMessage:{ rootPath: string, command:string, value:any }) {
+    this.nodejsWorker.send(confMessage)
+  }
+
+  sendCancel() {
+    this.nodejsWorker.send({
+      command: 'cancel'
+    })
+  }
+
+  kill() {
+    this.nodejsWorker.kill()
+  }
+}
\ No newline at end of file
diff --git a/app/modules/prover/lib/blockGenerator.ts b/app/modules/prover/lib/blockGenerator.ts
index a941bbaeb7fd0325e07f772a48b0eed2f2df1ae8..39d9cb45aee91ab8d19df5bdd52f9ccee77944d3 100644
--- a/app/modules/prover/lib/blockGenerator.ts
+++ b/app/modules/prover/lib/blockGenerator.ts
@@ -65,7 +65,7 @@ export class BlockGenerator {
     const wereExcludeds = await this.dal.getRevokedPubkeys();
     const newCertsFromWoT = await generator.findNewCertsFromWoT(current);
     const newcomersLeavers = await this.findNewcomersAndLeavers(current, (joinersData:any) => generator.filterJoiners(joinersData));
-    const transactions = await this.findTransactions(current);
+    const transactions = await this.findTransactions(current, manualValues);
     const joinData = newcomersLeavers[2];
     const leaveData = newcomersLeavers[3];
     const newCertsFromNewcomers = newcomersLeavers[4];
@@ -104,7 +104,8 @@ export class BlockGenerator {
     return [cur, newWoTMembers, finalJoinData, leavers, updates];
   }
 
-  private async findTransactions(current:DBBlock) {
+  private async findTransactions(current:DBBlock, options:{ dontCareAboutChaining?:boolean }) {
+    const ALSO_CHECK_PENDING_TXS = true
     const versionMin = current ? Math.min(CommonConstants.LAST_VERSION_FOR_TX, current.version) : CommonConstants.DOCUMENTS_VERSION;
     const txs = await this.dal.getTransactionsPending(versionMin);
     const transactions = [];
@@ -113,14 +114,9 @@ export class BlockGenerator {
       obj.currency = this.conf.currency
       const tx = TransactionDTO.fromJSONObject(obj);
       try {
-        await new Promise((resolve, reject) => {
-          LOCAL_RULES_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), (err:any, res:any) => {
-            if (err) return reject(err)
-            return resolve(res)
-          })
-        })
+        await LOCAL_RULES_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), this.conf, options)
         const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 };
-        await GLOBAL_RULES_HELPERS.checkSingleTransaction(tx, nextBlockWithFakeTimeVariation, this.conf, this.dal);
+        await GLOBAL_RULES_HELPERS.checkSingleTransaction(tx, nextBlockWithFakeTimeVariation, this.conf, this.dal, ALSO_CHECK_PENDING_TXS);
         await GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal);
         transactions.push(tx);
         passingTxs.push(tx);
@@ -279,7 +275,9 @@ export class BlockGenerator {
         const currentMembership = await this.dal.mindexDAL.getReducedMS(ms.issuer);
         const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1;
         if (!join.identity.revoked && currentMSN < parseInt(join.ms.number)) {
-          preJoinData[join.identity.pubkey] = join;
+          if (!preJoinData[join.identity.pubkey] || preJoinData[join.identity.pubkey].certs.length < join.certs.length) {
+            preJoinData[join.identity.pubkey] = join;
+          }
         }
       } catch (err) {
         if (err && !err.uerr) {
diff --git a/app/modules/prover/lib/blockProver.ts b/app/modules/prover/lib/blockProver.ts
index 9cd215d98d478a456857b52f9f5c5388e1afa153..93647b3b1c035658a309b2b9e11cd7371511e969 100644
--- a/app/modules/prover/lib/blockProver.ts
+++ b/app/modules/prover/lib/blockProver.ts
@@ -22,7 +22,7 @@ export class WorkerFarm {
 
   constructor(private server:Server, private logger:any) {
 
-    this.theEngine = new PowEngine(server.conf, server.logger)
+    this.theEngine = new PowEngine(server.conf, server.logger, server.dal)
 
     // An utility method to filter the pow notifications
     this.checkPoWandNotify = (hash:string, block:DBBlock, found:boolean) => {
@@ -44,6 +44,9 @@ export class WorkerFarm {
     })
   }
 
+  get nbWorkers() {
+    return this.theEngine.getNbWorkers()
+  }
 
   changeCPU(cpu:any) {
     return this.theEngine.setConf({ cpu })
@@ -70,7 +73,7 @@ export class WorkerFarm {
   }
 
   shutDownEngine() {
-    this.theEngine.shutDown()
+    return this.theEngine.shutDown()
   }
 
   /**
@@ -129,12 +132,7 @@ export class BlockProver {
     // If no farm was instanciated, there is nothing to do yet
     if (this.workerFarmPromise) {
       let farm = await this.getWorker();
-      if (farm.isComputing() && !farm.isStopping()) {
-        await farm.stopPoW()
-      } else {
-        // We force the stop anyway, just to be sure
-        await farm.stopPoW()
-      }
+      await farm.stopPoW()
       if (this.waitResolve) {
         this.waitResolve();
         this.waitResolve = null;
@@ -175,8 +173,8 @@ export class BlockProver {
       const start = Date.now();
       let result = await powFarm.askNewProof({
         newPoW: {
-          turnDuration: os.arch().match(/arm/) ? CommonConstants.POW_TURN_DURATION_ARM : CommonConstants.POW_TURN_DURATION_PC,
           conf: {
+            powNoSecurity: this.conf.powNoSecurity,
             cpu: this.conf.cpu,
             prefix: this.conf.prefix,
             avgGenTime: this.conf.avgGenTime,
@@ -194,10 +192,10 @@ export class BlockProver {
         throw 'Proof-of-work computation canceled because block received';
       } else {
         const proof = result.block;
-        const testsCount = result.testsCount;
+        const testsCount = result.testsCount * powFarm.nbWorkers
         const duration = (Date.now() - start);
-        const testsPerSecond = (testsCount / (duration / 1000)).toFixed(2);
-        this.logger.info('Done: #%s, %s in %ss (%s tests, ~%s tests/s)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond);
+        const testsPerSecond = testsCount / (duration / 1000)
+        this.logger.info('Done: #%s, %s in %ss (~%s tests, ~%s tests/s, using %s cores, CPU %s%)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond.toFixed(2), powFarm.nbWorkers, Math.floor(100*this.conf.cpu))
         this.logger.info('FOUND proof-of-work with %s leading zeros followed by [0-' + highMark + ']!', nbZeros);
         return BlockDTO.fromJSONObject(proof)
       }
diff --git a/app/modules/prover/lib/constants.ts b/app/modules/prover/lib/constants.ts
index 0a454d38fd9c85e58115c2f971bbb6cb1cea812e..bb0cfcf31821b76e4a86b066049431d345317613 100644
--- a/app/modules/prover/lib/constants.ts
+++ b/app/modules/prover/lib/constants.ts
@@ -13,6 +13,7 @@ export const ProverConstants = {
   NONCE_RANGE: 1000 * 1000 * 1000 * 100,
 
   POW_MAXIMUM_ACCEPTABLE_HANDICAP: 64,
+  POW_NB_PAUSES_PER_ROUND: 10,
 
   // When to trigger the PoW process again if no PoW is triggered for a while. In milliseconds.
   POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000
diff --git a/app/modules/prover/lib/engine.ts b/app/modules/prover/lib/engine.ts
index cc83f46822764786bf91c1c02156c597966cc299..883e9a6f1b507ed72875ae5eec9d07eddf5a2693 100644
--- a/app/modules/prover/lib/engine.ts
+++ b/app/modules/prover/lib/engine.ts
@@ -1,6 +1,6 @@
-import {ProverConstants} from "./constants"
 import {Master as PowCluster} from "./powCluster"
 import {ConfDTO} from "../../../lib/dto/ConfDTO"
+import {FileDAL} from "../../../lib/dal/fileDAL";
 
 const os         = require('os')
 
@@ -17,29 +17,24 @@ export class PowEngine {
   private cluster:PowCluster
   readonly id:number
 
-  constructor(private conf:ConfDTO, logger:any) {
+  constructor(private conf:ConfDTO, logger:any, private dal?:FileDAL) {
 
     // We use as much cores as available, but not more than CORES_MAXIMUM_USE_IN_PARALLEL
     this.nbWorkers = conf.nbCores
-    this.cluster = new PowCluster(this.nbWorkers, logger)
+    this.cluster = new PowCluster(this.nbWorkers, logger, dal)
     this.id = this.cluster.clusterId
   }
 
+  getNbWorkers() {
+    return this.cluster.nbWorkers
+  }
+
   forceInit() {
     return this.cluster.initCluster()
   }
 
   async prove(stuff:any) {
-
-    if (this.cluster.hasProofPending) {
-      await this.cluster.cancelWork()
-    }
-
-    const cpus = os.cpus()
-
-    if (os.arch().match(/arm/) || cpus[0].model.match(/Atom/)) {
-      stuff.newPoW.conf.nbCores /= 2; // Make sure that only once each physical core is used (for Hyperthreading).
-    }
+    await this.cluster.cancelWork()
     return await this.cluster.proveByWorkers(stuff)
   }
 
@@ -48,9 +43,6 @@ export class PowEngine {
   }
 
   setConf(value:any) {
-    if (os.arch().match(/arm/) && value.cpu !== undefined) {
-      value.cpu /= 2; // Don't know exactly why is ARM so much saturated by PoW, so let's divide by 2
-    }
     return this.cluster.changeConf(value)
   }
 
diff --git a/app/modules/prover/lib/permanentProver.ts b/app/modules/prover/lib/permanentProver.ts
index 636a68b986b358b506e00b40714257b7180b82e3..d5c4fefedeceb4550d8a4972fddfc4b4a3a09e49 100644
--- a/app/modules/prover/lib/permanentProver.ts
+++ b/app/modules/prover/lib/permanentProver.ts
@@ -9,7 +9,7 @@ import {Server} from "../../../../server"
 
 const querablep = require('querablep');
 
-interface Querable<T> extends Promise<T> {
+export interface Querable<T> extends Promise<T> {
   isFulfilled(): boolean
   isResolved(): boolean
   isRejected(): boolean
@@ -213,6 +213,8 @@ export class PermanentProver {
     await this.prover.cancel();
     // If we were waiting, stop it and process the continuous generation
     this.blockchainChangedResolver && this.blockchainChangedResolver();
+    const farm = await this.prover.getWorker()
+    await farm.shutDownEngine()
   }
 
   private checkTrialIsNotTooHigh(trial:number, current:DBBlock, selfPubkey:string) {
diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts
index 4d4820777cb9b279aebea2d09680d1be0b3aab1f..069bfba8fd1b493f153990da33646fa97b0ce3dd 100644
--- a/app/modules/prover/lib/powCluster.ts
+++ b/app/modules/prover/lib/powCluster.ts
@@ -1,30 +1,43 @@
 import {ConfDTO} from "../../../lib/dto/ConfDTO"
 import {ProverConstants} from "./constants"
+import {createPowWorker} from "./proof"
+import {PowWorker} from "./PowWorker"
+import {FileDAL} from "../../../lib/dal/fileDAL";
 
 const _ = require('underscore')
 const nuuid = require('node-uuid');
-const moment = require('moment');
 const cluster = require('cluster')
 const querablep = require('querablep')
-const logger = require('../../../lib/logger').NewLogger()
 
 let clusterId = 0
+cluster.setMaxListeners(3)
+
+export interface SlaveWorker {
+  worker:PowWorker,
+  index:number,
+  online:Promise<void>,
+  nonceBeginning:number
+}
 
 /**
  * Cluster controller, handles the messages between the main program and the PoW cluster.
  */
 export class Master {
 
+  nbCancels = 0
+
   clusterId:number
   currentPromise:any|null = null
-  slaves:any[] = []
-  slavesMap:any = {}
+  slaves:SlaveWorker[] = []
+  slavesMap:{
+    [k:number]: SlaveWorker|null
+  } = {}
   conf:any = {}
   logger:any
   onInfoCallback:any
   workersOnline:Promise<any>[]
 
-  constructor(private nbCores:number, logger:any) {
+  constructor(private nbCores:number, logger:any, private dal?:FileDAL) {
     this.clusterId = clusterId++
     this.logger = logger || Master.defaultLogger()
     this.onInfoMessage = (message:any) => {
@@ -36,38 +49,51 @@ export class Master {
     return this.slaves.length
   }
 
-  get hasProofPending() {
-    return !!this.currentPromise
-  }
-
   set onInfoMessage(callback:any) {
     this.onInfoCallback = callback
   }
 
-  onWorkerMessage(worker:any, message:any) {
+  onWorkerMessage(workerIndex:number, message:any) {
     // this.logger.info(`worker#${this.slavesMap[worker.id].index} sent message:${message}`)
-    if (message.pow && message.pow.pow) {
+    if (message && message.pow) {
       this.onInfoCallback && this.onInfoCallback(message)
     }
-    if (this.currentPromise && message.uuid === this.currentPromise.extras.uuid && !this.currentPromise.isResolved() && message.answer) {
-      this.logger.info(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index} HAS FOUND A PROOF #${message.answer.pow.pow}`)
-      this.currentPromise.extras.resolve(message.answer)
-      // Stop the slaves' current work
-      this.cancelWork()
+    if (this.currentPromise && message.uuid && !this.currentPromise.isResolved() && message.answer) {
+      this.logger.info(`ENGINE c#${this.clusterId}#${workerIndex} HAS FOUND A PROOF #${message.answer.pow.pow}`)
+    } else if (message.canceled) {
+      this.nbCancels++
     }
     // this.logger.debug(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index}:`, message)
   }
 
+  /*****************
+   * CLUSTER METHODS
+   ****************/
+
   initCluster() {
     // Setup master
     cluster.setupMaster({
-      exec: __filename
+      exec: __filename,
+      execArgv: [] // Do not try to debug forks
     })
 
     this.slaves = Array.from({ length: this.nbCores }).map((value, index) => {
-      const worker = cluster.fork()
-      this.logger.info(`Creating worker c#${this.clusterId}#w#${worker.id}`)
-      this.slavesMap[worker.id] = {
+      const nodejsWorker = cluster.fork()
+      const worker = new PowWorker(nodejsWorker, message => {
+        this.onWorkerMessage(index, message)
+      }, () => {
+        this.logger.info(`[online] worker c#${this.clusterId}#w#${index}`)
+        worker.sendConf({
+          rootPath: this.dal ? this.dal.rootPath : '',
+          command: 'conf',
+          value: this.conf
+        })
+      }, (code:any, signal:any) => {
+        this.logger.info(`worker ${worker.pid} died with code ${code} and signal ${signal}`)
+      })
+
+      this.logger.info(`Creating worker c#${this.clusterId}#w#${nodejsWorker.id}`)
+      const slave = {
 
         // The Node.js worker
         worker,
@@ -76,43 +102,16 @@ export class Master {
         index,
 
         // Worker ready
-        online: (function onlinePromise() {
-          let resolve
-          const p = querablep(new Promise(res => resolve = res))
-          p.extras = { resolve }
-          return p
-        })(),
+        online: worker.online,
 
         // Each worker has his own chunk of possible nonces
         nonceBeginning: this.nbCores === 1 ? 0 : (index + 1) * ProverConstants.NONCE_RANGE
       }
-      return this.slavesMap[worker.id]
+      this.slavesMap[nodejsWorker.id] = slave
+      return slave
     })
 
-    cluster.on('exit', (worker:any, code:any, signal:any) => {
-      this.logger.info(`worker ${worker.process.pid} died with code ${code} and signal ${signal}`)
-    })
-
-    cluster.on('online', (worker:any) => {
-      // We just listen to the workers of this Master
-      if (this.slavesMap[worker.id]) {
-        this.logger.info(`[online] worker c#${this.clusterId}#w#${worker.id}`)
-        this.slavesMap[worker.id].online.extras.resolve()
-        worker.send({
-          command: 'conf',
-          value: this.conf
-        })
-      }
-    })
-
-    cluster.on('message', (worker:any, msg:any) => {
-      // Message for this cluster
-      if (this.slavesMap[worker.id]) {
-        this.onWorkerMessage(worker, msg)
-      }
-    })
-
-    this.workersOnline = this.slaves.map((s:any) => s.online)
+    this.workersOnline = this.slaves.map((s) => s.online)
     return Promise.all(this.workersOnline)
   }
 
@@ -121,7 +120,8 @@ export class Master {
     this.conf.cpu = conf.cpu || this.conf.cpu
     this.conf.prefix = this.conf.prefix || conf.prefix
     this.slaves.forEach(s => {
-      s.worker.send({
+      s.worker.sendConf({
+        rootPath: '',
         command: 'conf',
         value: this.conf
       })
@@ -129,53 +129,48 @@ export class Master {
     return Promise.resolve(_.clone(conf))
   }
 
-  cancelWork() {
-    this.logger.info(`Cancelling the work on PoW cluster`)
+  private cancelWorkersWork() {
     this.slaves.forEach(s => {
-      s.worker.send({
-        command: 'cancel'
-      })
+      s.worker.sendCancel()
     })
-
-    // Eventually force the end of current promise
-    if (this.currentPromise && !this.currentPromise.isFulfilled()) {
-      this.currentPromise.extras.resolve(null)
+    if (this.dal) {
+      this.dal.powDAL.writeCurrent("")
     }
+  }
 
+  async cancelWork() {
+    const workEnded = this.currentPromise
+    // Don't await the cancellation!
+    this.cancelWorkersWork()
     // Current promise is done
     this.currentPromise = null
-
-    return Promise.resolve()
-  }
-
-  newPromise(uuid:string) {
-    let resolve
-    const p = querablep(new Promise(res => resolve = res))
-    p.extras = { resolve, uuid }
-    return p
+    return await workEnded
   }
 
   async shutDownWorkers() {
     if (this.workersOnline) {
       await Promise.all(this.workersOnline)
-      await Promise.all(this.slaves.map(async (s:any) => {
+      await Promise.all(this.slaves.map(async (s) => {
         s.worker.kill()
       }))
     }
+    this.slaves = []
   }
 
-  proveByWorkers(stuff:any) {
+  async proveByWorkers(stuff:any) {
 
     // Eventually spawn the workers
     if (this.slaves.length === 0) {
       this.initCluster()
     }
 
+    if (this.dal) {
+      await this.dal.powDAL.writeCurrent([stuff.newPoW.block.number - 1, stuff.newPoW.block.previousHash].join('-'))
+    }
+
     // Register the new proof uuid
     const uuid = nuuid.v4()
-    this.currentPromise = this.newPromise(uuid)
-
-    return (async () => {
+    this.currentPromise = querablep((async () => {
       await Promise.all(this.workersOnline)
 
       if (!this.currentPromise) {
@@ -184,19 +179,22 @@ export class Master {
       }
 
       // Start the salves' job
-      this.slaves.forEach((s:any, index) => {
-        s.worker.send({
+      const asks = this.slaves.map(async (s, index) => {
+        const proof = await s.worker.askProof({
           uuid,
           command: 'newPoW',
           value: {
+            rootPath: this.dal ? this.dal.rootPath : '',
+            initialTestsPerRound: stuff.initialTestsPerRound,
+            maxDuration: stuff.maxDuration,
             block: stuff.newPoW.block,
             nonceBeginning: s.nonceBeginning,
             zeros: stuff.newPoW.zeros,
             highMark: stuff.newPoW.highMark,
             pair: _.clone(stuff.newPoW.pair),
             forcedTime: stuff.newPoW.forcedTime,
-            turnDuration: stuff.newPoW.turnDuration,
             conf: {
+              powNoSecurity: stuff.newPoW.conf.powNoSecurity,
               medianTimeBlocks: stuff.newPoW.conf.medianTimeBlocks,
               avgGenTime: stuff.newPoW.conf.avgGenTime,
               cpu: stuff.newPoW.conf.cpu,
@@ -204,10 +202,30 @@ export class Master {
             }
           }
         })
+        this.logger.info(`[done] worker c#${this.clusterId}#w#${index}`)
+        return {
+          workerID: index,
+          proof
+        }
       })
 
-      return await this.currentPromise
-    })()
+      // Find a proof
+      const result = await Promise.race(asks)
+      // Don't await the cancellation!
+      this.cancelWorkersWork()
+      // Wait for all workers to have stopped looking for a proof
+      await Promise.all(asks)
+
+      if (!result.proof || !result.proof.message.answer) {
+        this.logger.info('No engine found the proof. It was probably cancelled.')
+        return null
+      } else {
+        this.logger.info(`ENGINE c#${this.clusterId}#${result.workerID} HAS FOUND A PROOF #${result.proof.message.answer.pow.pow}`)
+        return result.proof.message.answer
+      }
+    })())
+
+    return this.currentPromise
   }
 
   static defaultLogger() {
@@ -229,9 +247,8 @@ if (cluster.isMaster) {
 } else {
 
   process.on("SIGTERM", function() {
-    logger.info(`SIGTERM received, closing worker ${process.pid}`);
     process.exit(0)
   });
 
-  require('./proof')
+  createPowWorker()
 }
diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index 2108984c79fea9c1fefe14b814399a1e4b7bf678..855adb169858a77118674f622885e2868cba953f 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -6,296 +6,327 @@ import {ProverConstants} from "./constants"
 import {KeyGen} from "../../../lib/common-libs/crypto/keyring"
 import {dos2unix} from "../../../lib/common-libs/dos2unix"
 import {rawer} from "../../../lib/common-libs/index"
+import {ProcessCpuProfiler} from "../../../ProcessCpuProfiler"
+import {PowDAL} from "../../../lib/dal/fileDALs/PowDAL";
 
 const moment = require('moment');
 const querablep = require('querablep');
+const directory = require('../../../lib/system/directory');
 
-const PAUSES_PER_TURN = 5;
+export function createPowWorker() {
 
-// This value can be changed
-let TURN_DURATION_IN_MILLISEC = 100;
-
-let computing = querablep(Promise.resolve(null));
-let askedStop = false;
+  let powDAL:PowDAL|null = null
+  let computing = querablep(Promise.resolve(null));
+  let askedStop = false;
 
 // By default, we do not prefix the PoW by any number
-let prefix = 0;
+  let prefix = 0;
 
-let signatureFunc:any, lastSecret:any, currentCPU = 1;
+  let signatureFunc:any, lastSecret:any, currentCPU = 1;
 
-process.on('uncaughtException', (err:any) => {
-  console.error(err.stack || Error(err))
-  if (process.send) {
-    process.send({error: err});
-  } else {
-    throw Error('process.send() is not defined')
-  }
-});
+  process.on('uncaughtException', (err:any) => {
+    console.error(err.stack || Error(err))
+    if (process.send) {
+      process.send({error: err});
+    } else {
+      throw Error('process.send() is not defined')
+    }
+  });
 
-process.on('message', async (message) => {
+  process.on('unhandledRejection', () => {
+    process.exit()
+  })
 
-  switch (message.command) {
+  process.on('message', async (message) => {
 
-    case 'newPoW':
-      (async () => {
-        askedStop = true
+    switch (message.command) {
 
-        // Very important: do not await if the computation is already done, to keep the lock on JS engine
-        if (!computing.isFulfilled()) {
-          await computing;
-        }
+      case 'newPoW':
+        (async () => {
+          askedStop = true
 
-        const res = await beginNewProofOfWork(message.value);
-        answer(message, res);
-      })()
-      break;
+          // Very important: do not await if the computation is already done, to keep the lock on JS engine
+          if (!computing.isFulfilled()) {
+            await computing;
+          }
 
-    case 'cancel':
-      if (!computing.isFulfilled()) {
-        askedStop = true;
-      }
-      break;
+          if (message.value.rootPath) {
+            const params = await directory.getHomeFS(false, message.value.rootPath, false)
+            powDAL = new PowDAL(message.value.rootPath, params.fs)
+          }
 
-    case 'conf':
-      if (message.value.cpu !== undefined) {
-        currentCPU = message.value.cpu
-      }
-      if (message.value.prefix !== undefined) {
-        prefix = message.value.prefix
-      }
-      answer(message, { currentCPU, prefix });
-      break;
-  }
+          const res = await beginNewProofOfWork(message.value);
+          answer(message, res);
+        })()
+        break;
 
-})
-
-function beginNewProofOfWork(stuff:any) {
-  askedStop = false;
-  computing = querablep((async () => {
-
-    /*****************
-     * PREPARE POW STUFF
-     ****************/
-
-    let nonce = 0;
-    const conf = stuff.conf;
-    const block = stuff.block;
-    const nonceBeginning = stuff.nonceBeginning;
-    const nbZeros = stuff.zeros;
-    const pair = stuff.pair;
-    const forcedTime = stuff.forcedTime;
-    currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU;
-    prefix = parseInt(conf.prefix || prefix)
-    if (prefix && prefix < ProverConstants.NONCE_RANGE) {
-      prefix *= 100 * ProverConstants.NONCE_RANGE
-    }
-    const highMark = stuff.highMark;
-    const turnDuration = stuff.turnDuration || TURN_DURATION_IN_MILLISEC
-    let sigFunc = null;
-    if (signatureFunc && lastSecret === pair.sec) {
-      sigFunc = signatureFunc;
-    }
-    else {
-      lastSecret = pair.sec;
-      sigFunc = (msg:string) => KeyGen(pair.pub, pair.sec).signSync(msg)
+      case 'cancel':
+        if (!computing.isFulfilled()) {
+          askedStop = true;
+        }
+        break;
+
+      case 'conf':
+        if (message.value.cpu !== undefined) {
+          currentCPU = message.value.cpu
+        }
+        if (message.value.prefix !== undefined) {
+          prefix = message.value.prefix
+        }
+        answer(message, { currentCPU, prefix });
+        break;
     }
-    signatureFunc = sigFunc;
-    let pow = "", sig = "", raw = "";
 
-    /*****************
-     * GO!
-     ****************/
+  })
 
-    let testsCount = 0;
-    let found = false;
-    let score = 0;
-    let turn = 0;
+  function beginNewProofOfWork(stuff:any) {
+    askedStop = false;
+    computing = querablep((async () => {
 
-    while (!found && !askedStop) {
+      /*****************
+       * PREPARE POW STUFF
+       ****************/
+
+      let nonce = 0;
+      const maxDuration = stuff.maxDuration || 1000
+      const conf = stuff.conf;
+      const block = stuff.block;
+      const nonceBeginning = stuff.nonceBeginning;
+      const nbZeros = stuff.zeros;
+      const pair = stuff.pair;
+      const forcedTime = stuff.forcedTime;
+      currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU;
+      prefix = parseInt(conf.prefix || prefix)
+      if (prefix && prefix < ProverConstants.NONCE_RANGE) {
+        prefix *= 100 * ProverConstants.NONCE_RANGE
+      }
+      const highMark = stuff.highMark;
+      let sigFunc = null;
+      if (signatureFunc && lastSecret === pair.sec) {
+        sigFunc = signatureFunc;
+      }
+      else {
+        lastSecret = pair.sec;
+        sigFunc = (msg:string) => KeyGen(pair.pub, pair.sec).signSync(msg)
+      }
+      signatureFunc = sigFunc;
+      let pow = "", sig = "", raw = "";
 
       /*****************
-       * A TURN
+       * GO!
        ****************/
 
-      await Promise.race([
+      let pausePeriod = 1;
+      let testsCount = 0;
+      let found = false;
+      let turn = 0;
+      const profiler = new ProcessCpuProfiler(100)
+      let cpuUsage = profiler.cpuUsageOverLastMilliseconds(1)
+      // We limit the number of tests according to CPU usage
+      let testsPerRound = stuff.initialTestsPerRound || 1
+      let turnDuration = 20 // We initially goes quickly to the max speed = 50 reevaluations per second (1000 / 20)
 
-        // I. Stop the turn if it exceeds `turnDuration` ms
-        countDown(turnDuration),
+      while (!found && !askedStop) {
 
-        // II. Process the turn's PoW
-        (async () => {
+        /*****************
+         * A TURN ~ 100ms
+         ****************/
 
-          /*****************
-           * A TURN OF POW ~= 100ms by default
-           * --------------------
-           *
-           * The concept of "turn" is required to limit the CPU usage.
-           * We need a time reference to have the speed = nb tests / period of time.
-           * Here we have:
-           *
-           *   - speed = testsCount / turn
-           *
-           * We have taken 1 turn = 100ms to control the CPU usage after 100ms of PoW. This means that during the
-           * very first 100ms of the PoW, CPU usage = 100%. Then it becomes controlled to the %CPU set.
-           ****************/
+        await Promise.race([
 
-            // Prove
-          let i = 0;
-          const thisTurn = turn;
-          const pausePeriod = score ? score / PAUSES_PER_TURN : 10; // number of pauses per turn
-          // We limit the number of tests according to CPU usage
-          const testsPerRound = score ? Math.floor(score * currentCPU) : 1000 * 1000 * 1000
-
-          // Time is updated regularly during the proof
-          block.time = getBlockTime(block, conf, forcedTime)
-          if (block.number === 0) {
-            block.medianTime = block.time
-          }
-          block.inner_hash = getBlockInnerHash(block);
+          // I. Stop the turn if it exceeds `turnDuration` ms
+          countDown(turnDuration),
 
-          /*****************
-           * Iterations of a turn
-           ****************/
+          // II. Process the turn's PoW
+          (async () => {
 
-          while(!found && i < testsPerRound && thisTurn === turn && !askedStop) {
+            // Prove
+            let i = 0;
+            const thisTurn = turn;
 
-            // Nonce change (what makes the PoW change if the time field remains the same)
-            nonce++
+            // Time is updated regularly during the proof
+            block.time = getBlockTime(block, conf, forcedTime)
+            if (block.number === 0) {
+              block.medianTime = block.time
+            }
+            block.inner_hash = getBlockInnerHash(block);
 
             /*****************
-             * A PROOF OF WORK
+             * Iterations of a turn
              ****************/
 
-            // The final nonce is composed of 3 parts
-            block.nonce = prefix + nonceBeginning + nonce
-            raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n")
-            sig = dos2unix(sigFunc(raw))
-            pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase()
+            while(!found && i < testsPerRound && thisTurn === turn && !askedStop) {
 
-            /*****************
-             * Check the POW result
-             ****************/
+              // Nonce change (what makes the PoW change if the time field remains the same)
+              nonce++
 
-            let j = 0, charOK = true;
-            while (j < nbZeros && charOK) {
-              charOK = pow[j] === '0';
-              j++;
-            }
-            if (charOK) {
-              found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']')))
-            }
-            if (!found && nbZeros > 0 && j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW) {
-              pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }});
-            }
+              /*****************
+               * A PROOF OF WORK
+               ****************/
 
-            /*****************
-             * - Update local vars
-             * - Allow to receive stop signal
-             ****************/
+              // The final nonce is composed of 3 parts
+              block.nonce = prefix + nonceBeginning + nonce
+              raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n")
+              sig = dos2unix(sigFunc(raw))
+              pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase()
 
-            if (!found && !askedStop) {
-              i++;
-              testsCount++;
-              if (i % pausePeriod === 0) {
-                await countDown(0); // Very low pause, just the time to process eventual end of the turn
-              }
-            }
-          }
+              /*****************
+               * Check the POW result
+               ****************/
 
-          /*****************
-           * Check the POW result
-           ****************/
-          if (!found) {
+              let j = 0, charOK = true;
+              while (j < nbZeros && charOK) {
+                charOK = pow[j] === '0';
+                j++;
+              }
+              if (charOK) {
+                found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']')))
+              }
+              if (!found && nbZeros > 0 && j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW) {
+                pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }});
+              }
 
-            // CPU speed recording
-            if (turn > 0 && !score) {
-              score = testsCount;
+              /*****************
+               * - Update local vars
+               * - Allow to receive stop signal
+               ****************/
+
+              if (!found && !askedStop) {
+                i++;
+                testsCount++;
+                if (i % pausePeriod === 0) {
+                  await countDown(1); // Very low pause, just the time to process eventual end of the turn
+                }
+              }
             }
 
             /*****************
-             * UNLOAD CPU CHARGE
+             * Check the POW result
              ****************/
-            // We wait for a maximum time of `turnDuration`.
-            // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script
-            // just does nothing: this gives of a bit of breath to the CPU. The amount of "breath" depends on the "cpu"
-            // parameter.
-            await countDown(turnDuration);
+            if (!found) {
+
+              // CPU speed recording
+              if (turn > 0) {
+                cpuUsage = profiler.cpuUsageOverLastMilliseconds(turnDuration)
+                if (cpuUsage > currentCPU + 0.005 || cpuUsage < currentCPU - 0.005) {
+                  let powVariationFactor
+                  // powVariationFactor = currentCPU / (cpuUsage || 0.01) / 5 // divide by 2 to avoid extreme responses
+                  if (currentCPU > cpuUsage) {
+                    powVariationFactor = 1.01
+                    testsPerRound = Math.max(1, Math.ceil(testsPerRound * powVariationFactor))
+                  } else {
+                    powVariationFactor = 0.99
+                    testsPerRound = Math.max(1, Math.floor(testsPerRound * powVariationFactor))
+                  }
+                  pausePeriod = Math.floor(testsPerRound / ProverConstants.POW_NB_PAUSES_PER_ROUND)
+                }
+              }
+
+              /*****************
+               * UNLOAD CPU CHARGE FOR THIS TURN
+               ****************/
+              // We wait for a maximum time of `turnDuration`.
+              // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script
+              // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu"
+              // parameter.
+              await countDown(turnDuration);
+            }
+          })()
+        ]);
+
+        // console.log('W#%s.powDAL = ', process.pid, powDAL)
+
+        if (powDAL && !conf.powNoSecurity) {
+          const currentProofCheck = await powDAL.getCurrent()
+          if (currentProofCheck !== null) {
+            if (currentProofCheck === "") {
+              askedStop = true
+            } else {
+              const [currentNumber, currentHash] = currentProofCheck.split('-')
+              if (block.number !== parseInt(currentNumber) + 1 || block.previousHash !== currentHash) {
+                askedStop = true
+              }
+            }
           }
-        })()
-      ]);
+        }
 
-      // Next turn
-      turn++
-    }
+        // Next turn
+        turn++
 
-    /*****************
-     * POW IS OVER
-     * -----------
-     *
-     * We either have found a valid POW or a stop event has been detected.
-     ****************/
+        turnDuration += 1
+        turnDuration = Math.min(turnDuration, maxDuration) // Max 1 second per turn
+      }
+
+      /*****************
+       * POW IS OVER
+       * -----------
+       *
+       * We either have found a valid POW or a stop event has been detected.
+       ****************/
 
-    if (askedStop) {
+      if (askedStop) {
 
-      // PoW stopped
-      askedStop = false;
-      return null
+        // PoW stopped
+        askedStop = false;
+        pSend({ canceled: true })
+        return null
 
-    } else {
+      } else {
 
-      // PoW success
-      block.hash = pow
-      block.signature = sig
-      return {
-        pow: {
-          block: block,
-          testsCount: testsCount,
-          pow: pow
+        // PoW success
+        block.hash = pow
+        block.signature = sig
+        return {
+          pow: {
+            block: block,
+            testsCount: testsCount,
+            pow: pow
+          }
         }
       }
-    }
-  })())
+    })())
 
-  return computing;
-}
+    return computing;
+  }
 
-function countDown(duration:number) {
-  return new Promise((resolve) => setTimeout(resolve, duration));
-}
+  function countDown(duration:number) {
+    return new Promise((resolve) => setTimeout(resolve, duration));
+  }
 
-function getBlockInnerHash(block:DBBlock) {
-  const raw = rawer.getBlockInnerPart(block);
-  return hashf(raw)
-}
+  function getBlockInnerHash(block:DBBlock) {
+    const raw = rawer.getBlockInnerPart(block);
+    return hashf(raw)
+  }
 
-function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) {
-  if (forcedTime) {
-    return forcedTime;
+  function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) {
+    if (forcedTime) {
+      return forcedTime;
+    }
+    const now = moment.utc().unix();
+    const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf);
+    const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0;
+    const medianTime = block.medianTime;
+    const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset);
+    return Math.max(medianTime, upperBound);
   }
-  const now = moment.utc().unix();
-  const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf);
-  const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0;
-  const medianTime = block.medianTime;
-  const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset);
-  return Math.max(medianTime, upperBound);
-}
 
-function answer(message:any, theAnswer:any) {
-  return pSend({
-    uuid: message.uuid,
-    answer: theAnswer
-  })
-}
+  function answer(message:any, theAnswer:any) {
+    return pSend({
+      uuid: message.uuid,
+      answer: theAnswer
+    })
+  }
 
-function pSend(stuff:any) {
-  return new Promise(function (resolve, reject) {
-    if (process.send) {
-      process.send(stuff, function (error:any) {
-        !error && resolve();
-        error && reject();
-      })
-    } else {
-      reject('process.send() is not defined')
-    }
-  });
+  function pSend(stuff:any) {
+    return new Promise(function (resolve, reject) {
+      if (process.send) {
+        process.send(stuff, function (error:any) {
+          !error && resolve();
+          error && reject();
+        })
+      } else {
+        reject('process.send() is not defined')
+      }
+    });
+  }
 }
diff --git a/app/modules/router.ts b/app/modules/router.ts
index d6484f05ba8d3ef1a74e8440c44c25aae80af128..9c7f2b14d4eded2c706118fdb1a6d3ee0c0b621a 100644
--- a/app/modules/router.ts
+++ b/app/modules/router.ts
@@ -5,9 +5,7 @@ import * as stream from "stream"
 import {Multicaster} from "../lib/streams/multicaster"
 import {RouterStream} from "../lib/streams/router"
 
-const constants = require('../lib/constants');
-
-module.exports = {
+export const RouterDependency = {
   duniter: {
     service: {
       output: (server:Server, conf:ConfDTO, logger:any) => new Router(server)
@@ -26,7 +24,7 @@ module.exports = {
  * Service which triggers the server's peering generation (actualization of the Peer document).
  * @constructor
  */
-class Router extends stream.Transform {
+export class Router extends stream.Transform {
 
   theRouter:any
   theMulticaster:Multicaster = new Multicaster()
diff --git a/app/modules/ws2p/lib/WS2PClient.ts b/app/modules/ws2p/lib/WS2PClient.ts
index 569609f6e1c06bbdef4b7c8c9fadfcf48569972c..af885353d0a0afe5f573cd5322dcced2b2f1a638 100644
--- a/app/modules/ws2p/lib/WS2PClient.ts
+++ b/app/modules/ws2p/lib/WS2PClient.ts
@@ -13,19 +13,22 @@ export class WS2PClient {
 
   private constructor(public connection:WS2PConnection) {}
 
-  static async connectTo(server:Server, fullEndpointAddress:string, uuid:string, messageHandler:WS2PMessageHandler, expectedPub:string, allowKey:(pub:string)=>Promise<boolean> ) {
+  static async connectTo(server:Server, fullEndpointAddress:string, endpointVersion:number, expectedWS2PUID:string, messageHandler:WS2PMessageHandler, expectedPub:string, allowKey:(pub:string)=>Promise<boolean> ) {
     const k2 = new Key(server.conf.pair.pub, server.conf.pair.sec)
+    const myWs2pId = (server.conf.ws2p && server.conf.ws2p.uuid) ? server.conf.ws2p.uuid:""
     const c = WS2PConnection.newConnectionToAddress(
+      Math.min(endpointVersion, WS2PConstants.WS2P_API_VERSION),
       fullEndpointAddress,
       messageHandler,
-      new WS2PPubkeyLocalAuth(server.conf.currency , k2, allowKey),
+      new WS2PPubkeyLocalAuth(server.conf.currency , k2, myWs2pId, allowKey),
       new WS2PPubkeyRemoteAuth(server.conf.currency, k2, allowKey),
       ProxiesConf.wsProxy(fullEndpointAddress, server.conf.proxiesConf),
       {
         connectionTimeout: WS2PConstants.REQUEST_TIMEOUT,
         requestTimeout: WS2PConstants.REQUEST_TIMEOUT
       },
-      expectedPub
+      expectedPub,
+      expectedWS2PUID
     )
     const singleWriteProtection = new WS2PSingleWriteStream()
     const streamer = new WS2PStreamer(c)
diff --git a/app/modules/ws2p/lib/WS2PCluster.ts b/app/modules/ws2p/lib/WS2PCluster.ts
index bb76f0b4a44d53e6209b90f4232a49af16116c20..3b912104fab9b77a56d5d4ad89e258afc77c4863 100644
--- a/app/modules/ws2p/lib/WS2PCluster.ts
+++ b/app/modules/ws2p/lib/WS2PCluster.ts
@@ -1,3 +1,4 @@
+import {DEFAULT_ENCODING} from 'crypto';
 import {WS2PServer} from "./WS2PServer"
 import {Server} from "../../../../server"
 import {WS2PClient} from "./WS2PClient"
@@ -7,16 +8,16 @@ import {CrawlerConstants} from "../../crawler/lib/constants"
 import {WS2PBlockPuller} from "./WS2PBlockPuller"
 import {WS2PDocpoolPuller} from "./WS2PDocpoolPuller"
 import {WS2PConstants} from "./constants"
-import {PeerDTO} from "../../../lib/dto/PeerDTO"
+import {PeerDTO, WS2PEndpoint} from '../../../lib/dto/PeerDTO';
 import {GlobalFifoPromise} from "../../../service/GlobalFifoPromise"
 import {OtherConstants} from "../../../lib/other_constants"
 import {Key, verify} from "../../../lib/common-libs/crypto/keyring"
 import {WS2PServerMessageHandler} from "./interface/WS2PServerMessageHandler"
 import {WS2PMessageHandler} from "./impl/WS2PMessageHandler"
-import { CommonConstants } from '../../../lib/common-libs/constants';
-import { Package } from "../../../lib/common/package";
-import { ProverConstants } from "../../prover/lib/constants";
-import { ProxiesConf } from '../../../lib/proxy';
+import {CommonConstants} from '../../../lib/common-libs/constants';
+import {Package} from "../../../lib/common/package";
+import {ProverConstants} from "../../prover/lib/constants";
+import {ProxiesConf} from '../../../lib/proxy';
 
 const es = require('event-stream')
 const nuuid = require('node-uuid')
@@ -25,6 +26,13 @@ const _ = require('underscore')
 export interface WS2PHead {
   message:string
   sig:string
+  messageV2?:string
+  sigV2?:string
+  step?:number
+}
+
+export interface WS2pHeadCache extends WS2PHead {
+  blockstamp:string
 }
 
 export class WS2PCluster {
@@ -51,7 +59,7 @@ export class WS2PCluster {
   }
 
   private ws2pServer:WS2PServer|null = null
-  private ws2pClients:{[k:string]:WS2PClient} = {}
+  private ws2pClients:{[ws2puid:string]:WS2PClient} = {}
   private host:string|null = null
   private port:number|null = null
   private syncBlockInterval:NodeJS.Timer
@@ -69,11 +77,11 @@ export class WS2PCluster {
   // A cache to know wether a pubkey is a member or not
   private memberkeysCache:{ [k:string]: number } = {}
 
-  // A cache of the current HEAD for a given pubkey
-  private headsCache:{ [ws2pFullId:string]: { blockstamp:string, message:string, sig:string } } = {}
+  // A cache of the current HEAD for a given ws2pFullId
+  private headsCache:{ [ws2pFullId:string]:WS2pHeadCache } = {}
 
   // A buffer of "to be sent" heads
-  private newHeads:{ message:string, sig:string }[] = []
+  private newHeads:WS2PHead[] = []
 
   // The triggerer of a buffer of heads' sending
   private headsTimeout:NodeJS.Timer|null = null
@@ -93,115 +101,120 @@ export class WS2PCluster {
     const heads:WS2PHead[] = []
     const ws2pId = (this.server.conf.ws2p && this.server.conf.ws2p.uuid) || '000000'
     const localPub = this.server.conf.pair.pub
-    const fullId = [localPub, ws2pId].join('-')
-    if (!this.headsCache[fullId]) {
+    const myFullId = [localPub, ws2pId].join('-')
+    if (!this.headsCache[myFullId]) {
       const current = await this.server.dal.getCurrentBlockOrNull()
       if (current) {
-        const { sig, message } = this.sayHeadChangedTo(current.number, current.hash)
+        const myHead = await this.sayHeadChangedTo(current.number, current.hash)
         const blockstamp = [current.number, current.hash].join('-')
-        this.headsCache[fullId] = { blockstamp, message, sig }
+        this.headsCache[myFullId] = { blockstamp, message: myHead.message, sig: myHead.sig, messageV2: myHead.messageV2, sigV2: myHead.sigV2, step:myHead.step  }
+
       }
     }
     for (const ws2pFullId of Object.keys(this.headsCache)) {
       heads.push({
         message: this.headsCache[ws2pFullId].message,
-        sig: this.headsCache[ws2pFullId].sig
+        sig: this.headsCache[ws2pFullId].sig,
+        messageV2: this.headsCache[ws2pFullId].messageV2,
+        sigV2: this.headsCache[ws2pFullId].sigV2,
+        step: this.headsCache[ws2pFullId].step
       })
     }
     return heads
   }
 
-  async headsReceived(heads:[{ message:string, sig:string }]) {
-    const added:{ message:string, sig:string }[] = []
-    await Promise.all(heads.map(async (h:{ message:string, sig:string }) => {
+  async headsReceived(heads:WS2PHead[]) {
+    await Promise.all(heads.map(async (h:WS2PHead) => {
       try {
-        const message = h.message
-        const sig = h.sig
-        if (!message) {
-          throw "EMPTY_MESSAGE_FOR_HEAD"
-        }
-        if (message.match(WS2PConstants.HEAD_V0_REGEXP)) {
-          const [,, pub, blockstamp]:string[] = message.split(':')
-          const ws2pId = (this.server.conf.ws2p && this.server.conf.ws2p.uuid) || '000000'
-          const fullId = [pub, ws2pId].join('-')
-          const sigOK = verify(message, sig, pub)
-          if (sigOK) {
-            // Already known?
-            if (!this.headsCache[fullId] || this.headsCache[fullId].blockstamp !== blockstamp) {
-              // More recent?
-              if (!this.headsCache[fullId] || parseInt(this.headsCache[fullId].blockstamp) < parseInt(blockstamp)) {
-                // Check that issuer is a member and that the block exists
-                const isAllowed = pub === this.server.conf.pair.pub || this.isConnectedKey(pub) || (await this.isMemberKey(pub))
-                if (isAllowed) {
-                  const exists = await this.existsBlock(blockstamp)
-                  if (exists) {
-                    this.headsCache[fullId] = { blockstamp, message, sig }
-                    this.newHeads.push({message, sig})
-                    added.push({message, sig})
-                    // Cancel a pending "heads" to be spread
-                    if (this.headsTimeout) {
-                      clearTimeout(this.headsTimeout)
-                    }
-                    // Reprogram it a few moments later
-                    this.headsTimeout = setTimeout(async () => {
-                      const heads = this.newHeads.splice(0, this.newHeads.length)
-                      if (heads.length) {
-                        await this.spreadNewHeads(heads)
-                      }
-                    }, WS2PConstants.HEADS_SPREAD_TIMEOUT)
-                  }
-                }
-              }
-            }
-          } else {
+        // HEAD v2
+        if (h.messageV2 && h.messageV2.match(WS2PConstants.HEAD_V2_REGEXP)) {
+          if (!h.sigV2) {
             throw "HEAD_MESSAGE_WRONGLY_SIGNED"
+          } else {
+            const [,,, pub, blockstamp, ws2pId,,,,,]:string[] = h.messageV2.split(':')
+            this.headReceived(h, pub, [pub, ws2pId].join('-'), blockstamp)
           }
-        }
-        else if (message.match(WS2PConstants.HEAD_V1_REGEXP)) {
-          const [,,, pub, blockstamp, ws2pId, software, softVersion, prefix]:string[] = message.split(':')
-          const sigOK = verify(message, sig, pub)
-          const fullId = [pub, ws2pId].join('-')
-          if (sigOK) {
-            // Already known?
-            if (!this.headsCache[fullId] || this.headsCache[fullId].blockstamp !== blockstamp) {
-              // More recent?
-              if (!this.headsCache[fullId] || parseInt(this.headsCache[fullId].blockstamp) < parseInt(blockstamp)) {
-                // Check that issuer is a member and that the block exists
-                const isAllowed = pub === this.server.conf.pair.pub || this.isConnectedKey(pub) || (await this.isMemberKey(pub))
-                if (isAllowed) {
-                  const exists = await this.existsBlock(blockstamp)
-                  if (exists) {
-                    this.headsCache[fullId] = { blockstamp, message, sig }
-                    this.newHeads.push({message, sig})
-                    added.push({message, sig})
-                    // Cancel a pending "heads" to be spread
-                    if (this.headsTimeout) {
-                      clearTimeout(this.headsTimeout)
-                    }
-                    // Reprogram it a few moments later
-                    this.headsTimeout = setTimeout(async () => {
-                      const heads = this.newHeads.splice(0, this.newHeads.length)
-                      if (heads.length) {
-                        await this.spreadNewHeads(heads)
-                      }
-                    }, WS2PConstants.HEADS_SPREAD_TIMEOUT)
-                  }
-                }
-              }
-            }
+        } 
+        // HEAD v1 and HEAD v0
+        else if (h.message && h.sig) {
+          if (h.message.match(WS2PConstants.HEAD_V1_REGEXP)) {
+            const [,,, pub, blockstamp, ws2pId,,,]:string[] = h.message.split(':')
+            await this.headReceived(h, pub, [pub, ws2pId].join('-'), blockstamp)
+          } else if (h.message.match(WS2PConstants.HEAD_V0_REGEXP)) {
+            const [,,pub, blockstamp]:string[] = h.message.split(':')
+            await this.headReceived(h, pub, [pub, "00000000"].join('-'), blockstamp)
           } else {
-            throw "HEAD_MESSAGE_WRONGLY_SIGNED"
+            throw "HEAD_WRONG_FORMAT"
           }
         }
+        else if (!h.message) {
+          throw "EMPTY_MESSAGE_FOR_HEAD"
+        } else if (!h.sig) {
+          throw "HEAD_MESSAGE_WRONGLY_SIGNED"
+        } else {
+          throw "HEAD_WRONG_FORMAT"
+        }
       } catch (e) {
-        this.server.logger.trace(e)
+          this.server.logger.trace(e)
       }
     }))
+    // Cancel a pending "heads" to be spread
+    if (this.headsTimeout) {
+      clearTimeout(this.headsTimeout)
+    }
+    // Reprogram it a few moments later
+    this.headsTimeout = setTimeout(async () => {
+       const heads = this.newHeads.splice(0, this.newHeads.length)
+      if (heads.length) {
+        await this.spreadNewHeads(heads)
+      }
+    }, WS2PConstants.HEADS_SPREAD_TIMEOUT)
+    
     this.server.push({
       ws2p: 'heads',
-      added
+      added: this.newHeads
     })
-    return added
+  }
+
+  private async headReceived(h:WS2PHead, pub:string, fullId:string, blockstamp:string) {
+    try {
+      // Prevent fields injection
+      if ( (h.message.match(WS2PConstants.HEAD_V1_REGEXP) || h.message.match(WS2PConstants.HEAD_V0_REGEXP))
+      && h.sig.match(WS2PConstants.HEAD_SIG_REGEXP)
+      && (!h.messageV2 || h.messageV2.match(WS2PConstants.HEAD_V2_REGEXP))
+      && (!h.sigV2 || h.sigV2.match(WS2PConstants.HEAD_SIG_REGEXP))
+      && (!h.step || h.step.toFixed(0).match(/^[0-9]*$/))
+      ) {
+        const head:WS2PHead = { message: h.message, sig: h.sig, messageV2: h.messageV2, sigV2: h.sigV2, step: h.step }
+
+        const sigOK = verify(head.message, head.sig, pub)
+        const sigV2OK = (head.messageV2 !== undefined && head.sigV2 !== undefined) ? verify(head.messageV2, head.sigV2, pub):false
+        if ((sigV2OK && sigOK) || sigOK) {
+          // Already known or more recent or closer ?
+          const step = (this.headsCache[fullId]) ? this.headsCache[fullId].step || 0:0
+          if (!this.headsCache[fullId] // unknow head
+            || parseInt(this.headsCache[fullId].blockstamp) < parseInt(blockstamp) // more recent head
+            || (head.step !== undefined && head.step < step && this.headsCache[fullId].blockstamp === blockstamp) // closer head
+          ) {
+            // Check that issuer is a member and that the block exists
+            const isAllowed = pub === this.server.conf.pair.pub || this.isConnectedKey(pub) || (await this.isMemberKey(pub))
+            if (isAllowed) {
+              const exists = await this.existsBlock(blockstamp)
+              if (exists) {
+                this.headsCache[fullId] = { blockstamp, message: head.message, sig: head.sig, messageV2: head.messageV2, sigV2: head.sigV2, step: head.step }
+                this.newHeads.push(head)
+              }
+            }
+          }
+        } else {
+          throw "HEAD_MESSAGE_WRONGLY_SIGNED"
+        }
+      } else {
+        throw "HEAD_WRONG_FORMAT"
+      }
+    } catch (e) {
+      this.server.logger.trace(e)
+    }
   }
 
   private async isMemberKey(pub:string) {
@@ -248,12 +261,6 @@ export class WS2PCluster {
     this.maxLevel1Size = Math.max(newValue, 0) || 0
   }
 
-  set maxLevel2Peers(newValue:number) {
-    if (this.ws2pServer) {
-      this.ws2pServer.maxLevel2Peers = Math.max(newValue, 0)
-    }
-  }
-
   get maxLevel2Peers() {
     if (this.ws2pServer) {
       return this.ws2pServer.maxLevel2Peers || 0
@@ -266,9 +273,8 @@ export class WS2PCluster {
       await this.ws2pServer.close()
     }
     this.ws2pServer = await WS2PServer.bindOn(this.server, host, port, this.fifo, (pubkey:string, connectedPubkeys:string[]) => {
-      const privilegedNodes = (this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) ? this.server.conf.ws2p.privilegedNodes:[]
-      return this.acceptPubkey(pubkey, connectedPubkeys, () => this.servedCount(), this.maxLevel2Peers, privilegedNodes, (this.server.conf.ws2p && this.server.conf.ws2p.privilegedOnly || false))
-    }, this.messageHandler)
+      return this.acceptPubkey(pubkey, connectedPubkeys, [], () => this.servedCount(), this.maxLevel2Peers, this.privilegedNodes(), (this.server.conf.ws2p !== undefined && this.server.conf.ws2p.privilegedOnly)) 
+    }, this.keyPriorityLevel, this.messageHandler)
     this.host = host
     this.port = port
     return this.ws2pServer
@@ -283,23 +289,51 @@ export class WS2PCluster {
   }
 
   clientsCount() {
-    return Object.keys(this.ws2pClients).length
+    let count = 0
+    let connectedKeys:string[] = []
+    for (const ws2pid in this.ws2pClients) {
+      if (this.ws2pClients[ws2pid].connection.pubkey != this.server.conf.pair.pub
+        && connectedKeys.indexOf(this.ws2pClients[ws2pid].connection.pubkey) == -1) {
+        count++
+        connectedKeys.push(this.ws2pClients[ws2pid].connection.pubkey)
+      }
+    }
+    return count
+  }
+
+  numberOfConnectedPublicNodesWithSameKey() {
+    let count = 0
+    for (const ws2pid in this.ws2pClients) {
+      if (this.ws2pClients[ws2pid].connection.pubkey === this.server.conf.pair.pub) {
+        count++
+      }
+    }
+    return count
   }
 
   servedCount() {
-    return this.ws2pServer ? this.ws2pServer.getConnexions().length : 0
+    return (this.ws2pServer) ? this.ws2pServer.countConnexions():0
   }
 
-  async connectToRemoteWS(host: string, port: number, path:string, messageHandler:WS2PMessageHandler, expectedPub:string, ws2pEndpointUUID:string = ""): Promise<WS2PConnection> {
+  privilegedNodes() {
+    if (this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) {
+      return this.server.conf.ws2p.privilegedNodes
+    } else {
+      return  []
+    }
+  }
+
+  async connectToRemoteWS(endpointVersion:number, host: string, port: number, path:string, messageHandler:WS2PMessageHandler, expectedPub:string, ws2pEndpointUUID:string = ""): Promise<WS2PConnection> {
     const uuid = nuuid.v4()
     let pub = expectedPub.slice(0, 8)
     const api:string = (host.match(WS2PConstants.HOST_ONION_REGEX) !== null) ? 'WS2PTOR':'WS2P'
     try {
       const fullEndpointAddress = WS2PCluster.getFullAddress(host, port, path)
-      const ws2pc = await WS2PClient.connectTo(this.server, fullEndpointAddress, ws2pEndpointUUID, messageHandler, expectedPub, (pub:string) => {
+      const ws2pc = await WS2PClient.connectTo(this.server, fullEndpointAddress, endpointVersion, ws2pEndpointUUID, messageHandler, expectedPub, (pub:string) => {
         const connectedPubkeys = this.getConnectedPubkeys()
+        const connectedWS2PUID = this.getConnectedWS2PUID()
         const preferedNodes = (this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) ? this.server.conf.ws2p.preferedNodes:[]
-        return this.acceptPubkey(expectedPub, connectedPubkeys, () => this.clientsCount(), this.maxLevel1Size, preferedNodes, (this.server.conf.ws2p && this.server.conf.ws2p.preferedOnly) || false, ws2pEndpointUUID)
+        return this.acceptPubkey(expectedPub, connectedPubkeys, connectedWS2PUID, () => this.clientsCount(), this.maxLevel1Size, preferedNodes, (this.server.conf.ws2p && this.server.conf.ws2p.preferedOnly) || false, ws2pEndpointUUID)
       })
       this.ws2pClients[uuid] = ws2pc
       pub = ws2pc.connection.pubkey
@@ -329,15 +363,30 @@ export class WS2PCluster {
   }
 
   async connectToWS2Peers() {
+    // If incoming connections quota is full, delete one low-priority connection
+    if (this.ws2pServer !== null && this.ws2pServer.countConnexions() === this.ws2pServer.maxLevel2Peers) {
+      const privilegedKeys = ((this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) || []).slice() // Copy
+      this.ws2pServer.removeLowPriorityConnection(privilegedKeys)
+    }
+    const myUUID = (this.server.conf.ws2p && this.server.conf.ws2p.uuid) ? this.server.conf.ws2p.uuid:""
     const potentials = await this.server.dal.getWS2Peers()
     const peers:PeerDTO[] = potentials.map((p:any) => PeerDTO.fromJSONObject(p))
     const prefered = ((this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) || []).slice() // Copy
     // Our key is also a prefered one, so we connect to our siblings
-    prefered.push(this.server.conf.pair.pub)
     const canReachTorEndpoint = ProxiesConf.canReachTorEndpoint(this.server.conf.proxiesConf)
+    const canReachClearEndpoint = ProxiesConf.canReachClearEndpoint(this.server.conf.proxiesConf)
     peers.sort((a, b) => {
+      // Top priority at our own nodes
+      if (a.pubkey === this.server.conf.pair.pub && b.pubkey !== this.server.conf.pair.pub) {
+          return -1
+      } else if (a.pubkey !== this.server.conf.pair.pub && b.pubkey === this.server.conf.pair.pub) {
+        return 1
+      }
+
       const aIsPrefered = prefered.indexOf(a.pubkey) !== -1
       const bIsPrefered = prefered.indexOf(b.pubkey) !== -1
+      const aNumberOfFreeRooms = this.numberOfFreeRooms(a, canReachTorEndpoint, canReachClearEndpoint)
+      const bNumberOfFreeRooms = this.numberOfFreeRooms(b, canReachTorEndpoint, canReachClearEndpoint)
 
       if (canReachTorEndpoint) {
         const aAtWs2pTorEnpoint = a.endpoints.filter(function (element) { return element.match(CommonConstants.WS2PTOR_REGEXP); }).length > 0
@@ -345,50 +394,104 @@ export class WS2PCluster {
 
         if ( (aAtWs2pTorEnpoint && bAtWs2pTorEnpoint) || (!aAtWs2pTorEnpoint && !bAtWs2pTorEnpoint) ) {
           if ((aIsPrefered && bIsPrefered) || (!aIsPrefered && !bIsPrefered))  {
+            if (aNumberOfFreeRooms > bNumberOfFreeRooms) {
+              return -1
+            } else if (aNumberOfFreeRooms < bNumberOfFreeRooms) {
+              return 1
+            }
             return 0
           } else if (aIsPrefered) {
             return -1
-          } else {
-            return 1
           }
+          return 1
         } else {
           if (aAtWs2pTorEnpoint) {
             return -1
-          } else {
-            return 1
           }
+          return 1
         }
       } else {
         if ((aIsPrefered && bIsPrefered) || (!aIsPrefered && !bIsPrefered))  {
+          if (aNumberOfFreeRooms > bNumberOfFreeRooms) {
+            return -1
+          } else if (aNumberOfFreeRooms < bNumberOfFreeRooms) {
+            return 1
+          }
           return 0
         } else if (aIsPrefered) {
           return -1
-        } else {
-          return 1
         }
+        return 1
       }
     })
     let i = 0
-    const canReachClearEndpoint = ProxiesConf.canReachClearEndpoint(this.server.conf.proxiesConf)
-    while (i < peers.length && this.clientsCount() < this.maxLevel1Size) {
-      const p = peers[i]
-      const api = p.getWS2P(canReachTorEndpoint, canReachClearEndpoint)
-      if (api) {
-        try {
-          // We do not connect to local host
-          if (!this.server.conf.ws2p || api.uuid !== this.server.conf.ws2p.uuid || p.pubkey !== this.server.conf.pair.pub || api.uuid === '11111111') {
-            await this.connectToRemoteWS(api.host, api.port, api.path, this.messageHandler, p.pubkey, api.uuid)
+    let countPublicNodesWithSameKey:number = 1 // Necessary if maxPrivate = 0
+    let endpointsNodesWithSameKey:WS2PEndpoint[] = []
+    // Group the peers by bunches
+    const bunchsOfPeers = peers.reduce((bundles:PeerDTO[][], p:PeerDTO) => {
+      let bundleIndex = (bundles.length || 1) - 1
+      // Maximum size of a bundle of peers
+      if (bundles[bundleIndex] && bundles[bundleIndex].length >= WS2PConstants.INITIAL_CONNECTION_PEERS_BUNDLE_SIZE) {
+        bundleIndex++
+      }
+      // We create the bundle of it doesn't exist yet
+      if (!bundles[bundleIndex]) {
+        bundles[bundleIndex] = []
+      }
+      // We feed it with this peer
+      bundles[bundleIndex].push(p)
+      return bundles
+    }, [])
+    while (i < bunchsOfPeers.length && (this.clientsCount() < this.maxLevel1Size || this.numberOfConnectedPublicNodesWithSameKey() < countPublicNodesWithSameKey) ) {
+      this.server.logger.info("WS2P: init: bundle of peers %s/%s", i+1, bunchsOfPeers.length)
+      await Promise.all(bunchsOfPeers[i].map(async p => {
+        if (p.pubkey === this.server.conf.pair.pub) {
+          endpointsNodesWithSameKey = p.getAllWS2PEndpoints(canReachTorEndpoint, canReachClearEndpoint, myUUID)
+          countPublicNodesWithSameKey = endpointsNodesWithSameKey.length
+          for (const api of endpointsNodesWithSameKey) {
+            try {
+              // We do not connect to local host
+              if (api.uuid !== myUUID) {
+                await this.connectToRemoteWS(api.version, api.host, api.port, api.path, this.messageHandler, p.pubkey, api.uuid)
+              }
+            } catch (e) {
+              this.server.logger.debug('WS2P: init: failed connection')
+            }
+          }
+        } else {
+          const api = p.getOnceWS2PEndpoint(canReachTorEndpoint, canReachClearEndpoint)
+          if (api) {
+            try {
+              // We do not connect to local host
+              await this.connectToRemoteWS(api.version, api.host, api.port, api.path, this.messageHandler, p.pubkey, api.uuid)
+            } catch (e) {
+              this.server.logger.debug('WS2P: init: failed connection')
+            }
           }
-        } catch (e) {
-          this.server.logger.debug('WS2P: init: failed connection')
         }
-      }
+      }))
       i++
       // Trim the eventual extra connections
-      setTimeout(() => this.trimClientConnections(), WS2PConstants.CONNEXION_TIMEOUT)
+      setTimeout(() => this.removeLowPriorityConnections(prefered), WS2PConstants.CONNEXION_TIMEOUT)
     }
   }
 
+  private numberOfFreeRooms(p:PeerDTO, canReachTorEndpoint:boolean, canReachClearEndpoint:boolean) {
+    const api = p.getOnceWS2PEndpoint(canReachTorEndpoint, canReachClearEndpoint)
+    if (api) {
+      for (const ws2pFullId in this.headsCache) {
+        if (ws2pFullId.slice(0, 8) == api.uuid) {
+          const messageV2 = this.headsCache[ws2pFullId].messageV2
+          if (messageV2 !== undefined) {
+            const [,,, pub, blockstamp, ws2pId,,,,freeMemberRoom,freeMirorRoom]:string[] = messageV2.split(':')
+            return (this.server.dal.isMember(this.server.conf.pair.pub)) ? freeMemberRoom:freeMirorRoom
+          }
+        }
+      }
+    }
+    return 0
+  }
+
   listenServerFlow() {
     let connectingToNodesByFlow = false
 
@@ -399,15 +502,16 @@ export class WS2PCluster {
         // New peer
         if (data.endpoints) {
           const peer = PeerDTO.fromJSONObject(data)
-          const ws2pEnpoint = peer.getWS2P(ProxiesConf.canReachTorEndpoint(this.server.conf.proxiesConf), ProxiesConf.canReachClearEndpoint(this.server.conf.proxiesConf))
+          const ws2pEnpoint = peer.getOnceWS2PEndpoint(ProxiesConf.canReachTorEndpoint(this.server.conf.proxiesConf), ProxiesConf.canReachClearEndpoint(this.server.conf.proxiesConf))
           if (ws2pEnpoint) {
             // Check if already connected to the pubkey (in any way: server or client)
             const connectedPubkeys = this.getConnectedPubkeys()
-            const preferedNodes = (this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) ? this.server.conf.ws2p.preferedNodes:[]
-            const shouldAccept = await this.acceptPubkey(peer.pubkey, connectedPubkeys, () => this.clientsCount(), this.maxLevel1Size, preferedNodes, (this.server.conf.ws2p && this.server.conf.ws2p.preferedOnly) || false, ws2pEnpoint.uuid)
-            if (shouldAccept && (!this.server.conf.ws2p || ws2pEnpoint.uuid !== this.server.conf.ws2p.uuid || peer.pubkey !== this.server.conf.pair.pub || ws2pEnpoint.uuid === '11111111')) {
-              await this.connectToRemoteWS(ws2pEnpoint.host, ws2pEnpoint.port, ws2pEnpoint.path, this.messageHandler, peer.pubkey)
-              await this.trimClientConnections()
+            const connectedWS2PUID = this.getConnectedWS2PUID()
+            const preferedKeys = (this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) ? this.server.conf.ws2p.preferedNodes:[]
+            const shouldAccept = await this.acceptPubkey(peer.pubkey, connectedPubkeys, connectedWS2PUID, () => this.clientsCount(), this.maxLevel1Size, preferedKeys, (this.server.conf.ws2p && this.server.conf.ws2p.preferedOnly) || false, ws2pEnpoint.uuid)
+            if (shouldAccept && (!this.server.conf.ws2p || ws2pEnpoint.uuid !== this.server.conf.ws2p.uuid || peer.pubkey !== this.server.conf.pair.pub)) {
+              await this.connectToRemoteWS(ws2pEnpoint.version, ws2pEnpoint.host, ws2pEnpoint.port, ws2pEnpoint.path, this.messageHandler, peer.pubkey, ws2pEnpoint.uuid)
+              await this.removeLowPriorityConnections(preferedKeys)
             }
           }
         }
@@ -421,151 +525,211 @@ export class WS2PCluster {
         // HEAD changed
         else if (data.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED || data.bcEvent === OtherConstants.BC_EVENT.SWITCHED) {
           // Propagate this change to the network
-          const { sig, message } = this.sayHeadChangedTo(data.block.number, data.block.hash)
+          const myHead = await this.sayHeadChangedTo(data.block.number, data.block.hash)
           try {
-            await this.broadcastHead(message, sig)
+            await this.broadcastHead(myHead)
           } catch (e) {
             this.server.logger.warn(e)
           }
         }
-
-        // WS2P disconnection
-        else if (data.ws2p === 'disconnected') {
-          const nbConnections = this.getAllConnections().length
-          if (nbConnections < WS2PConstants.CONNECTIONS_LOW_LEVEL && !connectingToNodesByFlow) {
-            try {
-              connectingToNodesByFlow = true
-              await this.connectToWS2Peers()
-            } catch (e) {
-              throw e
-            } finally {
-              connectingToNodesByFlow = false
-            }
-          }
-        }
       })()
 
       return data
     }))
   }
 
-  private async broadcastHead(message:string, sig:string) {
-    await this.headsReceived([{ message, sig }])
-    return this.spreadNewHeads([{ message, sig }])
+  private async broadcastHead(head:WS2PHead) {
+    await this.headsReceived([head])
+    return this.spreadNewHeads([head])
   }
 
-  private async spreadNewHeads(heads:{ message:string, sig:string }[]) {
+  private async spreadNewHeads(heads:WS2PHead[]) {
+    heads = this.incrementHeadsStep(heads)
     const connexions = this.getAllConnections()
     return Promise.all(connexions.map(async (c) => {
       try {
-        await c.pushHeads(heads)
+          await c.pushHeads(heads)
       } catch (e) {
-        this.server.logger.warn('Could not spread new HEAD info to %s WS2P %s %s', c.pubkey)
+        this.server.logger.warn('Could not spread new HEAD info to %s WS2PID %s', c.pubkey, c.uuid)
       }
     }))
   }
 
-  private sayHeadChangedTo(number:number, hash:string) {
-    const api = (this.server.conf.ws2p && this.server.conf.ws2p.remotehost && this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) ? 'WS2P':'WS2P'
+  private incrementHeadsStep(heads_:WS2PHead[]) {
+    let heads:WS2PHead[] = []
+    for (let head of heads_) {
+      if (head.step !== undefined) {
+        head.step++
+      }
+      // Prevent injections
+      heads.push({
+        message: head.message,
+        sig: head.sig,
+        messageV2: head.messageV2,
+        sigV2: head.sigV2,
+        step: head.step
+      })
+    }
+    return heads
+  }
+
+  private async sayHeadChangedTo(number:number, hash:string) {
+    const api = this.getApi()
     const key = new Key(this.server.conf.pair.pub, this.server.conf.pair.sec)
-    const pub = key.publicKey
     const software = 'duniter'
     const softVersion = Package.getInstance().version
     const ws2pId = (this.server.conf.ws2p && this.server.conf.ws2p.uuid) || '00000000'
     const prefix = this.server.conf.prefix || ProverConstants.DEFAULT_PEER_ID
-    const message = `${api}:HEAD:1:${pub}:${number}-${hash}:${ws2pId}:${software}:${softVersion}:${prefix}`
+    const { freeMemberRoom , freeMirorRoom }  = await this.countFreeRooms()
+    const message = `${api}:HEAD:1:${key.publicKey}:${number}-${hash}:${ws2pId}:${software}:${softVersion}:${prefix}`
     const sig = key.signSync(message)
-    return { sig, message, pub }
+    const messageV2 = `${api}:HEAD:2:${key.publicKey}:${number}-${hash}:${ws2pId}:${software}:${softVersion}:${prefix}:${freeMemberRoom}:${freeMirorRoom}`
+    const sigV2 = key.signSync(messageV2)
+    
+    const myHead:WS2PHead = {
+      message,
+      sig,
+      messageV2,
+      sigV2,
+      step: 0 }
+
+    return myHead
+  }
+
+  private getApi() {
+    let api = 'WS2P'
+    let network = {
+      in: WS2PConstants.NETWORK.INCOMING.DEFAULT,
+      out: WS2PConstants.NETWORK.OUTCOMING.DEFAULT,
+    }
+    let ws2pPrivate = ''
+    let ws2pPublic = ''
+    if (this.server.conf.ws2p) {
+      if (this.server.conf.ws2p.publicAccess &&
+          (this.server.conf.ws2p.remotehost && this.server.conf.ws2p.remoteport)
+          ||
+          (this.server.conf.ws2p.upnp && this.server.conf.upnp)
+        )
+      {
+        ws2pPublic = 'I'
+        // Determine the network layer
+        if (this.server.conf.ws2p.remotehost && this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) {
+          network.in = WS2PConstants.NETWORK.INCOMING.TOR
+        }
+        // Apply the network layer
+        switch (network.in) {
+          case WS2PConstants.NETWORK.INCOMING.TOR: ws2pPublic += 'T'; break;
+          default: ws2pPublic += 'C'; break;
+        }
+      }
+      if (this.server.conf.ws2p.privateAccess) {
+        ws2pPrivate = 'O'
+        // Determine the network layer
+        if (this.server.conf.proxiesConf && (this.server.conf.proxiesConf.proxyTorAddress || this.server.conf.proxiesConf.forceTor)) {
+          network.out = WS2PConstants.NETWORK.OUTCOMING.TOR
+        }
+        // Apply the network layer
+        switch (network.out) {
+          case WS2PConstants.NETWORK.OUTCOMING.TOR: ws2pPrivate += 'T';
+            if (this.server.conf.proxiesConf && this.server.conf.proxiesConf.reachingClearEp) {
+              switch (this.server.conf.proxiesConf.reachingClearEp) {
+                case 'none': ws2pPrivate += 'S'; break;
+                case 'tor': ws2pPrivate += 'A'; break;
+                default: ws2pPrivate += 'M'; break;
+              }
+            }
+          break;
+          default: ws2pPrivate += 'CA'; break;
+        }
+      }
+    }
+
+
+    api += ws2pPrivate + ws2pPublic
+    return api
+  }
+
+  private async countFreeRooms() {
+    if (!this.ws2pServer) {
+      return {
+        freeMemberRoom: 0,
+        freeMirorRoom: 0
+      }
+    }
+
+    let freeMirorRoom = this.maxLevel2Peers - this.ws2pServer.countConnexions()
+    let freeMemberRoom = freeMirorRoom
+    const privilegedNodes = (this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) ? this.server.conf.ws2p.privilegedNodes:[]
+    for (const c of this.ws2pServer.getConnexions()) {
+      const connexionPriority = await this.keyPriorityLevel(c.pubkey, privilegedNodes)
+      if (connexionPriority < WS2PConstants.CONNECTIONS_PRIORITY.MEMBER_KEY_LEVEL) {
+        freeMemberRoom++
+      }
+    }
+
+    return {
+      freeMemberRoom,
+      freeMirorRoom
+    }
   }
 
-  async trimClientConnections() {
+  async trimServerConnections() {
+    if (this.ws2pServer) {
+      await this.ws2pServer.removeExcessIncomingConnections()
+    }
+  }
+
+  async removeLowPriorityConnections(preferedKeys:string[]) {
     let serverPubkeys:string[] = []
     if (this.ws2pServer) {
       serverPubkeys = this.ws2pServer.getConnexions().map(c => c.pubkey)
     }
-    let disconnectedOne = true
     // Disconnect Private connexions already present under Public
-    while (disconnectedOne) {
-      disconnectedOne = false
-      let uuids = Object.keys(this.ws2pClients)
-      uuids = _.shuffle(uuids)
-      for (const uuid of uuids) {
-        const client = this.ws2pClients[uuid]
-        const pub = client.connection.pubkey
-        const isNotOurself = pub !== this.server.conf.pair.pub
-        const isAlreadyInPublic = serverPubkeys.indexOf(pub) !== -1
-        if (isNotOurself && isAlreadyInPublic) {
-          client.connection.close()
-          await client.connection.closed
-          disconnectedOne = true
-          if (this.ws2pClients[uuid]) {
-            delete this.ws2pClients[uuid]
-          }
-        }
-      }
-    }
-    // Disconnect non-members
-    while (disconnectedOne && this.clientsCount() > this.maxLevel1Size) {
-      disconnectedOne = false
-      let uuids = Object.keys(this.ws2pClients)
-      uuids = _.shuffle(uuids)
-      for (const uuid of uuids) {
-        const client = this.ws2pClients[uuid]
-        const pub = client.connection.pubkey
-        const isNotOurself = pub !== this.server.conf.pair.pub
-        const isMember = await this.server.dal.isMember(pub)
-        const isPrefered = this.getPreferedNodes().indexOf(pub) !== -1
-        if (isNotOurself && !isMember && !disconnectedOne && !isPrefered) {
-          client.connection.close()
-          await client.connection.closed
-          disconnectedOne = true
-          if (this.ws2pClients[uuid]) {
-            delete this.ws2pClients[uuid]
-          }
-        }
-      }
-    }
-    disconnectedOne = true
-    // Disconnect non-prefered members
-    while (disconnectedOne && this.clientsCount() > this.maxLevel1Size) {
-      disconnectedOne = false
-      let uuids = Object.keys(this.ws2pClients)
-      uuids = _.shuffle(uuids)
-      for (const uuid of uuids) {
-        const client = this.ws2pClients[uuid]
-        const pub = client.connection.pubkey
-        const isNotOurself = pub !== this.server.conf.pair.pub
-        const isPrefered = this.getPreferedNodes().indexOf(pub) !== -1
-        if (isNotOurself && !disconnectedOne && !isPrefered) {
-          client.connection.close()
-          disconnectedOne = true
-          await client.connection.closed
-          if (this.ws2pClients[uuid]) {
-            delete this.ws2pClients[uuid]
-          }
+    let uuids = Object.keys(this.ws2pClients)
+    uuids = _.shuffle(uuids)
+    for (const uuid of uuids) {
+      const client = this.ws2pClients[uuid]
+      const pub = client.connection.pubkey
+      const isNotOurself = pub !== this.server.conf.pair.pub
+      const isAlreadyInPublic = serverPubkeys.indexOf(pub) !== -1
+      if (isNotOurself && isAlreadyInPublic) {
+        client.connection.close()
+        await client.connection.closed
+        if (this.ws2pClients[uuid]) {
+          delete this.ws2pClients[uuid]
         }
       }
     }
-    // Disconnect anything
-    disconnectedOne = true
-    while (disconnectedOne && this.clientsCount() > this.maxLevel1Size) {
-      disconnectedOne = false
+    // Disconnect Private connexions until the maximum size is respected
+    while (this.clientsCount() > this.maxLevel1Size) {
       let uuids = Object.keys(this.ws2pClients)
       uuids = _.shuffle(uuids)
+      let lowPriorityConnectionUUID:string = uuids[0]
+      let minPriorityLevel = await this.keyPriorityLevel(this.ws2pClients[lowPriorityConnectionUUID].connection.pubkey, preferedKeys)
       for (const uuid of uuids) {
         const client = this.ws2pClients[uuid]
-        if (!disconnectedOne) {
-          client.connection.close()
-          disconnectedOne = true
-          await client.connection.closed
-          if (this.ws2pClients[uuid]) {
-            delete this.ws2pClients[uuid]
+          if (uuid !== lowPriorityConnectionUUID) {
+            let uuidPriorityLevel = await this.keyPriorityLevel(client.connection.pubkey, preferedKeys)
+            if (uuidPriorityLevel < minPriorityLevel) {
+              lowPriorityConnectionUUID = uuid
+              minPriorityLevel = uuidPriorityLevel
+            }
           }
-        }
       }
+      this.ws2pClients[lowPriorityConnectionUUID].connection.close()
+      await this.ws2pClients[lowPriorityConnectionUUID].connection.closed
+      delete this.ws2pClients[lowPriorityConnectionUUID]
     }
   }
 
+  async keyPriorityLevel(pubkey:string, preferedOrPrivilegedKeys:string[]) {
+    const isMember = await this.server.dal.isMember(pubkey)
+    let priorityLevel = (isMember) ? WS2PConstants.CONNECTIONS_PRIORITY.MEMBER_KEY_LEVEL:0
+    priorityLevel += (preferedOrPrivilegedKeys.indexOf(pubkey) !== -1) ? WS2PConstants.CONNECTIONS_PRIORITY.PREFERED_PRIVILEGED_KEY_LEVEL:0
+    priorityLevel += (this.server.conf.pair.pub === pubkey) ? WS2PConstants.CONNECTIONS_PRIORITY.SELF_KEY_LEVEL:0
+    return priorityLevel
+  }
+
   private getPreferedNodes(): string[] {
     return (this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) || []
   }
@@ -573,64 +737,79 @@ export class WS2PCluster {
   protected async acceptPubkey(
     pub:string,
     connectedPubkeys:string[],
+    connectedWS2PUID:string[],
     getConcurrentConnexionsCount:()=>number,
     maxConcurrentConnexionsSize:number,
     priorityKeys:string[],
     priorityKeysOnly:boolean,
     targetWS2PUID = ""
   ) {
+    if (this.server.conf.pair.pub === pub) {
+      // We do not accept oneself connetion
+      if (this.server.conf.ws2p && this.server.conf.ws2p.uuid === targetWS2PUID || targetWS2PUID === '11111111') {
+        return false
+      } else {
+        // We always accept self nodes, and they have a supreme priority (these are siblings)
+        if (targetWS2PUID === "" ||  this.isNewSiblingNode(pub, targetWS2PUID, connectedWS2PUID) ) {
+            return true
+        } else {
+          // We are already connected to this self node (same WS2PUID)
+          return false
+        }
+      }
+    }
+
     // We do not accept banned keys
     if (this.banned[pub]) {
       this.server.logger.warn('Connection to %s refused, reason: %s', pub.slice(0, 8), this.banned[pub])
       return false
     }
-    let accept = priorityKeys.indexOf(pub) !== -1
-    if (!accept && !priorityKeysOnly && connectedPubkeys.indexOf(pub) === -1) {
-      // Do we have room?
-      if (this.server.conf.pair.pub === pub && this.server.conf.ws2p && this.server.conf.ws2p.uuid === targetWS2PUID) {
-        accept = false
-      }
-      else if (getConcurrentConnexionsCount() < maxConcurrentConnexionsSize) {
-        // Yes: just connect to it
-        accept = true
-      }
-      else {
-        // No: let's verify some peer has a lower priority
-        if (connectedPubkeys.indexOf(this.server.conf.pair.pub) !== -1) {
-          // Yes, we are connected to ourself. Let's replace this connexion
-          accept = true
-        }
-        else {
-          // Does this node have the priority over at least one node?
-          const isMemberPeer = await this.server.dal.isMember(pub)
-          if (isMemberPeer) {
-            // The node may have the priority over at least 1 other node
-            let i = 0, existsOneNonMemberNode = false
-            while (!existsOneNonMemberNode && i < connectedPubkeys.length) {
-              const isAlsoAMemberPeer = await this.server.dal.isMember(connectedPubkeys[i])
-              existsOneNonMemberNode = !isAlsoAMemberPeer
-              i++
-            }
-            if (existsOneNonMemberNode) {
-              // The node has the priority over a non-member peer: try to connect
-              accept = true
-            }
-          }
+
+    // Is priority key ?
+    let isPriorityKey = priorityKeys.indexOf(pub) !== -1
+
+    // We do not accept forbidden keys
+    if (priorityKeysOnly && !isPriorityKey && this.server.conf.pair.pub !== pub) {
+      return false
+    }
+
+    // We do not accept keys already connected
+    if (connectedPubkeys.indexOf(pub) !== -1) {
+      return false
+    }
+
+    // Is member key ?
+    const isMemberPeer = await this.server.dal.isMember(pub)
+
+    // Do we have room?
+    if (getConcurrentConnexionsCount() < maxConcurrentConnexionsSize) {
+      // Yes: just connect to it
+      return true
+    }
+    else {
+      let minPriorityLevel = WS2PConstants.CONNECTIONS_PRIORITY.MAX_PRIORITY_LEVEL
+      for (const connectedPubkey of connectedPubkeys) {
+        const connectedPubkeyPriorityLevel = await this.keyPriorityLevel(connectedPubkey, priorityKeys)
+        if (connectedPubkeyPriorityLevel < minPriorityLevel) {
+          minPriorityLevel = connectedPubkeyPriorityLevel
         }
       }
-    } else {
-      // The pubkey is already connected: we accept only self nodes, and they have a supreme priority (these are siblings)
-      if (targetWS2PUID) {
-        if (this.isSiblingNode(pub, targetWS2PUID)) {
-          accept = true
-        }
+      const pubkeyPriorityLevel = await this.keyPriorityLevel(pub, priorityKeys)
+      if (pubkeyPriorityLevel > minPriorityLevel) {
+        return true
       }
     }
-    return accept
+
+    return false
   }
 
-  isSiblingNode(pub:string, uuid:string) {
-    return !!(this.server.conf.pair.pub === pub && this.server.conf.ws2p && this.server.conf.ws2p.uuid !== uuid)
+  isNewSiblingNode(pub:string, targetWS2PUID:string, connectedWS2PUID:string[]) {
+    for (const uuid of connectedWS2PUID) {
+      if (uuid === targetWS2PUID) {
+        return false
+      }
+    }
+    return true
   }
 
   async getLevel1Connections() {
@@ -655,7 +834,7 @@ export class WS2PCluster {
 
   async startCrawling(waitConnection = false) {
     // For connectivity
-    this.reconnectionInteval = setInterval(() => this.server.push({ ws2p: 'disconnected' }), 1000 * WS2PConstants.RECONNEXION_INTERVAL_IN_SEC)
+    this.reconnectionInteval = setInterval(() => this.connectToWS2Peers(), 1000 * WS2PConstants.RECONNEXION_INTERVAL_IN_SEC)
     // For blocks
     if (this.syncBlockInterval)
       clearInterval(this.syncBlockInterval);
@@ -742,6 +921,12 @@ export class WS2PCluster {
     return clients.concat(served)
   }
 
+  getConnectedWS2PUID() {
+    const clients = Object.keys(this.ws2pClients).map(k => this.ws2pClients[k].connection.uuid)
+    const served = this.ws2pServer ? this.ws2pServer.getConnexions().map(c => c.uuid) : []
+    return clients.concat(served)
+  }
+
   banConnection(c:WS2PConnection, reason:string) {
     this.server.logger.warn('Banning connections of %s for %ss, reason: %s', c.pubkey.slice(0, 8), WS2PConstants.BAN_DURATION_IN_SECONDS, reason)
     if (c.pubkey) {
diff --git a/app/modules/ws2p/lib/WS2PConnection.ts b/app/modules/ws2p/lib/WS2PConnection.ts
index 0595e5626b4e307fffdd2be5d149ce6d33323bd7..752a3cf408c9a309b9780a78edb46b396fd70d3d 100644
--- a/app/modules/ws2p/lib/WS2PConnection.ts
+++ b/app/modules/ws2p/lib/WS2PConnection.ts
@@ -6,7 +6,7 @@ import {CertificationDTO} from "../../../lib/dto/CertificationDTO"
 import {MembershipDTO} from "../../../lib/dto/MembershipDTO"
 import {TransactionDTO} from "../../../lib/dto/TransactionDTO"
 import {PeerDTO} from "../../../lib/dto/PeerDTO"
-import {WS2PConstants} from "./constants"
+import { WS2PConstants } from './constants';
 import { ProxiesConf } from '../../../lib/proxy';
 const ws = require('ws')
 const SocksProxyAgent = require('socks-proxy-agent');
@@ -49,15 +49,16 @@ export interface WS2PAuth {
 }
 
 export interface WS2PRemoteAuth extends WS2PAuth {
-  registerCONNECT(challenge:string, sig: string, pub: string): Promise<boolean>
+  registerCONNECT(ws2pVersion:number, challenge:string, sig: string, pub: string, ws2pId:string): Promise<boolean>
   sendACK(ws:any): Promise<void>
   registerOK(sig: string): Promise<boolean>
   isAuthenticatedByRemote(): boolean
   getPubkey(): string
+  getVersion(): number
 }
 
 export interface WS2PLocalAuth extends WS2PAuth {
-  sendCONNECT(ws:any): Promise<void>
+  sendCONNECT(ws:any, ws2pVersion:number): Promise<void>
   registerACK(sig: string, pub: string): Promise<boolean>
   sendOK(ws:any): Promise<void>
   isRemoteAuthenticated(): boolean
@@ -71,6 +72,8 @@ export class WS2PPubkeyRemoteAuth implements WS2PRemoteAuth {
   protected challenge:string
   protected authenticatedByRemote = false
   protected remotePub = ""
+  protected remoteWs2pId = ""
+  protected remoteVersion = 1
   protected serverAuth:Promise<void>
   protected serverAuthResolve:()=>void
   protected serverAuthReject:(err:any)=>void
@@ -87,6 +90,10 @@ export class WS2PPubkeyRemoteAuth implements WS2PRemoteAuth {
     })
   }
 
+  getVersion() {
+    return this.remoteVersion
+  }
+
   getPubkey() {
     return this.remotePub
   }
@@ -102,17 +109,19 @@ export class WS2PPubkeyRemoteAuth implements WS2PRemoteAuth {
     }))
   }
 
-  async registerCONNECT(challenge:string, sig: string, pub: string): Promise<boolean> {
+  async registerCONNECT(ws2pVersion:number, challenge:string, sig: string, pub: string, ws2pId:string = ""): Promise<boolean> {
     const allow = await this.tellIsAuthorizedPubkey(pub)
     if (!allow) {
       return false
     }
-    const challengeMessage = `WS2P:CONNECT:${this.currency}:${pub}:${challenge}`
+    const challengeMessage = (ws2pVersion > 1) ? `WS2P:CONNECT:${this.currency}:${pub}:${ws2pId}:${challenge}`:`WS2P:CONNECT:${this.currency}:${pub}:${challenge}`
     Logger.log('registerCONNECT >>> ' + challengeMessage)
     const verified = verify(challengeMessage, sig, pub)
     if (verified) {
+      this.remoteVersion = ws2pVersion
       this.challenge = challenge
       this.remotePub = pub
+      this.remoteWs2pId = ws2pId
     }
     return verified
   }
@@ -152,6 +161,7 @@ export class WS2PPubkeyLocalAuth implements WS2PLocalAuth {
   constructor(
     protected currency:string,
     protected pair:Key,
+    protected ws2pId:string,
     protected tellIsAuthorizedPubkey:(pub: string) => Promise<boolean> = () => Promise.resolve(true)
   ) {
     this.challenge = nuuid.v4() + nuuid.v4()
@@ -161,17 +171,32 @@ export class WS2PPubkeyLocalAuth implements WS2PLocalAuth {
     })
   }
 
-  async sendCONNECT(ws:any): Promise<void> {
-    const challengeMessage = `WS2P:CONNECT:${this.currency}:${this.pair.pub}:${this.challenge}`
-    Logger.log('sendCONNECT >>> ' + challengeMessage)
-    const sig = this.pair.signSync(challengeMessage)
-    await ws.send(JSON.stringify({
-      auth: 'CONNECT',
-      pub: this.pair.pub,
-      challenge: this.challenge,
-      sig
-    }))
-    return this.serverAuth
+  async sendCONNECT(ws:any, ws2pVersion:number): Promise<void> {
+    if (ws2pVersion > 1) {
+      const challengeMessage = `WS2P:${ws2pVersion}:CONNECT:${this.currency}:${this.pair.pub}:${this.ws2pId}:${this.challenge}`
+      Logger.log('sendCONNECT >>> ' + challengeMessage)
+      const sig = this.pair.signSync(challengeMessage)
+      await ws.send(JSON.stringify({
+        auth: 'CONNECT',
+        version: ws2pVersion,
+        pub: this.pair.pub,
+        ws2pid: this.ws2pId,
+        challenge: this.challenge,
+        sig
+      }))
+      return this.serverAuth
+    } else if (ws2pVersion == 1) {
+      const challengeMessage = `WS2P:CONNECT:${this.currency}:${this.pair.pub}:${this.challenge}`
+      Logger.log('sendCONNECT >>> ' + challengeMessage)
+      const sig = this.pair.signSync(challengeMessage)
+      await ws.send(JSON.stringify({
+        auth: 'CONNECT',
+        pub: this.pair.pub,
+        challenge: this.challenge,
+        sig
+      }))
+      return this.serverAuth
+    }
   }
 
   async registerACK(sig: string, pub: string): Promise<boolean> {
@@ -243,6 +268,7 @@ export class WS2PConnection {
   } = {}
 
   constructor(
+    private ws2pVersion:number,
     private ws:any,
     private onWsOpened:Promise<void>,
     private onWsClosed:Promise<void>,
@@ -256,7 +282,8 @@ export class WS2PConnection {
       connectionTimeout: REQUEST_TIMEOUT_VALUE,
       requestTimeout: REQUEST_TIMEOUT_VALUE
     },
-    private expectedPub:string = ""
+    private expectedPub:string = "",
+    private expectedWS2PUID:string = ""
   ) {
     this.connectedp = new Promise((resolve, reject) => {
       this.connectedResolve = resolve
@@ -265,6 +292,7 @@ export class WS2PConnection {
   }
 
   static newConnectionToAddress(
+    ws2pVersion:number,
     address:string,
     messageHandler:WS2PMessageHandler,
     localAuth:WS2PLocalAuth,
@@ -277,7 +305,8 @@ export class WS2PConnection {
       connectionTimeout: REQUEST_TIMEOUT_VALUE,
       requestTimeout: REQUEST_TIMEOUT_VALUE
     },
-    expectedPub:string = "") {
+    expectedPub:string = "", 
+    expectedWS2PUID:string = "") {
       if (address.match(WS2PConstants.FULL_ADDRESS_ONION_REGEX)) {
         options = {
           connectionTimeout: WS2PConstants.CONNEXION_TOR_TIMEOUT,
@@ -292,7 +321,7 @@ export class WS2PConnection {
       websocket.on('close', () => res())
     })
     websocket.on('error', () => websocket.close())
-    return new WS2PConnection(websocket, onWsOpened, onWsClosed, messageHandler, localAuth, remoteAuth, options, expectedPub)
+    return new WS2PConnection(ws2pVersion, websocket, onWsOpened, onWsClosed, messageHandler, localAuth, remoteAuth, options, expectedPub, expectedWS2PUID)
   }
 
   static newConnectionFromWebSocketServer(
@@ -312,13 +341,21 @@ export class WS2PConnection {
     const onWsClosed:Promise<void> = new Promise(res => {
       websocket.on('close', () => res())
     })
-    return new WS2PConnection(websocket, onWsOpened, onWsClosed, messageHandler, localAuth, remoteAuth, options, expectedPub)
+    return new WS2PConnection(WS2PConstants.WS2P_DEFAULT_API_VERSION, websocket, onWsOpened, onWsClosed, messageHandler, localAuth, remoteAuth, options, expectedPub)
+  }
+
+  get version() {
+    return Math.min(WS2PConstants.WS2P_HEAD_VERSION, this.remoteAuth.getVersion())
   }
 
   get pubkey() {
     return this.remoteAuth.getPubkey()
   }
 
+  get uuid() {
+    return this.expectedWS2PUID
+  }
+
   get nbRequests() {
     return this.nbRequestsCount
   }
@@ -361,7 +398,7 @@ export class WS2PConnection {
             (async () => {
               await this.onWsOpened
               try {
-                await this.localAuth.sendCONNECT(this.ws)
+                await this.localAuth.sendCONNECT(this.ws, this.ws2pVersion)
                 await Promise.all([
                   this.localAuth.authenticationIsDone(),
                   this.remoteAuth.authenticationIsDone()
@@ -391,17 +428,24 @@ export class WS2PConnection {
                 if (data.auth && typeof data.auth === "string") {
 
                   if (data.auth === "CONNECT") {
+                    if (data.version) {
+                      if (typeof data.version !== "number") {
+                        await this.errorDetected(WS2P_ERR.AUTH_INVALID_ASK_FIELDS)
+                      } else {
+                        this.ws2pVersion = data.version
+                      }
+                    }
                     if (this.remoteAuth.isAuthenticatedByRemote()) {
                       return this.errorDetected(WS2P_ERR.ALREADY_AUTHENTICATED_BY_REMOTE)
                     }
                     else if (
-                      typeof data.pub !== "string" || typeof data.sig !== "string" || typeof data.challenge !== "string") {
+                      typeof data.pub !== "string" || typeof data.sig !== "string" || typeof data.challenge !== "string" || (this.ws2pVersion > 1 && typeof data.ws2pId !== "string") ) {
                       await this.errorDetected(WS2P_ERR.AUTH_INVALID_ASK_FIELDS)
                     } else {
                       if (this.expectedPub && data.pub !== this.expectedPub) {
                         await this.errorDetected(WS2P_ERR.INCORRECT_PUBKEY_FOR_REMOTE)
                       } else {
-                        const valid = await this.remoteAuth.registerCONNECT(data.challenge, data.sig, data.pub)
+                        const valid = await this.remoteAuth.registerCONNECT(this.ws2pVersion, data.challenge, data.sig, data.pub, (this.ws2pVersion > 1) ? data.ws2pID:"")
                         if (valid) {
                           await this.remoteAuth.sendACK(this.ws)
                         } else {
@@ -571,7 +615,7 @@ export class WS2PConnection {
     return this.pushData(WS2P_PUSH.PEER, 'peer', peer)
   }
 
-  async pushHeads(heads:{ message:string, sig:string }[]) {
+  async pushHeads(heads:{ message:string, sig:string, messageV2?:string, sigV2?:string, step?:number }[]) {
     return this.pushData(WS2P_PUSH.HEAD, 'heads', heads)
   }
 
diff --git a/app/modules/ws2p/lib/WS2PServer.ts b/app/modules/ws2p/lib/WS2PServer.ts
index ae79815fe93af6f0c24ebb355e7d404681165748..ce08493018528ec69b57f2e2c84efc1d1a838685 100644
--- a/app/modules/ws2p/lib/WS2PServer.ts
+++ b/app/modules/ws2p/lib/WS2PServer.ts
@@ -14,33 +14,39 @@ export class WS2PServer extends events.EventEmitter {
 
   private wss:any
   private connections:WS2PConnection[] = []
-  private maxLevel2Size = WS2PConstants.MAX_LEVEL_2_PEERS
 
   private constructor(
     private server:Server,
     private host:string,
     private port:number,
     private fifo:GlobalFifoPromise,
-    private shouldAcceptConnection:(pubkey:string, connectedPubkeys:string[])=>Promise<boolean>) {
+    private shouldAcceptConnection:(pubkey:string, connectedPubkeys:string[])=>Promise<boolean>,
+    public keyPriorityLevel:(pubkey:string, privilegedKeys:string[])=>Promise<number>) {
     super()
-    // Conf: max public connections
-    if (this.server.conf.ws2p && this.server.conf.ws2p.maxPublic !== undefined) {
-      this.maxLevel2Size = this.server.conf.ws2p.maxPublic
-    }
   }
 
   get maxLevel2Peers() {
-    return this.maxLevel2Size || 0
-  }
-
-  set maxLevel2Peers(newValue:number) {
-    this.maxLevel2Size = Math.max(newValue, 0)
+    if (this.server.conf.ws2p && this.server.conf.ws2p.maxPublic !== undefined && this.server.conf.ws2p.maxPublic !== null) {
+      return this.server.conf.ws2p.maxPublic
+    }
+    return WS2PConstants.MAX_LEVEL_2_PEERS
   }
 
   getConnexions() {
     return this.connections.slice()
   }
 
+  countConnexions() {
+    const connections = this.getConnexions()
+    let count = 0
+    for (const c of connections) {
+      if (c.pubkey != this.server.conf.pair.pub) {
+        count++
+      }
+    }
+    return count
+  }
+
   private listenToWebSocketConnections(messageHandler:WS2PMessageHandler) {
     const key = new Key(this.server.conf.pair.pub, this.server.conf.pair.sec)
     this.wss = new WebSocketServer({ host: this.host, port: this.port })
@@ -73,11 +79,11 @@ export class WS2PServer extends events.EventEmitter {
           requestTimeout: WS2PConstants.REQUEST_TOR_TIMEOUT
         }
       }
-
+      const myWs2pId = (this.server.conf.ws2p && this.server.conf.ws2p.uuid) ? this.server.conf.ws2p.uuid:""
       const c = WS2PConnection.newConnectionFromWebSocketServer(
         ws,
         messageHandler,
-        new WS2PPubkeyLocalAuth(this.server.conf.currency, key, acceptPubkey),
+        new WS2PPubkeyLocalAuth(this.server.conf.currency, key, myWs2pId, acceptPubkey),
         new WS2PPubkeyRemoteAuth(this.server.conf.currency, key, acceptPubkey),
         timeout
       )
@@ -92,7 +98,7 @@ export class WS2PServer extends events.EventEmitter {
         })
         this.connections.push(c)
         this.emit('newConnection', c)
-        this.server.logger.info('WS2P: established incoming connection from %s:%s', host, port)
+        this.server.logger.info('WS2P: established incoming connection from %s %s:%s', c.pubkey.slice(0, 8), host, port)
 
         // Broadcasting
         const singleWriteProtection = new WS2PSingleWriteStream()
@@ -108,6 +114,7 @@ export class WS2PServer extends events.EventEmitter {
         ws.on('close', () => {
           this.server.unpipe(singleWriteProtection)
           singleWriteProtection.unpipe(ws2pStreamer)
+          this.server.logger.info('WS2P: close incoming connection from %s %s:%s', c.pubkey.slice(0, 8), host, port)
           this.removeConnection(c)
           this.server.push({
             ws2p: 'disconnected',
@@ -117,7 +124,8 @@ export class WS2PServer extends events.EventEmitter {
           })
         })
 
-        await this.trimConnections()
+        // Remove excess incoming connections
+        this.removeExcessIncomingConnections()
 
         await this.server.dal.setPeerUP(c.pubkey)
 
@@ -128,44 +136,39 @@ export class WS2PServer extends events.EventEmitter {
     })
   }
 
-  async trimConnections() {
-    /*** OVERFLOW TRIMMING ***/
-    let disconnectedOne = true
-    // Disconnect non-members
-    while (disconnectedOne && this.connections.length > this.maxLevel2Size) {
-      disconnectedOne = false
-      for (const c of this.connections) {
-        const isMember = await this.server.dal.isMember(c.pubkey)
-        if (!isMember && !disconnectedOne) {
-          c.close()
-          this.removeConnection(c)
-          disconnectedOne = true
-        }
-      }
+  async removeExcessIncomingConnections() {
+    await this.removeDuplicateConnections()
+    const ws2pPublicMax = (this.server.conf.ws2p && this.server.conf.ws2p.maxPublic) ? this.server.conf.ws2p.maxPublic:WS2PConstants.MAX_LEVEL_2_PEERS
+    let privilegedKeys = (this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) ? this.server.conf.ws2p.privilegedNodes:[]
+    while (this.countConnexions() > this.maxLevel2Peers) {
+      await this.removeLowPriorityConnection(privilegedKeys)
     }
-    // Disconnect members
-    while (this.connections.length > this.maxLevel2Size) {
-      for (const c of this.connections) {
-        c.close()
+  }
+
+  async removeDuplicateConnections() {
+    let connectedPubkeys:string[] = []
+    for (const c of this.connections) {
+      if (connectedPubkeys.indexOf(c.pubkey) !== -1) {
         this.removeConnection(c)
+      } else if (c.pubkey !== this.server.conf.pair.pub) {
+        connectedPubkeys.push(c.pubkey)
       }
     }
-    /*** DUPLICATES TRIMMING ***/
-    disconnectedOne = true
-    while (disconnectedOne) {
-      disconnectedOne = false
-      const pubkeysFound = []
-      for (const c of this.connections) {
-        if (pubkeysFound.indexOf(c.pubkey) !== -1) {
-          c.close()
-          this.removeConnection(c)
-          disconnectedOne = true
-        }
-        else if (c.pubkey !== this.server.conf.pair.pub) {
-          pubkeysFound.push(c.pubkey)
+  }
+
+  async removeLowPriorityConnection(privilegedKeys:string[]) {
+    let lowPriorityConnection:WS2PConnection = this.connections[0]
+    let minPriorityLevel = await this.keyPriorityLevel(lowPriorityConnection.pubkey, privilegedKeys)
+    for (const c of this.connections) {
+      if (c !== lowPriorityConnection) {
+        let cPriorityLevel = await this.keyPriorityLevel(c.pubkey, privilegedKeys)
+        if (cPriorityLevel < minPriorityLevel) {
+          lowPriorityConnection = c
+          minPriorityLevel = cPriorityLevel
         }
       }
     }
+    this.removeConnection(lowPriorityConnection)
   }
 
   private removeConnection(c:WS2PConnection) {
@@ -173,6 +176,7 @@ export class WS2PServer extends events.EventEmitter {
     if (index !== -1) {
       // Remove the connection
       this.connections.splice(index, 1)
+      c.close()
     }
   }
 
@@ -201,8 +205,8 @@ export class WS2PServer extends events.EventEmitter {
     }))
   }
 
-  static async bindOn(server:Server, host:string, port:number, fifo:GlobalFifoPromise, shouldAcceptConnection:(pubkey:string, connectedPubkeys:string[])=>Promise<boolean>, messageHandler:WS2PMessageHandler) {
-    const ws2ps = new WS2PServer(server, host, port, fifo, shouldAcceptConnection)
+  static async bindOn(server:Server, host:string, port:number, fifo:GlobalFifoPromise, shouldAcceptConnection:(pubkey:string, connectedPubkeys:string[])=>Promise<boolean>, keyPriorityLevel:(pubkey:string, privilegedKeys:string[])=>Promise<number>, messageHandler:WS2PMessageHandler) {
+    const ws2ps = new WS2PServer(server, host, port, fifo, shouldAcceptConnection, keyPriorityLevel)
     await ws2ps.listenToWebSocketConnections(messageHandler)
     server.logger.info('WS2P server %s listening on %s:%s', server.conf.pair.pub, host, port)
     return ws2ps
diff --git a/app/modules/ws2p/lib/constants.ts b/app/modules/ws2p/lib/constants.ts
index 51972d7f7d1dca2c5406d8f7af8fea58bfe740a0..664026793d56ccf07033f930143ab2085b022ab0 100644
--- a/app/modules/ws2p/lib/constants.ts
+++ b/app/modules/ws2p/lib/constants.ts
@@ -1,6 +1,22 @@
 import {CommonConstants} from "../../../lib/common-libs/constants"
 export const WS2PConstants = {
 
+  NETWORK: {
+    INCOMING: {
+      DEFAULT: 0,
+      TOR: 1
+    },
+    OUTCOMING: {
+      DEFAULT: 0,
+      TOR: 1
+    },
+  },
+
+  WS2P_DEFAULT_API_VERSION:1,
+  WS2P_DEFAULT_HEAD_VERSION:1,
+  WS2P_API_VERSION: 1,
+  WS2P_HEAD_VERSION: 2,
+
   WS2P_UPNP_TTL: 600,
   WS2P_PORTS_START: 20900,
   WS2P_PORTS_END: 20999,
@@ -16,9 +32,15 @@ export const WS2PConstants = {
   DOCPOOL_PULLING_INTERVAL: 3600 * 4, // 4 hours
   SANDBOX_FIRST_PULL_DELAY: 300 * 2,  // 10 minutes after the start
 
-  MAX_LEVEL_1_PEERS: 10,
-  MAX_LEVEL_2_PEERS: 10,
-  CONNECTIONS_LOW_LEVEL: 3,
+  MAX_LEVEL_1_PEERS: 5,
+  MAX_LEVEL_2_PEERS: 20,
+
+  CONNECTIONS_PRIORITY: {
+    MEMBER_KEY_LEVEL: 1,
+    PREFERED_PRIVILEGED_KEY_LEVEL: 2,
+    SELF_KEY_LEVEL: 4,
+    MAX_PRIORITY_LEVEL: 7,
+  },
 
   BAN_DURATION_IN_SECONDS: 120,
   BAN_ON_REPEAT_THRESHOLD: 5,
@@ -30,7 +52,7 @@ export const WS2PConstants = {
     + CommonConstants.FORMATS.BLOCKSTAMP
     + '$'),
 
-  HEAD_V1_REGEXP: new RegExp('^WS2P(?:O[CT][SAM])?(?:I[CT])?:HEAD:1:'
+  HEAD_V1_REGEXP: new RegExp('^WS2P(?:O[CT][SAM]?)?(?:I[CT])?:HEAD:1:'
   + '(' + CommonConstants.FORMATS.PUBKEY + '):'
   + '(' + CommonConstants.FORMATS.BLOCKSTAMP + '):'
   + '(' + CommonConstants.FORMATS.WS2PID + '):'
@@ -39,8 +61,24 @@ export const WS2PConstants = {
   + '(' + CommonConstants.FORMATS.POW_PREFIX + ')'
   + '$'),
 
+  HEAD_V2_REGEXP: new RegExp('^WS2P(?:O[CT][SAM]?)?(?:I[CT])?:HEAD:2:'
+  + '(' + CommonConstants.FORMATS.PUBKEY + '):'
+  + '(' + CommonConstants.FORMATS.BLOCKSTAMP + '):'
+  + '(' + CommonConstants.FORMATS.WS2PID + '):'
+  + '(' + CommonConstants.FORMATS.SOFTWARE + '):'
+  + '(' + CommonConstants.FORMATS.SOFT_VERSION + '):'
+  + '(' + CommonConstants.FORMATS.POW_PREFIX + '):'
+  + '(' + CommonConstants.FORMATS.ZERO_OR_POSITIVE_INT + '):'
+  + '(' + CommonConstants.FORMATS.ZERO_OR_POSITIVE_INT + ')'
+  + '(?::' + CommonConstants.FORMATS.TIMESTAMP + ')?'
+  + '$'),
+  
+  HEAD_SIG_REGEXP: new RegExp(CommonConstants.FORMATS.SIGNATURE),
+
   HOST_ONION_REGEX: CommonConstants.HOST_ONION_REGEX,
   FULL_ADDRESS_ONION_REGEX: CommonConstants.WS_FULL_ADDRESS_ONION_REGEX,
 
+  INITIAL_CONNECTION_PEERS_BUNDLE_SIZE: 5,
+
   HEADS_SPREAD_TIMEOUT: 100 // Wait 100ms before sending a bunch of signed heads
 }
\ No newline at end of file
diff --git a/app/service/BlockchainService.ts b/app/service/BlockchainService.ts
index ba652048307abf4a5d3bbacc9da9f89edd562bce..e8148d81d2945f241a7f7be44384050b6b75991d 100644
--- a/app/service/BlockchainService.ts
+++ b/app/service/BlockchainService.ts
@@ -217,8 +217,9 @@ export class BlockchainService extends FIFOService {
         } catch (e) {
           this.logger.error(e)
           added = false
+          const theError = e && (e.message || e)
           this.push({
-            blockResolutionError: e && e.message
+            blockResolutionError: theError
           })
         }
         i++
diff --git a/app/service/TransactionsService.ts b/app/service/TransactionsService.ts
index e6ddf1263234920e509ca684438cc19cec1ecb5f..3068c1b71430fb521ad15146ff6fae11fb879965 100644
--- a/app/service/TransactionsService.ts
+++ b/app/service/TransactionsService.ts
@@ -41,7 +41,7 @@ export class TransactionService extends FIFOService {
         // Start checks...
         const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 };
         const dto = TransactionDTO.fromJSONObject(tx)
-        await LOCAL_RULES_HELPERS.checkSingleTransactionLocally(dto)
+        await LOCAL_RULES_HELPERS.checkSingleTransactionLocally(dto, this.conf)
         await GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal);
         await GLOBAL_RULES_HELPERS.checkSingleTransaction(dto, nextBlockWithFakeTimeVariation, this.conf, this.dal, CHECK_PENDING_TRANSACTIONS);
         const server_pubkey = this.conf.pair && this.conf.pair.pub;
diff --git a/appveyor.yml b/appveyor.yml
index 8fbf7baf69c2f8d27ec6c67b7cdfeff7b293b55e..1a271d5e4c01d8f303faad4f390484c16c87b2eb 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -1,6 +1,6 @@
 environment:
   matrix:
-    - nodejs_version: "6.9.2"
+    - nodejs_version: "8.9.2"
       ADDON_VERSION: "48"
 
 platform:
diff --git a/bin/duniter b/bin/duniter
index 6def37b84d026e4806a23d4db72b80dab3448c8e..6394374389e297938dfe9cf0aa4221343cb48918 100755
--- a/bin/duniter
+++ b/bin/duniter
@@ -1,7 +1,6 @@
 #!/usr/bin/env node
 "use strict";
 
-const heapdump = require('heapdump'); // Allow to take heap snapshots on will with "kill -USR2 <pid>" --> generates a heapdump-<id>.heapsnapshot file from where duniter was launched
 const co = require('co');
 const duniter = require('../index');
 const stack = duniter.statics.autoStack();
diff --git a/doc/Protocol.md b/doc/Protocol.md
index b657553f46cbe40dcdc0a1a8541732ea4a5f4c15..68a5300a4c3744b878f4c44cf058d8c89764e801 100644
--- a/doc/Protocol.md
+++ b/doc/Protocol.md
@@ -1549,6 +1549,40 @@ TRUE
 > Functionally: we cannot create nor lose money through transactions. We can only transfer coins we own.
 > Functionally: also, we cannot convert a superiod unit base into a lower one.
 
+##### Transactions chaining max depth
+
+    FUNCTION `getTransactionDepth(txHash, LOCAL_DEPTH)`:
+
+        INPUTS = LOCAL_SINDEX[op='UPDATE',tx=txHash]
+        DEPTH = LOCAL_DEPTH
+
+        FOR EACH `INPUT` OF `INPUTS`
+            CONSUMED = LOCAL_SINDEX[op='CREATE',identifier=INPUT.identifier,pos=INPUT.pos]
+            IF (CONSUMED != NULL)
+                IF (LOCAL_DEPTH < 5)
+                    DEPTH = MAX(DEPTH, getTransactionDepth(CONSUMED.tx, LOCAL_DEPTH +1)
+                ELSE
+                    DEPTH++
+                END_IF
+            END_IF
+        END_FOR
+
+        RETURN DEPTH
+
+    END_FUNCTION
+
+Then:
+
+    maxTxChainingDepth = 0
+
+For each `TX_HASH` of `UNIQ(PICK(LOCAL_SINDEX, 'tx))`:
+
+    maxTxChainingDepth = MAX(maxTxChainingDepth, getTransactionDepth(TX_HASH, 0))
+
+Rule:
+
+    maxTxChainingDepth <= 5
+
 #### Global
 
 Global validation verifies the coherence of a locally-validated block, in the context of the whole blockchain, including the block.
@@ -1580,6 +1614,7 @@ Function references:
 > If values count is even, the median is computed over the 2 centered values by an arithmetical median on them, *NOT* rounded.
 
 * *UNIQ* returns a list of the unique values in a list of values
+* *PICK* returns a list of the values by picking a particular property on each record
 * *INTEGER_PART* return the integer part of a number
 * *FIRST* return the first element in a list of values matching the given condition
 * *REDUCE* merges a set of elements into a single one, by extending the non-null properties from each record into the resulting record.
@@ -2244,7 +2279,7 @@ Else:
 
 ####### BR_G102 - ENTRY.age
 
-For each ENTRY in local IINDEX where `op = 'UPDATE'`:
+For each ENTRY in local SINDEX where `op = 'UPDATE'`:
 
     REF_BLOCK = HEAD~<HEAD~1.number + 1 - NUMBER(ENTRY.hash)>[hash=HASH(ENTRY.created_on)]
     
@@ -2266,17 +2301,31 @@ EndIf
 
 For each `LOCAL_SINDEX[op='UPDATE'] as ENTRY`:
 
-    INPUT = REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base])
+    INPUT_ENTRIES = LOCAL_SINDEX[op='CREATE',identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    If COUNT(INPUT_ENTRIES) == 0 Then
+        INPUT_ENTRIES = GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    EndIf
+    INPUT = REDUCE(INPUT_ENTRIES)
     ENTRY.conditions = INPUT.conditions
     ENTRY.available = INPUT.consumed == false
 
 ####### BR_G47 - ENTRY.isLocked
 
-    ENTRY.isLocked = TX_SOURCE_UNLOCK(REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]).conditions, ENTRY)
+    INPUT_ENTRIES = LOCAL_SINDEX[op='CREATE',identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    If COUNT(INPUT_ENTRIES) == 0 Then
+        INPUT_ENTRIES = GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    EndIf
+    INPUT = REDUCE(INPUT_ENTRIES)
+    ENTRY.isLocked = TX_SOURCE_UNLOCK(INPUT.conditions, ENTRY)
     
 ####### BR_G48 - ENTRY.isTimeLocked
 
-    ENTRY.isTimeLocked = ENTRY.written_time - REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]).written_time < ENTRY.locktime
+    INPUT_ENTRIES = LOCAL_SINDEX[op='CREATE',identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    If COUNT(INPUT_ENTRIES) == 0 Then
+        INPUT_ENTRIES = GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    EndIf
+    INPUT = REDUCE(INPUT_ENTRIES)
+    ENTRY.isTimeLocked = ENTRY.written_time - INPUT.written_time < ENTRY.locktime
 
 ##### Rules
 
diff --git a/doc/contribute-french.md b/doc/contribute-french.md
index 55dd4559a82390b41c6957ec5ec97ad3e5dfcac4..2301c838196b444e0fa62016075f46a0244cef92 100644
--- a/doc/contribute-french.md
+++ b/doc/contribute-french.md
@@ -1,4 +1,7 @@
 # Contribuer au code de Duniter
+
+  **ATTENTION : Ce tutoriel est obsolète ! Le nouveau tutoriel est ici : https://duniter.org/fr/wiki/duniter/tutoriel-dev/ **
+
 ## Introduction
 
 Cet article est un tutoriel d'initiation au code source du logiciel Duniter. Celui-ci vous permettra, à travers une succession d'étapes, d'accéder à la maîtrise des outils et méthodes utilisés quotidiennement par les développeurs de Duniter pour créer et modifier le logiciel.
diff --git a/duniter.sh b/duniter.sh
index 23b5fba83e7978bdfc8751d1275bf96d786e3fbc..e40eb5654ee0db74c7bc24fe1a1bb0151486db36 100755
--- a/duniter.sh
+++ b/duniter.sh
@@ -35,8 +35,8 @@ duniter() {
 
 	VERSION=`$NODE -v`
 
-	if [[ $VERSION != v6* ]]; then
-	  echo "$NODE v6 is required";
+	if [[ $VERSION != v8* ]]; then
+	  echo "$NODE v8 is required";
 	else
 
 	  # Calls duniter JS command
diff --git a/gui/index.html b/gui/index.html
index 69f1cfb5bf9a1f7c105834732bc10d86ae8ffa74..a2b2c66d272b21b96d026f986d54307291df09b0 100644
--- a/gui/index.html
+++ b/gui/index.html
@@ -3,7 +3,7 @@
 <head>
   <meta charset="utf-8">
   <meta http-equiv="X-UA-Compatible" content="IE=edge">
-  <title>Duniter 1.6.14</title>
+  <title>Duniter 1.6.17</title>
   <style>
     html {
       font-family: "Courier New", Courier, monospace;
diff --git a/index.ts b/index.ts
index 7563b20575717ce26eea50d24505dd5a880f63c9..15903c38a8696907c22b174f13f33c57a3271d6a 100644
--- a/index.ts
+++ b/index.ts
@@ -9,6 +9,7 @@ import {BmaDependency} from "./app/modules/bma/index"
 import {WS2PDependency} from "./app/modules/ws2p/index"
 import {ProverConstants} from "./app/modules/prover/lib/constants"
 import { ProxiesConf } from './app/lib/proxy';
+import {RouterDependency} from "./app/modules/router"
 
 const path = require('path');
 const _ = require('underscore');
@@ -25,9 +26,16 @@ const reapplyDependency   = require('./app/modules/reapply');
 const revertDependency    = require('./app/modules/revert');
 const daemonDependency    = require('./app/modules/daemon');
 const pSignalDependency   = require('./app/modules/peersignal');
-const routerDependency    = require('./app/modules/router');
 const pluginDependency    = require('./app/modules/plugin');
 
+let sigintListening = false
+
+// Trace errors
+process.on('unhandledRejection', (reason) => {
+  logger.error('Unhandled rejection: ' + reason);
+  logger.error(reason);
+});
+
 class Stacks {
 
   static todoOnRunDone:() => any = () => process.exit()
@@ -102,7 +110,7 @@ const DEFAULT_DEPENDENCIES = MINIMAL_DEPENDENCIES.concat([
   { name: 'duniter-revert',    required: revertDependency },
   { name: 'duniter-daemon',    required: daemonDependency },
   { name: 'duniter-psignal',   required: pSignalDependency },
-  { name: 'duniter-router',    required: routerDependency },
+  { name: 'duniter-router',    required: RouterDependency },
   { name: 'duniter-plugin',    required: pluginDependency },
   { name: 'duniter-prover',    required: ProverDependency },
   { name: 'duniter-keypair',   required: KeypairDependency },
@@ -157,6 +165,8 @@ export interface TransformableDuniterService extends DuniterService, stream.Tran
 
 class Stack {
 
+  private injectedServices = false
+
   private cli:any
   private configLoadingCallbacks:any[]
   private configBeforeSaveCallbacks:any[]
@@ -279,10 +289,12 @@ class Stack {
     }
 
     const server = new Server(home, program.memory === true, commandLineConf(program));
+    let piped = false
 
     // If ever the process gets interrupted
     let isSaving = false;
-    process.on('SIGINT', async () => {
+    if (!sigintListening) {
+      process.on('SIGINT', async () => {
         if (!isSaving) {
           isSaving = true;
           // Save DB
@@ -294,7 +306,9 @@ class Stack {
             process.exit(3);
           }
         }
-    });
+      })
+      sigintListening = true
+    }
 
     // Config or Data reset hooks
     server.resetDataHook = async () => {
@@ -366,26 +380,30 @@ class Stack {
        * Service injection
        * -----------------
        */
-      for (const def of this.definitions) {
-        if (def.service) {
-          // To feed data coming from some I/O (network, disk, other module, ...)
-          if (def.service.input) {
-            this.streams.input.push(def.service.input(server, conf, logger));
-          }
-          // To handle data this has been submitted by INPUT stream
-          if (def.service.process) {
-            this.streams.process.push(def.service.process(server, conf, logger));
-          }
-          // To handle data this has been validated by PROCESS stream
-          if (def.service.output) {
-            this.streams.output.push(def.service.output(server, conf, logger));
-          }
-          // Special service which does not stream anything particular (ex.: piloting the `server` object)
-          if (def.service.neutral) {
-            this.streams.neutral.push(def.service.neutral(server, conf, logger));
+      if (!this.injectedServices) {
+        this.injectedServices = true
+        for (const def of this.definitions) {
+          if (def.service) {
+            // To feed data coming from some I/O (network, disk, other module, ...)
+            if (def.service.input) {
+              this.streams.input.push(def.service.input(server, conf, logger));
+            }
+            // To handle data this has been submitted by INPUT stream
+            if (def.service.process) {
+              this.streams.process.push(def.service.process(server, conf, logger));
+            }
+            // To handle data this has been validated by PROCESS stream
+            if (def.service.output) {
+              this.streams.output.push(def.service.output(server, conf, logger));
+            }
+            // Special service which does not stream anything particular (ex.: piloting the `server` object)
+            if (def.service.neutral) {
+              this.streams.neutral.push(def.service.neutral(server, conf, logger));
+            }
           }
         }
       }
+      piped = true
       // All inputs write to global INPUT stream
       for (const module of this.streams.input) module.pipe(this.INPUT);
       // All processes read from global INPUT stream
@@ -408,13 +426,6 @@ class Stack {
           const modules = this.streams.input.concat(this.streams.process).concat(this.streams.output).concat(this.streams.neutral);
           // Any streaming module must implement a `stopService` method
           await Promise.all(modules.map((module:DuniterService) => module.stopService()))
-          // // Stop reading inputs
-          // for (const module of streams.input) module.unpipe();
-          // Stop reading from global INPUT
-          // INPUT.unpipe();
-          // for (const module of streams.process) module.unpipe();
-          // // Stop reading from global PROCESS
-          // PROCESS.unpipe();
         },
 
         this);
@@ -422,17 +433,20 @@ class Stack {
     } catch (e) {
       server.disconnect();
       throw e;
+    } finally {
+      if (piped) {
+        // Unpipe everything, as the command is done
+        for (const module of this.streams.input) module.unpipe()
+        for (const module of this.streams.process) module.unpipe()
+        for (const module of this.streams.output) module.unpipe()
+        this.INPUT.unpipe()
+        this.PROCESS.unpipe()
+      }
     }
   }
 
   executeStack(argv:string[]) {
 
-    // Trace these errors
-    process.on('unhandledRejection', (reason) => {
-      logger.error('Unhandled rejection: ' + reason);
-      logger.error(reason);
-    });
-
     // Executes the command
     return this.cli.execute(argv);
   }
diff --git a/package.json b/package.json
index b29a3135c768d50c2ad724f9093de01f27405fca..9bfe573d4f5b4bce75abf0540de0a12abeb9ed12 100644
--- a/package.json
+++ b/package.json
@@ -1,8 +1,8 @@
 {
   "name": "duniter",
-  "version": "1.6.14",
+  "version": "1.6.17",
   "engines": {
-    "node": ">=6 <9",
+    "node": ">=8.2.1 <9",
     "npm": ">=3.10"
   },
   "engineStrict": true,
@@ -12,7 +12,7 @@
   "node-main": "./bin/duniter",
   "window": {
     "icon": "duniter.png",
-    "title": "v1.6.14",
+    "title": "v1.6.17",
     "width": 800,
     "height": 800,
     "min_width": 750,
@@ -72,18 +72,17 @@
     "event-stream": "3.3.4",
     "express": "4.15.2",
     "express-fileupload": "0.0.5",
-    "heapdump": "^0.3.9",
     "inquirer": "3.0.6",
     "jison": "0.4.17",
     "js-yaml": "3.8.2",
     "merkle": "0.5.1",
-    "moment": "2.18.1",
+    "moment": "2.19.3",
     "morgan": "1.8.1",
     "multimeter": "0.1.1",
     "naclb": "1.3.9",
     "nnupnp": "1.0.2",
-    "node-uuid": "1.4.8",
     "node-pre-gyp": "0.6.34",
+    "node-uuid": "1.4.8",
     "optimist": "0.6.1",
     "q-io": "1.13.2",
     "querablep": "^0.1.0",
@@ -114,15 +113,14 @@
     "mocha-eslint": "0.1.7",
     "nyc": "^11.0.3",
     "sha1": "",
-    "should": "",
+    "should": "*",
     "source-map-support": "^0.4.15",
     "supertest": "",
     "tmp": "0.0.29",
     "ts-node": "^3.3.0",
     "typescript": "^2.4.1"
   },
-  "peerDependencies": {
-  },
+  "peerDependencies": {},
   "bin": {
     "duniter": "./bin/duniter"
   }
diff --git a/release/arch/arm/build-arm.sh b/release/arch/arm/build-arm.sh
index 07a68370b682640ef60e582e2610da17d344cd9d..9e1299c85be17b35d7ceab0ba86c738ccabb52b6 100755
--- a/release/arch/arm/build-arm.sh
+++ b/release/arch/arm/build-arm.sh
@@ -4,9 +4,12 @@
 export NVM_DIR="$HOME/.nvm"
 [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"  # This loads nvm
 
+
 # Prepare
+NODE_VERSION=8.9.1
 ARCH="`uname -m | sed -e \"s/86_//\"`"
-NVER="v6.11.2"
+NVER="v$NODE_VERSION"
+DUNITER_TAG=$1
 
 # Folders
 INITIAL_DIRECTORY=`pwd`
@@ -14,6 +17,11 @@ ROOT="/tmp/build_duniter"
 DOWNLOADS="$ROOT/downloads"
 RELEASES="$ROOT/releases"
 
+nvm install ${NODE_VERSION}
+nvm use ${NODE_VERSION}
+
+echo "Version de NodeJS : `node -v`"
+
 # -----------
 # Clean sources + releases
 # -----------
@@ -28,10 +36,8 @@ mkdir -p "$DOWNLOADS"
 cd "$DOWNLOADS"
 
 if [ ! -d "$DOWNLOADS/duniter" ]; then
-  git clone https://github.com/duniter/duniter.git
+  mv "$INITIAL_DIRECTORY/duniter-source" duniter
   cd duniter
-  COMMIT=`git rev-list --tags --max-count=1`
-  DUNITER_TAG=`echo $(git describe --tags $COMMIT) | sed 's/^v//'`
   git checkout "v${DUNITER_TAG}"
   cd ..
 fi
@@ -40,10 +46,10 @@ DUNITER_VER="$DUNITER_TAG"
 DUNITER_DEB_VER=" $DUNITER_TAG"
 DUNITER_TAG="v$DUNITER_TAG"
 
-echo "$ARCH"
-echo "$NVER"
-echo "$DUNITER_VER"
-echo "$DUNITER_DEB_VER"
+echo "Arch: $ARCH"
+echo "Nver: $NVER"
+echo "DuniterVer: $DUNITER_VER"
+echo "DebianVer: $DUNITER_DEB_VER"
 
 if [ ! -f "$DOWNLOADS/node-${NVER}-linux-${ARCH}.tar.gz" ]; then
   # Download Node.js and package it with the sources
@@ -60,9 +66,9 @@ cd ${RELEASES}/duniter
 echo "Copying Nodejs"
 cp -R "$DOWNLOADS/node-${NVER}-linux-${ARCH}" node
 
-echo "yarn"
-yarn
-yarn add duniter-ui@1.6.x --save --production
+npm install
+
+npm install duniter-ui@1.6.x --save --production
 SRC=`pwd`
 echo $SRC
 
@@ -80,7 +86,7 @@ mkdir -p duniter_release
 cp -R ${SRC}/* duniter_release/
 
 # Creating DEB packaging
-mv duniter_release/release/arch/debian/package duniter-${ARCH}
+mv duniter_release/release/extra/debian/package duniter-${ARCH}
 mkdir -p duniter-${ARCH}/opt/duniter/
 chmod 755 duniter-${ARCH}/DEBIAN/post*
 chmod 755 duniter-${ARCH}/DEBIAN/pre*
@@ -89,9 +95,9 @@ cd duniter_release
 pwd
 rm -Rf .git
 echo "Zipping..."
-zip -qr ../duniter-desktop.nw *
+zip -qr ../duniter.zip *
 cd ../
-mv duniter-desktop.nw duniter-${ARCH}/opt/duniter/
+mv duniter.zip duniter-${ARCH}/opt/duniter/
 echo "Making package package"
 fakeroot dpkg-deb --build duniter-${ARCH}
 mv duniter-${ARCH}.deb "$INITIAL_DIRECTORY/duniter-server-v${DUNITER_VER}-linux-${ARCH}.deb"
diff --git a/release/arch/debian/Vagrantfile b/release/arch/debian/Vagrantfile
deleted file mode 100644
index da912f7fbaa6332b1081e4f486ca5e24dc3086ea..0000000000000000000000000000000000000000
--- a/release/arch/debian/Vagrantfile
+++ /dev/null
@@ -1,72 +0,0 @@
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-# All Vagrant configuration is done below. The "2" in Vagrant.configure
-# configures the configuration version (we support older styles for
-# backwards compatibility). Please don't change it unless you know what
-# you're doing.
-Vagrant.configure("2") do |config|
-  # The most common configuration options are documented and commented below.
-  # For a complete reference, please see the online documentation at
-  # https://docs.vagrantup.com.
-
-  # Every Vagrant development environment requires a box. You can search for
-  # boxes at https://atlas.hashicorp.com/search.
-  config.vm.box = "https://s3.eu-central-1.amazonaws.com/duniter/vagrant/duniter_trusty64.box"
-  config.vm.provision :shell, path: "bootstrap.sh"
-
-  # Disable automatic box update checking. If you disable this, then
-  # boxes will only be checked for updates when the user runs
-  # `vagrant box outdated`. This is not recommended.
-  # config.vm.box_check_update = false
-
-  # Create a forwarded port mapping which allows access to a specific port
-  # within the machine from a port on the host machine. In the example below,
-  # accessing "localhost:8080" will access port 80 on the guest machine.
-  # config.vm.network "forwarded_port", guest: 80, host: 8080
-
-  # Create a private network, which allows host-only access to the machine
-  # using a specific IP.
-  # config.vm.network "private_network", ip: "192.168.33.10"
-
-  # Create a public network, which generally matched to bridged network.
-  # Bridged networks make the machine appear as another physical device on
-  # your network.
-  # config.vm.network "public_network"
-
-  # Share an additional folder to the guest VM. The first argument is
-  # the path on the host to the actual folder. The second argument is
-  # the path on the guest to mount the folder. And the optional third
-  # argument is a set of non-required options.
-  # config.vm.synced_folder "../data", "/vagrant_data"
-
-  # Provider-specific configuration so you can fine-tune various
-  # backing providers for Vagrant. These expose provider-specific options.
-  # Example for VirtualBox:
-  #
-   config.vm.provider "virtualbox" do |vb|
-     # Display the VirtualBox GUI when booting the machine
-     #vb.gui = true
-  
-     # Customize the amount of memory on the VM:
-     vb.memory = "2048"
-   end
-  #
-  # View the documentation for the provider you are using for more
-  # information on available options.
-
-  # Define a Vagrant Push strategy for pushing to Atlas. Other push strategies
-  # such as FTP and Heroku are also available. See the documentation at
-  # https://docs.vagrantup.com/v2/push/atlas.html for more information.
-  # config.push.define "atlas" do |push|
-  #   push.app = "YOUR_ATLAS_USERNAME/YOUR_APPLICATION_NAME"
-  # end
-
-  # Enable provisioning with a shell script. Additional provisioners such as
-  # Puppet, Chef, Ansible, Salt, and Docker are also available. Please see the
-  # documentation for more information about their specific syntax and use.
-  # config.vm.provision "shell", inline: <<-SHELL
-  #   apt-get update
-  #   apt-get install -y apache2
-  # SHELL
-end
diff --git a/release/arch/debian/bootstrap.sh b/release/arch/debian/bootstrap.sh
deleted file mode 100644
index 6666f97b5365f01b41da099362a4e4ae51301e2f..0000000000000000000000000000000000000000
--- a/release/arch/debian/bootstrap.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-# Yarn
-curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
-echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
-
-# System tools
-apt-get update
-apt-get install --yes git curl build-essential yarn python-minimal zip
-
-# User installation
-sudo su vagrant -c "bash /vagrant/user-bootstrap.sh"
diff --git a/release/arch/debian/build-deb.sh b/release/arch/debian/build-deb.sh
deleted file mode 100644
index 7d8b27a814b0bf89e0c2f23de5e18a0318058ff2..0000000000000000000000000000000000000000
--- a/release/arch/debian/build-deb.sh
+++ /dev/null
@@ -1,218 +0,0 @@
-#!/bin/bash
-
-# NVM
-export NVM_DIR="$HOME/.nvm"
-[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"  # This loads nvm
-
-# Prepare
-NVER=`node -v`
-DUNITER_TAG=
-ADDON_VERSION=48
-NW_VERSION=0.17.6
-NW_RELEASE="v${NW_VERSION}"
-NW="nwjs-${NW_RELEASE}-linux-x64"
-NW_GZ="${NW}.tar.gz"
-
-# Folders
-ROOT=`pwd`
-DOWNLOADS="$ROOT/downloads"
-RELEASES="$ROOT/releases"
-
-mkdir -p "$DOWNLOADS"
-
-# -----------
-# Clean sources + releases
-# -----------
-rm -rf "$DOWNLOADS/duniter"
-rm -rf "$RELEASES"
-rm -rf /vagrant/*.deb
-rm -rf /vagrant/*.tar.gz
-
-# -----------
-# Downloads
-# -----------
-
-cd "$DOWNLOADS"
-
-if [ ! -d "$DOWNLOADS/duniter" ]; then
-  git clone https://github.com/duniter/duniter.git
-  cd duniter
-  COMMIT=`git rev-list --tags --max-count=1`
-  DUNITER_TAG=`echo $(git describe --tags $COMMIT) | sed 's/^v//'`
-  git checkout "v${DUNITER_TAG}"
-  cd ..
-fi
-
-DUNITER_DEB_VER=" $DUNITER_TAG"
-DUNITER_TAG="v$DUNITER_TAG"
-
-if [ ! -f "$DOWNLOADS/$NW_GZ" ]; then
-  wget https://dl.nwjs.io/${NW_RELEASE}/${NW_GZ}
-  tar xvzf ${NW_GZ}
-fi
-
-if [ ! -f "$DOWNLOADS/node-${NVER}-linux-x64.tar.gz" ]; then
-  # Download Node.js and package it with the sources
-  wget http://nodejs.org/dist/${NVER}/node-${NVER}-linux-x64.tar.gz
-  tar xzf node-${NVER}-linux-x64.tar.gz
-fi
-
-# -----------
-# Releases
-# -----------
-
-rm -rf "$RELEASES"
-mkdir -p "$RELEASES"
-
-cp -r "$DOWNLOADS/duniter" "$RELEASES/duniter"
-cd "$RELEASES"
-
-# NPM build
-cp -r duniter _npm
-
-# Releases builds
-cd ${RELEASES}/duniter
-# Remove git files
-rm -Rf .git
-[[ $? -eq 0 ]] && echo ">> VM: building modules..."
-[[ $? -eq 0 ]] && yarn
-#[[ $? -eq 0 ]] && echo ">> VM: running tests..."
-#[[ $? -eq 0 ]] && yarn test
-
-# Duniter UI
-[[ $? -eq 0 ]] && yarn add duniter-ui@1.6.x
-
-[[ $? -eq 0 ]] && npm prune --production
-
-
-# Specific modules that are not needed in a release
-rm -rf node_modules/materialize-css
-rm -rf node_modules/duniter-ui/app
-rm -rf node_modules/duniter-ui/vendor
-rm -rf node_modules/scryptb/node_modules/node-pre-gyp
-rm -rf node_modules/naclb/node_modules/node-pre-gyp
-rm -rf node_modules/wotb/node_modules/node-pre-gyp
-rm -rf node_modules/sqlite3/build
-
-cp -r "$RELEASES/duniter" "$RELEASES/desktop_"
-cp -r "$RELEASES/duniter" "$RELEASES/server_"
-
-# -------------------------------------------------
-# Build Desktop version (Nw.js is embedded)
-# -------------------------------------------------
-
-cd "$RELEASES/desktop_"
-echo "$NW_RELEASE"
-
-cd "$RELEASES/desktop_/node_modules/wotb"
-#yarn --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/wotb.node lib/binding/Release/node-v$ADDON_VERSION-linux-x64/wotb.node
-cd "$RELEASES/desktop_/node_modules/naclb"
-#npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/naclb.node lib/binding/Release/node-v$ADDON_VERSION-linux-x64/naclb.node
-cd "$RELEASES/desktop_/node_modules/scryptb"
-#npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/scryptb.node lib/binding/Release/node-v$ADDON_VERSION-linux-x64/scryptb.node
-cd "$RELEASES/desktop_/node_modules/sqlite3"
-#npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/node-webkit-$NW_RELEASE-linux-x64/node_sqlite3.node lib/binding/node-v$ADDON_VERSION-linux-x64/node_sqlite3.node
-cd "$RELEASES/desktop_/node_modules/heapdump"
-nw-gyp --target=$NW_VERSION configure
-nw-gyp --target=$NW_VERSION build
-
-# Unused binaries
-cd "$RELEASES/desktop_/"
-rm -rf node_modules/sqlite3/build
-#rm -rf node_modules/naclb/build
-#rm -rf node_modules/wotb/build
-#rm -rf node_modules/scryptb/build
-
-## Install Nw.js
-mkdir -p "$RELEASES/desktop_release"
-
-# -------------------------------------------------
-# Build Desktop version .tar.gz
-# -------------------------------------------------
-
-cp -r $DOWNLOADS/${NW}/* "$RELEASES/desktop_release/"
-# Embed Node.js with Nw.js to make Duniter modules installable
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/lib "$RELEASES/desktop_release/"
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/include "$RELEASES/desktop_release/"
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/bin "$RELEASES/desktop_release/"
-# Add some specific files for GUI
-cp ${RELEASES}/desktop_/gui/* "$RELEASES/desktop_release/"
-# Add Duniter sources
-cp -R $RELEASES/desktop_/* "$RELEASES/desktop_release/"
-## Insert Nw specific fields while they do not exist (1.3.3)
-sed -i "s/\"main\": \"index.js\",/\"main\": \"index.html\",/" "$RELEASES/desktop_release/package.json"
-# Add links for Node.js + NPM
-cd "$RELEASES/desktop_release/bin"
-ln -s ../lib/node_modules/npm/bin/npm-cli.js ./npm -f
-cd ..
-ln -s ./bin/node node -f
-ln -s ./bin/npm npm -f
-#sed -i "s/\"node-main\": \"\.\.\/sources\/bin\/duniter\",/\"node-main\": \".\/bin\/duniter\",/" "$RELEASES/desktop_release/package.json"
-# Create a copy for TGZ binary
-cp -R "$RELEASES/desktop_release" "$RELEASES/desktop_release_tgz"
-#cd "$RELEASES/desktop_release_tgz/"
-#rm -rf node_modules/sqlite3/lib/binding/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/wotb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/naclb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/scryptb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-cd "$RELEASES/desktop_release_tgz"
-tar czf /vagrant/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz * --exclude ".git" --exclude "coverage" --exclude "test"
-
-# -------------------------------------------------
-# Build Desktop version .deb
-# -------------------------------------------------
-
-# Create .deb tree + package it
-#cp -r "$RELEASES/desktop_release/release/arch/debian/package" "$RELEASES/duniter-x64"
-cp -r "/vagrant/package" "$RELEASES/duniter-x64"
-mkdir -p "$RELEASES/duniter-x64/opt/duniter/"
-chmod 755 ${RELEASES}/duniter-x64/DEBIAN/post*
-chmod 755 ${RELEASES}/duniter-x64/DEBIAN/pre*
-sed -i "s/Version:.*/Version:$DUNITER_DEB_VER/g" ${RELEASES}/duniter-x64/DEBIAN/control
-cd ${RELEASES}/desktop_release/
-#rm -rf node_modules/sqlite3/lib/binding/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/wotb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/naclb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/scryptb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/sqlite3/lib/binding/node-v$ADDON_VERSION-linux-x64
-#rm -rf node_modules/wotb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-#rm -rf node_modules/naclb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-#rm -rf node_modules/scryptb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-zip -qr ${RELEASES}/duniter-x64/opt/duniter/duniter-desktop.nw *
-
-sed -i "s/Package: .*/Package: duniter-desktop/g" ${RELEASES}/duniter-x64/DEBIAN/control
-cd ${RELEASES}/
-fakeroot dpkg-deb --build duniter-x64
-mv duniter-x64.deb /vagrant/duniter-desktop-${DUNITER_TAG}-linux-x64.deb
-
-# -------------------------------------------------
-# Build Server version (Node.js is embedded, not Nw.js)
-# -------------------------------------------------
-
-cd ${RELEASES}
-rm -rf duniter-server-x64
-cp -r duniter-x64 duniter-server-x64
-
-# Remove Nw.js
-rm -rf duniter-server-x64/opt/duniter/duniter-desktop.nw*
-
-cd ${RELEASES}/server_
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64 node
-zip -qr ${RELEASES}/duniter-server-x64/opt/duniter/duniter-desktop.nw *
-cd ${RELEASES}
-sed -i "s/Package: .*/Package: duniter/g" ${RELEASES}/duniter-server-x64/DEBIAN/control
-rm -rf ${RELEASES}/duniter-server-x64/usr
-fakeroot dpkg-deb --build duniter-server-x64
-mv duniter-server-x64.deb /vagrant/duniter-server-${DUNITER_TAG}-linux-x64.deb
diff --git a/release/arch/debian/user-bootstrap.sh b/release/arch/debian/user-bootstrap.sh
deleted file mode 100644
index 38df75d12426297d394d40c3113496de092d6718..0000000000000000000000000000000000000000
--- a/release/arch/debian/user-bootstrap.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-# NVM
-curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.33.1/install.sh | bash
-export NVM_DIR="$HOME/.nvm"
-[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"  # This loads nvm
-
-# Node.js
-nvm install 6
-
-# node-pre-gyp
-npm install -g nw-gyp node-pre-gyp
diff --git a/release/arch/linux/0.24.4_common.gypi b/release/arch/linux/0.24.4_common.gypi
new file mode 100644
index 0000000000000000000000000000000000000000..d753e62ae583f9ee7aad7e686681647104568baa
--- /dev/null
+++ b/release/arch/linux/0.24.4_common.gypi
@@ -0,0 +1,521 @@
+{
+  'variables': {
+    'asan%': 0,
+    'werror': '',                     # Turn off -Werror in V8 build.
+    'visibility%': 'hidden',          # V8's visibility setting
+    'target_arch%': 'ia32',           # set v8's target architecture
+    'host_arch%': 'ia32',             # set v8's host architecture
+    'want_separate_host_toolset%': 0, # V8 should not build target and host
+    'library%': 'static_library',     # allow override to 'shared_library' for DLL/.so builds
+    'component%': 'static_library',   # NB. these names match with what V8 expects
+    'msvs_multi_core_compile': '0',   # we do enable multicore compiles, but not using the V8 way
+    'python%': 'python',
+
+    'node_shared%': 'true',
+    'force_dynamic_crt%': 0,
+    'node_use_v8_platform%': 'true',
+    'node_use_bundled_v8%': 'true',
+    'node_module_version%': '',
+    'mac_product_name': 'nwjs',
+
+    'node_tag%': '',
+    'uv_library%': 'static_library',
+
+    'openssl_fips': '',
+
+    # Default to -O0 for debug builds.
+    'v8_optimized_debug%': 0,
+
+    # Enable disassembler for `--print-code` v8 options
+    'v8_enable_disassembler': 1,
+    'v8_host_byteorder': '<!(python -c "import sys; print sys.byteorder")',
+
+    'v8_use_external_startup_data': 1,
+    'v8_enable_i18n_support%': 1,
+    #'icu_use_data_file_flag%': 1,
+    'win_fastlink': 0,
+
+    # Don't use ICU data file (icudtl.dat) from V8, we use our own.
+    'icu_use_data_file_flag%': 0,
+
+    'conditions': [
+      ['OS == "win"', {
+        'os_posix': 0,
+        'v8_postmortem_support%': 'false',
+        'OBJ_DIR': '<(PRODUCT_DIR)/obj',
+        'V8_BASE': '<(PRODUCT_DIR)/lib/v8_libbase.lib',
+      }, {
+        'os_posix': 1,
+        'v8_postmortem_support%': 'true',
+        'clang_dir': '<!(cd <(DEPTH) && pwd -P)/third_party/llvm-build/Release+Asserts',
+      }],
+      ['OS=="linux" and target_arch=="ia32"', {
+        'sysroot': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_jessie_i386-sysroot',
+      }],
+      ['OS=="linux" and target_arch=="x64"', {
+        'sysroot': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_jessie_amd64-sysroot',
+      }],
+      ['OS== "mac"', {
+        'conditions': [
+          ['GENERATOR=="ninja"', {
+            'OBJ_DIR': '<(PRODUCT_DIR)/obj',
+            'V8_BASE': '<(PRODUCT_DIR)/obj/deps/v8/src/libv8_base.a',
+          }, {
+            'OBJ_DIR%': '<(PRODUCT_DIR)/obj.target',
+            'V8_BASE%': '<(PRODUCT_DIR)/obj.target/deps/v8/src/libv8_base.a',
+          }],
+        ],
+      }],
+      ['openssl_fips != ""', {
+        'OPENSSL_PRODUCT': 'libcrypto.a',
+      }, {
+        'OPENSSL_PRODUCT': 'libopenssl.a',
+      }],
+      ['OS=="mac"', {
+        'clang%': 1,
+      }, {
+        'clang%': 0,
+      }],
+    ],
+  },
+
+  'conditions': [
+      [ 'clang==1 and OS != "mac"', {
+        'make_global_settings': [
+          ['CC', '<(clang_dir)/bin/clang'],
+          ['CXX', '<(clang_dir)/bin/clang++'],
+          ['CC.host', '$(CC)'],
+          ['CXX.host', '$(CXX)'],
+        ],
+      }],
+  ],
+  'target_defaults': {
+    'default_configuration': 'Release',
+    'variables': {
+      'conditions': [
+        ['OS=="win" and component=="shared_library"', {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '2', # 2 = /MD (nondebug DLL)
+          'win_debug_RuntimeLibrary%': '3',   # 3 = /MDd (debug DLL)
+        }, {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '0', # 0 = /MT (nondebug static)
+          'win_debug_RuntimeLibrary%': '1',   # 1 = /MTd (debug static)
+        }],
+      ],
+    },
+    'configurations': {
+      'Common_Base': {
+        'abstract': 1,
+        'msvs_settings':{
+          'VCCLCompilerTool': {
+            'AdditionalOptions': [
+              '/bigobj',
+              # Tell the compiler to crash on failures. This is undocumented
+              # and unsupported but very handy.
+              '/d2FastFail',
+            ],
+          },
+          'VCLinkerTool': {
+            # Add the default import libs.
+            'AdditionalDependencies': [
+              'kernel32.lib',
+              'gdi32.lib',
+              'winspool.lib',
+              'comdlg32.lib',
+              'advapi32.lib',
+              'shell32.lib',
+              'ole32.lib',
+              'oleaut32.lib',
+              'user32.lib',
+              'uuid.lib',
+              'odbc32.lib',
+              'odbccp32.lib',
+              'delayimp.lib',
+              'credui.lib',
+              'dbghelp.lib',
+              'shlwapi.lib',
+              'winmm.lib',
+            ],
+            'AdditionalOptions': [
+              # Suggested by Microsoft Devrel to avoid
+              #   LINK : fatal error LNK1248: image size (80000000) exceeds maximum allowable size (80000000)
+              # which started happening more regularly after VS2013 Update 4.
+              # Needs to be a bit lower for VS2015, or else errors out.
+              '/maxilksize:0x7ff00000',
+              # Tell the linker to crash on failures.
+              '/fastfail',
+            ],
+          },
+        },
+        'conditions': [
+          ['OS=="win" and win_fastlink==1 and MSVS_VERSION != "2013"', {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                # /PROFILE is incompatible with /debug:fastlink
+                'Profile': 'false',
+                'AdditionalOptions': [
+                  # Tell VS 2015+ to create a PDB that references debug
+                  # information in .obj and .lib files instead of copying
+                  # it all.
+                  '/DEBUG:FASTLINK',
+                ],
+              },
+            },
+          }],
+          ['OS=="win" and MSVS_VERSION == "2015"', {
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  # Work around crbug.com/526851, bug in VS 2015 RTM compiler.
+                  '/Zc:sizedDealloc-',
+                  # Disable thread-safe statics to avoid overhead and because
+                  # they are disabled on other platforms. See crbug.com/587210
+                  # and -fno-threadsafe-statics.
+                  '/Zc:threadSafeInit-',
+                ],
+              },
+            },
+          }],
+        ],
+      },
+      'Debug_Base': {
+        'abstract': 1,
+        'variables': {
+          'v8_enable_handle_zapping': 1,
+        },
+        'defines': [ 'DEBUG', '_DEBUG', 'V8_ENABLE_CHECKS' ],
+        'cflags': [ '-g', '-O0' ],
+        'conditions': [
+          ['target_arch=="x64"', {
+            'msvs_configuration_platform': 'x64',
+          }],
+          ['OS=="aix"', {
+            'cflags': [ '-gxcoff' ],
+            'ldflags': [ '-Wl,-bbigtoc' ],
+          }],
+          ['OS == "android"', {
+            'cflags': [ '-fPIE' ],
+            'ldflags': [ '-fPIE', '-pie' ]
+          }],
+        ],
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'RuntimeLibrary': '<(win_debug_RuntimeLibrary)', # static debug
+            'Optimization': 0, # /Od, no optimization
+            'MinimalRebuild': 'false',
+            'OmitFramePointers': 'false',
+            'BasicRuntimeChecks': 3, # /RTC1
+          },
+          'VCLinkerTool': {
+            'LinkIncremental': 2, # enable incremental linking
+          },
+        },
+        'xcode_settings': {
+          'GCC_OPTIMIZATION_LEVEL': '0', # stop gyp from defaulting to -Os
+        },
+      },
+      'Release_Base': {
+        'abstract': 1,
+        'variables': {
+          'v8_enable_handle_zapping': 0,
+        },
+        'cflags': [ '-O3' ],
+        'conditions': [
+          ['target_arch=="x64"', {
+            'msvs_configuration_platform': 'x64',
+          }],
+          ['OS=="solaris"', {
+            # pull in V8's postmortem metadata
+            'ldflags': [ '-Wl,-z,allextract' ]
+          }],
+          ['OS!="mac" and OS!="win"', {
+            'cflags': [ '-fno-omit-frame-pointer' ],
+          }],
+          ['OS == "android"', {
+            'cflags': [ '-fPIE' ],
+            'ldflags': [ '-fPIE', '-pie' ]
+          }],
+        ],
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'RuntimeLibrary': '<(win_release_RuntimeLibrary)', # static release
+            'Optimization': 3, # /Ox, full optimization
+            'FavorSizeOrSpeed': 1, # /Ot, favour speed over size
+            'InlineFunctionExpansion': 2, # /Ob2, inline anything eligible
+            'WholeProgramOptimization': 'true', # /GL, whole program optimization, needed for LTCG
+            'OmitFramePointers': 'true',
+            'EnableFunctionLevelLinking': 'true',
+            'EnableIntrinsicFunctions': 'true',
+            'RuntimeTypeInfo': 'false',
+            'AdditionalOptions': [
+              '/MP', # compile across multiple CPUs
+            ],
+          },
+          'VCLibrarianTool': {
+            'AdditionalOptions': [
+              '/LTCG', # link time code generation
+            ],
+          },
+          'VCLinkerTool': {
+            'LinkTimeCodeGeneration': 1, # link-time code generation
+            'OptimizeReferences': 2, # /OPT:REF
+            'EnableCOMDATFolding': 2, # /OPT:ICF
+            'LinkIncremental': 1, # disable incremental linking
+          },
+        },
+      },
+      'Debug': {
+        'inherit_from': ['Common_Base', 'Debug_Base'],
+      },
+      'Release': {
+        'inherit_from': ['Common_Base', 'Release_Base'],
+      },
+      'conditions': [
+        [ 'OS=="win"', {
+              'Debug_x64': { 'inherit_from': ['Debug'] },
+              'Release_x64': { 'inherit_from': ['Release'], },
+        }],
+      ],
+    },
+    # Forcibly disable -Werror.  We support a wide range of compilers, it's
+    # simply not feasible to squelch all warnings, never mind that the
+    # libraries in deps/ are not under our control.
+    'cflags!': ['-Werror'],
+    'msvs_settings': {
+      'VCCLCompilerTool': {
+        'StringPooling': 'true', # pool string literals
+        'DebugInformationFormat': 3, # Generate a PDB
+        'WarningLevel': 3,
+        'BufferSecurityCheck': 'true',
+        'ExceptionHandling': 0, # /EHsc
+        'SuppressStartupBanner': 'true',
+        # Disable "warning C4267: conversion from 'size_t' to 'int',
+        # possible loss of data".  Many originate from our dependencies
+        # and their sheer number drowns out other, more legitimate warnings.
+        'DisableSpecificWarnings': ['4267'],
+        'WarnAsError': 'false',
+      },
+      'VCLibrarianTool': {
+      },
+      'VCLinkerTool': {
+        'conditions': [
+          ['target_arch=="ia32"', {
+            'TargetMachine' : 1, # /MACHINE:X86
+            'target_conditions': [
+              ['_type=="executable"', {
+                'AdditionalOptions': [ '/SubSystem:Console,"5.01"' ],
+              }],
+            ],
+          }],
+          ['target_arch=="x64"', {
+            'TargetMachine' : 17, # /MACHINE:AMD64
+            'target_conditions': [
+              ['_type=="executable"', {
+                'AdditionalOptions': [ '/SubSystem:Console,"5.02"' ],
+              }],
+            ],
+          }],
+        ],
+        'GenerateDebugInformation': 'true',
+        'GenerateMapFile': 'true', # /MAP
+        'MapExports': 'true', # /MAPINFO:EXPORTS
+        'RandomizedBaseAddress': 2, # enable ASLR
+        'DataExecutionPrevention': 2, # enable DEP
+        'AllowIsolation': 'true',
+        'SuppressStartupBanner': 'true',
+      },
+    },
+    'msvs_disabled_warnings': [4351, 4355, 4800, 4595],
+    'conditions': [
+      ['asan == 1 and OS != "mac"', {
+        'cflags+': [
+          '-fno-omit-frame-pointer',
+          '-fsanitize=address',
+          '-DLEAK_SANITIZER'
+        ],
+        'cflags!': [ '-fomit-frame-pointer' ],
+        'ldflags': [ '-fsanitize=address' ],
+      }],
+      ['asan == 1 and OS == "mac"', {
+        'xcode_settings': {
+          'OTHER_CFLAGS+': [
+            '-fno-omit-frame-pointer',
+            '-gline-tables-only',
+            '-fsanitize=address',
+            '-DLEAK_SANITIZER'
+          ],
+          'OTHER_CFLAGS!': [
+            '-fomit-frame-pointer',
+          ],
+        },
+        'target_conditions': [
+          ['_type!="static_library"', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-fsanitize=address']},
+          }],
+        ],
+      }],
+      ['OS == "win"', {
+        'msvs_cygwin_shell': 0, # prevent actions from trying to use cygwin
+        'defines': [
+          'WIN32',
+          # we don't really want VC++ warning us about
+          # how dangerous C functions are...
+          '_CRT_SECURE_NO_DEPRECATE',
+          # ... or that C implementations shouldn't use
+          # POSIX names
+          '_CRT_NONSTDC_NO_DEPRECATE',
+          # Make sure the STL doesn't try to use exceptions
+          '_HAS_EXCEPTIONS=0',
+          #'BUILDING_V8_SHARED=1',
+          'BUILDING_UV_SHARED=1',
+        ],
+      }],
+      [ 'OS in "linux freebsd openbsd solaris aix"', {
+        'cflags': [ '-pthread'],
+        'ldflags': [ '-pthread'],
+      }],
+      [ 'OS in "linux freebsd openbsd solaris android aix"', {
+        'cflags': [ '-Wall', '-Wextra', '-Wno-unused-parameter', ],
+        'cflags_cc': [ '-fno-rtti', '-fno-exceptions', '-std=gnu++0x' ],
+        'ldflags': [ '-rdynamic' ],
+        'target_conditions': [
+          # The 1990s toolchain on SmartOS can't handle thin archives.
+          ['_type=="static_library" and OS=="solaris"', {
+            'standalone_static_library': 1,
+          }],
+          ['OS=="openbsd"', {
+            'ldflags': [ '-Wl,-z,wxneeded' ],
+          }],
+        ],
+        'conditions': [
+          [ 'target_arch=="ia32"', {
+            'cflags': [ '-m32', '--sysroot=<(sysroot)' ],
+            'ldflags': [ '-m32','--sysroot=<(sysroot)','<!(<(DEPTH)/content/nw/tools/sysroot_ld_path.sh <(sysroot))' ],
+          }],
+          [ 'target_arch=="x32"', {
+            'cflags': [ '-mx32' ],
+            'ldflags': [ '-mx32' ],
+          }],
+          [ 'target_arch=="x64"', {
+            'cflags': [ '-m64' ],
+            'ldflags': [ '-m64' ],
+          }],
+          [ 'target_arch=="ppc" and OS!="aix"', {
+            'cflags': [ '-m32' ],
+            'ldflags': [ '-m32' ],
+          }],
+          [ 'target_arch=="ppc64" and OS!="aix"', {
+	    'cflags': [ '-m64', '-mminimal-toc' ],
+	    'ldflags': [ '-m64' ],
+	   }],
+          [ 'target_arch=="s390"', {
+            'cflags': [ '-m31', '-march=z196' ],
+            'ldflags': [ '-m31', '-march=z196' ],
+          }],
+          [ 'target_arch=="s390x"', {
+            'cflags': [ '-m64', '-march=z196' ],
+            'ldflags': [ '-m64', '-march=z196' ],
+          }],
+          [ 'OS=="solaris"', {
+            'cflags': [ '-pthreads' ],
+            'ldflags': [ '-pthreads' ],
+            'cflags!': [ '-pthread' ],
+            'ldflags!': [ '-pthread' ],
+          }],
+          [ 'OS=="aix"', {
+            'conditions': [
+              [ 'target_arch=="ppc"', {
+                'ldflags': [ '-Wl,-bmaxdata:0x60000000/dsa' ],
+              }],
+              [ 'target_arch=="ppc64"', {
+                'cflags': [ '-maix64' ],
+                'ldflags': [ '-maix64' ],
+              }],
+            ],
+            'ldflags': [ '-Wl,-bbigtoc' ],
+            'ldflags!': [ '-rdynamic' ],
+          }],
+          [ 'node_shared=="true"', {
+            'cflags': [ '-fPIC' ],
+          }],
+        ],
+      }],
+      ['OS=="android"', {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'defines': [ '_GLIBCXX_USE_C99_MATH' ],
+            'libraries': [ '-llog' ],
+          }],
+        ],
+      }],
+      ['OS=="mac"', {
+        'defines': ['_DARWIN_USE_64_BIT_INODE=1'],
+        'xcode_settings': {
+          'ALWAYS_SEARCH_USER_PATHS': 'NO',
+          'GCC_CW_ASM_SYNTAX': 'NO',                # No -fasm-blocks
+          'GCC_DYNAMIC_NO_PIC': 'NO',               # No -mdynamic-no-pic
+                                                    # (Equivalent to -fPIC)
+          'GCC_ENABLE_CPP_EXCEPTIONS': 'NO',        # -fno-exceptions
+          'GCC_ENABLE_CPP_RTTI': 'NO',              # -fno-rtti
+          'GCC_ENABLE_PASCAL_STRINGS': 'NO',        # No -mpascal-strings
+          'GCC_THREADSAFE_STATICS': 'NO',           # -fno-threadsafe-statics
+          'PREBINDING': 'NO',                       # No -Wl,-prebind
+          'MACOSX_DEPLOYMENT_TARGET': '10.7',       # -mmacosx-version-min=10.7
+          'USE_HEADERMAP': 'NO',
+          'OTHER_CFLAGS': [
+            '-fno-strict-aliasing',
+          ],
+          'WARNING_CFLAGS': [
+            '-Wall',
+            '-Wendif-labels',
+            '-W',
+            '-Wno-unused-parameter',
+          ],
+        },
+        'target_conditions': [
+          ['_type!="static_library"', {
+            'xcode_settings': {
+              'OTHER_LDFLAGS': [
+                '-Wl,-no_pie',
+                '-Wl,-search_paths_first',
+              ],
+            },
+          }],
+        ],
+        'conditions': [
+          ['target_arch=="ia32"', {
+            'xcode_settings': {'ARCHS': ['i386']},
+          }],
+          ['target_arch=="x64"', {
+            'xcode_settings': {'ARCHS': ['x86_64']},
+          }],
+          ['clang==1', {
+            'xcode_settings': {
+              'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+              'CLANG_CXX_LANGUAGE_STANDARD': 'gnu++0x',  # -std=gnu++0x
+              'CLANG_CXX_LIBRARY': 'libc++',
+            },
+          }],
+        ],
+      }],
+      ['OS=="freebsd" and node_use_dtrace=="true"', {
+        'libraries': [ '-lelf' ],
+      }],
+      ['OS=="freebsd"', {
+        'conditions': [
+          ['llvm_version < "4.0"', {
+            # Use this flag because on FreeBSD std::pairs copy constructor is non-trivial.
+            # Doesn't apply to llvm 4.0 (FreeBSD 11.1) or later.
+            # Refs: https://lists.freebsd.org/pipermail/freebsd-toolchain/2016-March/002094.html
+            # Refs: https://svnweb.freebsd.org/ports/head/www/node/Makefile?revision=444555&view=markup
+            'cflags': [ '-D_LIBCPP_TRIVIAL_PAIR_COPY_CTOR=1' ],
+          }],
+        ],
+        'ldflags': [
+          '-Wl,--export-dynamic',
+        ],
+      }]
+    ],
+  }
+}
\ No newline at end of file
diff --git a/release/arch/linux/build-lin.sh b/release/arch/linux/build-lin.sh
new file mode 100644
index 0000000000000000000000000000000000000000..404135125e5fc3d1926d35c9e355243eb302e7df
--- /dev/null
+++ b/release/arch/linux/build-lin.sh
@@ -0,0 +1,250 @@
+#!/bin/bash
+
+if [[ -z "${1}" ]]; then
+	echo "Fatal: no version given to build script"
+	exit 1
+fi
+if [[ -s "$NVM_DIR/nvm.sh" ]]; then
+	source "$NVM_DIR/nvm.sh"
+else
+	echo "Fatal: could not load nvm"
+	exit 1
+fi
+
+# ---------
+# Functions
+# ---------
+
+# Copy nw.js compiled module released library to node libraries.
+# -
+# Parameters:
+# 1. Module name.
+nw_copy() {
+	[[ -z ${1} ]] && exit 1
+	cp lib/binding/Release/node-webkit-v${NW_VERSION}-linux-x64/${1}.node \
+		lib/binding/Release/node-v${ADDON_VERSION}-linux-x64/${1}.node || exit 1
+}
+
+# Copy nw.js compiled module library to node libraries, prefixing with node_.
+# -
+# Parameters:
+# 1. Module name.
+nw_copy_node() {
+	[[ -z ${1} ]] && exit 1
+	cp lib/binding/node-webkit-v${NW_VERSION}-linux-x64/node_${1}.node \
+		lib/binding/node-v${ADDON_VERSION}-linux-x64/node_${1}.node || exit 1
+}
+
+# Compile the module with nw.js.
+# -
+# Parameters:
+# 1. Module name.
+# 2. Action to be done to module after compilation, if needed.
+nw_compile() {
+	[[ -z ${1} ]] && exit 1
+	cd ${1} || exit 1
+	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} configure || exit 1
+	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} build || exit 1
+	[[ -z ${2} ]] || ${2} ${1}
+	cd ..
+}
+
+# Create description.
+# -
+# Parameters:
+# 1. Initial file name.
+# 2. Building type (either “desktop” or “server”).
+# 3. Category (OS, distribution).
+create_desc() {
+	cat >"${1}".desc <<-EOF
+	{
+	  "version": "${DUNITER_TAG}",
+	  "job": "${CI_JOB_ID}",
+	  "type": "${2^}",
+	  "category": "${3}",
+	  "arch": "x64"
+	}
+	EOF
+}
+
+# Desktop specific building phase.
+# -
+# Parameters:
+# 1. Building directory.
+build_extra_desktop() {
+	cp -r "${ROOT}/release/extra/desktop/"* "${1}" || exit 1
+}
+
+# Server specific building phase.
+# -
+# Parameters:
+# 1. Building directory.
+build_extra_server() {
+	mkdir -p "${1}/lib/systemd/system" || exit 1
+	cp "${ROOT}/release/extra/systemd/duniter.service" "${1}/lib/systemd/system" || exit 1
+}
+
+# Debian package building.
+# -
+# Parameters:
+# 1. Building type (either “desktop” or “server”).
+# 2. Debian package name.
+build_deb_pack() {
+	rm -rf "${RELEASES}/duniter-x64"
+	mkdir "${RELEASES}/duniter-x64" || exit 1
+	cp -r "${ROOT}/release/extra/debian/package/"* "${RELEASES}/duniter-x64" || exit 1
+	build_extra_${1} "${RELEASES}/duniter-x64"
+	mkdir -p "${RELEASES}/duniter-x64/opt/duniter/" || exit 1
+	chmod 755 "${RELEASES}/duniter-x64/DEBIAN/"post* || exit 1
+	chmod 755 "${RELEASES}/duniter-x64/DEBIAN/"pre* || exit 1
+	sed -i "s/Version:.*/Version:${DUNITER_DEB_VER}/g" "${RELEASES}/duniter-x64/DEBIAN/control" || exit 1
+
+	cd "${RELEASES}/${1}_/"
+	zip -qr "${RELEASES}/duniter-x64/opt/duniter/duniter.zip" * || exit 1
+
+	sed -i "s/Package: .*/Package: ${2}/g" "${RELEASES}/duniter-x64/DEBIAN/control" || exit 1
+
+	cd "${RELEASES}"
+	fakeroot dpkg-deb --build duniter-x64 || exit 1
+	mv duniter-x64.deb "${BIN}/duniter-${1}-${DUNITER_TAG}-linux-x64.deb" || exit 1
+	create_desc "${BIN}/duniter-${1}-${DUNITER_TAG}-linux-x64.deb" "${1}" "Linux (Ubuntu/Debian)"
+}
+
+# -----------
+# Prepare
+# -----------
+
+NODE_VERSION=8.9.1
+NVER="v${NODE_VERSION}"
+DUNITER_TAG="v${1}"
+DUNITER_DEB_VER=" ${1}"
+ADDON_VERSION=57
+NW_VERSION=0.24.4
+NW_RELEASE="v${NW_VERSION}"
+NW="nwjs-${NW_RELEASE}-linux-x64"
+NW_GZ="${NW}.tar.gz"
+DUNITER_UI_VER="1.6.x"
+
+nvm install ${NVER} || exit 1
+nvm use ${NVER} || exit 1
+npm install -g node-pre-gyp || exit 1
+npm install -g nw-gyp || exit 1
+
+# -----------
+# Folders
+# -----------
+
+ROOT="${PWD}"
+WORK_NAME=work
+WORK="${ROOT}/${WORK_NAME}"
+DOWNLOADS="${WORK}/downloads"
+RELEASES="${WORK}/releases"
+BIN="${WORK}/bin"
+
+mkdir -p "${DOWNLOADS}" "${RELEASES}" "${BIN}" || exit 1
+rm -rf "${BIN}/"*.{deb,tar.gz}{,.desc} # Clean up
+
+# -----------
+# Downloads
+# -----------
+
+cd "${DOWNLOADS}"
+curl -O https://dl.nwjs.io/${NW_RELEASE}/${NW_GZ} || exit 1
+tar xzf ${NW_GZ} || exit 1
+rm ${NW_GZ}
+curl -O http://nodejs.org/dist/${NVER}/node-${NVER}-linux-x64.tar.gz || exit 1
+tar xzf node-${NVER}-linux-x64.tar.gz || exit 1
+rm node-${NVER}-linux-x64.tar.gz
+
+# -----------
+# Releases
+# -----------
+
+# Prepare sources
+mkdir -p "${RELEASES}/duniter" || exit 1
+cp -r $(find "${ROOT}" -mindepth 1 -maxdepth 1 ! -name "${WORK_NAME}") "${RELEASES}/duniter" || exit 1
+cd "${RELEASES}/duniter"
+rm -Rf .gitignore .git || exit 1 # Remove git files
+
+# Build
+echo ">> VM: building modules..."
+npm install || exit 1
+
+# Duniter UI
+npm install "duniter-ui@${DUNITER_UI_VER}" || exit 1
+npm prune --production || exit 1
+
+rm -rf release coverage test # Non production folders
+cp -r "${RELEASES}/duniter" "${RELEASES}/desktop_" || exit 1
+cp -r "${RELEASES}/duniter" "${RELEASES}/server_" || exit 1
+
+# -------------------------------------
+# Build Desktop version against nw.js
+# -------------------------------------
+
+echo "${NW_RELEASE}"
+
+# FIX: bug of nw.js, we need to patch first.
+# TODO: remove this patch once a correct version of Nw.js is out (NodeJS 8 or 9 if the above modules are compliant)
+cd "${RELEASES}/desktop_/node_modules/wotb"
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure \
+  || echo "This failure is expected"
+cp ${ROOT}/release/arch/linux/0.24.4_common.gypi ~/.nw-gyp/0.24.4/common.gypi || exit 1
+
+cd "${RELEASES}/desktop_/node_modules/"
+nw_compile wotb nw_copy
+nw_compile naclb nw_copy
+nw_compile scryptb nw_copy
+nw_compile sqlite3 nw_copy_node
+
+# Unused binaries
+cd "${RELEASES}/desktop_/"
+rm -rf node_modules/sqlite3/build
+
+# --------------------------------
+# Embed nw.js in desktop version
+# --------------------------------
+
+# Install Nw.js
+mkdir -p "${RELEASES}/desktop_release" || exit 1
+cp -r "${DOWNLOADS}/${NW}/"* "${RELEASES}/desktop_release/" || exit 1
+# Embed Node.js with Nw.js to make Duniter modules installable
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64/lib" "${RELEASES}/desktop_release/" || exit 1
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64/include" "${RELEASES}/desktop_release/" || exit 1
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64/bin" "${RELEASES}/desktop_release/" || exit 1
+# Add some specific files for GUI
+cp "${RELEASES}/desktop_/gui/"* "${RELEASES}/desktop_release/" || exit 1
+# Add Duniter sources
+cp -R "${RELEASES}/desktop_/"* "${RELEASES}/desktop_release/" || exit 1
+# Insert Nw specific fields while they do not exist (1.3.3)
+sed -i "s/\"main\": \"index.js\",/\"main\": \"index.html\",/" "${RELEASES}/desktop_release/package.json" || exit 1
+# Add links for Node.js + NPM
+cd "${RELEASES}/desktop_release/bin"
+ln -s "../lib/node_modules/npm/bin/npm-cli.js" "./npm" -f || exit 1
+cd ..
+ln -s "./bin/node" "node" -f || exit 1
+ln -s "./bin/npm" "npm" -f || exit 1
+#sed -i "s/\"node-main\": \"\.\.\/sources\/bin\/duniter\",/\"node-main\": \".\/bin\/duniter\",/" "$RELEASES/desktop_release/package.json"
+rm -rf "${RELEASES}/desktop_"
+mv "${RELEASES}/desktop_release" "${RELEASES}/desktop_"
+
+# ---------------------------------
+# Embed node.js in server version
+# ---------------------------------
+
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64" "${RELEASES}/server_/node" || exit 1
+
+# ---------------
+# Build .tar.gz
+# ---------------
+
+cd "${RELEASES}/desktop_"
+tar czf "${BIN}/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz" * || exit 1
+create_desc "${BIN}/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz" "Desktop" "Linux (generic)"
+
+# -----------------------
+# Build Debian packages
+# -----------------------
+
+build_deb_pack desktop duniter-desktop
+build_deb_pack server duniter
diff --git a/release/arch/windows/0.24.4_common.gypi b/release/arch/windows/0.24.4_common.gypi
new file mode 100644
index 0000000000000000000000000000000000000000..d753e62ae583f9ee7aad7e686681647104568baa
--- /dev/null
+++ b/release/arch/windows/0.24.4_common.gypi
@@ -0,0 +1,521 @@
+{
+  'variables': {
+    'asan%': 0,
+    'werror': '',                     # Turn off -Werror in V8 build.
+    'visibility%': 'hidden',          # V8's visibility setting
+    'target_arch%': 'ia32',           # set v8's target architecture
+    'host_arch%': 'ia32',             # set v8's host architecture
+    'want_separate_host_toolset%': 0, # V8 should not build target and host
+    'library%': 'static_library',     # allow override to 'shared_library' for DLL/.so builds
+    'component%': 'static_library',   # NB. these names match with what V8 expects
+    'msvs_multi_core_compile': '0',   # we do enable multicore compiles, but not using the V8 way
+    'python%': 'python',
+
+    'node_shared%': 'true',
+    'force_dynamic_crt%': 0,
+    'node_use_v8_platform%': 'true',
+    'node_use_bundled_v8%': 'true',
+    'node_module_version%': '',
+    'mac_product_name': 'nwjs',
+
+    'node_tag%': '',
+    'uv_library%': 'static_library',
+
+    'openssl_fips': '',
+
+    # Default to -O0 for debug builds.
+    'v8_optimized_debug%': 0,
+
+    # Enable disassembler for `--print-code` v8 options
+    'v8_enable_disassembler': 1,
+    'v8_host_byteorder': '<!(python -c "import sys; print sys.byteorder")',
+
+    'v8_use_external_startup_data': 1,
+    'v8_enable_i18n_support%': 1,
+    #'icu_use_data_file_flag%': 1,
+    'win_fastlink': 0,
+
+    # Don't use ICU data file (icudtl.dat) from V8, we use our own.
+    'icu_use_data_file_flag%': 0,
+
+    'conditions': [
+      ['OS == "win"', {
+        'os_posix': 0,
+        'v8_postmortem_support%': 'false',
+        'OBJ_DIR': '<(PRODUCT_DIR)/obj',
+        'V8_BASE': '<(PRODUCT_DIR)/lib/v8_libbase.lib',
+      }, {
+        'os_posix': 1,
+        'v8_postmortem_support%': 'true',
+        'clang_dir': '<!(cd <(DEPTH) && pwd -P)/third_party/llvm-build/Release+Asserts',
+      }],
+      ['OS=="linux" and target_arch=="ia32"', {
+        'sysroot': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_jessie_i386-sysroot',
+      }],
+      ['OS=="linux" and target_arch=="x64"', {
+        'sysroot': '<!(cd <(DEPTH) && pwd -P)/build/linux/debian_jessie_amd64-sysroot',
+      }],
+      ['OS== "mac"', {
+        'conditions': [
+          ['GENERATOR=="ninja"', {
+            'OBJ_DIR': '<(PRODUCT_DIR)/obj',
+            'V8_BASE': '<(PRODUCT_DIR)/obj/deps/v8/src/libv8_base.a',
+          }, {
+            'OBJ_DIR%': '<(PRODUCT_DIR)/obj.target',
+            'V8_BASE%': '<(PRODUCT_DIR)/obj.target/deps/v8/src/libv8_base.a',
+          }],
+        ],
+      }],
+      ['openssl_fips != ""', {
+        'OPENSSL_PRODUCT': 'libcrypto.a',
+      }, {
+        'OPENSSL_PRODUCT': 'libopenssl.a',
+      }],
+      ['OS=="mac"', {
+        'clang%': 1,
+      }, {
+        'clang%': 0,
+      }],
+    ],
+  },
+
+  'conditions': [
+      [ 'clang==1 and OS != "mac"', {
+        'make_global_settings': [
+          ['CC', '<(clang_dir)/bin/clang'],
+          ['CXX', '<(clang_dir)/bin/clang++'],
+          ['CC.host', '$(CC)'],
+          ['CXX.host', '$(CXX)'],
+        ],
+      }],
+  ],
+  'target_defaults': {
+    'default_configuration': 'Release',
+    'variables': {
+      'conditions': [
+        ['OS=="win" and component=="shared_library"', {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '2', # 2 = /MD (nondebug DLL)
+          'win_debug_RuntimeLibrary%': '3',   # 3 = /MDd (debug DLL)
+        }, {
+          # See http://msdn.microsoft.com/en-us/library/aa652367.aspx
+          'win_release_RuntimeLibrary%': '0', # 0 = /MT (nondebug static)
+          'win_debug_RuntimeLibrary%': '1',   # 1 = /MTd (debug static)
+        }],
+      ],
+    },
+    'configurations': {
+      'Common_Base': {
+        'abstract': 1,
+        'msvs_settings':{
+          'VCCLCompilerTool': {
+            'AdditionalOptions': [
+              '/bigobj',
+              # Tell the compiler to crash on failures. This is undocumented
+              # and unsupported but very handy.
+              '/d2FastFail',
+            ],
+          },
+          'VCLinkerTool': {
+            # Add the default import libs.
+            'AdditionalDependencies': [
+              'kernel32.lib',
+              'gdi32.lib',
+              'winspool.lib',
+              'comdlg32.lib',
+              'advapi32.lib',
+              'shell32.lib',
+              'ole32.lib',
+              'oleaut32.lib',
+              'user32.lib',
+              'uuid.lib',
+              'odbc32.lib',
+              'odbccp32.lib',
+              'delayimp.lib',
+              'credui.lib',
+              'dbghelp.lib',
+              'shlwapi.lib',
+              'winmm.lib',
+            ],
+            'AdditionalOptions': [
+              # Suggested by Microsoft Devrel to avoid
+              #   LINK : fatal error LNK1248: image size (80000000) exceeds maximum allowable size (80000000)
+              # which started happening more regularly after VS2013 Update 4.
+              # Needs to be a bit lower for VS2015, or else errors out.
+              '/maxilksize:0x7ff00000',
+              # Tell the linker to crash on failures.
+              '/fastfail',
+            ],
+          },
+        },
+        'conditions': [
+          ['OS=="win" and win_fastlink==1 and MSVS_VERSION != "2013"', {
+            'msvs_settings': {
+              'VCLinkerTool': {
+                # /PROFILE is incompatible with /debug:fastlink
+                'Profile': 'false',
+                'AdditionalOptions': [
+                  # Tell VS 2015+ to create a PDB that references debug
+                  # information in .obj and .lib files instead of copying
+                  # it all.
+                  '/DEBUG:FASTLINK',
+                ],
+              },
+            },
+          }],
+          ['OS=="win" and MSVS_VERSION == "2015"', {
+            'msvs_settings': {
+              'VCCLCompilerTool': {
+                'AdditionalOptions': [
+                  # Work around crbug.com/526851, bug in VS 2015 RTM compiler.
+                  '/Zc:sizedDealloc-',
+                  # Disable thread-safe statics to avoid overhead and because
+                  # they are disabled on other platforms. See crbug.com/587210
+                  # and -fno-threadsafe-statics.
+                  '/Zc:threadSafeInit-',
+                ],
+              },
+            },
+          }],
+        ],
+      },
+      'Debug_Base': {
+        'abstract': 1,
+        'variables': {
+          'v8_enable_handle_zapping': 1,
+        },
+        'defines': [ 'DEBUG', '_DEBUG', 'V8_ENABLE_CHECKS' ],
+        'cflags': [ '-g', '-O0' ],
+        'conditions': [
+          ['target_arch=="x64"', {
+            'msvs_configuration_platform': 'x64',
+          }],
+          ['OS=="aix"', {
+            'cflags': [ '-gxcoff' ],
+            'ldflags': [ '-Wl,-bbigtoc' ],
+          }],
+          ['OS == "android"', {
+            'cflags': [ '-fPIE' ],
+            'ldflags': [ '-fPIE', '-pie' ]
+          }],
+        ],
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'RuntimeLibrary': '<(win_debug_RuntimeLibrary)', # static debug
+            'Optimization': 0, # /Od, no optimization
+            'MinimalRebuild': 'false',
+            'OmitFramePointers': 'false',
+            'BasicRuntimeChecks': 3, # /RTC1
+          },
+          'VCLinkerTool': {
+            'LinkIncremental': 2, # enable incremental linking
+          },
+        },
+        'xcode_settings': {
+          'GCC_OPTIMIZATION_LEVEL': '0', # stop gyp from defaulting to -Os
+        },
+      },
+      'Release_Base': {
+        'abstract': 1,
+        'variables': {
+          'v8_enable_handle_zapping': 0,
+        },
+        'cflags': [ '-O3' ],
+        'conditions': [
+          ['target_arch=="x64"', {
+            'msvs_configuration_platform': 'x64',
+          }],
+          ['OS=="solaris"', {
+            # pull in V8's postmortem metadata
+            'ldflags': [ '-Wl,-z,allextract' ]
+          }],
+          ['OS!="mac" and OS!="win"', {
+            'cflags': [ '-fno-omit-frame-pointer' ],
+          }],
+          ['OS == "android"', {
+            'cflags': [ '-fPIE' ],
+            'ldflags': [ '-fPIE', '-pie' ]
+          }],
+        ],
+        'msvs_settings': {
+          'VCCLCompilerTool': {
+            'RuntimeLibrary': '<(win_release_RuntimeLibrary)', # static release
+            'Optimization': 3, # /Ox, full optimization
+            'FavorSizeOrSpeed': 1, # /Ot, favour speed over size
+            'InlineFunctionExpansion': 2, # /Ob2, inline anything eligible
+            'WholeProgramOptimization': 'true', # /GL, whole program optimization, needed for LTCG
+            'OmitFramePointers': 'true',
+            'EnableFunctionLevelLinking': 'true',
+            'EnableIntrinsicFunctions': 'true',
+            'RuntimeTypeInfo': 'false',
+            'AdditionalOptions': [
+              '/MP', # compile across multiple CPUs
+            ],
+          },
+          'VCLibrarianTool': {
+            'AdditionalOptions': [
+              '/LTCG', # link time code generation
+            ],
+          },
+          'VCLinkerTool': {
+            'LinkTimeCodeGeneration': 1, # link-time code generation
+            'OptimizeReferences': 2, # /OPT:REF
+            'EnableCOMDATFolding': 2, # /OPT:ICF
+            'LinkIncremental': 1, # disable incremental linking
+          },
+        },
+      },
+      'Debug': {
+        'inherit_from': ['Common_Base', 'Debug_Base'],
+      },
+      'Release': {
+        'inherit_from': ['Common_Base', 'Release_Base'],
+      },
+      'conditions': [
+        [ 'OS=="win"', {
+              'Debug_x64': { 'inherit_from': ['Debug'] },
+              'Release_x64': { 'inherit_from': ['Release'], },
+        }],
+      ],
+    },
+    # Forcibly disable -Werror.  We support a wide range of compilers, it's
+    # simply not feasible to squelch all warnings, never mind that the
+    # libraries in deps/ are not under our control.
+    'cflags!': ['-Werror'],
+    'msvs_settings': {
+      'VCCLCompilerTool': {
+        'StringPooling': 'true', # pool string literals
+        'DebugInformationFormat': 3, # Generate a PDB
+        'WarningLevel': 3,
+        'BufferSecurityCheck': 'true',
+        'ExceptionHandling': 0, # /EHsc
+        'SuppressStartupBanner': 'true',
+        # Disable "warning C4267: conversion from 'size_t' to 'int',
+        # possible loss of data".  Many originate from our dependencies
+        # and their sheer number drowns out other, more legitimate warnings.
+        'DisableSpecificWarnings': ['4267'],
+        'WarnAsError': 'false',
+      },
+      'VCLibrarianTool': {
+      },
+      'VCLinkerTool': {
+        'conditions': [
+          ['target_arch=="ia32"', {
+            'TargetMachine' : 1, # /MACHINE:X86
+            'target_conditions': [
+              ['_type=="executable"', {
+                'AdditionalOptions': [ '/SubSystem:Console,"5.01"' ],
+              }],
+            ],
+          }],
+          ['target_arch=="x64"', {
+            'TargetMachine' : 17, # /MACHINE:AMD64
+            'target_conditions': [
+              ['_type=="executable"', {
+                'AdditionalOptions': [ '/SubSystem:Console,"5.02"' ],
+              }],
+            ],
+          }],
+        ],
+        'GenerateDebugInformation': 'true',
+        'GenerateMapFile': 'true', # /MAP
+        'MapExports': 'true', # /MAPINFO:EXPORTS
+        'RandomizedBaseAddress': 2, # enable ASLR
+        'DataExecutionPrevention': 2, # enable DEP
+        'AllowIsolation': 'true',
+        'SuppressStartupBanner': 'true',
+      },
+    },
+    'msvs_disabled_warnings': [4351, 4355, 4800, 4595],
+    'conditions': [
+      ['asan == 1 and OS != "mac"', {
+        'cflags+': [
+          '-fno-omit-frame-pointer',
+          '-fsanitize=address',
+          '-DLEAK_SANITIZER'
+        ],
+        'cflags!': [ '-fomit-frame-pointer' ],
+        'ldflags': [ '-fsanitize=address' ],
+      }],
+      ['asan == 1 and OS == "mac"', {
+        'xcode_settings': {
+          'OTHER_CFLAGS+': [
+            '-fno-omit-frame-pointer',
+            '-gline-tables-only',
+            '-fsanitize=address',
+            '-DLEAK_SANITIZER'
+          ],
+          'OTHER_CFLAGS!': [
+            '-fomit-frame-pointer',
+          ],
+        },
+        'target_conditions': [
+          ['_type!="static_library"', {
+            'xcode_settings': {'OTHER_LDFLAGS': ['-fsanitize=address']},
+          }],
+        ],
+      }],
+      ['OS == "win"', {
+        'msvs_cygwin_shell': 0, # prevent actions from trying to use cygwin
+        'defines': [
+          'WIN32',
+          # we don't really want VC++ warning us about
+          # how dangerous C functions are...
+          '_CRT_SECURE_NO_DEPRECATE',
+          # ... or that C implementations shouldn't use
+          # POSIX names
+          '_CRT_NONSTDC_NO_DEPRECATE',
+          # Make sure the STL doesn't try to use exceptions
+          '_HAS_EXCEPTIONS=0',
+          #'BUILDING_V8_SHARED=1',
+          'BUILDING_UV_SHARED=1',
+        ],
+      }],
+      [ 'OS in "linux freebsd openbsd solaris aix"', {
+        'cflags': [ '-pthread'],
+        'ldflags': [ '-pthread'],
+      }],
+      [ 'OS in "linux freebsd openbsd solaris android aix"', {
+        'cflags': [ '-Wall', '-Wextra', '-Wno-unused-parameter', ],
+        'cflags_cc': [ '-fno-rtti', '-fno-exceptions', '-std=gnu++0x' ],
+        'ldflags': [ '-rdynamic' ],
+        'target_conditions': [
+          # The 1990s toolchain on SmartOS can't handle thin archives.
+          ['_type=="static_library" and OS=="solaris"', {
+            'standalone_static_library': 1,
+          }],
+          ['OS=="openbsd"', {
+            'ldflags': [ '-Wl,-z,wxneeded' ],
+          }],
+        ],
+        'conditions': [
+          [ 'target_arch=="ia32"', {
+            'cflags': [ '-m32', '--sysroot=<(sysroot)' ],
+            'ldflags': [ '-m32','--sysroot=<(sysroot)','<!(<(DEPTH)/content/nw/tools/sysroot_ld_path.sh <(sysroot))' ],
+          }],
+          [ 'target_arch=="x32"', {
+            'cflags': [ '-mx32' ],
+            'ldflags': [ '-mx32' ],
+          }],
+          [ 'target_arch=="x64"', {
+            'cflags': [ '-m64' ],
+            'ldflags': [ '-m64' ],
+          }],
+          [ 'target_arch=="ppc" and OS!="aix"', {
+            'cflags': [ '-m32' ],
+            'ldflags': [ '-m32' ],
+          }],
+          [ 'target_arch=="ppc64" and OS!="aix"', {
+	    'cflags': [ '-m64', '-mminimal-toc' ],
+	    'ldflags': [ '-m64' ],
+	   }],
+          [ 'target_arch=="s390"', {
+            'cflags': [ '-m31', '-march=z196' ],
+            'ldflags': [ '-m31', '-march=z196' ],
+          }],
+          [ 'target_arch=="s390x"', {
+            'cflags': [ '-m64', '-march=z196' ],
+            'ldflags': [ '-m64', '-march=z196' ],
+          }],
+          [ 'OS=="solaris"', {
+            'cflags': [ '-pthreads' ],
+            'ldflags': [ '-pthreads' ],
+            'cflags!': [ '-pthread' ],
+            'ldflags!': [ '-pthread' ],
+          }],
+          [ 'OS=="aix"', {
+            'conditions': [
+              [ 'target_arch=="ppc"', {
+                'ldflags': [ '-Wl,-bmaxdata:0x60000000/dsa' ],
+              }],
+              [ 'target_arch=="ppc64"', {
+                'cflags': [ '-maix64' ],
+                'ldflags': [ '-maix64' ],
+              }],
+            ],
+            'ldflags': [ '-Wl,-bbigtoc' ],
+            'ldflags!': [ '-rdynamic' ],
+          }],
+          [ 'node_shared=="true"', {
+            'cflags': [ '-fPIC' ],
+          }],
+        ],
+      }],
+      ['OS=="android"', {
+        'target_conditions': [
+          ['_toolset=="target"', {
+            'defines': [ '_GLIBCXX_USE_C99_MATH' ],
+            'libraries': [ '-llog' ],
+          }],
+        ],
+      }],
+      ['OS=="mac"', {
+        'defines': ['_DARWIN_USE_64_BIT_INODE=1'],
+        'xcode_settings': {
+          'ALWAYS_SEARCH_USER_PATHS': 'NO',
+          'GCC_CW_ASM_SYNTAX': 'NO',                # No -fasm-blocks
+          'GCC_DYNAMIC_NO_PIC': 'NO',               # No -mdynamic-no-pic
+                                                    # (Equivalent to -fPIC)
+          'GCC_ENABLE_CPP_EXCEPTIONS': 'NO',        # -fno-exceptions
+          'GCC_ENABLE_CPP_RTTI': 'NO',              # -fno-rtti
+          'GCC_ENABLE_PASCAL_STRINGS': 'NO',        # No -mpascal-strings
+          'GCC_THREADSAFE_STATICS': 'NO',           # -fno-threadsafe-statics
+          'PREBINDING': 'NO',                       # No -Wl,-prebind
+          'MACOSX_DEPLOYMENT_TARGET': '10.7',       # -mmacosx-version-min=10.7
+          'USE_HEADERMAP': 'NO',
+          'OTHER_CFLAGS': [
+            '-fno-strict-aliasing',
+          ],
+          'WARNING_CFLAGS': [
+            '-Wall',
+            '-Wendif-labels',
+            '-W',
+            '-Wno-unused-parameter',
+          ],
+        },
+        'target_conditions': [
+          ['_type!="static_library"', {
+            'xcode_settings': {
+              'OTHER_LDFLAGS': [
+                '-Wl,-no_pie',
+                '-Wl,-search_paths_first',
+              ],
+            },
+          }],
+        ],
+        'conditions': [
+          ['target_arch=="ia32"', {
+            'xcode_settings': {'ARCHS': ['i386']},
+          }],
+          ['target_arch=="x64"', {
+            'xcode_settings': {'ARCHS': ['x86_64']},
+          }],
+          ['clang==1', {
+            'xcode_settings': {
+              'GCC_VERSION': 'com.apple.compilers.llvm.clang.1_0',
+              'CLANG_CXX_LANGUAGE_STANDARD': 'gnu++0x',  # -std=gnu++0x
+              'CLANG_CXX_LIBRARY': 'libc++',
+            },
+          }],
+        ],
+      }],
+      ['OS=="freebsd" and node_use_dtrace=="true"', {
+        'libraries': [ '-lelf' ],
+      }],
+      ['OS=="freebsd"', {
+        'conditions': [
+          ['llvm_version < "4.0"', {
+            # Use this flag because on FreeBSD std::pairs copy constructor is non-trivial.
+            # Doesn't apply to llvm 4.0 (FreeBSD 11.1) or later.
+            # Refs: https://lists.freebsd.org/pipermail/freebsd-toolchain/2016-March/002094.html
+            # Refs: https://svnweb.freebsd.org/ports/head/www/node/Makefile?revision=444555&view=markup
+            'cflags': [ '-D_LIBCPP_TRIVIAL_PAIR_COPY_CTOR=1' ],
+          }],
+        ],
+        'ldflags': [
+          '-Wl,--export-dynamic',
+        ],
+      }]
+    ],
+  }
+}
\ No newline at end of file
diff --git a/release/arch/windows/build.bat b/release/arch/windows/build.bat
index c05f9f67947b9fb50b166bffbac04d74b85bdcdc..7a74e0756d9f515194b262f9492f5570f7c1331c 100644
--- a/release/arch/windows/build.bat
+++ b/release/arch/windows/build.bat
@@ -1,15 +1,18 @@
 
-set ADDON_VERSION=48
-set NW_VERSION=0.17.6
-set NODEJS_VERSION=6.11.1
+set ADDON_VERSION=57
+set NW_VERSION=0.24.4
+set NODEJS_VERSION=8.9.1
 
-set NW_RELEASE=v0.17.6
+set NW_RELEASE=v%NW_VERSION%
 set NW=nwjs-%NW_RELEASE%-win-x64
 set NW_GZ=%NW%.zip
 
 set NODE_RELEASE=v%NODEJS_VERSION%
 set NODE=node-v%NODEJS_VERSION%-win-x64
 set NODE_ZIP=node-v%NODEJS_VERSION%-win-x64.zip
+set NODE_MSI=node-v%NODEJS_VERSION%-x64.msi
+
+echo "Version courante de NodeJS : "
 node -v
 
 REM NPM
@@ -25,16 +28,36 @@ if not exist %NODE_ZIP% (
   call 7z x %NODE_ZIP%
 )
 
+if not exist %NODE_MSI% (
+  echo "Telechargement de %NODE_MSI%..."
+  powershell -Command "(New-Object System.Net.WebClient).DownloadFile(\"https://nodejs.org/dist/%NODE_RELEASE%/%NODE_MSI%\", \"%NODE_MSI%\")"
+  powershell -Command "Start-Process msiexec.exe -Wait -ArgumentList '/I %cd%\%NODE_MSI% /quiet'"
+)
+
+powershell -Command "Start-Process msiexec.exe -Wait -ArgumentList '/I %cd%\%NODE_MSI% /quiet'"
+
+if not exist %NW_GZ% (
+  echo "Telechargement de %NW_GZ%..."
+  powershell -Command "(New-Object System.Net.WebClient).DownloadFile(\"https://dl.nwjs.io/%NW_RELEASE%/%NW_GZ%\", \"%NW_GZ%\")"
+  call 7z x %NW_GZ%
+)
+
+echo "Version courante de NodeJS : "
+node -v
+
+call npm install -g node-pre-gyp
+call npm install -g nw-gyp
+
 echo "Suppression des anciennes sources..."
 rd /s /q duniter
 rd /s /q duniter_release
 rd /s /q %NW%
 echo "Clonage de Duniter..."
-git clone https://github.com/duniter/duniter.git
+mkdir duniter
+xcopy C:\vagrant\duniter-source\* %cd%\duniter\* /s /e /Y
 cd duniter
 
-for /f "delims=" %%a in ('git rev-list --tags --max-count=1') do @set DUNITER_REV=%%a
-for /f "delims=" %%a in ('git describe --tags %DUNITER_REV%') do @set DUNITER_TAG=%%a
+for /f "delims=" %%x in (C:\vagrant\duniter_tag.txt) do set DUNITER_TAG=%%x
 echo %DUNITER_TAG%
 
 git checkout %DUNITER_TAG%
@@ -56,6 +79,11 @@ set SRC=%cd%
 echo %SRC%
 cd node_modules/wotb
 call npm install --build-from-source
+
+REM PREPARE common.gypi
+call node-pre-gyp --runtime=node-webkit --target=%NW_VERSION% --msvs_version=2015 configure
+xcopy C:\vagrant\0.24.4_common.gypi C:\Users\vagrant\.nw-gyp\0.24.4\common.gypi /s /e /Y
+
 call node-pre-gyp --runtime=node-webkit --target=%NW_VERSION% --msvs_version=2015 configure
 call node-pre-gyp --runtime=node-webkit --target=%NW_VERSION% --msvs_version=2015 build
 copy %cd%\lib\binding\Release\node-webkit-%NW_RELEASE%-win32-x64\wotb.node %cd%\lib\binding\Release\node-v%ADDON_VERSION%-win32-x64\wotb.node /Y
@@ -74,9 +102,6 @@ call npm install --build-from-source
 call node-pre-gyp --runtime=node-webkit --target=%NW_VERSION% --msvs_version=2015 configure
 call node-pre-gyp --runtime=node-webkit --target=%NW_VERSION% --msvs_version=2015 build
 copy %cd%\lib\binding\node-webkit-%NW_RELEASE%-win32-x64\node_sqlite3.node %cd%\lib\binding\node-v%ADDON_VERSION%-win32-x64\node_sqlite3.node /Y
-cd ../heapdump
-call nw-gyp --target=%NW_VERSION% --msvs_version=2015 configure
-call nw-gyp --target=%NW_VERSION% --msvs_version=2015 build
 cd ../../..
 mkdir duniter_release
 mkdir duniter_release\nodejs
diff --git a/release/arch/windows/duniter.iss b/release/arch/windows/duniter.iss
index 47c798f18d286aacdeefa9559f83346c299feac1..a90b4890bdeaa23cf6156b5903e85c47bf43a046 100644
--- a/release/arch/windows/duniter.iss
+++ b/release/arch/windows/duniter.iss
@@ -15,7 +15,7 @@
 #error "Unable to find MyAppExe"
 #endif
 
-#define MyAppVerStr "v1.6.14"
+#define MyAppVerStr "v1.6.17"
 
 [Setup]
 AppName={#MyAppName}
diff --git a/release/arch/debian/package/DEBIAN/control b/release/extra/debian/package/DEBIAN/control
similarity index 93%
rename from release/arch/debian/package/DEBIAN/control
rename to release/extra/debian/package/DEBIAN/control
index add5cf25e293f0adfb07af52470066ea9fcb8738..fc87698ca3bff5aec09715bff26741eb0565ee72 100644
--- a/release/arch/debian/package/DEBIAN/control
+++ b/release/extra/debian/package/DEBIAN/control
@@ -1,5 +1,5 @@
 Package: duniter
-Version: 1.6.14
+Version: 1.6.17
 Depends: unzip
 Section: misc
 Priority: optional
diff --git a/release/arch/debian/package/DEBIAN/postinst b/release/extra/debian/package/DEBIAN/postinst
similarity index 76%
rename from release/arch/debian/package/DEBIAN/postinst
rename to release/extra/debian/package/DEBIAN/postinst
index 8938ddb32899360b05791ebe244a871f88765bd0..ae7ac47e952891759c83eb36105980acc5e22589 100755
--- a/release/arch/debian/package/DEBIAN/postinst
+++ b/release/extra/debian/package/DEBIAN/postinst
@@ -5,9 +5,9 @@ DUN_SOURCES=$DUN_ROOT/
 mkdir -p $DUN_SOURCES
 
 # Duniter sources extraction
-if [[ -f $DUN_ROOT/duniter-desktop.nw ]]; then
-  unzip -q -d $DUN_SOURCES/ $DUN_ROOT/duniter-desktop.nw
-  rm -rf $DUN_ROOT/duniter-desktop.nw
+if [[ -f $DUN_ROOT/duniter.zip ]]; then
+  unzip -q -d $DUN_SOURCES/ $DUN_ROOT/duniter.zip
+  rm -rf $DUN_ROOT/duniter.zip
 fi
 
 # Duniter-Desktop
@@ -34,6 +34,10 @@ if [[ -d $DUN_SOURCES/node ]]; then
   cd $DUN_SOURCES
   cd node/bin/
   ln -s ../lib/node_modules/npm/bin/npm-cli.js ./npm -f
+  # Add duniter user for service
+  mkdir -p /var/lib/duniter
+  adduser --system --quiet --home /var/lib/duniter --no-create-home --disabled-password --group duniter
+  chown duniter:duniter /var/lib/duniter
 fi
 # Else will execute with environment node
 
diff --git a/release/arch/debian/package/DEBIAN/prerm b/release/extra/debian/package/DEBIAN/prerm
similarity index 100%
rename from release/arch/debian/package/DEBIAN/prerm
rename to release/extra/debian/package/DEBIAN/prerm
diff --git a/release/arch/debian/package/usr/share/applications/duniter.desktop b/release/extra/desktop/usr/share/applications/duniter.desktop
similarity index 100%
rename from release/arch/debian/package/usr/share/applications/duniter.desktop
rename to release/extra/desktop/usr/share/applications/duniter.desktop
diff --git a/release/extra/openrc/duniter.confd b/release/extra/openrc/duniter.confd
new file mode 100644
index 0000000000000000000000000000000000000000..af0c9a3a8c797e81534463b723d4fb7a9112f790
--- /dev/null
+++ b/release/extra/openrc/duniter.confd
@@ -0,0 +1,19 @@
+# File containing crypto keys
+#DUNITER_KEYS=/etc/duniter/keys.yml
+
+# Uncomment the following line to start the web GUI
+#DUNITER_WEB=yes
+
+# Parameters for the web GUI
+#DUNITER_WEB_HOST=localhost
+#DUNITER_WEB_PORT=9220
+
+# User and group of running process
+#DUNITER_GROUP=duniter
+#DUNITER_USER=duniter
+
+# Directory of duniter files
+#DUNITER_HOME=/var/lib/duniter
+
+# Directory of duniter data
+#DUNITER_DATA=duniter_default
diff --git a/release/extra/openrc/duniter.initd b/release/extra/openrc/duniter.initd
new file mode 100644
index 0000000000000000000000000000000000000000..6ce4c3d19c58e57b0b2cd0f0ddebdbde3a9b748c
--- /dev/null
+++ b/release/extra/openrc/duniter.initd
@@ -0,0 +1,50 @@
+#!/sbin/openrc-run
+
+: ${DUNITER_GROUP:=duniter}
+: ${DUNITER_USER:=duniter}
+
+: ${DUNITER_HOME:=/var/lib/duniter}
+: ${DUNITER_DATA:=duniter_default}
+
+command="/usr/bin/duniter"
+if yesno "${DUNITER_WEB}"; then
+	command_args="webstart"
+	if [[ ! -z ${DUNITER_WEB_HOST} ]]; then
+		command_args="${command_args} --webmhost \"${DUNITER_WEB_HOST}\""
+	fi
+	if [[ ! -z ${DUNITER_WEB_PORT} ]]; then
+		command_args="${command_args} --webmport \"${DUNITER_WEB_PORT}\""
+	fi
+else
+	command_args="start"
+fi
+if [[ ! -z ${DUNITER_KEYS} ]] && [[ -r ${DUNITER_KEYS} ]]; then
+	command_args="${command_args} --keyfile \"${DUNITER_KEYS}\""
+fi
+command_args="${command_args} --home \"${DUNITER_HOME}\" --mdb \"${DUNITER_DATA}\" ${DUNITER_SSD_OPTIONS}"
+start_stop_daemon_args="--user \"${DUNITER_USER}\":\"${DUNITER_GROUP}\""
+description="Duniter node"
+
+depend() {
+	need net
+}
+
+status() {
+	if ${command} status --home "${DUNITER_HOME}" --mdb "${DUNITER_DATA}" | grep -q "is running"; then
+		einfo "status: started"
+		return 0
+	else
+		if service_started; then
+			mark_service_stopped
+			eerror "status: crashed"
+			return 32
+		else
+			einfo "status: stopped"
+			return 3
+		fi
+	fi
+}
+
+stop() {
+	${command} stop --home "${DUNITER_HOME}" --mdb "${DUNITER_DATA}"
+}
diff --git a/release/extra/systemd/duniter.service b/release/extra/systemd/duniter.service
new file mode 100644
index 0000000000000000000000000000000000000000..51b819620e3082f915a6529b2e37de487ee5a991
--- /dev/null
+++ b/release/extra/systemd/duniter.service
@@ -0,0 +1,22 @@
+[Unit]
+Description=Duniter node
+After=network.target
+
+[Service]
+# Should be set to web in order to start with web GUI
+Environment="DUNITER_WEB="
+Environment="DUNITER_HOME=/var/lib/duniter"
+Environment="DUNITER_DATA=duniter_default"
+# If using a key file, DUNITER_OPTS can be defined like so:
+#Environment="DUNITER_OPTS=--keyfile /etc/duniter/keys.yml"
+Environment="DUNITER_OPTS="
+Group=duniter
+User=duniter
+Type=forking
+ExecStart=/usr/bin/duniter ${DUNITER_WEB}start --home ${DUNITER_HOME} --mdb ${DUNITER_DATA} $DUNITER_OPTS
+ExecReload=/usr/bin/duniter ${DUNITER_WEB}restart --home ${DUNITER_HOME} --mdb ${DUNITER_DATA} $DUNITER_OPTS
+ExecStop=/usr/bin/duniter stop --home ${DUNITER_HOME} --mdb ${DUNITER_DATA}
+Restart=on-failure
+
+[Install]
+WantedBy=multi-user.target
diff --git a/release/new_prerelease.sh b/release/new_prerelease.sh
index bd5d78caa6a0c3d2b6612f3200e4011a9bb6e2e1..2b33c2889a4c062928618bef637228afb9ed8302 100755
--- a/release/new_prerelease.sh
+++ b/release/new_prerelease.sh
@@ -1,7 +1,6 @@
 #!/bin/bash
 
 TAG="v$1"
-TOKEN=`cat $HOME/.config/duniter/.github`
 ARCH=`uname -m`
 # Check that the tag exists remotely
 
@@ -31,36 +30,19 @@ fi
 echo "Remote tag: $REMOTE_TAG"
 
 echo "Creating the pre-release..."
-ASSETS=`node ./release/scripts/create-release.js $TOKEN $TAG create`
-EXPECTED_ASSETS="duniter-desktop-$TAG-linux-x64.deb
-duniter-desktop-$TAG-linux-x64.tar.gz
-duniter-server-$TAG-linux-x64.deb
-duniter-desktop-$TAG-windows-x64.exe
+EXPECTED_ASSETS="duniter-desktop-$TAG-windows-x64.exe
 duniter-server-$TAG-linux-armv7l.deb"
 for asset in $EXPECTED_ASSETS; do
   if [[ -z `echo $ASSETS | grep -F "$asset"` ]]; then
 
     echo "Missing asset: $asset"
 
-    # Debian
-    if [[ $asset == *"linux-x64.deb" ]] || [[ $asset == *"linux-x64.tar.gz" ]]; then
-      if [[ $ARCH == "x86_64" ]]; then
-        echo "Starting Debian build..."
-        ./release/scripts/build.sh make deb $TAG
-        DEB_PATH="$PWD/release/arch/debian/$asset"
-        node ./release/scripts/upload-release.js $TOKEN $TAG $DEB_PATH
-      else
-        echo "This computer cannot build this asset, required architecture is 'x86_64'. Skipping."
-      fi
-    fi
-
     # Windows
     if [[ $asset == *".exe" ]]; then
       if [[ $ARCH == "x86_64" ]]; then
         echo "Starting Windows build..."
         ./release/scripts/build.sh make win $TAG
-        WIN_PATH="$PWD/release/arch/windows/$asset"
-        node ./release/scripts/upload-release.js $TOKEN $TAG $WIN_PATH
+        echo "Windows asset has been successfully built, it is available here : $PWD/release/arch/windows/$asset"
       else
         echo "This computer cannot build this asset, required architecture is 'x86_64'. Skipping."
       fi
@@ -71,8 +53,7 @@ for asset in $EXPECTED_ASSETS; do
       if [[ $ARCH == "armv7l" ]]; then
         echo "Starting ARM build..."
         ./release/scripts/build.sh make arm $TAG
-        ARM_PATH="$PWD/release/arch/arm/$asset"
-        node ./release/scripts/upload-release.js $TOKEN $TAG $ARM_PATH
+        echo "Arm asset has been successfully built, it is available here : $PWD/release/arch/arm/$asset"
       else
         echo "This computer cannot build this asset, required architecture is 'armv7l'. Skipping."
       fi
diff --git a/release/new_version.sh b/release/new_version.sh
index fbbda1bdcf6dc7121e9980b3b17b8c53de176f16..fc5120c778830e984aec36d220a6d8771929fffb 100755
--- a/release/new_version.sh
+++ b/release/new_version.sh
@@ -8,8 +8,7 @@ if [[ $1 =~ ^[0-9]+.[0-9]+.[0-9]+((a|b)[0-9]+)?$ ]]; then
   echo "Changing to version: $1"
   # Change the version in package.json and test file
   sed -i "s/version\": .*/version\": \"$1\",/g" package.json
-  sed -i "s/Version: .*/Version: $1/g" release/arch/debian/package/DEBIAN/control
-  sed -i "s/version').equal('.*/version').equal('$1');/g" test/integration/branches.js
+  sed -i "s/Version: .*/Version: $1/g" release/extra/debian/package/DEBIAN/control
   sed -i "s/ release: .*/ release: v$1/g" appveyor.yml
 
   # Duniter.iss (Windows installer)
@@ -21,7 +20,7 @@ if [[ $1 =~ ^[0-9]+.[0-9]+.[0-9]+((a|b)[0-9]+)?$ ]]; then
 
   # Commit
   git reset HEAD
-  git add package.json test/integration/branches.js gui/index.html release/arch/debian/package/DEBIAN/control release/arch/windows/duniter.iss
+  git add package.json gui/index.html release/extra/debian/package/DEBIAN/control release/arch/windows/duniter.iss
   git commit -m "v$1"
   git tag "v$1"
 else
diff --git a/release/scripts/build.sh b/release/scripts/build.sh
index a341dc222766bfb49c21f264d967853077a0b59d..6a0ae2940c38b69a1e1c81226a894f35fd5b35b7 100755
--- a/release/scripts/build.sh
+++ b/release/scripts/build.sh
@@ -1,45 +1,104 @@
 #!/bin/bash
 
+BUILDER_TAG="v1.0.1"
+
 TAG="$3"
+ORIGIN="$4"
+IS_LOCAL_TAG=0
+
+if [[ -z "${TAG}" ]]; then
+  # Default tag = YEARMONTHDAY.HOURMINUTE.SECONDS
+  TAG="`date +\"%Y%m%d\"`.`date +\"%H%M\"`.`date +\"%S\"`"
+  IS_LOCAL_TAG=1
+fi
+
+if [[ -z "${ORIGIN}" ]]; then
+  # Default tag = local branch name
+  ORIGIN="$(cd ./; pwd)"
+fi
 
 case "$1" in
 make)
   case "$2" in
   arm)
     cd release/arch/arm
-    ./build-arm.sh
+
+    #### PREPARE SOURCE CODE ####
+    rm -rf duniter-source
+    # Clone from remote
+    echo ">> VM: Cloning sources from ${ORIGIN}..."
+    git clone "${ORIGIN}" duniter-source
+    if [ ${IS_LOCAL_TAG} -eq 1 ]; then
+      cd duniter-source
+      echo ">> git tag v${TAG}..."
+      ./release/new_version.sh "$TAG"
+      cd ..
+    fi
+
+    ./build-arm.sh ${TAG}
+
     if [ ! $? -eq 0 ]; then
       echo ">> Something went wrong. Stopping build."
     else
       echo ">> Build success."
     fi
     ;;
-  deb)
-    cd release/arch/debian
+  lin)
+    cd release/arch/linux
     if [[ ! -f "duniter-desktop-$TAG-linux-x64.deb" ]]; then
-      [[ $? -eq 0 ]] && echo ">> Starting Vagrant Ubuntu VM..."
-      [[ $? -eq 0 ]] && vagrant up
-      [[ $? -eq 0 ]] && echo ">> VM: building Duniter..."
-      [[ $? -eq 0 ]] && vagrant ssh -- 'bash -s' < ./build-deb.sh
+
+      #### PREPARE SOURCE CODE ####
+      # Clone from remote
+      echo ">> VM: Cloning sources from ${ORIGIN}..."
+      git clone "${ORIGIN}" duniter-source
+      cd duniter-source
+      [[ ${IS_LOCAL_TAG} -eq 1 ]] && ./release/new_version.sh "${TAG}"
+      git checkout "v${TAG}"
+      cd ..
+
+      docker pull duniter/release-builder:${BUILDER_TAG}
+cat <<EOF |
+      cd /builds/duniter-source
+      bash "release/arch/linux/build-lin.sh" "${TAG}" || exit 1
+      exit 0
+EOF
+      docker run --rm -i -v ${PWD}/duniter-source:/builds/duniter-source duniter/release-builder:${BUILDER_TAG}
       if [ ! $? -eq 0 ]; then
         echo ">> Something went wrong. Stopping build."
       else
+        mv duniter-source/work/bin/* .
         echo ">> Build success. Shutting the VM down."
       fi
-      vagrant halt
+      rm -rf duniter-source
       echo ">> VM closed."
     else
-      echo "Debian binaries already built. Ready for upload."
+      echo "Linux binaries already built. Ready for upload."
     fi
     ;;
   win)
     cd release/arch/windows
     if [[ ! -f "duniter-desktop-$TAG-windows-x64.exe" ]]; then
+
+      #### PREPARE SOURCE CODE ####
+      rm -rf duniter-source
+      # Clone from remote
+      echo ">> VM: Cloning sources from ${ORIGIN}..."
+      git clone "${ORIGIN}" duniter-source
+      echo "${TAG}" > duniter_tag.txt
+      if [ ${IS_LOCAL_TAG} -eq 1 ]; then
+        cd duniter-source
+        ./release/new_version.sh "$TAG"
+        cd ..
+      fi
+
       [[ $? -eq 0 ]] && echo ">> Starting Vagrant Windows VM..."
       [[ $? -eq 0 ]] && vagrant up
+
+      rm -f duniter_tag.txt
       if [ ! $? -eq 0 ]; then
         echo ">> Something went wrong. Stopping build."
       fi
+      rm -rf ./duniter-source
       vagrant halt
       echo ">> VM closed."
     else
diff --git a/test/fast/protocol-local-rule-chained-tx-depth.ts b/test/fast/protocol-local-rule-chained-tx-depth.ts
new file mode 100644
index 0000000000000000000000000000000000000000..a13f0fb3c45bc7d979aa3c98e7c914fe8657a640
--- /dev/null
+++ b/test/fast/protocol-local-rule-chained-tx-depth.ts
@@ -0,0 +1,45 @@
+import {LOCAL_RULES_HELPERS} from "../../app/lib/rules/local_rules"
+
+const _ = require('underscore')
+const assert = require('assert')
+
+describe("Protocol BR_G110 - chained tx depth", () => {
+
+  const sindex = [
+    { tx: 'A', op: 'UPDATE', identifier: 'UD1', pos: 0 },
+    { tx: 'A', op: 'CREATE', identifier: 'TXA', pos: 0 },
+    { tx: 'B', op: 'UPDATE', identifier: 'TXA', pos: 0 },
+    { tx: 'B', op: 'CREATE', identifier: 'TXB', pos: 0 },
+    { tx: 'C', op: 'UPDATE', identifier: 'TXB', pos: 0 },
+    { tx: 'C', op: 'CREATE', identifier: 'TXC', pos: 0 },
+    { tx: 'D', op: 'UPDATE', identifier: 'TXC', pos: 0 },
+    { tx: 'D', op: 'CREATE', identifier: 'TXD', pos: 0 },
+    { tx: 'E', op: 'UPDATE', identifier: 'TXD', pos: 0 },
+    { tx: 'E', op: 'CREATE', identifier: 'TXE', pos: 0 },
+    { tx: 'F', op: 'UPDATE', identifier: 'TXE', pos: 0 },
+    { tx: 'F', op: 'CREATE', identifier: 'TXF', pos: 0 },
+    { tx: 'G', op: 'UPDATE', identifier: 'TXF', pos: 0 },
+    { tx: 'G', op: 'CREATE', identifier: 'TXG', pos: 0 },
+    { tx: 'H', op: 'UPDATE', identifier: 'TXG', pos: 0 },
+    { tx: 'H', op: 'CREATE', identifier: 'TXH', pos: 0 },
+  ]
+
+  it('should detected normal depth', () => {
+    assert.equal(0, LOCAL_RULES_HELPERS.getTransactionDepth('A', sindex))
+    assert.equal(1, LOCAL_RULES_HELPERS.getTransactionDepth('B', sindex))
+    assert.equal(2, LOCAL_RULES_HELPERS.getTransactionDepth('C', sindex))
+    assert.equal(3, LOCAL_RULES_HELPERS.getTransactionDepth('D', sindex))
+    assert.equal(4, LOCAL_RULES_HELPERS.getTransactionDepth('E', sindex))
+    assert.equal(5, LOCAL_RULES_HELPERS.getTransactionDepth('F', sindex))
+    assert.equal(6, LOCAL_RULES_HELPERS.getTransactionDepth('G', sindex))
+  })
+
+  it('should detected max the depth to 6', () => {
+    assert.equal(6, LOCAL_RULES_HELPERS.getTransactionDepth('H', sindex))
+  })
+
+  it('should find the max depth globally', () => {
+    assert.equal(6, LOCAL_RULES_HELPERS.getMaxTransactionDepth(sindex))
+  })
+})
+
diff --git a/test/fast/prover/pow-1-cluster.js b/test/fast/prover/pow-1-cluster.ts
similarity index 63%
rename from test/fast/prover/pow-1-cluster.js
rename to test/fast/prover/pow-1-cluster.ts
index 96d58c12b9f36dd1391c91d3b649bb4f12f25bbc..a2e76947e030785b11c2396118d011fb32f433ee 100644
--- a/test/fast/prover/pow-1-cluster.js
+++ b/test/fast/prover/pow-1-cluster.ts
@@ -1,16 +1,19 @@
-"use strict";
+import {Master} from "../../../app/modules/prover/lib/powCluster"
 
 const co = require('co')
-const should = require('should')
-const PowCluster = require('../../../app/modules/prover/lib/powCluster').Master
+require('should')
 const logger = require('../../../app/lib/logger').NewLogger()
 
-let master
+let master:Master
 
 describe('PoW Cluster', () => {
 
   before(() => {
-    master = new PowCluster(1, logger)
+    master = new Master(1, logger)
+  })
+
+  after(() => {
+    return master.shutDownWorkers()
   })
 
   it('should have an empty cluster if no PoW was asked', () => {
@@ -73,4 +76,35 @@ describe('PoW Cluster', () => {
     delay.should.be.below(50)
   }))
 
+  it('should be able to stop all the cores on cancel', async () => {
+    master.proveByWorkers({
+      initialTestsPerRound: 100,
+      maxDuration: 1000,
+      newPoW: {
+        block: {
+          number: 0
+        },
+        zeros: 10,
+        highMark: 'F',
+        conf: {
+          medianTimeBlocks: 1,
+          avgGenTime: 100,
+          cpu: 0.8,
+          prefix: '8',
+          nbCores: 1
+        },
+        pair: {
+          pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
+          sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
+        }
+      }
+    })
+    await new Promise(res => {
+      master.onInfoMessage = () => res()
+    })
+    await master.cancelWork()
+    await new Promise(res => setTimeout(res, 100))
+    master.nbCancels.should.equal(1)
+  })
+
 });
diff --git a/test/fast/prover/pow-2-engine.js b/test/fast/prover/pow-2-engine.js
index 8238438d02c1866d4bf7acc0d26625f2f3549a32..743744ba550dd7db189020a2949321393c844e52 100644
--- a/test/fast/prover/pow-2-engine.js
+++ b/test/fast/prover/pow-2-engine.js
@@ -10,6 +10,7 @@ describe('PoW Engine', () => {
   it('should be configurable', () => co(function*(){
     const e1 = new PowEngine({ nbCores: 1 }, logger);
     (yield e1.setConf({ cpu: 0.2, prefix: '34' })).should.deepEqual({ cpu: 0.2, prefix: '34' });
+    yield e1.shutDown()
   }));
 
   it('should be able to make a proof', () => co(function*(){
@@ -52,6 +53,7 @@ describe('PoW Engine', () => {
         pow: '009A52E6E2E4EA7DE950A2DA673114FA55B070EBE350D75FF0C62C6AAE9A37E5'
       }
     });
+    yield e1.shutDown()
   }));
 
   it('should be able to stop a proof', () => co(function*(){
@@ -85,5 +87,6 @@ describe('PoW Engine', () => {
     yield e1.cancel()
     // const proof = yield proofPromise;
     // should.not.exist(proof);
+    yield e1.shutDown()
   }));
 });
diff --git a/test/integration/branches.js b/test/integration/branches.js
deleted file mode 100644
index dadf77a17461865a6b6a4b9adff1026220653e81..0000000000000000000000000000000000000000
--- a/test/integration/branches.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-
-const _         = require('underscore');
-const co        = require('co');
-const should    = require('should');
-const duniter   = require('../../index');
-const bma       = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const rp        = require('request-promise');
-const httpTest  = require('./tools/http');
-const shutDownEngine  = require('./tools/shutDownEngine');
-
-const expectAnswer   = httpTest.expectAnswer;
-
-const MEMORY_MODE = true;
-const commonConf = {
-  ipv4: '127.0.0.1',
-  currency: 'bb',
-  httpLogs: true,
-  forksize: 3,
-  sigQty: 1
-};
-
-let s1
-
-describe("Branches", () => co(function*() {
-
-  before(() => co(function*() {
-
-    s1 = duniter(
-      '/bb1',
-      MEMORY_MODE,
-      _.extend({
-        port: '7778',
-        pair: {
-          pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-          sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-        }
-      }, commonConf));
-
-    const server = yield s1.initWithDAL();
-    const bmapi = yield bma(server);
-    yield bmapi.openConnections();
-  }));
-
-  after(() => {
-    return shutDownEngine(s1)
-  })
-
-  describe("Server 1 /blockchain", function() {
-
-    it('should have a 3 blocks fork window size', function() {
-      return expectAnswer(rp('http://127.0.0.1:7778/node/summary', { json: true }), function(res) {
-        res.should.have.property('duniter').property('software').equal('duniter');
-        res.should.have.property('duniter').property('version').equal('1.6.14');
-        res.should.have.property('duniter').property('forkWindowSize').equal(3);
-      });
-    });
-  });
-}));
diff --git a/test/integration/branches2.js b/test/integration/branches2.js
deleted file mode 100644
index 0970f97333bb4e18b9ede6bfb3beb3f5a06cfebf..0000000000000000000000000000000000000000
--- a/test/integration/branches2.js
+++ /dev/null
@@ -1,214 +0,0 @@
-"use strict";
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
-    return new (P || (P = Promise))(function (resolve, reject) {
-        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
-        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
-        function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
-        step((generator = generator.apply(thisArg, _arguments || [])).next());
-    });
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const other_constants_1 = require("../../app/lib/other_constants");
-const logger_1 = require("../../app/lib/logger");
-const index_1 = require("../../app/modules/bma/index");
-const index_2 = require("../../app/modules/crawler/index");
-const toolbox_1 = require("./tools/toolbox");
-const co = require('co');
-const _ = require('underscore');
-const duniter = require('../../index');
-const bma = index_1.BmaDependency.duniter.methods.bma;
-const user = require('./tools/user');
-const rp = require('request-promise');
-const httpTest = require('./tools/http');
-const commit = require('./tools/commit');
-const sync = require('./tools/sync');
-const shutDownEngine = require('./tools/shutDownEngine');
-const expectJSON = httpTest.expectJSON;
-const expectHttpCode = httpTest.expectHttpCode;
-if (other_constants_1.OtherConstants.MUTE_LOGS_DURING_UNIT_TESTS) {
-    logger_1.NewLogger().mute();
-}
-// Trace these errors
-process.on('unhandledRejection', (reason) => {
-    console.error('Unhandled rejection: ' + reason);
-    console.error(reason);
-});
-const MEMORY_MODE = true;
-const commonConf = {
-    ipv4: '127.0.0.1',
-    currency: 'bb',
-    httpLogs: true,
-    forksize: 10,
-    switchOnHeadAdvance: 6,
-    avgGenTime: 30 * 60,
-    sigQty: 1
-};
-let s1, s2, cat, toc;
-const now = Math.round(new Date().getTime() / 1000);
-describe("SelfFork", function () {
-    before(() => co(function* () {
-        s1 = duniter('/bb4', MEMORY_MODE, _.extend({
-            port: '7781',
-            pair: {
-                pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-                sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-            }
-        }, commonConf));
-        s2 = duniter('/bb5', MEMORY_MODE, _.extend({
-            port: '7782',
-            pair: {
-                pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
-                sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
-            }
-        }, commonConf));
-        cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 });
-        toc = user('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 });
-        const commitS1 = commit(s1);
-        const commitS2 = commit(s2, {
-            time: now + 37180
-        });
-        yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        s1.addEndpointsDefinitions(() => index_1.BmaDependency.duniter.methods.getMainEndpoint(s1.conf));
-        s2.addEndpointsDefinitions(() => index_1.BmaDependency.duniter.methods.getMainEndpoint(s2.conf));
-        // Server 1
-        yield cat.createIdentity();
-        yield toc.createIdentity();
-        yield toc.cert(cat);
-        yield cat.cert(toc);
-        yield cat.join();
-        yield toc.join();
-        yield commitS1({
-            time: now
-        });
-        yield commitS1();
-        yield commitS1();
-        yield commitS1();
-        // Server 2
-        yield sync(0, 2, s1, s2);
-        yield toolbox_1.waitToHaveBlock(s2, 2);
-        let s2p = yield s2.PeeringService.peer();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield s1.writePeer(s2p);
-        // Forking S1 from S2
-        yield Promise.all([
-            toolbox_1.waitForkResolution(s1, 9),
-            index_2.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey)
-        ]);
-    }));
-    after(() => {
-        return Promise.all([
-            shutDownEngine(s1),
-            shutDownEngine(s2)
-        ]);
-    });
-    describe("Server 1 /blockchain", function () {
-        it('/block/0 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/0', { json: true }), {
-                number: 0,
-                issuersCount: 0,
-                issuersFrame: 1,
-                issuersFrameVar: 0
-            });
-        });
-        it('/block/1 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/1', { json: true }), {
-                number: 1,
-                issuersCount: 1,
-                issuersFrame: 1,
-                issuersFrameVar: 5
-            });
-        });
-        it('/block/2 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/2', { json: true }), {
-                number: 2,
-                issuersCount: 1,
-                issuersFrame: 2,
-                issuersFrameVar: 4
-            });
-        });
-        it('/block/3 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/3', { json: true }), {
-                number: 3,
-                issuersCount: 1,
-                issuersFrame: 3,
-                issuersFrameVar: 3
-            });
-        });
-        it('/block/4 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/4', { json: true }), {
-                number: 4,
-                issuersCount: 2,
-                issuersFrame: 4,
-                issuersFrameVar: 7
-            });
-        });
-        it('/block/5 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/5', { json: true }), {
-                number: 5,
-                issuersCount: 2,
-                issuersFrame: 5,
-                issuersFrameVar: 6
-            });
-        });
-        it('/block/6 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/6', { json: true }), {
-                number: 6,
-                issuersCount: 2,
-                issuersFrame: 6,
-                issuersFrameVar: 5
-            });
-        });
-        it('/block/7 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/7', { json: true }), {
-                number: 7,
-                issuersCount: 2,
-                issuersFrame: 7,
-                issuersFrameVar: 4
-            });
-        });
-        it('/block/88 should not exist', function () {
-            return expectHttpCode(404, rp('http://127.0.0.1:7781/blockchain/block/88'));
-        });
-        it('/current should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/current', { json: true }), {
-                number: 9
-            });
-        });
-        it('should have 2 branch', () => __awaiter(this, void 0, void 0, function* () {
-            const branches = yield s1.BlockchainService.branches();
-            branches.should.have.length(1);
-        }));
-    });
-    describe("Server 2 /blockchain", function () {
-        it('/block/0 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/0', { json: true }), {
-                number: 0
-            });
-        });
-        it('/block/1 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/1', { json: true }), {
-                number: 1
-            });
-        });
-        it('/block/88 should not exist', function () {
-            return expectHttpCode(404, rp('http://127.0.0.1:7782/blockchain/block/88'));
-        });
-        it('/current should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/current', { json: true }), {
-                number: 9
-            });
-        });
-        it('should have 1 branch', () => co(function* () {
-            const branches = yield s2.BlockchainService.branches();
-            branches.should.have.length(1);
-        }));
-    });
-});
-//# sourceMappingURL=branches2.js.map
\ No newline at end of file
diff --git a/test/integration/branches_switch.js b/test/integration/branches_switch.js
deleted file mode 100644
index 353ef8113098859a1f9e67a0b9592211759b1194..0000000000000000000000000000000000000000
--- a/test/integration/branches_switch.js
+++ /dev/null
@@ -1,99 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-const index_1 = require("../../app/modules/crawler/index");
-const index_2 = require("../../app/modules/bma/index");
-const co = require('co');
-const _ = require('underscore');
-const duniter = require('../../index');
-const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const TestUser = require('./tools/TestUser').TestUser;
-const rp = require('request-promise');
-const httpTest = require('./tools/http');
-const commit = require('./tools/commit');
-const sync = require('./tools/sync');
-const shutDownEngine = require('./tools/shutDownEngine');
-const expectJSON = httpTest.expectJSON;
-const MEMORY_MODE = true;
-const commonConf = {
-    ipv4: '127.0.0.1',
-    currency: 'bb',
-    httpLogs: true,
-    forksize: 30,
-    avgGenTime: 1,
-    sigQty: 1
-};
-let s1, s2, cat, toc;
-describe("Switch", function () {
-    before(() => co(function* () {
-        s1 = duniter('/bb11', MEMORY_MODE, _.extend({
-            switchOnHeadAdvance: 0,
-            port: '7788',
-            pair: {
-                pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-                sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-            },
-            rootoffset: 10,
-            sigQty: 1, dt: 1, ud0: 120
-        }, commonConf));
-        s2 = duniter('/bb12', MEMORY_MODE, _.extend({
-            switchOnHeadAdvance: 0,
-            port: '7789',
-            pair: {
-                pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
-                sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
-            }
-        }, commonConf));
-        cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 });
-        toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 });
-        yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        s1.addEndpointsDefinitions(() => index_2.BmaDependency.duniter.methods.getMainEndpoint(s1.conf));
-        s2.addEndpointsDefinitions(() => index_2.BmaDependency.duniter.methods.getMainEndpoint(s2.conf));
-        yield cat.createIdentity();
-        yield toc.createIdentity();
-        yield toc.cert(cat);
-        yield cat.cert(toc);
-        yield cat.join();
-        yield toc.join();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield sync(0, 2, s1, s2);
-        let s2p = yield s2.PeeringService.peer();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        // So we now have:
-        // S1 01234
-        // S2   `3456789
-        yield s1.writePeer(s2p);
-        // Forking S1 from S2
-        yield index_1.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey);
-        // S1 should have switched to the other branch
-    }));
-    after(() => {
-        return Promise.all([
-            shutDownEngine(s1),
-            shutDownEngine(s2)
-        ]);
-    });
-    describe("Server 1 /blockchain", function () {
-        it('/block/8 should exist on S1', function () {
-            return expectJSON(rp('http://127.0.0.1:7788/blockchain/block/8', { json: true }), {
-                number: 8
-            });
-        });
-        it('/block/8 should exist on S2', function () {
-            return expectJSON(rp('http://127.0.0.1:7789/blockchain/block/8', { json: true }), {
-                number: 8
-            });
-        });
-    });
-});
-//# sourceMappingURL=branches_switch.js.map
\ No newline at end of file
diff --git a/test/integration/branches_switch.ts b/test/integration/branches_switch.ts
index 2eb7fae68263581106b049d653402e133aa6508a..ecce9de72ba20fb22fbaeb610ea56ca95f02f8a2 100644
--- a/test/integration/branches_switch.ts
+++ b/test/integration/branches_switch.ts
@@ -11,6 +11,7 @@ const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
 const commit    = require('./tools/commit');
 const sync      = require('./tools/sync');
+const cluster   = require('cluster')
 const shutDownEngine  = require('./tools/shutDownEngine');
 
 const expectJSON     = httpTest.expectJSON;
@@ -31,6 +32,8 @@ describe("Switch", function() {
 
   before(() => co(function *() {
 
+    cluster.setMaxListeners(6)
+
     s1 = duniter(
       '/bb11',
       MEMORY_MODE,
@@ -97,6 +100,7 @@ describe("Switch", function() {
   }));
 
   after(() => {
+    cluster.setMaxListeners(3)
     return Promise.all([
       shutDownEngine(s1),
       shutDownEngine(s2)
diff --git a/test/integration/continuous-proof.js b/test/integration/continuous-proof.js
index 289d5b694b3f5ceeef79fb2f479af64c6080548f..157477a80f3dca199030a6ba498ea6d0cf218e44 100644
--- a/test/integration/continuous-proof.js
+++ b/test/integration/continuous-proof.js
@@ -37,6 +37,7 @@ describe("Continous proof-of-work", function() {
     yield i1.join();
     yield i2.join();
     yield s1.commit();
+    yield s1.closeCluster();
   }));
 
   it('should automatically stop waiting if nothing happens', () => co(function*() {
@@ -104,7 +105,7 @@ describe("Continous proof-of-work", function() {
     s2.conf.cpu = 1.0;
     s2.startBlockComputation();
     yield s2.until('block', 15);
-    s2.stopBlockComputation();
+    yield s2.stopBlockComputation();
     yield [
       require('../../app/modules/crawler').CrawlerDependency.duniter.methods.pullBlocks(s3),
       new Promise(res => {
@@ -121,11 +122,6 @@ describe("Continous proof-of-work", function() {
     const current = yield s3.get('/blockchain/current')
     yield s3.stopBlockComputation();
     current.number.should.be.aboveOrEqual(14)
+    yield s1.closeCluster()
   }));
-
-  after(() => {
-    return Promise.all([
-      s1.closeCluster()
-    ])
-  })
 });
diff --git a/test/integration/documents-currency.ts b/test/integration/documents-currency.ts
index a97afd54206967a360beb54e2ba528117ae7c2eb..50327d07d860ac73031d52f6fb15808175197ff0 100644
--- a/test/integration/documents-currency.ts
+++ b/test/integration/documents-currency.ts
@@ -1,3 +1,4 @@
+
 import { NewTestingServer, TestingServer } from './tools/toolbox';
 import { unlock } from '../../app/lib/common-libs/txunlock';
 import { ConfDTO, CurrencyConfDTO } from '../../app/lib/dto/ConfDTO';
diff --git a/test/integration/forwarding.js b/test/integration/forwarding.js
deleted file mode 100644
index 993247afcae309b74a8001914f99576823085ed0..0000000000000000000000000000000000000000
--- a/test/integration/forwarding.js
+++ /dev/null
@@ -1,184 +0,0 @@
-"use strict";
-const should = require('should');
-const assert = require('assert');
-const async  = require('async');
-const _      = require('underscore');
-const co     = require('co');
-const node   = require('./tools/node');
-const TestUser = require('./tools/TestUser').TestUser
-const jspckg = require('../../package');
-const constants = require('../../app/lib/constants');
-
-require('../../app/modules/bma').BmaDependency.duniter.methods.noLimit(); // Disables the HTTP limiter
-
-if (constants.MUTE_LOGS_DURING_UNIT_TESTS) {
-  require('../../app/lib/logger').NewLogger().mute();
-}
-
-describe("Forwarding", function() {
-
-  describe("Nodes", function() {
-
-    const common = { currency: 'bb', nobma: false, bmaWithCrawler:true, ws2p: { upnp: false }, ipv4: '127.0.0.1', remoteipv4: '127.0.0.1', rootoffset: 0, sigQty: 1 };
-
-    const node1 = node('db_1', _({ upnp: false, httplogs: false, port: 9600, remoteport: 9600, pair: { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'} }).extend(common));
-    const node2 = node('db_2', _({ upnp: false, httplogs: false, port: 9601, remoteport: 9601, pair: { pub: 'G2CBgZBPLe6FSFUgpx2Jf1Aqsgta6iib3vmDRA1yLiqU', sec: '58LDg8QLmF5pv6Dn9h7X4yFKfMTdP8fdAiWVcyDoTRJu454fwRihCLULH4MW37zncsg4ruoTGJPZneWk22QmG1w4'} }).extend(common));
-
-    const cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, node1);
-    const tac = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, node1);
-    const tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, node1);
-    const toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, node1);
-
-    before(() => co(function*(){
-      yield [node1, node2].map((theNode) => theNode.startTesting());
-      yield new Promise(function(resolve, reject){
-        async.waterfall([
-          function(next) {
-            node2.peering(next);
-          },
-          function(peer, next) {
-            node1.submitPeer(peer, function(err) {
-              next(err);
-            });
-          },
-          function(next) {
-            node1.peering(next);
-          },
-          function(peer, next) {
-            node2.submitPeer(peer, next);
-          }
-        ], function(err) {
-          err ? reject(err) : resolve();
-        });
-      });
-      yield [
-        node2.until('identity', 4),
-        node2.until('certification', 2),
-        node2.until('block', 1),
-        co(function *() {
-
-          // Self certifications
-          yield cat.createIdentity();
-          yield tac.createIdentity();
-          yield tic.createIdentity();
-          yield toc.createIdentity();
-          // Certifications
-          yield cat.cert(tac);
-          yield tac.cert(cat);
-          yield cat.join();
-          yield tac.join();
-          yield node1.commitP();
-        })
-      ];
-      yield [
-        node2.until('revocation', 1),
-        co(function *() {
-          yield cat.revoke();
-        })
-      ];
-    }));
-
-    describe("Testing technical API", function(){
-
-      it('Node1 should be up and running', node1.summary(function(summary, done){
-        should.exists(summary);
-        should.exists(summary.duniter);
-        should.exists(summary.duniter.software);
-        should.exists(summary.duniter.version);
-        assert.equal(summary.duniter.software, "duniter");
-        assert.equal(summary.duniter.version, jspckg.version);
-        done();
-      }));
-
-      it('Node2 should be up and running', node2.summary(function(summary, done){
-        should.exists(summary);
-        should.exists(summary.duniter);
-        should.exists(summary.duniter.software);
-        should.exists(summary.duniter.version);
-        assert.equal(summary.duniter.software, "duniter");
-        assert.equal(summary.duniter.version, jspckg.version);
-        done();
-      }));
-    });
-
-    describe('Node 1', doTests(node1));
-    describe('Node 2', doTests(node2));
-
-  });
-});
-
-function doTests(theNode) {
-
-  return function(){
-
-    describe("user cat", function(){
-
-      it('should give only 1 result', theNode.lookup('cat', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have sent 1 signature', theNode.lookup('cat', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].signed.length, 1);
-          should.exists(res.results[0].signed[0].isMember);
-          should.exists(res.results[0].signed[0].wasMember);
-          assert.equal(res.results[0].signed[0].isMember, true);
-          assert.equal(res.results[0].signed[0].wasMember, true);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-    });
-
-    describe("user tac", function(){
-
-      it('should give only 1 result', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have 1 signature', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].uids[0].others.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have sent 1 signature', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].signed.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-    });
-
-    it('toc should give only 1 result', theNode.lookup('toc', function(res, done){
-      should.not.exists(res);
-      done();
-    }));
-
-    it('tic should give only 1 results', theNode.lookup('tic', function(res, done){
-      should.not.exists(res);
-      done();
-    }));
-  };
-}
diff --git a/test/integration/forwarding.ts b/test/integration/forwarding.ts
new file mode 100644
index 0000000000000000000000000000000000000000..8adbba5eecbbb90794e343be1a6b6dab41e603a5
--- /dev/null
+++ b/test/integration/forwarding.ts
@@ -0,0 +1,136 @@
+import {NewLogger} from "../../app/lib/logger"
+import {BmaDependency} from "../../app/modules/bma/index"
+import {TestUser} from "./tools/TestUser"
+import {simpleTestingConf, simpleTestingServer, TestingServer} from "./tools/toolbox"
+import {RouterDependency} from "../../app/modules/router"
+
+require('should');
+const assert = require('assert');
+const jspckg = require('../../package');
+const constants = require('../../app/lib/constants');
+
+BmaDependency.duniter.methods.noLimit(); // Disables the HTTP limiter
+
+if (constants.MUTE_LOGS_DURING_UNIT_TESTS) {
+  NewLogger().mute()
+}
+
+describe("Forwarding", function() {
+
+  describe("Nodes", function() {
+
+    const now = 1500000000
+    const conf1 = simpleTestingConf(now, { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'})
+    const conf2 = simpleTestingConf(now, { pub: 'G2CBgZBPLe6FSFUgpx2Jf1Aqsgta6iib3vmDRA1yLiqU', sec: '58LDg8QLmF5pv6Dn9h7X4yFKfMTdP8fdAiWVcyDoTRJu454fwRihCLULH4MW37zncsg4ruoTGJPZneWk22QmG1w4'})
+
+    const node1 = simpleTestingServer(conf1)
+    const node2 = simpleTestingServer(conf2)
+
+    const cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, node1);
+    const tac = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, node1);
+    const tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, node1);
+    const toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, node1);
+
+    before(async () => {
+      await node1.initDalBmaConnections()
+      await node2.initDalBmaConnections()
+      await node1.sharePeeringWith(node2)
+      await node2.sharePeeringWith(node1)
+      RouterDependency.duniter.methods.routeToNetwork(node1._server)
+      RouterDependency.duniter.methods.routeToNetwork(node2._server)
+      await Promise.all([
+        node2.until('identity', 4),
+        node2.until('certification', 2),
+        node2.until('block', 1),
+        (async () => {
+
+          // Self certifications
+          await cat.createIdentity();
+          await tac.createIdentity();
+          await tic.createIdentity();
+          await toc.createIdentity();
+          // Certifications
+          await cat.cert(tac);
+          await tac.cert(cat);
+          await cat.join();
+          await tac.join();
+          await node1.commit({ time: now })
+        })()
+      ])
+      await Promise.all([
+        node2.until('revocation', 1),
+        cat.revoke()
+      ])
+    })
+
+    describe("Testing technical API", function(){
+
+      it('Node1 should be up and running', () => node1.expectThat('/node/summary', (summary:any) => {
+        should.exists(summary);
+        should.exists(summary.duniter);
+        should.exists(summary.duniter.software);
+        should.exists(summary.duniter.version);
+        assert.equal(summary.duniter.software, "duniter");
+        assert.equal(summary.duniter.version, jspckg.version);
+      }))
+
+      it('Node2 should be up and running', () => node2.expectThat('/node/summary', (summary:any) => {
+        should.exists(summary);
+        should.exists(summary.duniter);
+        should.exists(summary.duniter.software);
+        should.exists(summary.duniter.version);
+        assert.equal(summary.duniter.software, "duniter");
+        assert.equal(summary.duniter.version, jspckg.version);
+      }))
+    });
+
+    describe('Node 1', doTests(node1));
+    describe('Node 2', doTests(node2));
+
+  });
+});
+
+function doTests(theNode:TestingServer) {
+
+  return () => {
+
+    describe("user cat", () => {
+
+      it('should give only 1 result', () => theNode.expectThat('/wot/lookup/cat', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results.length, 1);
+      }));
+
+      it('should have sent 1 signature', () => theNode.expectThat('/wot/lookup/cat', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].signed.length, 1);
+        should.exists(res.results[0].signed[0].isMember);
+        should.exists(res.results[0].signed[0].wasMember);
+        assert.equal(res.results[0].signed[0].isMember, true);
+        assert.equal(res.results[0].signed[0].wasMember, true);
+      }));
+    });
+
+    describe("user tac", () => {
+
+      it('should give only 1 result', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results.length, 1);
+      }))
+
+      it('should have 1 signature', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].uids[0].others.length, 1);
+      }))
+
+      it('should have sent 1 signature', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].signed.length, 1);
+      }))
+    })
+
+    it('toc should give no result', () => theNode.expectError('/wot/lookup/toc', 404, 'No matching identity'))
+
+    it('tic should give no results', () => theNode.expectError('/wot/lookup/tic', 404, 'No matching identity'))
+  }
+}
diff --git a/test/integration/http_api.js b/test/integration/http_api.js
index 83cd3ccb202b102879c48a2bbed2170436992c67..9454b78d613b86f0b88ea912648efb70e36ce5fe 100644
--- a/test/integration/http_api.js
+++ b/test/integration/http_api.js
@@ -336,7 +336,6 @@ function expectJSON(promise, json) {
 
 function postBlock(server2) {
   return function(block) {
-    console.log(typeof block == 'string' ? block : block.getRawSigned())
     return post(server2, '/blockchain/block')({
       block: typeof block == 'string' ? block : block.getRawSigned()
     })
diff --git a/test/integration/network-update.js b/test/integration/network-update.js
index 44c7ce9293b128fd57563f2c44527f610e265adb..7da00b7cf58913dda10b67898e0ccbbbe001a299 100644
--- a/test/integration/network-update.js
+++ b/test/integration/network-update.js
@@ -60,7 +60,7 @@ describe("Network updating", function() {
       yield [s1, s2].reduce((p, server) => co(function*() {
         yield p;
         yield server.initDalBmaConnections()
-        require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+        require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
       }), Promise.resolve());
 
       // Server 1
diff --git a/test/integration/peer-outdated.js b/test/integration/peer-outdated.js
index 3f2cb0e1f4b37553114a501b8b8ceb66e34c4ab5..1855c043183697cb8a64986b9368e26dce22ad8c 100644
--- a/test/integration/peer-outdated.js
+++ b/test/integration/peer-outdated.js
@@ -42,7 +42,7 @@ describe("Peer document expiry", function() {
     yield [s1, s2].reduce((p, server) => co(function*() {
       yield p;
       yield server.initDalBmaConnections()
-      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+      require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
     }), Promise.resolve());
 
     // Server 1
diff --git a/test/integration/peerings.js b/test/integration/peerings.js
index 4b227f6317b951c2776dbd8b32a672bf31ca4237..5fc5d49bfa55b94f4614a3280c9c8aa1e95652b6 100644
--- a/test/integration/peerings.js
+++ b/test/integration/peerings.js
@@ -93,7 +93,7 @@ describe("Network", function() {
               return bmaAPI.openConnections()
                 .then(() => {
                   server.bma = bmaAPI;
-                  require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+                  require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
                 });
             });
         });
diff --git a/test/integration/peers-same-pubkey.js b/test/integration/peers-same-pubkey.js
index 41c4b9c19edb5c6362a53784f1120d73fae53cb4..6375127aa429522f1f265815dcacccb9395fde6f 100644
--- a/test/integration/peers-same-pubkey.js
+++ b/test/integration/peers-same-pubkey.js
@@ -36,7 +36,7 @@ describe("Peer document", function() {
     yield [s1, s2, s3].reduce((p, server) => co(function*() {
       yield p;
       yield server.initDalBmaConnections()
-      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+      require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
     }), Promise.resolve());
 
     // Server 1
diff --git a/test/integration/server-import-export.js b/test/integration/server-import-export.js
index 4c56854060f461cae02c95fa7b4629b40e4b9966..0f3ab4344bfbdf3981dc90bc837ad2434b082c25 100644
--- a/test/integration/server-import-export.js
+++ b/test/integration/server-import-export.js
@@ -21,10 +21,10 @@ let s0, s1;
 describe('Import/Export', () => {
 
   before(() => co(function *() {
-    s0 = toolbox.server(_.extend({ homename: 'dev_unit_tests1' }, serverConfig));
+    s0 = toolbox.server(_.extend({ homename: 'dev_unit_tests1', powNoSecurity: true }, serverConfig));
     yield s0.resetHome();
 
-    s1 = toolbox.server(_.extend({ homename: 'dev_unit_tests1' }, serverConfig));
+    s1 = toolbox.server(_.extend({ homename: 'dev_unit_tests1', powNoSecurity: true }, serverConfig));
 
     const cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: s1 });
     const tac = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, { server: s1 });
@@ -53,8 +53,6 @@ describe('Import/Export', () => {
     return new Promise((resolve, reject) => {
       archive.on('error', reject);
       output.on('close', function() {
-        console.log(archive.pointer() + ' total bytes');
-        console.log('archiver has been finalized and the output file descriptor has closed.');
         resolve();
       });
     });
diff --git a/test/integration/start_generate_blocks.js b/test/integration/start_generate_blocks.js
index 3c79c48341eadb7b7948f71467ac79fd44259b78..a8d813cb9e8fecda634057b5edb7094563ac569f 100644
--- a/test/integration/start_generate_blocks.js
+++ b/test/integration/start_generate_blocks.js
@@ -76,7 +76,7 @@ describe("Generation", function() {
         yield server.initWithDAL();
         server.bma = yield bma(server);
         yield server.bma.openConnections();
-        require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+        require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
         yield server.PeeringService.generateSelfPeer(server.conf);
         const prover = require('../../app/modules/prover').ProverDependency.duniter.methods.prover(server);
         server.startBlockComputation = () => prover.startService();
diff --git a/test/integration/tools/toolbox.ts b/test/integration/tools/toolbox.ts
index 45a7ceb7bdeb018ee07b8e37cf1ed17a140eea83..af71bd3ae15e46c3b4a0fc1168a2d61014fc80a3 100644
--- a/test/integration/tools/toolbox.ts
+++ b/test/integration/tools/toolbox.ts
@@ -23,6 +23,7 @@ import {WS2PCluster} from "../../../app/modules/ws2p/lib/WS2PCluster"
 import {WS2PServer} from "../../../app/modules/ws2p/lib/WS2PServer"
 import {WS2PServerMessageHandler} from "../../../app/modules/ws2p/lib/interface/WS2PServerMessageHandler"
 import {TestUser} from "./TestUser"
+import {RouterDependency} from "../../../app/modules/router"
 
 const assert      = require('assert');
 const _           = require('underscore');
@@ -100,8 +101,8 @@ export const simpleNetworkOf2NodesAnd2Users = async (options:any) => {
   await tac.join();
 
   // Each server forwards to each other
-  require('../../../app/modules/router').duniter.methods.routeToNetwork(s1);
-  require('../../../app/modules/router').duniter.methods.routeToNetwork(s2);
+  RouterDependency.duniter.methods.routeToNetwork(s1._server)
+  RouterDependency.duniter.methods.routeToNetwork(s2._server)
 
   return { s1, s2, cat, tac };
 }
@@ -601,7 +602,7 @@ export class TestingServer {
     const bmaAPI = await bma(this.server);
     await bmaAPI.openConnections();
     this.bma = bmaAPI;
-    require('../../../app/modules/router').duniter.methods.routeToNetwork(this.server);
+    RouterDependency.duniter.methods.routeToNetwork(this.server)
     // Extra: for /wot/requirements URL
     require('../../../app/modules/prover').ProverDependency.duniter.methods.hookServer(this.server);
   }
@@ -643,7 +644,7 @@ export async function newWS2PBidirectionnalConnection(currency:string, k1:Key, k
     wss.on('connection', async (ws:any) => {
       switch (i) {
         case 1:
-          s1 = WS2PConnection.newConnectionFromWebSocketServer(ws, serverHandler, new WS2PPubkeyLocalAuth(currency, k1), new WS2PPubkeyRemoteAuth(currency, k1), {
+          s1 = WS2PConnection.newConnectionFromWebSocketServer(ws, serverHandler, new WS2PPubkeyLocalAuth(currency, k1, ""), new WS2PPubkeyRemoteAuth(currency, k1), {
             connectionTimeout: 100,
             requestTimeout: 100
           });
@@ -657,13 +658,13 @@ export async function newWS2PBidirectionnalConnection(currency:string, k1:Key, k
       })
       i++
     })
-    c1 = WS2PConnection.newConnectionToAddress('ws://localhost:' + port, new (class EmptyHandler implements WS2PMessageHandler {
+    c1 = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:' + port, new (class EmptyHandler implements WS2PMessageHandler {
       async handlePushMessage(json: any): Promise<void> {
       }
       async answerToRequest(json: any): Promise<WS2PResponse> {
         return {}
       }
-    }), new WS2PPubkeyLocalAuth(currency, k2), new WS2PPubkeyRemoteAuth(currency, k2))
+    }), new WS2PPubkeyLocalAuth(currency, k2, ""), new WS2PPubkeyRemoteAuth(currency, k2))
   })
 }
 
@@ -678,7 +679,7 @@ export const simpleWS2PNetwork: (s1: TestingServer, s2: TestingServer) => Promis
   const connexionPromise = new Promise(res => {
     ws2ps.on('newConnection', res)
   })
-  const ws2pc = await cluster2.connectToRemoteWS('localhost', port, '', new WS2PServerMessageHandler(s2._server, cluster2), s1._server.conf.pair.pub)
+  const ws2pc = await cluster2.connectToRemoteWS(1, 'localhost', port, '', new WS2PServerMessageHandler(s2._server, cluster2), s1._server.conf.pair.pub)
 
   await connexionPromise
   w1 = await ws2ps.getConnection(clientPub)
diff --git a/test/integration/transactions-chaining.js b/test/integration/transactions-chaining.js
deleted file mode 100644
index 66a02c1ca402f6046a8ece5faaa057bbd26efba0..0000000000000000000000000000000000000000
--- a/test/integration/transactions-chaining.js
+++ /dev/null
@@ -1,92 +0,0 @@
-"use strict";
-
-const co = require('co');
-const _ = require('underscore');
-const should = require('should');
-const assert = require('assert');
-const constants = require('../../app/lib/constants');
-const bma       = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const CommonConstants = require('../../app/lib/common-libs/constants').CommonConstants
-const toolbox   = require('./tools/toolbox');
-const node   = require('./tools/node');
-const TestUser = require('./tools/TestUser').TestUser
-const unit   = require('./tools/unit');
-const http   = require('./tools/http');
-
-describe("Transaction chaining", function() {
-
-  const now = 1456644632;
-
-  let s1, tic, toc
-
-  before(() => co(function*() {
-
-    s1 = toolbox.server({
-      pair: {
-        pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
-        sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
-      },
-      dt: 3600,
-      udTime0: now + 3600,
-      ud0: 1200,
-      c: 0.1
-    });
-
-    tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, { server: s1 });
-    toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 });
-
-    yield s1.initDalBmaConnections();
-    yield tic.createIdentity();
-    yield toc.createIdentity();
-    yield tic.cert(toc);
-    yield toc.cert(tic);
-    yield tic.join();
-    yield toc.join();
-    yield s1.commit({ time: now });
-    yield s1.commit({ time: now + 7210 });
-    yield s1.commit({ time: now + 7210 });
-  }));
-
-  after(() => {
-    return Promise.all([
-      s1.closeCluster()
-    ])
-  })
-
-  describe("Sources", function(){
-
-    it('it should exist block#2 with UD of 1200', () => s1.expect('/blockchain/block/2', (block) => {
-      should.exists(block);
-      assert.equal(block.number, 2);
-      assert.equal(block.dividend, 1200);
-    }));
-  });
-
-  describe("Chaining", function(){
-
-    it('with SIG and XHX', () => co(function *() {
-      // Current state
-      let current = yield s1.get('/blockchain/current');
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
-      let tx1 = yield toc.prepareITX(1040, tic); // Rest = 1200 - 1040 = 160
-      let tx2 = yield toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 160, base: 0, lock: 'SIG(' + tic.pub + ')' }], {
-        comment: 'also take the remaining 160 units',
-        blockstamp: [current.number, current.hash].join('-'),
-        theseOutputsStart: 1
-      });
-      const tmp = CommonConstants.TRANSACTION_MAX_TRIES;
-      CommonConstants.TRANSACTION_MAX_TRIES = 2;
-      yield unit.shouldNotFail(toc.sendTX(tx1));
-      yield unit.shouldNotFail(toc.sendTX(tx2));
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(1);
-      yield s1.commit({ time: now + 7210 }); // TX1 commited
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // The 160 remaining units
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(2); // The UD + 1040 units sent by toc
-      yield s1.commit({ time: now + 7210 }); // TX2 commited
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3); // The UD + 1040 + 160 units sent by toc
-      CommonConstants.TRANSACTION_MAX_TRIES = tmp;
-    }));
-  });
-});
diff --git a/test/integration/transactions-chaining.ts b/test/integration/transactions-chaining.ts
new file mode 100644
index 0000000000000000000000000000000000000000..3d58e6a4617e8869d0117d6aad60dad9c63887ae
--- /dev/null
+++ b/test/integration/transactions-chaining.ts
@@ -0,0 +1,111 @@
+import {CommonConstants} from "../../app/lib/common-libs/constants"
+import {TestUser} from "./tools/TestUser"
+import {TestingServer} from "./tools/toolbox"
+
+const should = require('should');
+const assert = require('assert');
+const toolbox   = require('./tools/toolbox');
+const unit   = require('./tools/unit');
+
+describe("Transaction chaining", () => {
+
+  const now = 1456644632;
+
+  let s1:TestingServer, tic:TestUser, toc:TestUser
+
+  before(async () => {
+
+    s1 = toolbox.server({
+      pair: {
+        pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
+        sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
+      },
+      dt: 3600,
+      udTime0: now + 3600,
+      ud0: 1200,
+      c: 0.1
+    });
+
+    tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, { server: s1 });
+    toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 });
+
+    await s1.initDalBmaConnections();
+    await tic.createIdentity();
+    await toc.createIdentity();
+    await tic.cert(toc);
+    await toc.cert(tic);
+    await tic.join();
+    await toc.join();
+    await s1.commit({ time: now });
+    await s1.commit({ time: now + 7210 });
+    await s1.commit({ time: now + 7210 });
+  })
+
+  after(() => {
+    return s1.closeCluster()
+  })
+
+  describe("Sources", () => {
+
+    it('it should exist block#2 with UD of 1200', () => s1.expect('/blockchain/block/2', (block: { number:number, dividend:number }) => {
+      should.exists(block);
+      assert.equal(block.number, 2);
+      assert.equal(block.dividend, 1200);
+    }))
+  })
+
+  describe("Chaining", () => {
+
+    it('with SIG and XHX', async () => {
+      // Current state
+      let current = await s1.get('/blockchain/current');
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
+      let tx1 = await toc.prepareITX(1040, tic); // Rest = 1200 - 1040 = 160
+      let tx2 = await toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 160, base: 0, lock: 'SIG(' + tic.pub + ')' }], {
+        comment: 'also take the remaining 160 units',
+        blockstamp: [current.number, current.hash].join('-'),
+        theseOutputsStart: 1
+      });
+      const tmp = CommonConstants.TRANSACTION_MAX_TRIES;
+      CommonConstants.TRANSACTION_MAX_TRIES = 2;
+      await unit.shouldNotFail(toc.sendTX(tx1));
+      await unit.shouldNotFail(toc.sendTX(tx2));
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // 1200
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(1); // 1200
+      await s1.commit({ time: now + 7210 }); // TX1 commited only
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // 1200 - 1040 = 160 remaining
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(2); // The UD + 1040 units sent by toc
+      await s1.commit({ time: now + 7210 }); // TX2 commited now (cause it couldn't be chained before)
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3); // The UD + 1040 + 160 units sent by toc
+      CommonConstants.TRANSACTION_MAX_TRIES = tmp;
+    })
+
+    it('should refuse a block with more than 5 chained tx in it', async () => {
+      // Current state
+      let current = await s1.get('/blockchain/current');
+      const blockstamp = [current.number, current.hash].join('-');
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3);
+      // Ping-pong of 1200 units
+      let tx1 = await tic.prepareITX(1200, toc, "PING-PONG TX1");
+      let tx2 = await toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + tic.pub + ')' }], { blockstamp, comment: "PING-PONG TX2" });
+      let tx3 = await tic.prepareUTX(tx2, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { blockstamp, comment: "PING-PONG TX3" });
+      let tx4 = await toc.prepareUTX(tx3, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + tic.pub + ')' }], { blockstamp, comment: "PING-PONG TX4" });
+      let tx5 = await tic.prepareUTX(tx4, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { blockstamp, comment: "PING-PONG TX5" });
+      let tx6 = await toc.prepareUTX(tx5, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + tic.pub + ')' }], { blockstamp, comment: "PING-PONG TX6" });
+      let tx7 = await tic.prepareUTX(tx6, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { blockstamp, comment: "PING-PONG TX7" });
+      const tmp = CommonConstants.TRANSACTION_MAX_TRIES;
+      CommonConstants.TRANSACTION_MAX_TRIES = 2;
+      await unit.shouldNotFail(toc.sendTX(tx1));
+      await unit.shouldNotFail(toc.sendTX(tx2));
+      await unit.shouldNotFail(toc.sendTX(tx3));
+      await unit.shouldNotFail(toc.sendTX(tx4));
+      await unit.shouldNotFail(toc.sendTX(tx5));
+      await unit.shouldNotFail(toc.sendTX(tx6));
+      await unit.shouldNotFail(toc.sendTX(tx7));
+      await s1.commitWaitError({ dontCareAboutChaining: true }, 'The maximum transaction chaining length per block is 5')
+      CommonConstants.TRANSACTION_MAX_TRIES = tmp;
+    })
+  });
+});
diff --git a/test/integration/ws2p_connection.ts b/test/integration/ws2p_connection.ts
index c84d9a42ecc109cd4dd0033d27d4229cd4e15671..e444726b3494123b084e81a6b865c8b9d0547c6e 100644
--- a/test/integration/ws2p_connection.ts
+++ b/test/integration/ws2p_connection.ts
@@ -44,7 +44,7 @@ describe('WS2P', () => {
       })
 
       it('should be able to create a connection', async () => {
-        const ws2p = WS2PConnection.newConnectionToAddress('ws://localhost:' + portA, new WS2PMutedHandler(), new WS2PNoLocalAuth(), new WS2PNoRemoteAuth())
+        const ws2p = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:' + portA, new WS2PMutedHandler(), new WS2PNoLocalAuth(), new WS2PNoRemoteAuth())
         const res = await ws2p.request({ name: 'head' })
         assert.deepEqual({ bla: 'aa' }, res)
       })
@@ -95,7 +95,7 @@ describe('WS2P', () => {
 
       it('should refuse the connection if the server does not answer', async () => {
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        const ws2p = WS2PConnection.newConnectionToAddress('ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair), new WS2PPubkeyRemoteAuth(gtest, keypair), undefined, {
+        const ws2p = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair, ""), new WS2PPubkeyRemoteAuth(gtest, keypair), undefined, {
           connectionTimeout: 100,
           requestTimeout: 100
         })
@@ -104,7 +104,7 @@ describe('WS2P', () => {
 
       it('should refuse the connection if the server answers with a wrong signature', async () => {
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        const ws2p = WS2PConnection.newConnectionToAddress('ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair), new WS2PPubkeyRemoteAuth(gtest, keypair), undefined, {
+        const ws2p = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair, ""), new WS2PPubkeyRemoteAuth(gtest, keypair), undefined, {
           connectionTimeout: 100,
           requestTimeout: 100
         })
@@ -113,7 +113,7 @@ describe('WS2P', () => {
 
       it('should refuse the connection if the server refuses our signature', async () => {
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        const ws2p = WS2PConnection.newConnectionToAddress('ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair), new WS2PPubkeyRemoteAuth(gtest, keypair), undefined, {
+        const ws2p = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair, ""), new WS2PPubkeyRemoteAuth(gtest, keypair), undefined, {
           connectionTimeout: 100,
           requestTimeout: 100
         })
@@ -123,7 +123,7 @@ describe('WS2P', () => {
 
       it('should accept the connection if the server answers with a good signature', async () => {
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        const ws2p = WS2PConnection.newConnectionToAddress('ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair), new WS2PNoRemoteAuth(), undefined, {
+        const ws2p = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair, ""), new WS2PNoRemoteAuth(), undefined, {
           connectionTimeout: 1000,
           requestTimeout: 1000
         })
@@ -180,7 +180,7 @@ describe('WS2P', () => {
 
       it('should be able to create connections and make several requests', async () => {
         // connection 1
-        const c1 = WS2PConnection.newConnectionToAddress('ws://localhost:' + portB, new WS2PMutedHandler(), new WS2PNoLocalAuth(), new WS2PNoRemoteAuth())
+        const c1 = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:' + portB, new WS2PMutedHandler(), new WS2PNoLocalAuth(), new WS2PNoRemoteAuth())
         assert.deepEqual({ answer: 'world' }, await c1.request({ name: 'hello!' }))
         assert.deepEqual({ answer: 'world' }, await c1.request({ name: 'hello2!' }))
         assert.equal(s1.nbRequests, 0)
@@ -192,7 +192,7 @@ describe('WS2P', () => {
         assert.equal(s1.nbPushsByRemote, 0)
         assert.equal(c1.nbPushsByRemote, 0)
         // connection 2
-        const c2 = WS2PConnection.newConnectionToAddress('ws://localhost:' + portB, new WS2PMutedHandler(), new WS2PNoLocalAuth(), new WS2PNoRemoteAuth())
+        const c2 = WS2PConnection.newConnectionToAddress(1 ,'ws://localhost:' + portB, new WS2PMutedHandler(), new WS2PNoLocalAuth(), new WS2PNoRemoteAuth())
         assert.deepEqual({ answer: 'this is s2![j = 0]' }, await c2.request({ name: 'test?' }))
         assert.deepEqual({ answer: 'this is s2![j = 1]' }, await c2.request({ name: 'test!' }))
         assert.deepEqual({ answer: 'this is s2![j = 2]' }, await c2.request({ name: 'test!!!' }))
@@ -230,7 +230,7 @@ describe('WS2P', () => {
         wss.on('connection', async (ws:any) => {
           switch (i) {
             case 1:
-              resolveS1(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair), new WS2PPubkeyRemoteAuth(gtest, serverKeypair), {
+              resolveS1(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair, ""), new WS2PPubkeyRemoteAuth(gtest, serverKeypair), {
                 connectionTimeout: 100,
                 requestTimeout: 100
               }));
@@ -244,7 +244,7 @@ describe('WS2P', () => {
               }
             }
 
-              resolveS2(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair), new WS2PPubkeyNotAnsweringWithOKAuth(gtest, serverKeypair), {
+              resolveS2(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair, ""), new WS2PPubkeyNotAnsweringWithOKAuth(gtest, serverKeypair), {
                 connectionTimeout: 100,
                 requestTimeout: 100
               }));
@@ -252,7 +252,7 @@ describe('WS2P', () => {
               break
             case 3:
 
-              resolveS3(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair), new WS2PPubkeyRemoteAuth(gtest, serverKeypair), {
+              resolveS3(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair, ""), new WS2PPubkeyRemoteAuth(gtest, serverKeypair), {
                 connectionTimeout: 100,
                 requestTimeout: 100
               }));
@@ -260,7 +260,7 @@ describe('WS2P', () => {
               break
             case 4:
 
-              resolveS4(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair), new WS2PPubkeyRemoteAuth(gtest, serverKeypair), {
+              resolveS4(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair, ""), new WS2PPubkeyRemoteAuth(gtest, serverKeypair), {
                 connectionTimeout: 100,
                 requestTimeout: 100
               }));
@@ -268,13 +268,13 @@ describe('WS2P', () => {
               break
 
             case 5:
-              resolveS5(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair), new WS2PPubkeyRemoteAuth(gtest, serverKeypair)));
+              resolveS5(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair, ""), new WS2PPubkeyRemoteAuth(gtest, serverKeypair)));
               (await s5p).connect().catch((e:any) => logger.error('WS2P: newConnectionFromWebSocketServer connection error'))
               break
 
             case 6:
 
-              resolveS6(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair), new WS2PPubkeyRemoteAuth(gtest, serverKeypair), {
+              resolveS6(WS2PConnection.newConnectionFromWebSocketServer(ws, new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, serverKeypair, ""), new WS2PPubkeyRemoteAuth(gtest, serverKeypair), {
                 connectionTimeout: 100,
                 requestTimeout: 100
               }));
@@ -298,7 +298,7 @@ describe('WS2P', () => {
         }
 
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        const c1 = WS2PConnection.newConnectionToAddress('ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair), new WS2PPubkeyNotAnsweringWithACKAuth(gtest, keypair))
+        const c1 = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair, ""), new WS2PPubkeyNotAnsweringWithACKAuth(gtest, keypair))
         c1.connect().catch((e:any) => logger.error('WS2P: connection error'))
         const s1 = await s1p
         await assertThrows(s1.request({ name: 'something' }), "WS2P connection timeout")
@@ -306,7 +306,7 @@ describe('WS2P', () => {
 
       it('should refuse the connection if the client not confirm with OK', async () => {
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        WS2PConnection.newConnectionToAddress('ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair), new WS2PPubkeyRemoteAuth(gtest, keypair))
+        WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair, ""), new WS2PPubkeyRemoteAuth(gtest, keypair))
         const s2 = await s2p
         await assertThrows(s2.request({ name: 'something' }), "WS2P connection timeout")
       })
@@ -326,7 +326,7 @@ describe('WS2P', () => {
         }
 
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        const c3 = WS2PConnection.newConnectionToAddress('ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair), new WS2PPubkeyAnsweringWithWrongSigForACK(gtest, keypair))
+        const c3 = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyLocalAuth(gtest, keypair, ""), new WS2PPubkeyAnsweringWithWrongSigForACK(gtest, keypair))
         c3.connect().catch((e:any) => logger.error('WS2P: connection error'))
         const s3 = await s3p
         await assertThrows(s3.request({ name: 'something' }), "Wrong signature from server ACK")
@@ -349,20 +349,20 @@ describe('WS2P', () => {
         }
 
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        const c4 = WS2PConnection.newConnectionToAddress('ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyRefusingACKSignature(gtest, keypair), new WS2PPubkeyRemoteAuth(gtest, keypair))
+        const c4 = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyRefusingACKSignature(gtest, keypair, ""), new WS2PPubkeyRemoteAuth(gtest, keypair))
         const s4 = await s4p
         await assertThrows(c4.connect(), "Wrong signature from server ACK")
       })
 
       it('should accept the connection if everything is OK on both side', async () => {
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        const c5 = WS2PConnection.newConnectionToAddress('ws://localhost:20903', new (class TmpHandler implements WS2PMessageHandler {
+        const c5 = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new (class TmpHandler implements WS2PMessageHandler {
           async handlePushMessage(json: any): Promise<void> {
           }
           async answerToRequest(json: any): Promise<WS2PResponse> {
             return { answer: 'success!' }
           }
-        }), new WS2PPubkeyLocalAuth(gtest, keypair), new WS2PPubkeyRemoteAuth(gtest, keypair))
+        }), new WS2PPubkeyLocalAuth(gtest, keypair, ""), new WS2PPubkeyRemoteAuth(gtest, keypair))
         await c5.connect().catch((e:any) => logger.error('WS2P: connection error'))
         const s5 = await s5p
         assert.deepEqual({ answer: 'success!' }, await s5.request({ name: 'connection?'} ))
@@ -376,7 +376,7 @@ describe('WS2P', () => {
         }
 
         const keypair = new Key('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP')
-        const c6 = WS2PConnection.newConnectionToAddress('ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyNotAnsweringWithOKAuth(gtest, keypair), new WS2PPubkeyRemoteAuth(gtest, keypair))
+        const c6 = WS2PConnection.newConnectionToAddress(1, 'ws://localhost:20903', new WS2PMutedHandler(), new WS2PPubkeyNotAnsweringWithOKAuth(gtest, keypair, ""), new WS2PPubkeyRemoteAuth(gtest, keypair))
         c6.connect().catch((e:any) => logger.error('WS2P: connection error'))
         const s6 = await s6p
         await assertThrows(s6.request({ name: 'something' }), "WS2P connection timeout")
@@ -415,6 +415,10 @@ class WS2PNoLocalAuth implements WS2PLocalAuth {
 
 class WS2PNoRemoteAuth implements WS2PRemoteAuth {
 
+  getVersion(): number {
+    return 1
+  }
+
   getPubkey(): string {
     return ""
   }
@@ -422,7 +426,7 @@ class WS2PNoRemoteAuth implements WS2PRemoteAuth {
   async sendACK(ws: any): Promise<void> {
   }
 
-  async registerCONNECT(challenge:string, sig: string, pub: string): Promise<boolean> {
+  async registerCONNECT(version:number, challenge:string, sig: string, pub: string, ws2pId:string): Promise<boolean> {
     return true
   }
 
diff --git a/test/integration/ws2p_heads.ts b/test/integration/ws2p_heads.ts
index 7f0e78e50fa47b3e4fa08c96f9eac99718480356..7d20af4b2e0ae9acd4e99620a6a753d524230351 100644
--- a/test/integration/ws2p_heads.ts
+++ b/test/integration/ws2p_heads.ts
@@ -86,7 +86,7 @@ describe("WS2P heads propagation", function() {
     b3 = s1.commit({ time: now })
     await Promise.all([
       s2.waitToHaveBlock(3),
-      s2.waitForHeads(1)
+      s2.waitForHeads(2) // head v2 + head v1
     ])
     await s1.expect('/network/ws2p/info', (res:any) => {
       assert.equal(res.peers.level1, 0)
diff --git a/test/integration/ws2p_server_limitations.ts b/test/integration/ws2p_server_limitations.ts
index 8130d186258fb6a94f927ad2e0baaf58bbf7f63d..fd4929955a38c1933b61c1787da6eb51e4b3b0c0 100644
--- a/test/integration/ws2p_server_limitations.ts
+++ b/test/integration/ws2p_server_limitations.ts
@@ -140,7 +140,7 @@ describe("WS2P server limitations", function() {
   })
 
   it('should not connect to s3 because of connection size limit', async () => {
-    cluster3.maxLevel2Peers = 0
+    if (s3.conf.ws2p) s3.conf.ws2p.maxPublic = 0
     const p3 = await s3.getPeer()
     await s2.writePeer(p3)
     b3 = await s1.commit({ time: now })
@@ -164,29 +164,64 @@ describe("WS2P server limitations", function() {
     })
   })
 
-  it('should connect to s3 because of configuration favorism', async () => {
-    cluster3.maxLevel2Peers = 1
+  it('should be able to fully disconnect the WS2P network', async () => {
     if (s1._server.conf.ws2p) s1._server.conf.ws2p.privateAccess = true
-    if (s3._server.conf.ws2p) s3._server.conf.ws2p.publicAccess = true
+    if (s3._server.conf.ws2p) {
+      s3._server.conf.ws2p.publicAccess = true
+      s3._server.conf.ws2p.maxPublic = 1
+    }
     await s3.writeBlock(b3)
-    await s3._server.PeeringService.generateSelfPeer(s3._server.conf)
-    await s3._server.PeeringService.generateSelfPeer(s3._server.conf)
     await s1.waitToHaveBlock(3)
     await s2.waitToHaveBlock(3)
     // await waitForkWS2PConnection(s3._server, 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd')
     b4 = await s1.commit({ time: now })
     await s2.waitToHaveBlock(4)
+    // The test
+    if (s3.conf.ws2p) s3.conf.ws2p.maxPublic = 0
+    if (s1.conf.ws2p) s1.conf.ws2p.maxPublic = 0 // <-- Breaks the connection s2 -> s1
+    await cluster1.trimServerConnections()
+    const s2PreferedKeys = (s2.conf.ws2p && s2.conf.ws2p.preferedNodes) ? s2.conf.ws2p.preferedNodes:[]
+    await cluster2.removeLowPriorityConnections(s2PreferedKeys)
+    await waitForkWS2PDisconnection(s1._server, '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc')
+    await cluster3.trimServerConnections()
+    await s1.expect('/network/ws2p/info', (res:any) => {
+      assert.equal(res.peers.level1, 0)
+      assert.equal(res.peers.level2, 0)
+    })
+    await s2.expect('/network/ws2p/info', (res:any) => {
+      assert.equal(res.peers.level1, 0)
+      assert.equal(res.peers.level2, 0)
+    })
+    await s3.expect('/network/ws2p/info', (res:any) => {
+      assert.equal(res.peers.level1, 0)
+      assert.equal(res.peers.level2, 0)
+    })
+    await s4.expect('/network/ws2p/info', (res:any) => {
+      assert.equal(res.peers.level1, 0)
+      assert.equal(res.peers.level2, 0)
+    })
+  })
+
+  it('should connect to s3 because of configuration favorism', async () => {
+    if (s1._server.conf.ws2p) s1._server.conf.ws2p.privateAccess = true
+    if (s3._server.conf.ws2p) s3._server.conf.ws2p.publicAccess = true
+    if (s3._server.conf.ws2p) {
+      s3._server.conf.ws2p.publicAccess = true
+      s3._server.conf.ws2p.maxPublic = 1
+    }
     await s3._server.PeeringService.generateSelfPeer(s3._server.conf)
     const p3 = await s3.getPeer()
     await s2.writePeer(p3)
+    await waitForkWS2PConnection(s3._server, '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc')
+    await s1.writePeer(p3)
     await waitForkWS2PConnection(s3._server, 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd')
-    await waitForkWS2PDisconnection(s3._server, '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc')
+    await waitForkWS2PDisconnection(s3._server, '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc') // <-- s2 is kicked! s1 is prefered
     await s1.expect('/network/ws2p/info', (res:any) => {
       assert.equal(res.peers.level1, 1) // <- New connection to s3
-      assert.equal(res.peers.level2, 1)
+      assert.equal(res.peers.level2, 0)
     })
     await s2.expect('/network/ws2p/info', (res:any) => {
-      assert.equal(res.peers.level1, 1)
+      assert.equal(res.peers.level1, 0)
       assert.equal(res.peers.level2, 0)
     })
     await s3.expect('/network/ws2p/info', (res:any) => {
diff --git a/tsconfig.json b/tsconfig.json
index 330af57dbdab513110b9a54bd45b6721b7921ef1..0d50c5895ccac504389e3c7489b2006394b331ac 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -1,7 +1,7 @@
 {
   "compilerOptions": {
     "sourceMap": true,
-    "target": "es6",
+    "target": "es2017",
     "declaration": true,
     "moduleResolution": "node",
     "module": "commonjs",
diff --git a/yarn.lock b/yarn.lock
index e31a5cf3ec6018745c32e2924443a74d95dfaf8d..fd48982082b1b8eca41cbd968fbe604762124be7 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -3,12 +3,12 @@
 
 
 "@types/mocha@^2.2.41":
-  version "2.2.41"
-  resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-2.2.41.tgz#e27cf0817153eb9f2713b2d3f6c68f1e1c3ca608"
+  version "2.2.44"
+  resolved "https://registry.yarnpkg.com/@types/mocha/-/mocha-2.2.44.tgz#1d4a798e53f35212fd5ad4d04050620171cd5b5e"
 
 "@types/node@^8.0.9":
-  version "8.0.20"
-  resolved "https://registry.yarnpkg.com/@types/node/-/node-8.0.20.tgz#65c7375255c24b184c215a5d0b63247c32f01c91"
+  version "8.0.53"
+  resolved "https://registry.yarnpkg.com/@types/node/-/node-8.0.53.tgz#396b35af826fa66aad472c8cb7b8d5e277f4e6d8"
 
 "@types/should@^8.3.0":
   version "8.3.0"
@@ -23,18 +23,18 @@ JSONSelect@0.4.0:
   resolved "https://registry.yarnpkg.com/JSV/-/JSV-4.0.2.tgz#d077f6825571f82132f9dffaed587b4029feff57"
 
 abbrev@1:
-  version "1.1.0"
-  resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.0.tgz#d0554c2256636e2f56e7c2e5ad183f859428d81f"
+  version "1.1.1"
+  resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8"
 
 accept-encoding@~0.1.0:
   version "0.1.0"
   resolved "https://registry.yarnpkg.com/accept-encoding/-/accept-encoding-0.1.0.tgz#5dd88b8df71f1dc2e5cc6b9565ecce1e399a333e"
 
 accepts@~1.3.3:
-  version "1.3.3"
-  resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.3.tgz#c3ca7434938648c3e0d9c1e328dd68b622c284ca"
+  version "1.3.4"
+  resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.4.tgz#86246758c7dd6d21a6474ff084a4740ec05eb21f"
   dependencies:
-    mime-types "~2.1.11"
+    mime-types "~2.1.16"
     negotiator "0.6.1"
 
 acorn-jsx@^3.0.0:
@@ -47,9 +47,15 @@ acorn@^3.0.4:
   version "3.3.0"
   resolved "https://registry.yarnpkg.com/acorn/-/acorn-3.3.0.tgz#45e37fb39e8da3f25baee3ff5369e2bb5f22017a"
 
-acorn@^5.1.1:
-  version "5.1.1"
-  resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.1.1.tgz#53fe161111f912ab999ee887a90a0bc52822fd75"
+acorn@^5.2.1:
+  version "5.2.1"
+  resolved "https://registry.yarnpkg.com/acorn/-/acorn-5.2.1.tgz#317ac7821826c22c702d66189ab8359675f135d7"
+
+agent-base@^4.1.0:
+  version "4.1.2"
+  resolved "https://registry.yarnpkg.com/agent-base/-/agent-base-4.1.2.tgz#80fa6cde440f4dcf9af2617cf246099b5d99f0c8"
+  dependencies:
+    es6-promisify "^5.0.0"
 
 adm-zip@0.4.7:
   version "0.4.7"
@@ -66,6 +72,15 @@ ajv@^4.7.0, ajv@^4.9.1:
     co "^4.6.0"
     json-stable-stringify "^1.0.1"
 
+ajv@^5.1.0:
+  version "5.5.0"
+  resolved "https://registry.yarnpkg.com/ajv/-/ajv-5.5.0.tgz#eb2840746e9dc48bd5e063a36e3fd400c5eab5a9"
+  dependencies:
+    co "^4.6.0"
+    fast-deep-equal "^1.0.0"
+    fast-json-stable-stringify "^2.0.0"
+    json-schema-traverse "^0.3.0"
+
 align-text@^0.1.1, align-text@^0.1.3:
   version "0.1.4"
   resolved "https://registry.yarnpkg.com/align-text/-/align-text-0.1.4.tgz#0cd90a561093f35d0a99256c22b7069433fad117"
@@ -104,6 +119,10 @@ ansi-styles@^3.1.0:
   dependencies:
     color-convert "^1.9.0"
 
+ansi-styles@~1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.0.0.tgz#cb102df1c56f5123eab8b67cd7b98027a0279178"
+
 ansi@^0.3.0, ansi@~0.3.1:
   version "0.3.1"
   resolved "https://registry.yarnpkg.com/ansi/-/ansi-0.3.1.tgz#0c42d4fb17160d5a9af1e484bace1c66922c1b21"
@@ -115,8 +134,8 @@ append-transform@^0.4.0:
     default-require-extensions "^1.0.0"
 
 aproba@^1.0.3:
-  version "1.1.2"
-  resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.1.2.tgz#45c6629094de4e96f693ef7eab74ae079c240fc1"
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
 
 archiver-utils@^1.3.0:
   version "1.3.0"
@@ -223,7 +242,7 @@ async@0.1.22:
   version "0.1.22"
   resolved "https://registry.yarnpkg.com/async/-/async-0.1.22.tgz#0fc1aaa088a0e3ef0ebe2d8831bab0dcf8845061"
 
-async@2.2.0, async@^2.0.0:
+async@2.2.0:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/async/-/async-2.2.0.tgz#c324eba010a237e4fbd55a12dee86367d5c0ef32"
   dependencies:
@@ -233,6 +252,12 @@ async@^1.4.0:
   version "1.5.2"
   resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
 
+async@^2.0.0:
+  version "2.6.0"
+  resolved "https://registry.yarnpkg.com/async/-/async-2.6.0.tgz#61a29abb6fcc026fea77e56d1c6ec53a795951f4"
+  dependencies:
+    lodash "^4.14.0"
+
 async@~0.9.0:
   version "0.9.2"
   resolved "https://registry.yarnpkg.com/async/-/async-0.9.2.tgz#aea74d5e61c1f899613bf64bda66d4c78f2fd17d"
@@ -253,29 +278,33 @@ aws-sign2@~0.6.0:
   version "0.6.0"
   resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.6.0.tgz#14342dd38dbcc94d0e5b87d763cd63612c0e794f"
 
-aws4@^1.2.1:
+aws-sign2@~0.7.0:
+  version "0.7.0"
+  resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8"
+
+aws4@^1.2.1, aws4@^1.6.0:
   version "1.6.0"
   resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.6.0.tgz#83ef5ca860b2b32e4a0deedee8c771b9db57471e"
 
-babel-code-frame@^6.16.0, babel-code-frame@^6.22.0:
-  version "6.22.0"
-  resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.22.0.tgz#027620bee567a88c32561574e7fd0801d33118e4"
+babel-code-frame@^6.16.0, babel-code-frame@^6.26.0:
+  version "6.26.0"
+  resolved "https://registry.yarnpkg.com/babel-code-frame/-/babel-code-frame-6.26.0.tgz#63fd43f7dc1e3bb7ce35947db8fe369a3f58c74b"
   dependencies:
-    chalk "^1.1.0"
+    chalk "^1.1.3"
     esutils "^2.0.2"
-    js-tokens "^3.0.0"
+    js-tokens "^3.0.2"
 
 babel-generator@^6.18.0:
-  version "6.25.0"
-  resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.25.0.tgz#33a1af70d5f2890aeb465a4a7793c1df6a9ea9fc"
+  version "6.26.0"
+  resolved "https://registry.yarnpkg.com/babel-generator/-/babel-generator-6.26.0.tgz#ac1ae20070b79f6e3ca1d3269613053774f20dc5"
   dependencies:
     babel-messages "^6.23.0"
-    babel-runtime "^6.22.0"
-    babel-types "^6.25.0"
+    babel-runtime "^6.26.0"
+    babel-types "^6.26.0"
     detect-indent "^4.0.0"
     jsesc "^1.3.0"
-    lodash "^4.2.0"
-    source-map "^0.5.0"
+    lodash "^4.17.4"
+    source-map "^0.5.6"
     trim-right "^1.0.1"
 
 babel-messages@^6.23.0:
@@ -284,49 +313,49 @@ babel-messages@^6.23.0:
   dependencies:
     babel-runtime "^6.22.0"
 
-babel-runtime@^6.22.0:
-  version "6.25.0"
-  resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.25.0.tgz#33b98eaa5d482bb01a8d1aa6b437ad2b01aec41c"
+babel-runtime@^6.22.0, babel-runtime@^6.26.0:
+  version "6.26.0"
+  resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe"
   dependencies:
     core-js "^2.4.0"
-    regenerator-runtime "^0.10.0"
+    regenerator-runtime "^0.11.0"
 
 babel-template@^6.16.0:
-  version "6.25.0"
-  resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.25.0.tgz#665241166b7c2aa4c619d71e192969552b10c071"
+  version "6.26.0"
+  resolved "https://registry.yarnpkg.com/babel-template/-/babel-template-6.26.0.tgz#de03e2d16396b069f46dd9fff8521fb1a0e35e02"
   dependencies:
-    babel-runtime "^6.22.0"
-    babel-traverse "^6.25.0"
-    babel-types "^6.25.0"
-    babylon "^6.17.2"
-    lodash "^4.2.0"
+    babel-runtime "^6.26.0"
+    babel-traverse "^6.26.0"
+    babel-types "^6.26.0"
+    babylon "^6.18.0"
+    lodash "^4.17.4"
 
-babel-traverse@^6.18.0, babel-traverse@^6.25.0:
-  version "6.25.0"
-  resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.25.0.tgz#2257497e2fcd19b89edc13c4c91381f9512496f1"
+babel-traverse@^6.18.0, babel-traverse@^6.26.0:
+  version "6.26.0"
+  resolved "https://registry.yarnpkg.com/babel-traverse/-/babel-traverse-6.26.0.tgz#46a9cbd7edcc62c8e5c064e2d2d8d0f4035766ee"
   dependencies:
-    babel-code-frame "^6.22.0"
+    babel-code-frame "^6.26.0"
     babel-messages "^6.23.0"
-    babel-runtime "^6.22.0"
-    babel-types "^6.25.0"
-    babylon "^6.17.2"
-    debug "^2.2.0"
-    globals "^9.0.0"
-    invariant "^2.2.0"
-    lodash "^4.2.0"
-
-babel-types@^6.18.0, babel-types@^6.25.0:
-  version "6.25.0"
-  resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.25.0.tgz#70afb248d5660e5d18f811d91c8303b54134a18e"
-  dependencies:
-    babel-runtime "^6.22.0"
+    babel-runtime "^6.26.0"
+    babel-types "^6.26.0"
+    babylon "^6.18.0"
+    debug "^2.6.8"
+    globals "^9.18.0"
+    invariant "^2.2.2"
+    lodash "^4.17.4"
+
+babel-types@^6.18.0, babel-types@^6.26.0:
+  version "6.26.0"
+  resolved "https://registry.yarnpkg.com/babel-types/-/babel-types-6.26.0.tgz#a3b073f94ab49eb6fa55cd65227a334380632497"
+  dependencies:
+    babel-runtime "^6.26.0"
     esutils "^2.0.2"
-    lodash "^4.2.0"
-    to-fast-properties "^1.0.1"
+    lodash "^4.17.4"
+    to-fast-properties "^1.0.3"
 
-babylon@^6.17.2, babylon@^6.17.4:
-  version "6.17.4"
-  resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.17.4.tgz#3e8b7402b88d22c3423e137a1577883b15ff869a"
+babylon@^6.18.0:
+  version "6.18.0"
+  resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3"
 
 balanced-match@^1.0.0:
   version "1.0.0"
@@ -384,8 +413,8 @@ block-stream@*:
     inherits "~2.0.0"
 
 bluebird@^3.5.0:
-  version "3.5.0"
-  resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.0.tgz#791420d7f551eea2897453a8a77653f96606d67c"
+  version "3.5.1"
+  resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.1.tgz#d9551f9de98f1fcda1e683d17ee91a0602ee2eb9"
 
 body-parser@1.17.1:
   version "1.17.1"
@@ -414,6 +443,18 @@ boom@2.x.x:
   dependencies:
     hoek "2.x.x"
 
+boom@4.x.x:
+  version "4.3.1"
+  resolved "https://registry.yarnpkg.com/boom/-/boom-4.3.1.tgz#4f8a3005cb4a7e3889f749030fd25b96e01d2e31"
+  dependencies:
+    hoek "4.x.x"
+
+boom@5.x.x:
+  version "5.2.0"
+  resolved "https://registry.yarnpkg.com/boom/-/boom-5.2.0.tgz#5dd9da6ee3a5f302077436290cb717d3f4a54e02"
+  dependencies:
+    hoek "4.x.x"
+
 brace-expansion@^1.0.0, brace-expansion@^1.1.7:
   version "1.1.8"
   resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.8.tgz#c07b211c7c952ec1f8efd51a77ef0d1d3990a292"
@@ -488,10 +529,6 @@ camelcase@^1.0.2:
   version "1.2.1"
   resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-1.2.1.tgz#9bb5304d2e0b56698b2c758b08a3eaa9daa58a39"
 
-camelcase@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-3.0.0.tgz#32fc4b9fcdaf845fcdf7e73bb97cac2261f0ab0a"
-
 camelcase@^4.1.0:
   version "4.1.0"
   resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-4.1.0.tgz#d545635be1e33c542649c69173e5de6acfae34dd"
@@ -517,7 +554,7 @@ chainsaw@~0.1.0:
   dependencies:
     traverse ">=0.3.0 <0.4"
 
-chalk@^1.0, chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3:
+chalk@^1.0, chalk@^1.0.0, chalk@^1.1.1, chalk@^1.1.3:
   version "1.1.3"
   resolved "https://registry.yarnpkg.com/chalk/-/chalk-1.1.3.tgz#a8115c55e4a702fe4d150abd3872822a7e09fc98"
   dependencies:
@@ -528,13 +565,25 @@ chalk@^1.0, chalk@^1.0.0, chalk@^1.1.0, chalk@^1.1.1, chalk@^1.1.3:
     supports-color "^2.0.0"
 
 chalk@^2.0.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.1.0.tgz#ac5becf14fa21b99c6c92ca7a7d7cfd5b17e743e"
+  version "2.3.0"
+  resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.3.0.tgz#b5ea48efc9c1793dccc9b4767c93914d3f2d52ba"
   dependencies:
     ansi-styles "^3.1.0"
     escape-string-regexp "^1.0.5"
     supports-color "^4.0.0"
 
+chalk@~0.4.0:
+  version "0.4.0"
+  resolved "https://registry.yarnpkg.com/chalk/-/chalk-0.4.0.tgz#5199a3ddcd0c1efe23bc08c1b027b06176e0c64f"
+  dependencies:
+    ansi-styles "~1.0.0"
+    has-color "~0.1.0"
+    strip-ansi "~0.1.0"
+
+chardet@^0.4.0:
+  version "0.4.2"
+  resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2"
+
 "charenc@>= 0.0.1":
   version "0.0.2"
   resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
@@ -570,8 +619,8 @@ cli-width@^1.0.1:
   resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-1.1.1.tgz#a4d293ef67ebb7b88d4a4d42c0ccf00c4d1e366d"
 
 cli-width@^2.0.0:
-  version "2.1.0"
-  resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.1.0.tgz#b234ca209b29ef66fc518d9b98d5847b00edf00a"
+  version "2.2.0"
+  resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-2.2.0.tgz#ff19ede8a9a5e579324147b0c11f0fbcbabed639"
 
 cliui@^2.1.0:
   version "2.1.0"
@@ -604,8 +653,8 @@ collections@^0.2.0:
     weak-map "1.0.0"
 
 color-convert@^1.9.0:
-  version "1.9.0"
-  resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.0.tgz#1accf97dd739b983bf994d56fec8f95853641b7a"
+  version "1.9.1"
+  resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.1.tgz#c1261107aeb2f294ebffec9ed9ecad529a6097ed"
   dependencies:
     color-name "^1.1.1"
 
@@ -652,8 +701,8 @@ component-emitter@^1.2.0:
   resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6"
 
 compress-commons@^1.2.0:
-  version "1.2.0"
-  resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-1.2.0.tgz#58587092ef20d37cb58baf000112c9278ff73b9f"
+  version "1.2.2"
+  resolved "https://registry.yarnpkg.com/compress-commons/-/compress-commons-1.2.2.tgz#524a9f10903f3a813389b0225d27c48bb751890f"
   dependencies:
     buffer-crc32 "^0.2.1"
     crc32-stream "^2.0.0"
@@ -687,12 +736,12 @@ content-disposition@0.5.2:
   resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.2.tgz#0cf68bb9ddf5f2be7961c3a85178cb85dba78cb4"
 
 content-type@~1.0.2:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.2.tgz#b7d113aee7a8dd27bd21133c4dc2529df1721eed"
+  version "1.0.4"
+  resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b"
 
 convert-source-map@^1.3.0:
-  version "1.5.0"
-  resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.0.tgz#9acd70851c6d5dfdd93d9282e5edf94a03ff46b5"
+  version "1.5.1"
+  resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.5.1.tgz#b8278097b9bc229365de5c62cf5fcaed8b5599e5"
 
 cookie-signature@1.0.6:
   version "1.0.6"
@@ -702,13 +751,13 @@ cookie@0.3.1:
   version "0.3.1"
   resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.3.1.tgz#e7e0a1f9ef43b4c8ba925c5c5a96e806d16873bb"
 
-cookiejar@^2.0.6:
+cookiejar@^2.1.0:
   version "2.1.1"
   resolved "https://registry.yarnpkg.com/cookiejar/-/cookiejar-2.1.1.tgz#41ad57b1b555951ec171412a81942b1e8200d34a"
 
 core-js@^2.4.0:
-  version "2.5.0"
-  resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.0.tgz#569c050918be6486b3837552028ae0466b717086"
+  version "2.5.1"
+  resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.5.1.tgz#ae6874dc66937789b80754ff5428df66819ca50b"
 
 core-util-is@1.0.2, core-util-is@~1.0.0:
   version "1.0.2"
@@ -738,8 +787,8 @@ crc32-stream@^2.0.0:
     readable-stream "^2.0.0"
 
 crc@^3.4.4:
-  version "3.4.4"
-  resolved "https://registry.yarnpkg.com/crc/-/crc-3.4.4.tgz#9da1e980e3bd44fc5c93bf5ab3da3378d85e466b"
+  version "3.5.0"
+  resolved "https://registry.yarnpkg.com/crc/-/crc-3.5.0.tgz#98b8ba7d489665ba3979f59b21381374101a1964"
 
 cross-spawn@^4:
   version "4.0.2"
@@ -772,6 +821,12 @@ cryptiles@2.x.x:
   dependencies:
     boom "2.x.x"
 
+cryptiles@3.x.x:
+  version "3.1.2"
+  resolved "https://registry.yarnpkg.com/cryptiles/-/cryptiles-3.1.2.tgz#a89fbb220f5ce25ec56e8c4aa8a4fd7b5b0d29fe"
+  dependencies:
+    boom "5.x.x"
+
 ctype@0.5.3:
   version "0.5.3"
   resolved "https://registry.yarnpkg.com/ctype/-/ctype-0.5.3.tgz#82c18c2461f74114ef16c135224ad0b9144ca12f"
@@ -807,18 +862,30 @@ debug-log@^1.0.1:
   version "1.0.1"
   resolved "https://registry.yarnpkg.com/debug-log/-/debug-log-1.0.1.tgz#2307632d4c04382b8df8a32f70b895046d52745f"
 
-debug@2.6.1, debug@^2.1.1:
+debug@2.6.1:
   version "2.6.1"
   resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.1.tgz#79855090ba2c4e3115cc7d8769491d58f0491351"
   dependencies:
     ms "0.7.2"
 
-debug@2.6.8, debug@^2.2.0, debug@^2.6.3:
+debug@2.6.8:
   version "2.6.8"
   resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.8.tgz#e731531ca2ede27d188222427da17821d68ff4fc"
   dependencies:
     ms "2.0.0"
 
+debug@2.6.9, debug@^2.1.1, debug@^2.2.0, debug@^2.6.8:
+  version "2.6.9"
+  resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f"
+  dependencies:
+    ms "2.0.0"
+
+debug@^3.1.0:
+  version "3.1.0"
+  resolved "https://registry.yarnpkg.com/debug/-/debug-3.1.0.tgz#5bb5a0672628b64149566ba16819e61518c67261"
+  dependencies:
+    ms "2.0.0"
+
 debug@~2.2.0:
   version "2.2.0"
   resolved "https://registry.yarnpkg.com/debug/-/debug-2.2.0.tgz#f87057e995b1a1f6ae6a4960664137bc56f039da"
@@ -881,6 +948,10 @@ detect-indent@^4.0.0:
   dependencies:
     repeating "^2.0.0"
 
+detect-libc@^1.0.2:
+  version "1.0.3"
+  resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
+
 dicer@0.2.5:
   version "0.2.5"
   resolved "https://registry.yarnpkg.com/dicer/-/dicer-0.2.5.tgz#5996c086bb33218c812c090bddc09cd12facb70f"
@@ -888,10 +959,14 @@ dicer@0.2.5:
     readable-stream "1.1.x"
     streamsearch "0.1.2"
 
-diff@3.2.0, diff@^3.1.0:
+diff@3.2.0:
   version "3.2.0"
   resolved "https://registry.yarnpkg.com/diff/-/diff-3.2.0.tgz#c9ce393a4b7cbd0b058a725c93df299027868ff9"
 
+diff@^3.1.0:
+  version "3.4.0"
+  resolved "https://registry.yarnpkg.com/diff/-/diff-3.4.0.tgz#b1d85507daf3964828de54b37d0d73ba67dda56c"
+
 doctrine@^0.6.2:
   version "0.6.4"
   resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-0.6.4.tgz#81428491a942ef18b0492056eda3800eee57d61d"
@@ -969,20 +1044,20 @@ errorhandler@1.5.0:
     accepts "~1.3.3"
     escape-html "~1.0.3"
 
-es5-ext@^0.10.14, es5-ext@^0.10.9, es5-ext@~0.10.14:
-  version "0.10.26"
-  resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.26.tgz#51b2128a531b70c4f6764093a73cbebb82186372"
+es5-ext@^0.10.14, es5-ext@^0.10.35, es5-ext@^0.10.9, es5-ext@~0.10.14:
+  version "0.10.37"
+  resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.37.tgz#0ee741d148b80069ba27d020393756af257defc3"
   dependencies:
-    es6-iterator "2"
-    es6-symbol "~3.1"
+    es6-iterator "~2.0.1"
+    es6-symbol "~3.1.1"
 
-es6-iterator@2, es6-iterator@^2.0.1, es6-iterator@~2.0.1:
-  version "2.0.1"
-  resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.1.tgz#8e319c9f0453bf575d374940a655920e59ca5512"
+es6-iterator@^2.0.1, es6-iterator@~2.0.1:
+  version "2.0.3"
+  resolved "https://registry.yarnpkg.com/es6-iterator/-/es6-iterator-2.0.3.tgz#a7de889141a05a94b0854403b2d0a0fbfa98f3b7"
   dependencies:
     d "1"
-    es5-ext "^0.10.14"
-    es6-symbol "^3.1"
+    es5-ext "^0.10.35"
+    es6-symbol "^3.1.1"
 
 es6-map@^0.1.3:
   version "0.1.5"
@@ -995,6 +1070,16 @@ es6-map@^0.1.3:
     es6-symbol "~3.1.1"
     event-emitter "~0.3.5"
 
+es6-promise@^4.0.3:
+  version "4.1.1"
+  resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.1.1.tgz#8811e90915d9a0dba36274f0b242dbda78f9c92a"
+
+es6-promisify@^5.0.0:
+  version "5.0.0"
+  resolved "https://registry.yarnpkg.com/es6-promisify/-/es6-promisify-5.0.0.tgz#5109d62f3e56ea967c4b63505aef08291c8a5203"
+  dependencies:
+    es6-promise "^4.0.3"
+
 es6-set@~0.1.5:
   version "0.1.5"
   resolved "https://registry.yarnpkg.com/es6-set/-/es6-set-0.1.5.tgz#d2b3ec5d4d800ced818db538d28974db0a73ccb1"
@@ -1005,7 +1090,7 @@ es6-set@~0.1.5:
     es6-symbol "3.1.1"
     event-emitter "~0.3.5"
 
-es6-symbol@3.1.1, es6-symbol@^3.1, es6-symbol@^3.1.1, es6-symbol@~3.1, es6-symbol@~3.1.1:
+es6-symbol@3.1.1, es6-symbol@^3.1.1, es6-symbol@~3.1.1:
   version "3.1.1"
   resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.1.tgz#bf00ef4fdab6ba1b46ecb7b629b4c7ed5715cc77"
   dependencies:
@@ -1124,10 +1209,10 @@ espree@^2.0.1:
   resolved "https://registry.yarnpkg.com/espree/-/espree-2.2.5.tgz#df691b9310889402aeb29cc066708c56690b854b"
 
 espree@^3.3.1:
-  version "3.5.0"
-  resolved "https://registry.yarnpkg.com/espree/-/espree-3.5.0.tgz#98358625bdd055861ea27e2867ea729faf463d8d"
+  version "3.5.2"
+  resolved "https://registry.yarnpkg.com/espree/-/espree-3.5.2.tgz#756ada8b979e9dcfcdb30aad8d1a9304a905e1ca"
   dependencies:
-    acorn "^5.1.1"
+    acorn "^5.2.1"
     acorn-jsx "^3.0.0"
 
 esprima@1.1.x, esprima@~1.1.1:
@@ -1138,6 +1223,10 @@ esprima@^3.1.1:
   version "3.1.3"
   resolved "https://registry.yarnpkg.com/esprima/-/esprima-3.1.3.tgz#fdca51cee6133895e3c88d535ce49dbff62a4633"
 
+esprima@^4.0.0:
+  version "4.0.0"
+  resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.0.tgz#4499eddcd1110e0b218bacf2fa7f7f59f55ca804"
+
 "esprima@~ 1.0.2":
   version "1.0.4"
   resolved "https://registry.yarnpkg.com/esprima/-/esprima-1.0.4.tgz#9f557e08fc3b4d26ece9dd34f8fbf476b62585ad"
@@ -1178,8 +1267,8 @@ esutils@~1.0.0:
   resolved "https://registry.yarnpkg.com/esutils/-/esutils-1.0.0.tgz#8151d358e20c8acc7fb745e7472c0025fe496570"
 
 etag@~1.8.0:
-  version "1.8.0"
-  resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.0.tgz#6f631aef336d6c46362b51764044ce216be3c051"
+  version "1.8.1"
+  resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887"
 
 event-emitter@~0.3.5:
   version "0.3.5"
@@ -1269,17 +1358,17 @@ express@4.15.2:
     utils-merge "1.0.0"
     vary "~1.1.0"
 
-extend@^3.0.0, extend@~3.0.0:
+extend@^3.0.0, extend@~3.0.0, extend@~3.0.1:
   version "3.0.1"
   resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444"
 
 external-editor@^2.0.1:
-  version "2.0.4"
-  resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.0.4.tgz#1ed9199da9cbfe2ef2f7a31b2fde8b0d12368972"
+  version "2.1.0"
+  resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-2.1.0.tgz#3d026a21b7f95b5726387d4200ac160d372c3b48"
   dependencies:
+    chardet "^0.4.0"
     iconv-lite "^0.4.17"
-    jschardet "^1.4.2"
-    tmp "^0.0.31"
+    tmp "^0.0.33"
 
 extglob@^0.3.1:
   version "0.3.2"
@@ -1287,14 +1376,26 @@ extglob@^0.3.1:
   dependencies:
     is-extglob "^1.0.0"
 
-extsprintf@1.3.0, extsprintf@^1.2.0:
+extsprintf@1.3.0:
   version "1.3.0"
   resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05"
 
+extsprintf@^1.2.0:
+  version "1.4.0"
+  resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f"
+
 eyes@0.1.x:
   version "0.1.8"
   resolved "https://registry.yarnpkg.com/eyes/-/eyes-0.1.8.tgz#62cf120234c683785d902348a800ef3e0cc20bc0"
 
+fast-deep-equal@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-1.0.0.tgz#96256a3bc975595eb36d82e9929d060d893439ff"
+
+fast-json-stable-stringify@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz#d5142c0caee6b1189f87d3a76111064f86c8bbf2"
+
 fast-levenshtein@~1.0.0:
   version "1.0.7"
   resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-1.0.7.tgz#0178dcdee023b92905193af0959e8a7639cfdcb9"
@@ -1338,14 +1439,14 @@ fill-range@^2.1.0:
     repeat-string "^1.5.2"
 
 finalhandler@~1.0.0:
-  version "1.0.4"
-  resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.4.tgz#18574f2e7c4b98b8ae3b230c21f201f31bdb3fb7"
+  version "1.0.6"
+  resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.6.tgz#007aea33d1a4d3e42017f624848ad58d212f814f"
   dependencies:
-    debug "2.6.8"
+    debug "2.6.9"
     encodeurl "~1.0.1"
     escape-html "~1.0.3"
     on-finished "~2.3.0"
-    parseurl "~1.3.1"
+    parseurl "~1.3.2"
     statuses "~1.3.1"
     unpipe "~1.0.0"
 
@@ -1364,15 +1465,15 @@ find-up@^1.0.0:
     path-exists "^2.0.0"
     pinkie-promise "^2.0.0"
 
-find-up@^2.0.0, find-up@^2.1.0:
+find-up@^2.1.0:
   version "2.1.0"
   resolved "https://registry.yarnpkg.com/find-up/-/find-up-2.1.0.tgz#45d1b7e506c717ddd482775a2b77920a3c0c57a7"
   dependencies:
     locate-path "^2.0.0"
 
 flat-cache@^1.2.1:
-  version "1.2.2"
-  resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.2.2.tgz#fa86714e72c21db88601761ecf2f555d1abc6b96"
+  version "1.3.0"
+  resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-1.3.0.tgz#d3030b32b38154f4e3b7e9c709f490f7ef97c481"
   dependencies:
     circular-json "^0.3.1"
     del "^2.0.2"
@@ -1404,9 +1505,9 @@ forever-agent@~0.6.1:
   version "0.6.1"
   resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91"
 
-form-data@^2.1.1, form-data@~2.1.1:
-  version "2.1.4"
-  resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.4.tgz#33c183acf193276ecaa98143a69e94bfee1750d1"
+form-data@^2.3.1, form-data@~2.3.1:
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.1.tgz#6fb94fbd71885306d73d15cc497fe4cc4ecd44bf"
   dependencies:
     asynckit "^0.4.0"
     combined-stream "^1.0.5"
@@ -1420,13 +1521,21 @@ form-data@~0.1.0:
     combined-stream "~0.0.4"
     mime "~1.2.11"
 
+form-data@~2.1.1:
+  version "2.1.4"
+  resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.1.4.tgz#33c183acf193276ecaa98143a69e94bfee1750d1"
+  dependencies:
+    asynckit "^0.4.0"
+    combined-stream "^1.0.5"
+    mime-types "^2.1.12"
+
 formidable@^1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/formidable/-/formidable-1.1.1.tgz#96b8886f7c3c3508b932d6bd70c4d3a88f35f1a9"
 
 forwarded@~0.1.0:
-  version "0.1.0"
-  resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.0.tgz#19ef9874c4ae1c297bcf078fde63a09b66a84363"
+  version "0.1.2"
+  resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84"
 
 fresh@0.5.0:
   version "0.5.0"
@@ -1577,7 +1686,7 @@ globals@^6.1.0:
   version "6.4.1"
   resolved "https://registry.yarnpkg.com/globals/-/globals-6.4.1.tgz#8498032b3b6d1cc81eebc5f79690d8fe29fabf4f"
 
-globals@^9.0.0, globals@^9.14.0:
+globals@^9.14.0, globals@^9.18.0:
   version "9.18.0"
   resolved "https://registry.yarnpkg.com/globals/-/globals-9.18.0.tgz#aa3896b3e69b487f17e31ed2143d69a8e30c2d8a"
 
@@ -1615,8 +1724,8 @@ hammerjs@^2.0.4:
   resolved "https://registry.yarnpkg.com/hammerjs/-/hammerjs-2.0.8.tgz#04ef77862cff2bb79d30f7692095930222bf60f1"
 
 handlebars@^4.0.3:
-  version "4.0.10"
-  resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.10.tgz#3d30c718b09a3d96f23ea4cc1f403c4d3ba9ff4f"
+  version "4.0.11"
+  resolved "https://registry.yarnpkg.com/handlebars/-/handlebars-4.0.11.tgz#630a35dfe0294bc281edae6ffc5d329fc7982dcc"
   dependencies:
     async "^1.4.0"
     optimist "^0.6.1"
@@ -1628,6 +1737,10 @@ har-schema@^1.0.5:
   version "1.0.5"
   resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-1.0.5.tgz#d263135f43307c02c602afc8fe95970c0151369e"
 
+har-schema@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92"
+
 har-validator@~4.2.1:
   version "4.2.1"
   resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-4.2.1.tgz#33481d0f1bbff600dd203d75812a6a5fba002e2a"
@@ -1635,12 +1748,23 @@ har-validator@~4.2.1:
     ajv "^4.9.1"
     har-schema "^1.0.5"
 
+har-validator@~5.0.3:
+  version "5.0.3"
+  resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.0.3.tgz#ba402c266194f15956ef15e0fcf242993f6a7dfd"
+  dependencies:
+    ajv "^5.1.0"
+    har-schema "^2.0.0"
+
 has-ansi@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/has-ansi/-/has-ansi-2.0.0.tgz#34f5049ce1ecdf2b0649af3ef24e45ed35416d91"
   dependencies:
     ansi-regex "^2.0.0"
 
+has-color@~0.1.0:
+  version "0.1.7"
+  resolved "https://registry.yarnpkg.com/has-color/-/has-color-0.1.7.tgz#67144a5260c34fc3cca677d041daf52fe7b78b2f"
+
 has-flag@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa"
@@ -1668,7 +1792,7 @@ hawk@1.1.1:
     hoek "0.9.x"
     sntp "0.2.x"
 
-hawk@~3.1.3:
+hawk@3.1.3, hawk@~3.1.3:
   version "3.1.3"
   resolved "https://registry.yarnpkg.com/hawk/-/hawk-3.1.3.tgz#078444bd7c1640b0fe540d2c9b73d59678e8e1c4"
   dependencies:
@@ -1677,9 +1801,18 @@ hawk@~3.1.3:
     hoek "2.x.x"
     sntp "1.x.x"
 
-heapdump@^0.3.9:
-  version "0.3.9"
-  resolved "https://registry.yarnpkg.com/heapdump/-/heapdump-0.3.9.tgz#03c74eb0df5d67be0982e83429ba9c9d2b3b7f78"
+hawk@~6.0.2:
+  version "6.0.2"
+  resolved "https://registry.yarnpkg.com/hawk/-/hawk-6.0.2.tgz#af4d914eb065f9b5ce4d9d11c1cb2126eecc3038"
+  dependencies:
+    boom "4.x.x"
+    cryptiles "3.x.x"
+    hoek "4.x.x"
+    sntp "2.x.x"
+
+he@1.1.1:
+  version "1.1.1"
+  resolved "https://registry.yarnpkg.com/he/-/he-1.1.1.tgz#93410fd21b009735151f8868c2f271f3427e23fd"
 
 hoek@0.9.x:
   version "0.9.1"
@@ -1689,6 +1822,16 @@ hoek@2.x.x:
   version "2.16.3"
   resolved "https://registry.yarnpkg.com/hoek/-/hoek-2.16.3.tgz#20bb7403d3cea398e91dc4710a8ff1b8274a25ed"
 
+hoek@4.x.x:
+  version "4.2.0"
+  resolved "https://registry.yarnpkg.com/hoek/-/hoek-4.2.0.tgz#72d9d0754f7fe25ca2d01ad8f8f9a9449a89526d"
+
+homedir-polyfill@^1.0.1:
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.1.tgz#4c2bbc8a758998feebf5ed68580f76d46768b4bc"
+  dependencies:
+    parse-passwd "^1.0.0"
+
 hosted-git-info@^2.1.4:
   version "2.5.0"
   resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.5.0.tgz#6d60e34b3abbc8313062c3b798ef8d901a07af3c"
@@ -1718,17 +1861,25 @@ http-signature@~1.1.0:
     jsprim "^1.2.2"
     sshpk "^1.7.0"
 
+http-signature@~1.2.0:
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1"
+  dependencies:
+    assert-plus "^1.0.0"
+    jsprim "^1.2.2"
+    sshpk "^1.7.0"
+
 iconv-lite@0.4.15:
   version "0.4.15"
   resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.15.tgz#fe265a218ac6a57cfe854927e9d04c19825eddeb"
 
 iconv-lite@^0.4.17:
-  version "0.4.18"
-  resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.18.tgz#23d8656b16aae6742ac29732ea8f0336a4789cf2"
+  version "0.4.19"
+  resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.19.tgz#f7468f60135f5e5dad3399c0a81be9a1603a082b"
 
 ignore@^3.2.0:
-  version "3.3.3"
-  resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.3.tgz#432352e57accd87ab3110e82d3fea0e47812156d"
+  version "3.3.7"
+  resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.7.tgz#612289bfb3c220e186a58118618d5be8c1bab021"
 
 imurmurhash@^0.1.4:
   version "0.1.4"
@@ -1746,8 +1897,8 @@ inherits@2, inherits@2.0.3, inherits@^2.0.3, inherits@~2.0.0, inherits@~2.0.1, i
   resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
 
 ini@~1.3.0:
-  version "1.3.4"
-  resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.4.tgz#0537cb79daf59b59a1a517dff706c86ec039162e"
+  version "1.3.5"
+  resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927"
 
 inquirer@3.0.6:
   version "3.0.6"
@@ -1799,10 +1950,10 @@ inquirer@^0.8.2:
     through "^2.3.6"
 
 interpret@^1.0.0:
-  version "1.0.3"
-  resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.0.3.tgz#cbc35c62eeee73f19ab7b10a801511401afc0f90"
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/interpret/-/interpret-1.1.0.tgz#7ed1b1410c6a0e0f78cf95d3b8440c63f78b8614"
 
-invariant@^2.2.0:
+invariant@^2.2.2:
   version "2.2.2"
   resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.2.tgz#9e1f56ac0acdb6bf303306f338be3b204ae60360"
   dependencies:
@@ -1816,6 +1967,10 @@ ip@0.0.1:
   version "0.0.1"
   resolved "https://registry.yarnpkg.com/ip/-/ip-0.0.1.tgz#bbc68d7cc448560a63fbe99237a01bc50fdca7ec"
 
+ip@^1.1.4:
+  version "1.1.5"
+  resolved "https://registry.yarnpkg.com/ip/-/ip-1.1.5.tgz#bdded70114290828c0a039e72ef25f5aaec4354a"
+
 ipaddr.js@1.4.0:
   version "1.4.0"
   resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.4.0.tgz#296aca878a821816e5b85d0a285a99bcff4582f0"
@@ -1825,8 +1980,8 @@ is-arrayish@^0.2.1:
   resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d"
 
 is-buffer@^1.1.5:
-  version "1.1.5"
-  resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.5.tgz#1f3b26ef613b214b88cbca23cc6c01d87961eecc"
+  version "1.1.6"
+  resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
 
 is-builtin-module@^1.0.0:
   version "1.0.0"
@@ -1875,8 +2030,8 @@ is-glob@^2.0.0, is-glob@^2.0.1:
     is-extglob "^1.0.0"
 
 is-my-json-valid@^2.10.0:
-  version "2.16.0"
-  resolved "https://registry.yarnpkg.com/is-my-json-valid/-/is-my-json-valid-2.16.0.tgz#f079dd9bfdae65ee2038aae8acbc86ab109e3693"
+  version "2.16.1"
+  resolved "https://registry.yarnpkg.com/is-my-json-valid/-/is-my-json-valid-2.16.1.tgz#5a846777e2c2620d1e69104e5d3a03b1f6088f11"
   dependencies:
     generate-function "^2.0.0"
     generate-object-property "^1.1.0"
@@ -1906,8 +2061,8 @@ is-path-in-cwd@^1.0.0:
     is-path-inside "^1.0.0"
 
 is-path-inside@^1.0.0:
-  version "1.0.0"
-  resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.0.tgz#fc06e5a1683fbda13de667aff717bbc10a48f37f"
+  version "1.0.1"
+  resolved "https://registry.yarnpkg.com/is-path-inside/-/is-path-inside-1.0.1.tgz#8ef5b7de50437a3fdca6b4e865ef7aa55cb48036"
   dependencies:
     path-is-inside "^1.0.1"
 
@@ -1971,46 +2126,46 @@ istanbul-lib-coverage@^1.1.1:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-1.1.1.tgz#73bfb998885299415c93d38a3e9adf784a77a9da"
 
-istanbul-lib-hook@^1.0.7:
-  version "1.0.7"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.0.7.tgz#dd6607f03076578fe7d6f2a630cf143b49bacddc"
+istanbul-lib-hook@^1.1.0:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/istanbul-lib-hook/-/istanbul-lib-hook-1.1.0.tgz#8538d970372cb3716d53e55523dd54b557a8d89b"
   dependencies:
     append-transform "^0.4.0"
 
-istanbul-lib-instrument@^1.7.4:
-  version "1.7.4"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.7.4.tgz#e9fd920e4767f3d19edc765e2d6b3f5ccbd0eea8"
+istanbul-lib-instrument@^1.9.1:
+  version "1.9.1"
+  resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-1.9.1.tgz#250b30b3531e5d3251299fdd64b0b2c9db6b558e"
   dependencies:
     babel-generator "^6.18.0"
     babel-template "^6.16.0"
     babel-traverse "^6.18.0"
     babel-types "^6.18.0"
-    babylon "^6.17.4"
+    babylon "^6.18.0"
     istanbul-lib-coverage "^1.1.1"
     semver "^5.3.0"
 
-istanbul-lib-report@^1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.1.tgz#f0e55f56655ffa34222080b7a0cd4760e1405fc9"
+istanbul-lib-report@^1.1.2:
+  version "1.1.2"
+  resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-1.1.2.tgz#922be27c13b9511b979bd1587359f69798c1d425"
   dependencies:
     istanbul-lib-coverage "^1.1.1"
     mkdirp "^0.5.1"
     path-parse "^1.0.5"
     supports-color "^3.1.2"
 
-istanbul-lib-source-maps@^1.2.1:
-  version "1.2.1"
-  resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.1.tgz#a6fe1acba8ce08eebc638e572e294d267008aa0c"
+istanbul-lib-source-maps@^1.2.2:
+  version "1.2.2"
+  resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-1.2.2.tgz#750578602435f28a0c04ee6d7d9e0f2960e62c1c"
   dependencies:
-    debug "^2.6.3"
+    debug "^3.1.0"
     istanbul-lib-coverage "^1.1.1"
     mkdirp "^0.5.1"
     rimraf "^2.6.1"
     source-map "^0.5.3"
 
-istanbul-reports@^1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.1.1.tgz#042be5c89e175bc3f86523caab29c014e77fee4e"
+istanbul-reports@^1.1.3:
+  version "1.1.3"
+  resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-1.1.3.tgz#3b9e1e8defb6d18b1d425da8e8b32c5a163f2d10"
   dependencies:
     handlebars "^4.0.3"
 
@@ -2034,11 +2189,8 @@ jison@0.4.17:
     lex-parser "~0.1.3"
     nomnom "1.5.2"
 
-jquery@^2.1.4:
-  version "2.2.4"
-  resolved "https://registry.yarnpkg.com/jquery/-/jquery-2.2.4.tgz#2c89d6889b5eac522a7eea32c14521559c6cbf02"
 
-js-tokens@^3.0.0:
+js-tokens@^3.0.0, js-tokens@^3.0.2:
   version "3.0.2"
   resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-3.0.2.tgz#9866df395102130e38f7f996bceb65443209c25b"
 
@@ -2049,25 +2201,32 @@ js-yaml@3.0.1:
     argparse "~ 0.1.11"
     esprima "~ 1.0.2"
 
-js-yaml@3.8.2, js-yaml@^3.2.5, js-yaml@^3.5.1:
+js-yaml@3.8.2:
   version "3.8.2"
   resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.8.2.tgz#02d3e2c0f6beab20248d412c352203827d786721"
   dependencies:
     argparse "^1.0.7"
     esprima "^3.1.1"
 
+js-yaml@^3.2.5, js-yaml@^3.5.1:
+  version "3.10.0"
+  resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.10.0.tgz#2e78441646bd4682e963f22b6e92823c309c62dc"
+  dependencies:
+    argparse "^1.0.7"
+    esprima "^4.0.0"
+
 jsbn@~0.1.0:
   version "0.1.1"
   resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513"
 
-jschardet@^1.4.2:
-  version "1.5.1"
-  resolved "https://registry.yarnpkg.com/jschardet/-/jschardet-1.5.1.tgz#c519f629f86b3a5bedba58a88d311309eec097f9"
-
 jsesc@^1.3.0:
   version "1.3.0"
   resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-1.3.0.tgz#46c3fec8c1892b12b0833db9bc7622176dbab34b"
 
+json-schema-traverse@^0.3.0:
+  version "0.3.1"
+  resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.3.1.tgz#349a6d44c53a51de89b40805c5d5e59b417d3340"
+
 json-schema@0.2.3:
   version "0.2.3"
   resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13"
@@ -2176,15 +2335,6 @@ load-json-file@^1.0.0:
     pinkie-promise "^2.0.0"
     strip-bom "^2.0.0"
 
-load-json-file@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/load-json-file/-/load-json-file-2.0.0.tgz#7947e42149af80d696cbf797bcaabcfe1fe29ca8"
-  dependencies:
-    graceful-fs "^4.1.2"
-    parse-json "^2.2.0"
-    pify "^2.0.0"
-    strip-bom "^3.0.0"
-
 locate-path@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-2.0.0.tgz#2b568b265eec944c6d9c0de9c3dbbbca0354cd8e"
@@ -2255,7 +2405,7 @@ lodash@^3.3.1:
   version "3.10.1"
   resolved "https://registry.yarnpkg.com/lodash/-/lodash-3.10.1.tgz#5bf45e8e49ba4189e17d482789dfd15bd140b7b6"
 
-lodash@^4.0.0, lodash@^4.13.1, lodash@^4.14.0, lodash@^4.2.0, lodash@^4.3.0, lodash@^4.8.0:
+lodash@^4.0.0, lodash@^4.13.1, lodash@^4.14.0, lodash@^4.17.4, lodash@^4.3.0, lodash@^4.8.0:
   version "4.17.4"
   resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.4.tgz#78203a4d1c328ae1d86dca6460e369b57f4055ae"
 
@@ -2362,15 +2512,15 @@ micromatch@^2.3.11:
     parse-glob "^3.0.4"
     regex-cache "^0.4.2"
 
-mime-db@~1.29.0:
-  version "1.29.0"
-  resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.29.0.tgz#48d26d235589651704ac5916ca06001914266878"
+mime-db@~1.30.0:
+  version "1.30.0"
+  resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.30.0.tgz#74c643da2dd9d6a45399963465b26d5ca7d71f01"
 
-mime-types@^2.1.12, mime-types@~2.1.11, mime-types@~2.1.15, mime-types@~2.1.7:
-  version "2.1.16"
-  resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.16.tgz#2b858a52e5ecd516db897ac2be87487830698e23"
+mime-types@^2.1.12, mime-types@~2.1.15, mime-types@~2.1.16, mime-types@~2.1.17, mime-types@~2.1.7:
+  version "2.1.17"
+  resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.17.tgz#09d7a393f03e995a79f8af857b70a9e0ab16557a"
   dependencies:
-    mime-db "~1.29.0"
+    mime-db "~1.30.0"
 
 mime-types@~1.0.1:
   version "1.0.2"
@@ -2380,9 +2530,9 @@ mime@1.3.4:
   version "1.3.4"
   resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.4.tgz#115f9e3b6b3daf2959983cb38f149a2d40eb5d53"
 
-mime@^1.2.11, mime@^1.3.4:
-  version "1.3.6"
-  resolved "https://registry.yarnpkg.com/mime/-/mime-1.3.6.tgz#591d84d3653a6b0b4a3b9df8de5aa8108e72e5e0"
+mime@^1.2.11, mime@^1.4.1:
+  version "1.6.0"
+  resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1"
 
 mime@~1.2.11:
   version "1.2.11"
@@ -2435,8 +2585,8 @@ mocha-eslint@0.1.7:
     glob "5.0.5"
 
 mocha@^3.4.2:
-  version "3.5.0"
-  resolved "https://registry.yarnpkg.com/mocha/-/mocha-3.5.0.tgz#1328567d2717f997030f8006234bce9b8cd72465"
+  version "3.5.3"
+  resolved "https://registry.yarnpkg.com/mocha/-/mocha-3.5.3.tgz#1e0480fe36d2da5858d1eb6acc38418b26eaa20d"
   dependencies:
     browser-stdout "1.3.0"
     commander "2.9.0"
@@ -2445,6 +2595,7 @@ mocha@^3.4.2:
     escape-string-regexp "1.0.5"
     glob "7.1.1"
     growl "1.9.2"
+    he "1.1.1"
     json3 "3.3.2"
     lodash.create "3.1.1"
     mkdirp "0.5.1"
@@ -2566,7 +2717,7 @@ node-pre-gyp@0.6.33:
     tar "~2.2.1"
     tar-pack "~3.3.0"
 
-node-pre-gyp@0.6.34, node-pre-gyp@~0.6.28:
+node-pre-gyp@0.6.34:
   version "0.6.34"
   resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.34.tgz#94ad1c798a11d7fc67381b50d47f8cc18d9799f7"
   dependencies:
@@ -2580,17 +2731,40 @@ node-pre-gyp@0.6.34, node-pre-gyp@~0.6.28:
     tar "^2.2.1"
     tar-pack "^3.4.0"
 
+node-pre-gyp@~0.6.28:
+  version "0.6.39"
+  resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.6.39.tgz#c00e96860b23c0e1420ac7befc5044e1d78d8649"
+  dependencies:
+    detect-libc "^1.0.2"
+    hawk "3.1.3"
+    mkdirp "^0.5.1"
+    nopt "^4.0.1"
+    npmlog "^4.0.2"
+    rc "^1.1.7"
+    request "2.81.0"
+    rimraf "^2.6.1"
+    semver "^5.3.0"
+    tar "^2.2.1"
+    tar-pack "^3.4.0"
+
 node-uuid@1.4.8, node-uuid@~1.4.0:
   version "1.4.8"
   resolved "https://registry.yarnpkg.com/node-uuid/-/node-uuid-1.4.8.tgz#b040eb0923968afabf8d32fb1f17f1167fdab907"
 
-nomnom@1.5.2, "nomnom@>= 1.5.x":
+nomnom@1.5.2:
   version "1.5.2"
   resolved "https://registry.yarnpkg.com/nomnom/-/nomnom-1.5.2.tgz#f4345448a853cfbd5c0d26320f2477ab0526fe2f"
   dependencies:
     colors "0.5.x"
     underscore "1.1.x"
 
+"nomnom@>= 1.5.x":
+  version "1.8.1"
+  resolved "https://registry.yarnpkg.com/nomnom/-/nomnom-1.8.1.tgz#2151f722472ba79e50a76fc125bb8c8f2e4dc2a7"
+  dependencies:
+    chalk "~0.4.0"
+    underscore "~1.6.0"
+
 nopt@^4.0.1:
   version "4.0.1"
   resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.1.tgz#d0d4685afd5415193c8c7505602d0d17cd64474d"
@@ -2647,8 +2821,8 @@ number-is-nan@^1.0.0:
   resolved "https://registry.yarnpkg.com/number-is-nan/-/number-is-nan-1.0.1.tgz#097b602b53422a522c1afb8790318336941a011d"
 
 nyc@^11.0.3:
-  version "11.1.0"
-  resolved "https://registry.yarnpkg.com/nyc/-/nyc-11.1.0.tgz#d6b3c5e16892a25af63138ba484676aa8a22eda7"
+  version "11.3.0"
+  resolved "https://registry.yarnpkg.com/nyc/-/nyc-11.3.0.tgz#a42bc17b3cfa41f7b15eb602bc98b2633ddd76f0"
   dependencies:
     archy "^1.0.0"
     arrify "^1.0.1"
@@ -2661,11 +2835,11 @@ nyc@^11.0.3:
     foreground-child "^1.5.3"
     glob "^7.0.6"
     istanbul-lib-coverage "^1.1.1"
-    istanbul-lib-hook "^1.0.7"
-    istanbul-lib-instrument "^1.7.4"
-    istanbul-lib-report "^1.1.1"
-    istanbul-lib-source-maps "^1.2.1"
-    istanbul-reports "^1.1.1"
+    istanbul-lib-hook "^1.1.0"
+    istanbul-lib-instrument "^1.9.1"
+    istanbul-lib-report "^1.1.2"
+    istanbul-lib-source-maps "^1.2.2"
+    istanbul-reports "^1.1.3"
     md5-hex "^1.2.0"
     merge-source-map "^1.0.2"
     micromatch "^2.3.11"
@@ -2673,16 +2847,16 @@ nyc@^11.0.3:
     resolve-from "^2.0.0"
     rimraf "^2.5.4"
     signal-exit "^3.0.1"
-    spawn-wrap "^1.3.8"
+    spawn-wrap "=1.3.8"
     test-exclude "^4.1.1"
-    yargs "^8.0.1"
-    yargs-parser "^5.0.0"
+    yargs "^10.0.3"
+    yargs-parser "^8.0.0"
 
 oauth-sign@~0.3.0:
   version "0.3.0"
   resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.3.0.tgz#cb540f93bb2b22a7d5941691a288d60e8ea9386e"
 
-oauth-sign@~0.8.1:
+oauth-sign@~0.8.1, oauth-sign@~0.8.2:
   version "0.8.2"
   resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.8.2.tgz#46a6ab7f0aead8deae9ec0565780b7d4efeb9d43"
 
@@ -2778,7 +2952,7 @@ os-locale@^2.0.0:
     lcid "^1.0.0"
     mem "^1.1.0"
 
-os-tmpdir@^1.0.0, os-tmpdir@~1.0.1:
+os-tmpdir@^1.0.0, os-tmpdir@~1.0.1, os-tmpdir@~1.0.2:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
 
@@ -2822,9 +2996,13 @@ parse-json@^2.2.0:
   dependencies:
     error-ex "^1.2.0"
 
-parseurl@~1.3.1:
-  version "1.3.1"
-  resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.1.tgz#c8ab8c9223ba34888aa64a297b28853bec18da56"
+parse-passwd@^1.0.0:
+  version "1.0.0"
+  resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6"
+
+parseurl@~1.3.1, parseurl@~1.3.2:
+  version "1.3.2"
+  resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.2.tgz#fc289d4ed8993119460c156253262cdc8de65bf3"
 
 path-exists@^2.0.0:
   version "2.1.0"
@@ -2864,12 +3042,6 @@ path-type@^1.0.0:
     pify "^2.0.0"
     pinkie-promise "^2.0.0"
 
-path-type@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/path-type/-/path-type-2.0.0.tgz#f012ccb8415b7096fc2daa1054c3d72389594c73"
-  dependencies:
-    pify "^2.0.0"
-
 pause-stream@0.0.11:
   version "0.0.11"
   resolved "https://registry.yarnpkg.com/pause-stream/-/pause-stream-0.0.11.tgz#fe5a34b0cbce12b5aa6a2b403ee2e73b602f1445"
@@ -2880,6 +3052,10 @@ performance-now@^0.2.0:
   version "0.2.0"
   resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-0.2.0.tgz#33ef30c5c77d4ea21c5a53869d91b56d8f2555e5"
 
+performance-now@^2.1.0:
+  version "2.1.0"
+  resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
+
 pify@^2.0.0:
   version "2.3.0"
   resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
@@ -2955,11 +3131,11 @@ q-io@1.13.2:
     qs "^1.2.1"
     url2 "^0.0.0"
 
-q@1.5.0, q@^1.0.1:
-  version "1.5.0"
-  resolved "https://registry.yarnpkg.com/q/-/q-1.5.0.tgz#dd01bac9d06d30e6f219aecb8253ee9ebdc308f1"
+q@^1.0.1:
+  version "1.5.1"
+  resolved "https://registry.yarnpkg.com/q/-/q-1.5.1.tgz#7e32f75b41381291d04611f1bf14109ac00651d7"
 
-qs@6.4.0, qs@^6.1.0, qs@~6.4.0:
+qs@6.4.0, qs@~6.4.0:
   version "6.4.0"
   resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233"
 
@@ -2967,6 +3143,10 @@ qs@^1.2.1:
   version "1.2.2"
   resolved "https://registry.yarnpkg.com/qs/-/qs-1.2.2.tgz#19b57ff24dc2a99ce1f8bdf6afcda59f8ef61f88"
 
+qs@^6.5.1, qs@~6.5.1:
+  version "6.5.1"
+  resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8"
+
 qs@~1.0.0:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/qs/-/qs-1.0.2.tgz#50a93e2b5af6691c31bcea5dae78ee6ea1903768"
@@ -2999,8 +3179,8 @@ raw-body@~2.2.0:
     unpipe "1.0.0"
 
 rc@^1.1.7:
-  version "1.2.1"
-  resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.1.tgz#2e03e8e42ee450b8cb3dce65be1bf8974e1dfd95"
+  version "1.2.2"
+  resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.2.tgz#d8ce9cb57e8d64d9c7badd9876c7c34cbe3c7077"
   dependencies:
     deep-extend "~0.4.0"
     ini "~1.3.0"
@@ -3023,13 +3203,6 @@ read-pkg-up@^1.0.1:
     find-up "^1.0.0"
     read-pkg "^1.0.0"
 
-read-pkg-up@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-2.0.0.tgz#6b72a8048984e0c41e79510fd5e9fa99b3b549be"
-  dependencies:
-    find-up "^2.0.0"
-    read-pkg "^2.0.0"
-
 read-pkg@^1.0.0:
   version "1.1.0"
   resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-1.1.0.tgz#f5ffaa5ecd29cb31c0474bca7d756b6bb29e3f28"
@@ -3038,14 +3211,6 @@ read-pkg@^1.0.0:
     normalize-package-data "^2.3.2"
     path-type "^1.0.0"
 
-read-pkg@^2.0.0:
-  version "2.0.0"
-  resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-2.0.0.tgz#8ef1c0623c6a6db0dc6713c4bfac46332b2368f8"
-  dependencies:
-    load-json-file "^2.0.0"
-    normalize-package-data "^2.3.2"
-    path-type "^2.0.0"
-
 readable-stream@1.1.x:
   version "1.1.14"
   resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-1.1.14.tgz#7cf4c54ef648e3813084c636dd2079e166c081d9"
@@ -3109,20 +3274,19 @@ rechoir@^0.6.2:
   dependencies:
     resolve "^1.1.6"
 
-regenerator-runtime@^0.10.0:
-  version "0.10.5"
-  resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.10.5.tgz#336c3efc1220adcedda2c9fab67b5a7955a33658"
+regenerator-runtime@^0.11.0:
+  version "0.11.0"
+  resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.0.tgz#7e54fe5b5ccd5d6624ea6255c3473be090b802e1"
 
 regex-cache@^0.4.2:
-  version "0.4.3"
-  resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.3.tgz#9b1a6c35d4d0dfcef5711ae651e8e9d3d7114145"
+  version "0.4.4"
+  resolved "https://registry.yarnpkg.com/regex-cache/-/regex-cache-0.4.4.tgz#75bdc58a2a1496cec48a12835bc54c8d562336dd"
   dependencies:
     is-equal-shallow "^0.1.3"
-    is-primitive "^2.0.0"
 
 remove-trailing-separator@^1.0.1:
-  version "1.0.2"
-  resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.0.2.tgz#69b062d978727ad14dc6b56ba4ab772fd8d70511"
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef"
 
 repeat-element@^1.1.2:
   version "1.1.2"
@@ -3175,7 +3339,7 @@ request@2.40.0:
     tough-cookie ">=0.12.0"
     tunnel-agent "~0.4.0"
 
-request@2.81.0, request@2.x, request@^2.79.0, request@^2.81.0:
+request@2.81.0:
   version "2.81.0"
   resolved "https://registry.yarnpkg.com/request/-/request-2.81.0.tgz#c6928946a0e06c5f8d6f8a9333469ffda46298a0"
   dependencies:
@@ -3202,6 +3366,33 @@ request@2.81.0, request@2.x, request@^2.79.0, request@^2.81.0:
     tunnel-agent "^0.6.0"
     uuid "^3.0.0"
 
+request@2.x, request@^2.79.0, request@^2.81.0:
+  version "2.83.0"
+  resolved "https://registry.yarnpkg.com/request/-/request-2.83.0.tgz#ca0b65da02ed62935887808e6f510381034e3356"
+  dependencies:
+    aws-sign2 "~0.7.0"
+    aws4 "^1.6.0"
+    caseless "~0.12.0"
+    combined-stream "~1.0.5"
+    extend "~3.0.1"
+    forever-agent "~0.6.1"
+    form-data "~2.3.1"
+    har-validator "~5.0.3"
+    hawk "~6.0.2"
+    http-signature "~1.2.0"
+    is-typedarray "~1.0.0"
+    isstream "~0.1.2"
+    json-stringify-safe "~5.0.1"
+    mime-types "~2.1.17"
+    oauth-sign "~0.8.2"
+    performance-now "^2.1.0"
+    qs "~6.5.1"
+    safe-buffer "^5.1.1"
+    stringstream "~0.0.5"
+    tough-cookie "~2.3.3"
+    tunnel-agent "^0.6.0"
+    uuid "^3.1.0"
+
 require-directory@^2.1.1:
   version "2.1.1"
   resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42"
@@ -3226,8 +3417,8 @@ resolve-from@^2.0.0:
   resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-2.0.0.tgz#9480ab20e94ffa1d9e80a804c7ea147611966b57"
 
 resolve@^1.1.6:
-  version "1.4.0"
-  resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.4.0.tgz#a75be01c53da25d934a98ebd0e4c4a7312f92a86"
+  version "1.5.0"
+  resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.5.0.tgz#1f09acce796c9a762579f31b2c1cc4c3cddf9f36"
   dependencies:
     path-parse "^1.0.5"
 
@@ -3260,9 +3451,9 @@ right-align@^0.1.1:
   dependencies:
     align-text "^0.1.1"
 
-rimraf@2, rimraf@2.6.1, rimraf@^2.2.8, rimraf@^2.3.3, rimraf@^2.5.1, rimraf@^2.5.4, rimraf@^2.6.1:
-  version "2.6.1"
-  resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.1.tgz#c2338ec643df7a1b7fe5c54fa86f57428a55f33d"
+rimraf@2, rimraf@^2.2.8, rimraf@^2.3.3, rimraf@^2.5.1, rimraf@^2.5.4, rimraf@^2.6.1:
+  version "2.6.2"
+  resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.2.tgz#2ed8150d24a16ea8651e6d6ef0f47c4158ce7a36"
   dependencies:
     glob "^7.0.5"
 
@@ -3296,7 +3487,7 @@ rx@^4.1.0:
   version "4.1.0"
   resolved "https://registry.yarnpkg.com/rx/-/rx-4.1.0.tgz#a5f13ff79ef3b740fe30aa803fb09f98805d4782"
 
-safe-buffer@^5.0.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
+safe-buffer@^5.0.1, safe-buffer@^5.1.1, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
   version "5.1.1"
   resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.1.tgz#893312af69b2123def71f57889001671eeb2c853"
 
@@ -3392,13 +3583,13 @@ shelljs@^0.7.5:
     interpret "^1.0.0"
     rechoir "^0.6.2"
 
-should-equal@^1.0.0:
-  version "1.0.1"
-  resolved "https://registry.yarnpkg.com/should-equal/-/should-equal-1.0.1.tgz#0b6e9516f2601a9fb0bb2dcc369afa1c7e200af7"
+should-equal@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/should-equal/-/should-equal-2.0.0.tgz#6072cf83047360867e68e98b09d71143d04ee0c3"
   dependencies:
-    should-type "^1.0.0"
+    should-type "^1.4.0"
 
-should-format@^3.0.2:
+should-format@^3.0.3:
   version "3.0.3"
   resolved "https://registry.yarnpkg.com/should-format/-/should-format-3.0.3.tgz#9bfc8f74fa39205c53d38c34d717303e277124f1"
   dependencies:
@@ -3412,7 +3603,7 @@ should-type-adaptors@^1.0.1:
     should-type "^1.3.0"
     should-util "^1.0.0"
 
-should-type@^1.0.0, should-type@^1.3.0, should-type@^1.4.0:
+should-type@^1.3.0, should-type@^1.4.0:
   version "1.4.0"
   resolved "https://registry.yarnpkg.com/should-type/-/should-type-1.4.0.tgz#0756d8ce846dfd09843a6947719dfa0d4cff5cf3"
 
@@ -3420,12 +3611,12 @@ should-util@^1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/should-util/-/should-util-1.0.0.tgz#c98cda374aa6b190df8ba87c9889c2b4db620063"
 
-should@:
-  version "11.2.1"
-  resolved "https://registry.yarnpkg.com/should/-/should-11.2.1.tgz#90f55145552d01cfc200666e4e818a1c9670eda2"
+should@*:
+  version "13.1.3"
+  resolved "https://registry.yarnpkg.com/should/-/should-13.1.3.tgz#a089bdf7979392a8272a712c8b63acbaafb7948f"
   dependencies:
-    should-equal "^1.0.0"
-    should-format "^3.0.2"
+    should-equal "^2.0.0"
+    should-format "^3.0.3"
     should-type "^1.4.0"
     should-type-adaptors "^1.0.1"
     should-util "^1.0.0"
@@ -3448,6 +3639,10 @@ slide@^1.1.5:
   version "1.1.6"
   resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707"
 
+smart-buffer@^1.0.13:
+  version "1.1.15"
+  resolved "https://registry.yarnpkg.com/smart-buffer/-/smart-buffer-1.1.15.tgz#7f114b5b65fab3e2a35aa775bb12f0d1c649bf16"
+
 sntp@0.2.x:
   version "0.2.4"
   resolved "https://registry.yarnpkg.com/sntp/-/sntp-0.2.4.tgz#fb885f18b0f3aad189f824862536bceeec750900"
@@ -3460,9 +3655,29 @@ sntp@1.x.x:
   dependencies:
     hoek "2.x.x"
 
+sntp@2.x.x:
+  version "2.1.0"
+  resolved "https://registry.yarnpkg.com/sntp/-/sntp-2.1.0.tgz#2c6cec14fedc2222739caf9b5c3d85d1cc5a2cc8"
+  dependencies:
+    hoek "4.x.x"
+
+socks-proxy-agent@^3.0.1:
+  version "3.0.1"
+  resolved "https://registry.yarnpkg.com/socks-proxy-agent/-/socks-proxy-agent-3.0.1.tgz#2eae7cf8e2a82d34565761539a7f9718c5617659"
+  dependencies:
+    agent-base "^4.1.0"
+    socks "^1.1.10"
+
+socks@^1.1.10:
+  version "1.1.10"
+  resolved "https://registry.yarnpkg.com/socks/-/socks-1.1.10.tgz#5b8b7fc7c8f341c53ed056e929b7bf4de8ba7b5a"
+  dependencies:
+    ip "^1.1.4"
+    smart-buffer "^1.0.13"
+
 source-map-support@^0.4.0, source-map-support@^0.4.15:
-  version "0.4.15"
-  resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.15.tgz#03202df65c06d2bd8c7ec2362a193056fef8d3b1"
+  version "0.4.18"
+  resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.4.18.tgz#0286a6de8be42641338594e97ccea75f0a2c585f"
   dependencies:
     source-map "^0.5.6"
 
@@ -3472,9 +3687,9 @@ source-map@^0.4.4:
   dependencies:
     amdefine ">=0.0.4"
 
-source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6, source-map@~0.5.1:
-  version "0.5.6"
-  resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412"
+source-map@^0.5.3, source-map@^0.5.6, source-map@~0.5.1:
+  version "0.5.7"
+  resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
 
 source-map@~0.1.33:
   version "0.1.43"
@@ -3482,7 +3697,7 @@ source-map@~0.1.33:
   dependencies:
     amdefine ">=0.0.4"
 
-spawn-wrap@^1.3.8:
+spawn-wrap@=1.3.8:
   version "1.3.8"
   resolved "https://registry.yarnpkg.com/spawn-wrap/-/spawn-wrap-1.3.8.tgz#fa2a79b990cbb0bb0018dca6748d88367b19ec31"
   dependencies:
@@ -3542,7 +3757,11 @@ stack-trace@0.0.x:
   version "0.0.10"
   resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0"
 
-"statuses@>= 1.3.1 < 2", statuses@~1.3.1:
+"statuses@>= 1.3.1 < 2":
+  version "1.4.0"
+  resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.4.0.tgz#bb73d446da2796106efcc1b601a253d6c46bd087"
+
+statuses@~1.3.1:
   version "1.3.1"
   resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.3.1.tgz#faf51b9eb74aaef3b3acf4ad5f61abf24cb7b93e"
 
@@ -3589,7 +3808,7 @@ string_decoder@~1.0.3:
   dependencies:
     safe-buffer "~5.1.0"
 
-stringstream@~0.0.4:
+stringstream@~0.0.4, stringstream@~0.0.5:
   version "0.0.5"
   resolved "https://registry.yarnpkg.com/stringstream/-/stringstream-0.0.5.tgz#4e484cd4de5a0bbbee18e46307710a8a81621878"
 
@@ -3611,6 +3830,10 @@ strip-ansi@^4.0.0:
   dependencies:
     ansi-regex "^3.0.0"
 
+strip-ansi@~0.1.0:
+  version "0.1.1"
+  resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-0.1.1.tgz#39e8a98d044d150660abe4a6808acf70bb7bc991"
+
 strip-bom@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-2.0.0.tgz#6219a85616520491f35788bdbf1447a99c7e6b0e"
@@ -3634,18 +3857,18 @@ strip-json-comments@~1.0.1:
   resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-1.0.4.tgz#1e15fbcac97d3ee99bf2d73b4c656b082bbafb91"
 
 superagent@^3.0.0:
-  version "3.5.2"
-  resolved "https://registry.yarnpkg.com/superagent/-/superagent-3.5.2.tgz#3361a3971567504c351063abeaae0faa23dbf3f8"
+  version "3.8.1"
+  resolved "https://registry.yarnpkg.com/superagent/-/superagent-3.8.1.tgz#2571fd921f3fcdba43ac68c3b35c91951532701f"
   dependencies:
     component-emitter "^1.2.0"
-    cookiejar "^2.0.6"
-    debug "^2.2.0"
+    cookiejar "^2.1.0"
+    debug "^3.1.0"
     extend "^3.0.0"
-    form-data "^2.1.1"
+    form-data "^2.3.1"
     formidable "^1.1.1"
     methods "^1.1.1"
-    mime "^1.3.4"
-    qs "^6.1.0"
+    mime "^1.4.1"
+    qs "^6.5.1"
     readable-stream "^2.0.5"
 
 supertest@:
@@ -3655,7 +3878,7 @@ supertest@:
     methods "~1.1.2"
     superagent "^3.0.0"
 
-supports-color@3.1.2, supports-color@^3.1.2:
+supports-color@3.1.2:
   version "3.1.2"
   resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.1.2.tgz#72a262894d9d408b956ca05ff37b2ed8a6e2a2d5"
   dependencies:
@@ -3665,9 +3888,15 @@ supports-color@^2.0.0:
   version "2.0.0"
   resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
 
+supports-color@^3.1.2:
+  version "3.2.3"
+  resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6"
+  dependencies:
+    has-flag "^1.0.0"
+
 supports-color@^4.0.0:
-  version "4.2.1"
-  resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.2.1.tgz#65a4bb2631e90e02420dba5554c375a4754bb836"
+  version "4.5.0"
+  resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-4.5.0.tgz#be7a0de484dec5c5cddf8b3d59125044912f635b"
   dependencies:
     has-flag "^2.0.0"
 
@@ -3683,12 +3912,12 @@ table@^3.7.8:
     string-width "^2.0.0"
 
 tail@^1.2.1:
-  version "1.2.2"
-  resolved "https://registry.yarnpkg.com/tail/-/tail-1.2.2.tgz#3c40a47d53e137c541a14cc133532ecb2a75cc51"
+  version "1.2.3"
+  resolved "https://registry.yarnpkg.com/tail/-/tail-1.2.3.tgz#b08d6fa79fb928869631a341a51c14497c1c4255"
 
 tar-pack@^3.4.0:
-  version "3.4.0"
-  resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.0.tgz#23be2d7f671a8339376cbdb0b8fe3fdebf317984"
+  version "3.4.1"
+  resolved "https://registry.yarnpkg.com/tar-pack/-/tar-pack-3.4.1.tgz#e1dbc03a9b9d3ba07e896ad027317eb679a10a1f"
   dependencies:
     debug "^2.2.0"
     fstream "^1.0.10"
@@ -3726,8 +3955,8 @@ tar-pack@~3.3.0:
     uid-number "~0.0.6"
 
 tar-stream@^1.5.0:
-  version "1.5.4"
-  resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.5.4.tgz#36549cf04ed1aee9b2a30c0143252238daf94016"
+  version "1.5.5"
+  resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.5.5.tgz#5cad84779f45c83b1f2508d96b09d88c7218af55"
   dependencies:
     bl "^1.0.0"
     end-of-stream "^1.0.0"
@@ -3770,19 +3999,19 @@ tmp@0.0.29:
   dependencies:
     os-tmpdir "~1.0.1"
 
-tmp@0.0.31, tmp@^0.0.31:
-  version "0.0.31"
-  resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.31.tgz#8f38ab9438e17315e5dbd8b3657e8bfb277ae4a7"
+tmp@^0.0.33:
+  version "0.0.33"
+  resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9"
   dependencies:
-    os-tmpdir "~1.0.1"
+    os-tmpdir "~1.0.2"
 
-to-fast-properties@^1.0.1:
+to-fast-properties@^1.0.3:
   version "1.0.3"
   resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-1.0.3.tgz#b83571fa4d8c25b82e231b06e3a3055de4ca1a47"
 
-tough-cookie@>=0.12.0, tough-cookie@~2.3.0:
-  version "2.3.2"
-  resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a"
+tough-cookie@>=0.12.0, tough-cookie@~2.3.0, tough-cookie@~2.3.3:
+  version "2.3.3"
+  resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.3.tgz#0b618a5565b6dea90bf3425d04d55edc475a7561"
   dependencies:
     punycode "^1.4.1"
 
@@ -3834,10 +4063,14 @@ tunnel-agent@~0.4.0:
   version "0.4.3"
   resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.4.3.tgz#6373db76909fe570e08d73583365ed828a74eeeb"
 
-tweetnacl@0.14.3, tweetnacl@^0.14.3, tweetnacl@~0.14.0:
+tweetnacl@0.14.3:
   version "0.14.3"
   resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.3.tgz#3da382f670f25ded78d7b3d1792119bca0b7132d"
 
+tweetnacl@^0.14.3, tweetnacl@~0.14.0:
+  version "0.14.5"
+  resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64"
+
 type-check@~0.3.1, type-check@~0.3.2:
   version "0.3.2"
   resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72"
@@ -3856,8 +4089,8 @@ typedarray@^0.0.6:
   resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
 
 typescript@^2.4.1:
-  version "2.4.2"
-  resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.4.2.tgz#f8395f85d459276067c988aa41837a8f82870844"
+  version "2.6.2"
+  resolved "https://registry.yarnpkg.com/typescript/-/typescript-2.6.2.tgz#3c5b6fd7f6de0914269027f03c0946758f7673a4"
 
 uglify-js@^2.6:
   version "2.8.29"
@@ -3892,6 +4125,10 @@ underscore@1.8.3:
   version "1.8.3"
   resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.8.3.tgz#4f3fb53b106e6097fcf9cb4109f2a5e9bdfa5022"
 
+underscore@~1.6.0:
+  version "1.6.0"
+  resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.6.0.tgz#8b38b10cacdef63337b8b24e4ff86d45aea529a8"
+
 underscore@~1.7.0:
   version "1.7.0"
   resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.7.0.tgz#6bbaf0877500d36be34ecaa584e0db9fef035209"
@@ -3926,7 +4163,7 @@ url2@^0.0.0:
   version "0.0.0"
   resolved "https://registry.yarnpkg.com/url2/-/url2-0.0.0.tgz#4eaabd1d5c3ac90d62ab4485c998422865a04b1a"
 
-user-home@^1.0.0, user-home@^1.1.1:
+user-home@^1.0.0:
   version "1.1.1"
   resolved "https://registry.yarnpkg.com/user-home/-/user-home-1.1.1.tgz#2b5be23a32b63a7c9deb8d0f28d485724a3df190"
 
@@ -3944,15 +4181,15 @@ utils-merge@1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.0.tgz#0294fb922bb9375153541c4f7096231f287c8af8"
 
-uuid@^3.0.0:
+uuid@^3.0.0, uuid@^3.1.0:
   version "3.1.0"
   resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.1.0.tgz#3dd3d3e790abc24d7b0d3a034ffababe28ebbc04"
 
 v8flags@^3.0.0:
-  version "3.0.0"
-  resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.0.0.tgz#4be9604488e0c4123645def705b1848d16b8e01f"
+  version "3.0.1"
+  resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.0.1.tgz#dce8fc379c17d9f2c9e9ed78d89ce00052b1b76b"
   dependencies:
-    user-home "^1.1.1"
+    homedir-polyfill "^1.0.1"
 
 validate-npm-package-license@^3.0.1:
   version "3.0.1"
@@ -3962,8 +4199,8 @@ validate-npm-package-license@^3.0.1:
     spdx-expression-parse "~1.0.0"
 
 vary@^1, vary@~1.1.0:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.1.tgz#67535ebb694c1d52257457984665323f587e8d37"
+  version "1.1.2"
+  resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc"
 
 verror@1.10.0:
   version "1.10.0"
@@ -4025,8 +4262,8 @@ wordwrap@~1.0.0:
   resolved "https://registry.yarnpkg.com/wordwrap/-/wordwrap-1.0.0.tgz#27584810891456a4171c8d0226441ade90cbcaeb"
 
 wotb@0.6.x:
-  version "0.6.2"
-  resolved "https://registry.yarnpkg.com/wotb/-/wotb-0.6.2.tgz#cfdb7f6c69b35b86c4798450d627748629e3fda8"
+  version "0.6.3"
+  resolved "https://registry.yarnpkg.com/wotb/-/wotb-0.6.3.tgz#9e64a1a2ad312f6b72b92a6f570ae963d13dd76a"
   dependencies:
     bindings "1.2.1"
     nan "2.2.0"
@@ -4057,9 +4294,9 @@ write@^0.2.1:
   dependencies:
     mkdirp "^0.5.1"
 
-ws@1.1.1:
-  version "1.1.1"
-  resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.1.tgz#082ddb6c641e85d4bb451f03d52f06eabdb1f018"
+ws@1.1.5:
+  version "1.1.5"
+  resolved "https://registry.yarnpkg.com/ws/-/ws-1.1.5.tgz#cbd9e6e75e09fc5d2c90015f21f0c40875e0dd51"
   dependencies:
     options ">=0.0.5"
     ultron "1.0.x"
@@ -4086,35 +4323,28 @@ yallist@^2.1.2:
   version "2.1.2"
   resolved "https://registry.yarnpkg.com/yallist/-/yallist-2.1.2.tgz#1c11f9218f076089a47dd512f93c6699a6a81d52"
 
-yargs-parser@^5.0.0:
-  version "5.0.0"
-  resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-5.0.0.tgz#275ecf0d7ffe05c77e64e7c86e4cd94bf0e1228a"
-  dependencies:
-    camelcase "^3.0.0"
-
-yargs-parser@^7.0.0:
-  version "7.0.0"
-  resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-7.0.0.tgz#8d0ac42f16ea55debd332caf4c4038b3e3f5dfd9"
+yargs-parser@^8.0.0:
+  version "8.0.0"
+  resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-8.0.0.tgz#21d476330e5a82279a4b881345bf066102e219c6"
   dependencies:
     camelcase "^4.1.0"
 
-yargs@^8.0.1:
-  version "8.0.2"
-  resolved "https://registry.yarnpkg.com/yargs/-/yargs-8.0.2.tgz#6299a9055b1cefc969ff7e79c1d918dceb22c360"
+yargs@^10.0.3:
+  version "10.0.3"
+  resolved "https://registry.yarnpkg.com/yargs/-/yargs-10.0.3.tgz#6542debd9080ad517ec5048fb454efe9e4d4aaae"
   dependencies:
-    camelcase "^4.1.0"
     cliui "^3.2.0"
     decamelize "^1.1.1"
+    find-up "^2.1.0"
     get-caller-file "^1.0.1"
     os-locale "^2.0.0"
-    read-pkg-up "^2.0.0"
     require-directory "^2.1.1"
     require-main-filename "^1.0.1"
     set-blocking "^2.0.0"
     string-width "^2.0.0"
     which-module "^2.0.0"
     y18n "^3.2.1"
-    yargs-parser "^7.0.0"
+    yargs-parser "^8.0.0"
 
 yargs@~3.10.0:
   version "3.10.0"