From 49537b7cc72e3d88b98268531ba7c490b50a5012 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Wed, 13 Dec 2017 23:16:53 +0100
Subject: [PATCH 01/80] Revert "[fix] finalize the revert of the branch'
 better_cpu'"

This reverts commit bbf2b95
---
 app/modules/prover/lib/blockProver.ts | 10 +++--
 app/modules/prover/lib/engine.ts      | 29 +++++++-------
 app/modules/prover/lib/powCluster.ts  |  1 -
 app/modules/prover/lib/proof.ts       | 55 +++++++++++++--------------
 4 files changed, 46 insertions(+), 49 deletions(-)

diff --git a/app/modules/prover/lib/blockProver.ts b/app/modules/prover/lib/blockProver.ts
index 9cd215d98..ccafd0717 100644
--- a/app/modules/prover/lib/blockProver.ts
+++ b/app/modules/prover/lib/blockProver.ts
@@ -44,6 +44,9 @@ export class WorkerFarm {
     })
   }
 
+  get nbWorkers() {
+    return this.theEngine.getNbWorkers()
+  }
 
   changeCPU(cpu:any) {
     return this.theEngine.setConf({ cpu })
@@ -175,7 +178,6 @@ export class BlockProver {
       const start = Date.now();
       let result = await powFarm.askNewProof({
         newPoW: {
-          turnDuration: os.arch().match(/arm/) ? CommonConstants.POW_TURN_DURATION_ARM : CommonConstants.POW_TURN_DURATION_PC,
           conf: {
             cpu: this.conf.cpu,
             prefix: this.conf.prefix,
@@ -194,10 +196,10 @@ export class BlockProver {
         throw 'Proof-of-work computation canceled because block received';
       } else {
         const proof = result.block;
-        const testsCount = result.testsCount;
+        const testsCount = result.testsCount * powFarm.nbWorkers
         const duration = (Date.now() - start);
-        const testsPerSecond = (testsCount / (duration / 1000)).toFixed(2);
-        this.logger.info('Done: #%s, %s in %ss (%s tests, ~%s tests/s)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond);
+        const testsPerSecond = testsCount / (duration / 1000)
+        this.logger.info('Done: #%s, %s in %ss (~%s tests, ~%s tests/s, using %s cores, CPU %s%)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond.toFixed(2), powFarm.nbWorkers, Math.floor(100*this.conf.cpu))
         this.logger.info('FOUND proof-of-work with %s leading zeros followed by [0-' + highMark + ']!', nbZeros);
         return BlockDTO.fromJSONObject(proof)
       }
diff --git a/app/modules/prover/lib/engine.ts b/app/modules/prover/lib/engine.ts
index cc83f4682..30585270a 100644
--- a/app/modules/prover/lib/engine.ts
+++ b/app/modules/prover/lib/engine.ts
@@ -1,4 +1,3 @@
-import {ProverConstants} from "./constants"
 import {Master as PowCluster} from "./powCluster"
 import {ConfDTO} from "../../../lib/dto/ConfDTO"
 
@@ -25,22 +24,25 @@ export class PowEngine {
     this.id = this.cluster.clusterId
   }
 
+  getNbWorkers() {
+    return this.cluster.nbWorkers
+  }
+
   forceInit() {
     return this.cluster.initCluster()
   }
 
   async prove(stuff:any) {
-
-    if (this.cluster.hasProofPending) {
-      await this.cluster.cancelWork()
-    }
-
-    const cpus = os.cpus()
-
-    if (os.arch().match(/arm/) || cpus[0].model.match(/Atom/)) {
-      stuff.newPoW.conf.nbCores /= 2; // Make sure that only once each physical core is used (for Hyperthreading).
-    }
-    return await this.cluster.proveByWorkers(stuff)
+        if (this.cluster.hasProofPending) {
+          await this.cluster.cancelWork()
+        }
+    
+        const cpus = os.cpus()
+    
+        if (os.arch().match(/arm/) || cpus[0].model.match(/Atom/)) {
+          stuff.newPoW.conf.nbCores /= 2; // Make sure that only once each physical core is used (for Hyperthreading).
+        }
+        return await this.cluster.proveByWorkers(stuff)
   }
 
   cancel() {
@@ -48,9 +50,6 @@ export class PowEngine {
   }
 
   setConf(value:any) {
-    if (os.arch().match(/arm/) && value.cpu !== undefined) {
-      value.cpu /= 2; // Don't know exactly why is ARM so much saturated by PoW, so let's divide by 2
-    }
     return this.cluster.changeConf(value)
   }
 
diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts
index 4d4820777..140c887a5 100644
--- a/app/modules/prover/lib/powCluster.ts
+++ b/app/modules/prover/lib/powCluster.ts
@@ -195,7 +195,6 @@ export class Master {
             highMark: stuff.newPoW.highMark,
             pair: _.clone(stuff.newPoW.pair),
             forcedTime: stuff.newPoW.forcedTime,
-            turnDuration: stuff.newPoW.turnDuration,
             conf: {
               medianTimeBlocks: stuff.newPoW.conf.medianTimeBlocks,
               avgGenTime: stuff.newPoW.conf.avgGenTime,
diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index 9b15c0be5..02927aac8 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -6,15 +6,11 @@ import {ProverConstants} from "./constants"
 import {KeyGen} from "../../../lib/common-libs/crypto/keyring"
 import {dos2unix} from "../../../lib/common-libs/dos2unix"
 import {rawer} from "../../../lib/common-libs/index"
+import {ProcessCpuProfiler} from "../../../ProcessCpuProfiler"
 
 const moment = require('moment');
 const querablep = require('querablep');
 
-const PAUSES_PER_TURN = 5;
-
-// This value can be changed
-let TURN_DURATION_IN_MILLISEC = 100;
-
 let computing = querablep(Promise.resolve(null));
 let askedStop = false;
 
@@ -90,7 +86,6 @@ function beginNewProofOfWork(stuff:any) {
       prefix *= 100 * ProverConstants.NONCE_RANGE
     }
     const highMark = stuff.highMark;
-    const turnDuration = stuff.turnDuration || TURN_DURATION_IN_MILLISEC
     let sigFunc = null;
     if (signatureFunc && lastSecret === pair.sec) {
       sigFunc = signatureFunc;
@@ -108,13 +103,17 @@ function beginNewProofOfWork(stuff:any) {
 
     let testsCount = 0;
     let found = false;
-    let score = 0;
     let turn = 0;
+    const profiler = new ProcessCpuProfiler(100)
+    let cpuUsage = profiler.cpuUsageOverLastMilliseconds(1)
+    // We limit the number of tests according to CPU usage
+    let testsPerRound = 1
+    let turnDuration = 20 // We initially goes quickly to the max speed = 50 reevaluations per second (1000 / 20)
 
     while (!found && !askedStop) {
 
       /*****************
-       * A TURN
+       * A TURN ~ 100ms
        ****************/
 
       await Promise.race([
@@ -125,26 +124,9 @@ function beginNewProofOfWork(stuff:any) {
         // II. Process the turn's PoW
         (async () => {
 
-          /*****************
-           * A TURN OF POW ~= 100ms by default
-           * --------------------
-           *
-           * The concept of "turn" is required to limit the CPU usage.
-           * We need a time reference to have the speed = nb tests / period of time.
-           * Here we have:
-           *
-           *   - speed = testsCount / turn
-           *
-           * We have taken 1 turn = 100ms to control the CPU usage after 100ms of PoW. This means that during the
-           * very first 100ms of the PoW, CPU usage = 100%. Then it becomes controlled to the %CPU set.
-           ****************/
-
             // Prove
           let i = 0;
           const thisTurn = turn;
-          const pausePeriod = score ? score / PAUSES_PER_TURN : 10; // number of pauses per turn
-          // We limit the number of tests according to CPU usage
-          const testsPerRound = score ? Math.floor(score * currentCPU) : 1000 * 1000 * 1000
 
           // Time is updated regularly during the proof
           block.time = getBlockTime(block, conf, forcedTime)
@@ -196,7 +178,7 @@ function beginNewProofOfWork(stuff:any) {
             if (!found && !askedStop) {
               i++;
               testsCount++;
-              if (i % pausePeriod === 0) {
+              if (i % testsPerRound === 0) {
                 await countDown(0); // Very low pause, just the time to process eventual end of the turn
               }
             }
@@ -208,12 +190,24 @@ function beginNewProofOfWork(stuff:any) {
           if (!found) {
 
             // CPU speed recording
-            if (turn > 0 && !score) {
-              score = testsCount;
+            if (turn > 0) {
+              const oldTestsPerRound = testsPerRound
+              cpuUsage = profiler.cpuUsageOverLastMilliseconds(turnDuration)
+              if (cpuUsage > currentCPU + 0.005 || cpuUsage < currentCPU - 0.005) {
+                let powVariationFactor
+                // powVariationFactor = currentCPU / (cpuUsage || 0.01) / 5 // divide by 2 to avoid extreme responses
+                if (currentCPU > cpuUsage) {
+                  powVariationFactor = 1.01
+                  testsPerRound = Math.max(1, Math.ceil(testsPerRound * powVariationFactor))
+                } else {
+                  powVariationFactor = 0.99
+                  testsPerRound = Math.max(1, Math.floor(testsPerRound * powVariationFactor))
+                }
+              }
             }
 
             /*****************
-             * UNLOAD CPU CHARGE
+             * UNLOAD CPU CHARGE FOR THIS TURN
              ****************/
             // We wait for a maximum time of `turnDuration`.
             // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script
@@ -226,6 +220,9 @@ function beginNewProofOfWork(stuff:any) {
 
       // Next turn
       turn++
+
+      turnDuration += 1
+      turnDuration = Math.min(turnDuration, 1000) // Max 1 second per turn
     }
 
     /*****************
-- 
GitLab


From 7ac480f765a0a661d9c2c10d8c759e8aec98880f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Thu, 14 Dec 2017 22:51:20 +0100
Subject: [PATCH 02/80] [fix] #1234 PoW workers were not cancelling their
 current proof when asked

---
 app/modules/prover/lib/constants.ts           |  1 +
 app/modules/prover/lib/powCluster.ts          |  9 ++++-
 app/modules/prover/lib/proof.ts               | 11 +++--
 .../{pow-1-cluster.js => pow-1-cluster.ts}    | 40 ++++++++++++++++---
 4 files changed, 50 insertions(+), 11 deletions(-)
 rename test/fast/prover/{pow-1-cluster.js => pow-1-cluster.ts} (64%)

diff --git a/app/modules/prover/lib/constants.ts b/app/modules/prover/lib/constants.ts
index 0a454d38f..bb0cfcf31 100644
--- a/app/modules/prover/lib/constants.ts
+++ b/app/modules/prover/lib/constants.ts
@@ -13,6 +13,7 @@ export const ProverConstants = {
   NONCE_RANGE: 1000 * 1000 * 1000 * 100,
 
   POW_MAXIMUM_ACCEPTABLE_HANDICAP: 64,
+  POW_NB_PAUSES_PER_ROUND: 10,
 
   // When to trigger the PoW process again if no PoW is triggered for a while. In milliseconds.
   POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000
diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts
index 140c887a5..f14f9b67e 100644
--- a/app/modules/prover/lib/powCluster.ts
+++ b/app/modules/prover/lib/powCluster.ts
@@ -15,6 +15,8 @@ let clusterId = 0
  */
 export class Master {
 
+  nbCancels = 0
+
   clusterId:number
   currentPromise:any|null = null
   slaves:any[] = []
@@ -54,6 +56,8 @@ export class Master {
       this.currentPromise.extras.resolve(message.answer)
       // Stop the slaves' current work
       this.cancelWork()
+    } else if (message.canceled) {
+      this.nbCancels++
     }
     // this.logger.debug(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index}:`, message)
   }
@@ -130,7 +134,7 @@ export class Master {
   }
 
   cancelWork() {
-    this.logger.info(`Cancelling the work on PoW cluster`)
+    this.logger.info(`Cancelling the work on PoW cluster of %s slaves`, this.slaves.length)
     this.slaves.forEach(s => {
       s.worker.send({
         command: 'cancel'
@@ -189,7 +193,8 @@ export class Master {
           uuid,
           command: 'newPoW',
           value: {
-            block: stuff.newPoW.block,
+            initialTestsPerRound: stuff.initialTestsPerRound,
+            maxDuration: stuff.maxDuration,block: stuff.newPoW.block,
             nonceBeginning: s.nonceBeginning,
             zeros: stuff.newPoW.zeros,
             highMark: stuff.newPoW.highMark,
diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index 02927aac8..caadc5cf6 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -74,6 +74,7 @@ function beginNewProofOfWork(stuff:any) {
      ****************/
 
     let nonce = 0;
+    const maxDuration = stuff.maxDuration || 1000
     const conf = stuff.conf;
     const block = stuff.block;
     const nonceBeginning = stuff.nonceBeginning;
@@ -101,13 +102,14 @@ function beginNewProofOfWork(stuff:any) {
      * GO!
      ****************/
 
+    let pausePeriod = 1;
     let testsCount = 0;
     let found = false;
     let turn = 0;
     const profiler = new ProcessCpuProfiler(100)
     let cpuUsage = profiler.cpuUsageOverLastMilliseconds(1)
     // We limit the number of tests according to CPU usage
-    let testsPerRound = 1
+    let testsPerRound = stuff.initialTestsPerRound || 1
     let turnDuration = 20 // We initially goes quickly to the max speed = 50 reevaluations per second (1000 / 20)
 
     while (!found && !askedStop) {
@@ -178,7 +180,7 @@ function beginNewProofOfWork(stuff:any) {
             if (!found && !askedStop) {
               i++;
               testsCount++;
-              if (i % testsPerRound === 0) {
+              if (i % pausePeriod === 0) {
                 await countDown(0); // Very low pause, just the time to process eventual end of the turn
               }
             }
@@ -191,7 +193,6 @@ function beginNewProofOfWork(stuff:any) {
 
             // CPU speed recording
             if (turn > 0) {
-              const oldTestsPerRound = testsPerRound
               cpuUsage = profiler.cpuUsageOverLastMilliseconds(turnDuration)
               if (cpuUsage > currentCPU + 0.005 || cpuUsage < currentCPU - 0.005) {
                 let powVariationFactor
@@ -203,6 +204,7 @@ function beginNewProofOfWork(stuff:any) {
                   powVariationFactor = 0.99
                   testsPerRound = Math.max(1, Math.floor(testsPerRound * powVariationFactor))
                 }
+                pausePeriod = Math.floor(testsPerRound / ProverConstants.POW_NB_PAUSES_PER_ROUND)
               }
             }
 
@@ -222,7 +224,7 @@ function beginNewProofOfWork(stuff:any) {
       turn++
 
       turnDuration += 1
-      turnDuration = Math.min(turnDuration, 1000) // Max 1 second per turn
+      turnDuration = Math.min(turnDuration, maxDuration) // Max 1 second per turn
     }
 
     /*****************
@@ -236,6 +238,7 @@ function beginNewProofOfWork(stuff:any) {
 
       // PoW stopped
       askedStop = false;
+      pSend({ canceled: true })
       return null
 
     } else {
diff --git a/test/fast/prover/pow-1-cluster.js b/test/fast/prover/pow-1-cluster.ts
similarity index 64%
rename from test/fast/prover/pow-1-cluster.js
rename to test/fast/prover/pow-1-cluster.ts
index 96d58c12b..a225f880d 100644
--- a/test/fast/prover/pow-1-cluster.js
+++ b/test/fast/prover/pow-1-cluster.ts
@@ -1,16 +1,15 @@
-"use strict";
+import {Master} from "../../../app/modules/prover/lib/powCluster"
 
 const co = require('co')
-const should = require('should')
-const PowCluster = require('../../../app/modules/prover/lib/powCluster').Master
+require('should')
 const logger = require('../../../app/lib/logger').NewLogger()
 
-let master
+let master:Master
 
 describe('PoW Cluster', () => {
 
   before(() => {
-    master = new PowCluster(1, logger)
+    master = new Master(1, logger)
   })
 
   it('should have an empty cluster if no PoW was asked', () => {
@@ -73,4 +72,35 @@ describe('PoW Cluster', () => {
     delay.should.be.below(50)
   }))
 
+  it('should be able to stop all the cores on cancel', async () => {
+    master.proveByWorkers({
+      initialTestsPerRound: 100,
+      maxDuration: 1000,
+      newPoW: {
+        block: {
+          number: 0
+        },
+        zeros: 10,
+        highMark: 'F',
+        conf: {
+          medianTimeBlocks: 1,
+          avgGenTime: 100,
+          cpu: 0.8,
+          prefix: '8',
+          nbCores: 1
+        },
+        pair: {
+          pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
+          sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
+        }
+      }
+    })
+    await new Promise(res => {
+      master.onInfoMessage = () => res()
+    })
+    await master.cancelWork()
+    await new Promise(res => setTimeout(res, 100))
+    master.nbCancels.should.equal(1)
+  })
+
 });
-- 
GitLab


From f7c36eff1b8795f27f2c316865a6a88e960f3532 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 17 Dec 2017 10:43:06 +0100
Subject: [PATCH 03/80] [fix] WS2P client testing was sometimes randomly
 failing

---
 app/modules/ws2p/lib/WS2PCluster.ts | 8 ++++----
 app/modules/ws2p/lib/WS2PServer.ts  | 5 ++---
 2 files changed, 6 insertions(+), 7 deletions(-)

diff --git a/app/modules/ws2p/lib/WS2PCluster.ts b/app/modules/ws2p/lib/WS2PCluster.ts
index bf5ac43a5..3502138a3 100644
--- a/app/modules/ws2p/lib/WS2PCluster.ts
+++ b/app/modules/ws2p/lib/WS2PCluster.ts
@@ -1,4 +1,4 @@
-import { DEFAULT_ENCODING } from 'crypto';
+import {DEFAULT_ENCODING} from 'crypto';
 import {WS2PServer} from "./WS2PServer"
 import {Server} from "../../../../server"
 import {WS2PClient} from "./WS2PClient"
@@ -8,7 +8,7 @@ import {CrawlerConstants} from "../../crawler/lib/constants"
 import {WS2PBlockPuller} from "./WS2PBlockPuller"
 import {WS2PDocpoolPuller} from "./WS2PDocpoolPuller"
 import {WS2PConstants} from "./constants"
-import { PeerDTO, WS2PEndpoint } from '../../../lib/dto/PeerDTO';
+import {PeerDTO, WS2PEndpoint} from '../../../lib/dto/PeerDTO';
 import {GlobalFifoPromise} from "../../../service/GlobalFifoPromise"
 import {OtherConstants} from "../../../lib/other_constants"
 import {Key, verify} from "../../../lib/common-libs/crypto/keyring"
@@ -681,11 +681,11 @@ export class WS2PCluster {
       let uuids = Object.keys(this.ws2pClients)
       uuids = _.shuffle(uuids)
       let lowPriorityConnectionUUID:string = uuids[0]
-      let minPriorityLevel = this.keyPriorityLevel(this.ws2pClients[lowPriorityConnectionUUID].connection.pubkey, preferedKeys)
+      let minPriorityLevel = await this.keyPriorityLevel(this.ws2pClients[lowPriorityConnectionUUID].connection.pubkey, preferedKeys)
       for (const uuid of uuids) {
         const client = this.ws2pClients[uuid]
           if (uuid !== lowPriorityConnectionUUID) {
-            let uuidPriorityLevel = this.keyPriorityLevel(client.connection.pubkey, preferedKeys)
+            let uuidPriorityLevel = await this.keyPriorityLevel(client.connection.pubkey, preferedKeys)
             if (uuidPriorityLevel < minPriorityLevel) {
               lowPriorityConnectionUUID = uuid
               minPriorityLevel = uuidPriorityLevel
diff --git a/app/modules/ws2p/lib/WS2PServer.ts b/app/modules/ws2p/lib/WS2PServer.ts
index 002d0cf82..ce0849301 100644
--- a/app/modules/ws2p/lib/WS2PServer.ts
+++ b/app/modules/ws2p/lib/WS2PServer.ts
@@ -7,7 +7,6 @@ import {WS2PConstants} from "./constants"
 import {WS2PMessageHandler} from "./impl/WS2PMessageHandler"
 import {WS2PStreamer} from "./WS2PStreamer"
 import {WS2PSingleWriteStream} from "./WS2PSingleWriteStream"
-import { WS2PCluster } from './WS2PCluster';
 
 const WebSocketServer = require('ws').Server
 
@@ -159,10 +158,10 @@ export class WS2PServer extends events.EventEmitter {
 
   async removeLowPriorityConnection(privilegedKeys:string[]) {
     let lowPriorityConnection:WS2PConnection = this.connections[0]
-    let minPriorityLevel = this.keyPriorityLevel(lowPriorityConnection.pubkey, privilegedKeys)
+    let minPriorityLevel = await this.keyPriorityLevel(lowPriorityConnection.pubkey, privilegedKeys)
     for (const c of this.connections) {
       if (c !== lowPriorityConnection) {
-        let cPriorityLevel = this.keyPriorityLevel(c.pubkey, privilegedKeys)
+        let cPriorityLevel = await this.keyPriorityLevel(c.pubkey, privilegedKeys)
         if (cPriorityLevel < minPriorityLevel) {
           lowPriorityConnection = c
           minPriorityLevel = cPriorityLevel
-- 
GitLab


From 11a843f49be8cad89afa1a8bff7efef992216086 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 17 Dec 2017 15:29:47 +0100
Subject: [PATCH 04/80] [enh] Refactoring: RouterDependency

---
 app/modules/router.ts                     | 6 ++----
 index.ts                                  | 4 ++--
 test/integration/network-update.js        | 2 +-
 test/integration/peer-outdated.js         | 2 +-
 test/integration/peerings.js              | 2 +-
 test/integration/peers-same-pubkey.js     | 2 +-
 test/integration/start_generate_blocks.js | 2 +-
 test/integration/tools/toolbox.ts         | 7 ++++---
 8 files changed, 13 insertions(+), 14 deletions(-)

diff --git a/app/modules/router.ts b/app/modules/router.ts
index d6484f05b..9c7f2b14d 100644
--- a/app/modules/router.ts
+++ b/app/modules/router.ts
@@ -5,9 +5,7 @@ import * as stream from "stream"
 import {Multicaster} from "../lib/streams/multicaster"
 import {RouterStream} from "../lib/streams/router"
 
-const constants = require('../lib/constants');
-
-module.exports = {
+export const RouterDependency = {
   duniter: {
     service: {
       output: (server:Server, conf:ConfDTO, logger:any) => new Router(server)
@@ -26,7 +24,7 @@ module.exports = {
  * Service which triggers the server's peering generation (actualization of the Peer document).
  * @constructor
  */
-class Router extends stream.Transform {
+export class Router extends stream.Transform {
 
   theRouter:any
   theMulticaster:Multicaster = new Multicaster()
diff --git a/index.ts b/index.ts
index ecf7be9d6..8eb07ea0d 100644
--- a/index.ts
+++ b/index.ts
@@ -9,6 +9,7 @@ import {BmaDependency} from "./app/modules/bma/index"
 import {WS2PDependency} from "./app/modules/ws2p/index"
 import {ProverConstants} from "./app/modules/prover/lib/constants"
 import { ProxiesConf } from './app/lib/proxy';
+import {RouterDependency} from "./app/modules/router"
 
 const path = require('path');
 const _ = require('underscore');
@@ -25,7 +26,6 @@ const reapplyDependency   = require('./app/modules/reapply');
 const revertDependency    = require('./app/modules/revert');
 const daemonDependency    = require('./app/modules/daemon');
 const pSignalDependency   = require('./app/modules/peersignal');
-const routerDependency    = require('./app/modules/router');
 const pluginDependency    = require('./app/modules/plugin');
 
 class Stacks {
@@ -102,7 +102,7 @@ const DEFAULT_DEPENDENCIES = MINIMAL_DEPENDENCIES.concat([
   { name: 'duniter-revert',    required: revertDependency },
   { name: 'duniter-daemon',    required: daemonDependency },
   { name: 'duniter-psignal',   required: pSignalDependency },
-  { name: 'duniter-router',    required: routerDependency },
+  { name: 'duniter-router',    required: RouterDependency },
   { name: 'duniter-plugin',    required: pluginDependency },
   { name: 'duniter-prover',    required: ProverDependency },
   { name: 'duniter-keypair',   required: KeypairDependency },
diff --git a/test/integration/network-update.js b/test/integration/network-update.js
index 44c7ce929..7da00b7cf 100644
--- a/test/integration/network-update.js
+++ b/test/integration/network-update.js
@@ -60,7 +60,7 @@ describe("Network updating", function() {
       yield [s1, s2].reduce((p, server) => co(function*() {
         yield p;
         yield server.initDalBmaConnections()
-        require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+        require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
       }), Promise.resolve());
 
       // Server 1
diff --git a/test/integration/peer-outdated.js b/test/integration/peer-outdated.js
index 3f2cb0e1f..1855c0431 100644
--- a/test/integration/peer-outdated.js
+++ b/test/integration/peer-outdated.js
@@ -42,7 +42,7 @@ describe("Peer document expiry", function() {
     yield [s1, s2].reduce((p, server) => co(function*() {
       yield p;
       yield server.initDalBmaConnections()
-      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+      require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
     }), Promise.resolve());
 
     // Server 1
diff --git a/test/integration/peerings.js b/test/integration/peerings.js
index 4b227f631..5fc5d49bf 100644
--- a/test/integration/peerings.js
+++ b/test/integration/peerings.js
@@ -93,7 +93,7 @@ describe("Network", function() {
               return bmaAPI.openConnections()
                 .then(() => {
                   server.bma = bmaAPI;
-                  require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+                  require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
                 });
             });
         });
diff --git a/test/integration/peers-same-pubkey.js b/test/integration/peers-same-pubkey.js
index 41c4b9c19..6375127aa 100644
--- a/test/integration/peers-same-pubkey.js
+++ b/test/integration/peers-same-pubkey.js
@@ -36,7 +36,7 @@ describe("Peer document", function() {
     yield [s1, s2, s3].reduce((p, server) => co(function*() {
       yield p;
       yield server.initDalBmaConnections()
-      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+      require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
     }), Promise.resolve());
 
     // Server 1
diff --git a/test/integration/start_generate_blocks.js b/test/integration/start_generate_blocks.js
index 3c79c4834..a8d813cb9 100644
--- a/test/integration/start_generate_blocks.js
+++ b/test/integration/start_generate_blocks.js
@@ -76,7 +76,7 @@ describe("Generation", function() {
         yield server.initWithDAL();
         server.bma = yield bma(server);
         yield server.bma.openConnections();
-        require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+        require('../../app/modules/router').RouterDependency.duniter.methods.routeToNetwork(server);
         yield server.PeeringService.generateSelfPeer(server.conf);
         const prover = require('../../app/modules/prover').ProverDependency.duniter.methods.prover(server);
         server.startBlockComputation = () => prover.startService();
diff --git a/test/integration/tools/toolbox.ts b/test/integration/tools/toolbox.ts
index ea81eb794..af71bd3ae 100644
--- a/test/integration/tools/toolbox.ts
+++ b/test/integration/tools/toolbox.ts
@@ -23,6 +23,7 @@ import {WS2PCluster} from "../../../app/modules/ws2p/lib/WS2PCluster"
 import {WS2PServer} from "../../../app/modules/ws2p/lib/WS2PServer"
 import {WS2PServerMessageHandler} from "../../../app/modules/ws2p/lib/interface/WS2PServerMessageHandler"
 import {TestUser} from "./TestUser"
+import {RouterDependency} from "../../../app/modules/router"
 
 const assert      = require('assert');
 const _           = require('underscore');
@@ -100,8 +101,8 @@ export const simpleNetworkOf2NodesAnd2Users = async (options:any) => {
   await tac.join();
 
   // Each server forwards to each other
-  require('../../../app/modules/router').duniter.methods.routeToNetwork(s1);
-  require('../../../app/modules/router').duniter.methods.routeToNetwork(s2);
+  RouterDependency.duniter.methods.routeToNetwork(s1._server)
+  RouterDependency.duniter.methods.routeToNetwork(s2._server)
 
   return { s1, s2, cat, tac };
 }
@@ -601,7 +602,7 @@ export class TestingServer {
     const bmaAPI = await bma(this.server);
     await bmaAPI.openConnections();
     this.bma = bmaAPI;
-    require('../../../app/modules/router').duniter.methods.routeToNetwork(this.server);
+    RouterDependency.duniter.methods.routeToNetwork(this.server)
     // Extra: for /wot/requirements URL
     require('../../../app/modules/prover').ProverDependency.duniter.methods.hookServer(this.server);
   }
-- 
GitLab


From 616084b0230f1882ace9a248cf0ee1fcdefb29b1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 17 Dec 2017 15:31:53 +0100
Subject: [PATCH 05/80] [enh] tests: remove logging of a block

---
 test/integration/http_api.js | 1 -
 1 file changed, 1 deletion(-)

diff --git a/test/integration/http_api.js b/test/integration/http_api.js
index 83cd3ccb2..9454b78d6 100644
--- a/test/integration/http_api.js
+++ b/test/integration/http_api.js
@@ -336,7 +336,6 @@ function expectJSON(promise, json) {
 
 function postBlock(server2) {
   return function(block) {
-    console.log(typeof block == 'string' ? block : block.getRawSigned())
     return post(server2, '/blockchain/block')({
       block: typeof block == 'string' ? block : block.getRawSigned()
     })
-- 
GitLab


From 4aa66888fa9b56a0f4a0cb6376e5e48e6e958d1e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 17 Dec 2017 15:32:41 +0100
Subject: [PATCH 06/80] [enh] tests: remove logging 'SIGTERM'

---
 app/modules/prover/lib/powCluster.ts | 1 -
 1 file changed, 1 deletion(-)

diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts
index f14f9b67e..528a34cdd 100644
--- a/app/modules/prover/lib/powCluster.ts
+++ b/app/modules/prover/lib/powCluster.ts
@@ -233,7 +233,6 @@ if (cluster.isMaster) {
 } else {
 
   process.on("SIGTERM", function() {
-    logger.info(`SIGTERM received, closing worker ${process.pid}`);
     process.exit(0)
   });
 
-- 
GitLab


From 6a7858673df4d40840e4bb8543a1d815c5ba212b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 17 Dec 2017 15:36:30 +0100
Subject: [PATCH 07/80] [fix] tests: remove EventEmitter leaks

---
 app/modules/prover/lib/blockProver.ts     |   2 +-
 app/modules/prover/lib/permanentProver.ts |   2 +
 app/modules/prover/lib/powCluster.ts      |  62 +++++---
 app/modules/prover/lib/proof.ts           |   4 +
 index.ts                                  |  66 +++++---
 test/fast/prover/pow-1-cluster.ts         |   4 +
 test/fast/prover/pow-2-engine.js          |   3 +
 test/integration/branches_switch.js       |   3 +
 test/integration/branches_switch.ts       |   4 +
 test/integration/continuous-proof.js      |  10 +-
 test/integration/forwarding.js            | 184 ----------------------
 test/integration/forwarding.ts            | 136 ++++++++++++++++
 12 files changed, 238 insertions(+), 242 deletions(-)
 delete mode 100644 test/integration/forwarding.js
 create mode 100644 test/integration/forwarding.ts

diff --git a/app/modules/prover/lib/blockProver.ts b/app/modules/prover/lib/blockProver.ts
index ccafd0717..32d5699ec 100644
--- a/app/modules/prover/lib/blockProver.ts
+++ b/app/modules/prover/lib/blockProver.ts
@@ -73,7 +73,7 @@ export class WorkerFarm {
   }
 
   shutDownEngine() {
-    this.theEngine.shutDown()
+    return this.theEngine.shutDown()
   }
 
   /**
diff --git a/app/modules/prover/lib/permanentProver.ts b/app/modules/prover/lib/permanentProver.ts
index 636a68b98..7ef75a358 100644
--- a/app/modules/prover/lib/permanentProver.ts
+++ b/app/modules/prover/lib/permanentProver.ts
@@ -213,6 +213,8 @@ export class PermanentProver {
     await this.prover.cancel();
     // If we were waiting, stop it and process the continuous generation
     this.blockchainChangedResolver && this.blockchainChangedResolver();
+    const farm = await this.prover.getWorker()
+    await farm.shutDownEngine()
   }
 
   private checkTrialIsNotTooHigh(trial:number, current:DBBlock, selfPubkey:string) {
diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts
index 528a34cdd..cd9c7c239 100644
--- a/app/modules/prover/lib/powCluster.ts
+++ b/app/modules/prover/lib/powCluster.ts
@@ -3,12 +3,11 @@ import {ProverConstants} from "./constants"
 
 const _ = require('underscore')
 const nuuid = require('node-uuid');
-const moment = require('moment');
 const cluster = require('cluster')
 const querablep = require('querablep')
-const logger = require('../../../lib/logger').NewLogger()
 
 let clusterId = 0
+cluster.setMaxListeners(3)
 
 /**
  * Cluster controller, handles the messages between the main program and the PoW cluster.
@@ -25,6 +24,9 @@ export class Master {
   logger:any
   onInfoCallback:any
   workersOnline:Promise<any>[]
+  private exitHandler: (worker: any, code: any, signal: any) => void
+  private onlineHandler: (worker: any) => void
+  private messageHandler: (worker: any, msg: any) => void
 
   constructor(private nbCores:number, logger:any) {
     this.clusterId = clusterId++
@@ -32,6 +34,29 @@ export class Master {
     this.onInfoMessage = (message:any) => {
       this.logger.info(`${message.pow.pow} nonce = ${message.pow.block.nonce}`)
     }
+
+    this.exitHandler = (worker:any, code:any, signal:any) => {
+      this.logger.info(`worker ${worker.process.pid} died with code ${code} and signal ${signal}`)
+    }
+
+    this.onlineHandler = (worker:any) => {
+      // We just listen to the workers of this Master
+      if (this.slavesMap[worker.id]) {
+        this.logger.info(`[online] worker c#${this.clusterId}#w#${worker.id}`)
+        this.slavesMap[worker.id].online.extras.resolve()
+        worker.send({
+          command: 'conf',
+          value: this.conf
+        })
+      }
+    }
+
+    this.messageHandler = (worker:any, msg:any) => {
+      // Message for this cluster
+      if (this.slavesMap[worker.id]) {
+        this.onWorkerMessage(worker, msg)
+      }
+    }
   }
 
   get nbWorkers() {
@@ -62,6 +87,10 @@ export class Master {
     // this.logger.debug(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index}:`, message)
   }
 
+  /*****************
+   * CLUSTER METHODS
+   ****************/
+
   initCluster() {
     // Setup master
     cluster.setupMaster({
@@ -93,28 +122,9 @@ export class Master {
       return this.slavesMap[worker.id]
     })
 
-    cluster.on('exit', (worker:any, code:any, signal:any) => {
-      this.logger.info(`worker ${worker.process.pid} died with code ${code} and signal ${signal}`)
-    })
-
-    cluster.on('online', (worker:any) => {
-      // We just listen to the workers of this Master
-      if (this.slavesMap[worker.id]) {
-        this.logger.info(`[online] worker c#${this.clusterId}#w#${worker.id}`)
-        this.slavesMap[worker.id].online.extras.resolve()
-        worker.send({
-          command: 'conf',
-          value: this.conf
-        })
-      }
-    })
-
-    cluster.on('message', (worker:any, msg:any) => {
-      // Message for this cluster
-      if (this.slavesMap[worker.id]) {
-        this.onWorkerMessage(worker, msg)
-      }
-    })
+    cluster.on('exit', this.exitHandler)
+    cluster.on('online', this.onlineHandler)
+    cluster.on('message', this.messageHandler)
 
     this.workersOnline = this.slaves.map((s:any) => s.online)
     return Promise.all(this.workersOnline)
@@ -165,7 +175,11 @@ export class Master {
       await Promise.all(this.slaves.map(async (s:any) => {
         s.worker.kill()
       }))
+      cluster.removeListener('exit', this.exitHandler)
+      cluster.removeListener('online', this.onlineHandler)
+      cluster.removeListener('message', this.messageHandler)
     }
+    this.slaves = []
   }
 
   proveByWorkers(stuff:any) {
diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index caadc5cf6..407c7a965 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -28,6 +28,10 @@ process.on('uncaughtException', (err:any) => {
   }
 });
 
+process.on('unhandledRejection', () => {
+  process.exit()
+})
+
 process.on('message', async (message) => {
 
   switch (message.command) {
diff --git a/index.ts b/index.ts
index 8eb07ea0d..6ce4dab8d 100644
--- a/index.ts
+++ b/index.ts
@@ -28,6 +28,8 @@ const daemonDependency    = require('./app/modules/daemon');
 const pSignalDependency   = require('./app/modules/peersignal');
 const pluginDependency    = require('./app/modules/plugin');
 
+let sigintListening = false
+
 class Stacks {
 
   static todoOnRunDone:() => any = () => process.exit()
@@ -157,6 +159,8 @@ export interface TransformableDuniterService extends DuniterService, stream.Tran
 
 class Stack {
 
+  private injectedServices = false
+
   private cli:any
   private configLoadingCallbacks:any[]
   private configBeforeSaveCallbacks:any[]
@@ -279,10 +283,12 @@ class Stack {
     }
 
     const server = new Server(home, program.memory === true, commandLineConf(program));
+    let piped = false
 
     // If ever the process gets interrupted
     let isSaving = false;
-    process.on('SIGINT', async () => {
+    if (!sigintListening) {
+      process.on('SIGINT', async () => {
         if (!isSaving) {
           isSaving = true;
           // Save DB
@@ -294,7 +300,9 @@ class Stack {
             process.exit(3);
           }
         }
-    });
+      })
+      sigintListening = true
+    }
 
     // Config or Data reset hooks
     server.resetDataHook = async () => {
@@ -366,26 +374,30 @@ class Stack {
        * Service injection
        * -----------------
        */
-      for (const def of this.definitions) {
-        if (def.service) {
-          // To feed data coming from some I/O (network, disk, other module, ...)
-          if (def.service.input) {
-            this.streams.input.push(def.service.input(server, conf, logger));
-          }
-          // To handle data this has been submitted by INPUT stream
-          if (def.service.process) {
-            this.streams.process.push(def.service.process(server, conf, logger));
-          }
-          // To handle data this has been validated by PROCESS stream
-          if (def.service.output) {
-            this.streams.output.push(def.service.output(server, conf, logger));
-          }
-          // Special service which does not stream anything particular (ex.: piloting the `server` object)
-          if (def.service.neutral) {
-            this.streams.neutral.push(def.service.neutral(server, conf, logger));
+      if (!this.injectedServices) {
+        this.injectedServices = true
+        for (const def of this.definitions) {
+          if (def.service) {
+            // To feed data coming from some I/O (network, disk, other module, ...)
+            if (def.service.input) {
+              this.streams.input.push(def.service.input(server, conf, logger));
+            }
+            // To handle data this has been submitted by INPUT stream
+            if (def.service.process) {
+              this.streams.process.push(def.service.process(server, conf, logger));
+            }
+            // To handle data this has been validated by PROCESS stream
+            if (def.service.output) {
+              this.streams.output.push(def.service.output(server, conf, logger));
+            }
+            // Special service which does not stream anything particular (ex.: piloting the `server` object)
+            if (def.service.neutral) {
+              this.streams.neutral.push(def.service.neutral(server, conf, logger));
+            }
           }
         }
       }
+      piped = true
       // All inputs write to global INPUT stream
       for (const module of this.streams.input) module.pipe(this.INPUT);
       // All processes read from global INPUT stream
@@ -408,13 +420,6 @@ class Stack {
           const modules = this.streams.input.concat(this.streams.process).concat(this.streams.output).concat(this.streams.neutral);
           // Any streaming module must implement a `stopService` method
           await Promise.all(modules.map((module:DuniterService) => module.stopService()))
-          // // Stop reading inputs
-          // for (const module of streams.input) module.unpipe();
-          // Stop reading from global INPUT
-          // INPUT.unpipe();
-          // for (const module of streams.process) module.unpipe();
-          // // Stop reading from global PROCESS
-          // PROCESS.unpipe();
         },
 
         this);
@@ -422,6 +427,15 @@ class Stack {
     } catch (e) {
       server.disconnect();
       throw e;
+    } finally {
+      if (piped) {
+        // Unpipe everything, as the command is done
+        for (const module of this.streams.input) module.unpipe()
+        for (const module of this.streams.process) module.unpipe()
+        for (const module of this.streams.output) module.unpipe()
+        this.INPUT.unpipe()
+        this.PROCESS.unpipe()
+      }
     }
   }
 
diff --git a/test/fast/prover/pow-1-cluster.ts b/test/fast/prover/pow-1-cluster.ts
index a225f880d..a2e76947e 100644
--- a/test/fast/prover/pow-1-cluster.ts
+++ b/test/fast/prover/pow-1-cluster.ts
@@ -12,6 +12,10 @@ describe('PoW Cluster', () => {
     master = new Master(1, logger)
   })
 
+  after(() => {
+    return master.shutDownWorkers()
+  })
+
   it('should have an empty cluster if no PoW was asked', () => {
     master.nbWorkers.should.equal(0)
   })
diff --git a/test/fast/prover/pow-2-engine.js b/test/fast/prover/pow-2-engine.js
index 8238438d0..743744ba5 100644
--- a/test/fast/prover/pow-2-engine.js
+++ b/test/fast/prover/pow-2-engine.js
@@ -10,6 +10,7 @@ describe('PoW Engine', () => {
   it('should be configurable', () => co(function*(){
     const e1 = new PowEngine({ nbCores: 1 }, logger);
     (yield e1.setConf({ cpu: 0.2, prefix: '34' })).should.deepEqual({ cpu: 0.2, prefix: '34' });
+    yield e1.shutDown()
   }));
 
   it('should be able to make a proof', () => co(function*(){
@@ -52,6 +53,7 @@ describe('PoW Engine', () => {
         pow: '009A52E6E2E4EA7DE950A2DA673114FA55B070EBE350D75FF0C62C6AAE9A37E5'
       }
     });
+    yield e1.shutDown()
   }));
 
   it('should be able to stop a proof', () => co(function*(){
@@ -85,5 +87,6 @@ describe('PoW Engine', () => {
     yield e1.cancel()
     // const proof = yield proofPromise;
     // should.not.exist(proof);
+    yield e1.shutDown()
   }));
 });
diff --git a/test/integration/branches_switch.js b/test/integration/branches_switch.js
index 353ef8113..d3c7eb1aa 100644
--- a/test/integration/branches_switch.js
+++ b/test/integration/branches_switch.js
@@ -11,6 +11,7 @@ const rp = require('request-promise');
 const httpTest = require('./tools/http');
 const commit = require('./tools/commit');
 const sync = require('./tools/sync');
+const cluster = require('cluster');
 const shutDownEngine = require('./tools/shutDownEngine');
 const expectJSON = httpTest.expectJSON;
 const MEMORY_MODE = true;
@@ -25,6 +26,7 @@ const commonConf = {
 let s1, s2, cat, toc;
 describe("Switch", function () {
     before(() => co(function* () {
+        cluster.setMaxListeners(6);
         s1 = duniter('/bb11', MEMORY_MODE, _.extend({
             switchOnHeadAdvance: 0,
             port: '7788',
@@ -78,6 +80,7 @@ describe("Switch", function () {
         // S1 should have switched to the other branch
     }));
     after(() => {
+        cluster.setMaxListeners(3);
         return Promise.all([
             shutDownEngine(s1),
             shutDownEngine(s2)
diff --git a/test/integration/branches_switch.ts b/test/integration/branches_switch.ts
index 2eb7fae68..ecce9de72 100644
--- a/test/integration/branches_switch.ts
+++ b/test/integration/branches_switch.ts
@@ -11,6 +11,7 @@ const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
 const commit    = require('./tools/commit');
 const sync      = require('./tools/sync');
+const cluster   = require('cluster')
 const shutDownEngine  = require('./tools/shutDownEngine');
 
 const expectJSON     = httpTest.expectJSON;
@@ -31,6 +32,8 @@ describe("Switch", function() {
 
   before(() => co(function *() {
 
+    cluster.setMaxListeners(6)
+
     s1 = duniter(
       '/bb11',
       MEMORY_MODE,
@@ -97,6 +100,7 @@ describe("Switch", function() {
   }));
 
   after(() => {
+    cluster.setMaxListeners(3)
     return Promise.all([
       shutDownEngine(s1),
       shutDownEngine(s2)
diff --git a/test/integration/continuous-proof.js b/test/integration/continuous-proof.js
index 289d5b694..157477a80 100644
--- a/test/integration/continuous-proof.js
+++ b/test/integration/continuous-proof.js
@@ -37,6 +37,7 @@ describe("Continous proof-of-work", function() {
     yield i1.join();
     yield i2.join();
     yield s1.commit();
+    yield s1.closeCluster();
   }));
 
   it('should automatically stop waiting if nothing happens', () => co(function*() {
@@ -104,7 +105,7 @@ describe("Continous proof-of-work", function() {
     s2.conf.cpu = 1.0;
     s2.startBlockComputation();
     yield s2.until('block', 15);
-    s2.stopBlockComputation();
+    yield s2.stopBlockComputation();
     yield [
       require('../../app/modules/crawler').CrawlerDependency.duniter.methods.pullBlocks(s3),
       new Promise(res => {
@@ -121,11 +122,6 @@ describe("Continous proof-of-work", function() {
     const current = yield s3.get('/blockchain/current')
     yield s3.stopBlockComputation();
     current.number.should.be.aboveOrEqual(14)
+    yield s1.closeCluster()
   }));
-
-  after(() => {
-    return Promise.all([
-      s1.closeCluster()
-    ])
-  })
 });
diff --git a/test/integration/forwarding.js b/test/integration/forwarding.js
deleted file mode 100644
index 993247afc..000000000
--- a/test/integration/forwarding.js
+++ /dev/null
@@ -1,184 +0,0 @@
-"use strict";
-const should = require('should');
-const assert = require('assert');
-const async  = require('async');
-const _      = require('underscore');
-const co     = require('co');
-const node   = require('./tools/node');
-const TestUser = require('./tools/TestUser').TestUser
-const jspckg = require('../../package');
-const constants = require('../../app/lib/constants');
-
-require('../../app/modules/bma').BmaDependency.duniter.methods.noLimit(); // Disables the HTTP limiter
-
-if (constants.MUTE_LOGS_DURING_UNIT_TESTS) {
-  require('../../app/lib/logger').NewLogger().mute();
-}
-
-describe("Forwarding", function() {
-
-  describe("Nodes", function() {
-
-    const common = { currency: 'bb', nobma: false, bmaWithCrawler:true, ws2p: { upnp: false }, ipv4: '127.0.0.1', remoteipv4: '127.0.0.1', rootoffset: 0, sigQty: 1 };
-
-    const node1 = node('db_1', _({ upnp: false, httplogs: false, port: 9600, remoteport: 9600, pair: { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'} }).extend(common));
-    const node2 = node('db_2', _({ upnp: false, httplogs: false, port: 9601, remoteport: 9601, pair: { pub: 'G2CBgZBPLe6FSFUgpx2Jf1Aqsgta6iib3vmDRA1yLiqU', sec: '58LDg8QLmF5pv6Dn9h7X4yFKfMTdP8fdAiWVcyDoTRJu454fwRihCLULH4MW37zncsg4ruoTGJPZneWk22QmG1w4'} }).extend(common));
-
-    const cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, node1);
-    const tac = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, node1);
-    const tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, node1);
-    const toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, node1);
-
-    before(() => co(function*(){
-      yield [node1, node2].map((theNode) => theNode.startTesting());
-      yield new Promise(function(resolve, reject){
-        async.waterfall([
-          function(next) {
-            node2.peering(next);
-          },
-          function(peer, next) {
-            node1.submitPeer(peer, function(err) {
-              next(err);
-            });
-          },
-          function(next) {
-            node1.peering(next);
-          },
-          function(peer, next) {
-            node2.submitPeer(peer, next);
-          }
-        ], function(err) {
-          err ? reject(err) : resolve();
-        });
-      });
-      yield [
-        node2.until('identity', 4),
-        node2.until('certification', 2),
-        node2.until('block', 1),
-        co(function *() {
-
-          // Self certifications
-          yield cat.createIdentity();
-          yield tac.createIdentity();
-          yield tic.createIdentity();
-          yield toc.createIdentity();
-          // Certifications
-          yield cat.cert(tac);
-          yield tac.cert(cat);
-          yield cat.join();
-          yield tac.join();
-          yield node1.commitP();
-        })
-      ];
-      yield [
-        node2.until('revocation', 1),
-        co(function *() {
-          yield cat.revoke();
-        })
-      ];
-    }));
-
-    describe("Testing technical API", function(){
-
-      it('Node1 should be up and running', node1.summary(function(summary, done){
-        should.exists(summary);
-        should.exists(summary.duniter);
-        should.exists(summary.duniter.software);
-        should.exists(summary.duniter.version);
-        assert.equal(summary.duniter.software, "duniter");
-        assert.equal(summary.duniter.version, jspckg.version);
-        done();
-      }));
-
-      it('Node2 should be up and running', node2.summary(function(summary, done){
-        should.exists(summary);
-        should.exists(summary.duniter);
-        should.exists(summary.duniter.software);
-        should.exists(summary.duniter.version);
-        assert.equal(summary.duniter.software, "duniter");
-        assert.equal(summary.duniter.version, jspckg.version);
-        done();
-      }));
-    });
-
-    describe('Node 1', doTests(node1));
-    describe('Node 2', doTests(node2));
-
-  });
-});
-
-function doTests(theNode) {
-
-  return function(){
-
-    describe("user cat", function(){
-
-      it('should give only 1 result', theNode.lookup('cat', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have sent 1 signature', theNode.lookup('cat', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].signed.length, 1);
-          should.exists(res.results[0].signed[0].isMember);
-          should.exists(res.results[0].signed[0].wasMember);
-          assert.equal(res.results[0].signed[0].isMember, true);
-          assert.equal(res.results[0].signed[0].wasMember, true);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-    });
-
-    describe("user tac", function(){
-
-      it('should give only 1 result', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have 1 signature', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].uids[0].others.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-
-      it('should have sent 1 signature', theNode.lookup('tac', function(res, done){
-        try {
-          should.exists(res);
-          assert.equal(res.results[0].signed.length, 1);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }));
-    });
-
-    it('toc should give only 1 result', theNode.lookup('toc', function(res, done){
-      should.not.exists(res);
-      done();
-    }));
-
-    it('tic should give only 1 results', theNode.lookup('tic', function(res, done){
-      should.not.exists(res);
-      done();
-    }));
-  };
-}
diff --git a/test/integration/forwarding.ts b/test/integration/forwarding.ts
new file mode 100644
index 000000000..8adbba5ee
--- /dev/null
+++ b/test/integration/forwarding.ts
@@ -0,0 +1,136 @@
+import {NewLogger} from "../../app/lib/logger"
+import {BmaDependency} from "../../app/modules/bma/index"
+import {TestUser} from "./tools/TestUser"
+import {simpleTestingConf, simpleTestingServer, TestingServer} from "./tools/toolbox"
+import {RouterDependency} from "../../app/modules/router"
+
+require('should');
+const assert = require('assert');
+const jspckg = require('../../package');
+const constants = require('../../app/lib/constants');
+
+BmaDependency.duniter.methods.noLimit(); // Disables the HTTP limiter
+
+if (constants.MUTE_LOGS_DURING_UNIT_TESTS) {
+  NewLogger().mute()
+}
+
+describe("Forwarding", function() {
+
+  describe("Nodes", function() {
+
+    const now = 1500000000
+    const conf1 = simpleTestingConf(now, { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'})
+    const conf2 = simpleTestingConf(now, { pub: 'G2CBgZBPLe6FSFUgpx2Jf1Aqsgta6iib3vmDRA1yLiqU', sec: '58LDg8QLmF5pv6Dn9h7X4yFKfMTdP8fdAiWVcyDoTRJu454fwRihCLULH4MW37zncsg4ruoTGJPZneWk22QmG1w4'})
+
+    const node1 = simpleTestingServer(conf1)
+    const node2 = simpleTestingServer(conf2)
+
+    const cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, node1);
+    const tac = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, node1);
+    const tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, node1);
+    const toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, node1);
+
+    before(async () => {
+      await node1.initDalBmaConnections()
+      await node2.initDalBmaConnections()
+      await node1.sharePeeringWith(node2)
+      await node2.sharePeeringWith(node1)
+      RouterDependency.duniter.methods.routeToNetwork(node1._server)
+      RouterDependency.duniter.methods.routeToNetwork(node2._server)
+      await Promise.all([
+        node2.until('identity', 4),
+        node2.until('certification', 2),
+        node2.until('block', 1),
+        (async () => {
+
+          // Self certifications
+          await cat.createIdentity();
+          await tac.createIdentity();
+          await tic.createIdentity();
+          await toc.createIdentity();
+          // Certifications
+          await cat.cert(tac);
+          await tac.cert(cat);
+          await cat.join();
+          await tac.join();
+          await node1.commit({ time: now })
+        })()
+      ])
+      await Promise.all([
+        node2.until('revocation', 1),
+        cat.revoke()
+      ])
+    })
+
+    describe("Testing technical API", function(){
+
+      it('Node1 should be up and running', () => node1.expectThat('/node/summary', (summary:any) => {
+        should.exists(summary);
+        should.exists(summary.duniter);
+        should.exists(summary.duniter.software);
+        should.exists(summary.duniter.version);
+        assert.equal(summary.duniter.software, "duniter");
+        assert.equal(summary.duniter.version, jspckg.version);
+      }))
+
+      it('Node2 should be up and running', () => node2.expectThat('/node/summary', (summary:any) => {
+        should.exists(summary);
+        should.exists(summary.duniter);
+        should.exists(summary.duniter.software);
+        should.exists(summary.duniter.version);
+        assert.equal(summary.duniter.software, "duniter");
+        assert.equal(summary.duniter.version, jspckg.version);
+      }))
+    });
+
+    describe('Node 1', doTests(node1));
+    describe('Node 2', doTests(node2));
+
+  });
+});
+
+function doTests(theNode:TestingServer) {
+
+  return () => {
+
+    describe("user cat", () => {
+
+      it('should give only 1 result', () => theNode.expectThat('/wot/lookup/cat', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results.length, 1);
+      }));
+
+      it('should have sent 1 signature', () => theNode.expectThat('/wot/lookup/cat', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].signed.length, 1);
+        should.exists(res.results[0].signed[0].isMember);
+        should.exists(res.results[0].signed[0].wasMember);
+        assert.equal(res.results[0].signed[0].isMember, true);
+        assert.equal(res.results[0].signed[0].wasMember, true);
+      }));
+    });
+
+    describe("user tac", () => {
+
+      it('should give only 1 result', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results.length, 1);
+      }))
+
+      it('should have 1 signature', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].uids[0].others.length, 1);
+      }))
+
+      it('should have sent 1 signature', () => theNode.expectThat('/wot/lookup/tac', (res:any) => {
+        should.exists(res);
+        assert.equal(res.results[0].signed.length, 1);
+      }))
+    })
+
+    it('toc should give no result', () => theNode.expectError('/wot/lookup/toc', 404, 'No matching identity'))
+
+    it('tic should give no results', () => theNode.expectError('/wot/lookup/tic', 404, 'No matching identity'))
+  }
+}
-- 
GitLab


From c978338229d39cfaee5c54059e032bed4738bdb0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 17 Dec 2017 16:00:12 +0100
Subject: [PATCH 08/80] [enh] Untrack some generated test files

---
 .eslintignore                       |   2 +
 .gitignore                          |   6 +
 test/integration/branches.js        |  59 --------
 test/integration/branches2.js       | 214 ----------------------------
 test/integration/branches_switch.js | 102 -------------
 5 files changed, 8 insertions(+), 375 deletions(-)
 delete mode 100644 test/integration/branches.js
 delete mode 100644 test/integration/branches2.js
 delete mode 100644 test/integration/branches_switch.js

diff --git a/.eslintignore b/.eslintignore
index d25a2f246..052737334 100644
--- a/.eslintignore
+++ b/.eslintignore
@@ -43,5 +43,7 @@ app/modules/bma/lib/entity/*.js
 app/modules/bma/lib/controllers/*.js
 app/modules/crawler/*.js
 app/modules/crawler/lib/*.js
+app/ProcessCpuProfiler.js
+app/lib/common/package.js
 test/*.js
 test/**/*.js
\ No newline at end of file
diff --git a/.gitignore b/.gitignore
index 0fdf9f4d6..9b656c465 100644
--- a/.gitignore
+++ b/.gitignore
@@ -56,6 +56,9 @@ test/integration/tools/TestUser.js*
 test/integration/tools/TestUser.d.ts
 test/integration/documents-currency.js*
 test/integration/documents-currency.d.ts
+test/integration/forwarding.js
+test/integration/branches_switch.js
+test/integration/branches2.js
 test/fast/modules/crawler/block_pulling.js*
 test/fast/modules/crawler/block_pulling.d.ts
 test/fast/fork*.js*
@@ -66,3 +69,6 @@ test/fast/modules/ws2p/*.js*
 test/fast/modules/ws2p/*.d.ts
 test/fast/modules/common/grammar.js*
 test/fast/modules/common/grammar.d.ts
+test/fast/prover/pow-1-cluster.d.ts
+test/fast/prover/pow-1-cluster.js
+test/fast/prover/pow-1-cluster.js.map
diff --git a/test/integration/branches.js b/test/integration/branches.js
deleted file mode 100644
index dadf77a17..000000000
--- a/test/integration/branches.js
+++ /dev/null
@@ -1,59 +0,0 @@
-"use strict";
-
-const _         = require('underscore');
-const co        = require('co');
-const should    = require('should');
-const duniter   = require('../../index');
-const bma       = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const rp        = require('request-promise');
-const httpTest  = require('./tools/http');
-const shutDownEngine  = require('./tools/shutDownEngine');
-
-const expectAnswer   = httpTest.expectAnswer;
-
-const MEMORY_MODE = true;
-const commonConf = {
-  ipv4: '127.0.0.1',
-  currency: 'bb',
-  httpLogs: true,
-  forksize: 3,
-  sigQty: 1
-};
-
-let s1
-
-describe("Branches", () => co(function*() {
-
-  before(() => co(function*() {
-
-    s1 = duniter(
-      '/bb1',
-      MEMORY_MODE,
-      _.extend({
-        port: '7778',
-        pair: {
-          pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-          sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-        }
-      }, commonConf));
-
-    const server = yield s1.initWithDAL();
-    const bmapi = yield bma(server);
-    yield bmapi.openConnections();
-  }));
-
-  after(() => {
-    return shutDownEngine(s1)
-  })
-
-  describe("Server 1 /blockchain", function() {
-
-    it('should have a 3 blocks fork window size', function() {
-      return expectAnswer(rp('http://127.0.0.1:7778/node/summary', { json: true }), function(res) {
-        res.should.have.property('duniter').property('software').equal('duniter');
-        res.should.have.property('duniter').property('version').equal('1.6.14');
-        res.should.have.property('duniter').property('forkWindowSize').equal(3);
-      });
-    });
-  });
-}));
diff --git a/test/integration/branches2.js b/test/integration/branches2.js
deleted file mode 100644
index 18d055579..000000000
--- a/test/integration/branches2.js
+++ /dev/null
@@ -1,214 +0,0 @@
-"use strict";
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
-    return new (P || (P = Promise))(function (resolve, reject) {
-        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
-        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
-        function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
-        step((generator = generator.apply(thisArg, _arguments || [])).next());
-    });
-};
-Object.defineProperty(exports, "__esModule", { value: true });
-const other_constants_1 = require("../../app/lib/other_constants");
-const logger_1 = require("../../app/lib/logger");
-const index_1 = require("../../app/modules/bma/index");
-const index_2 = require("../../app/modules/crawler/index");
-const toolbox_1 = require("./tools/toolbox");
-const TestUser_1 = require("./tools/TestUser");
-const co = require('co');
-const _ = require('underscore');
-const duniter = require('../../index');
-const bma = index_1.BmaDependency.duniter.methods.bma;
-const rp = require('request-promise');
-const httpTest = require('./tools/http');
-const commit = require('./tools/commit');
-const sync = require('./tools/sync');
-const shutDownEngine = require('./tools/shutDownEngine');
-const expectJSON = httpTest.expectJSON;
-const expectHttpCode = httpTest.expectHttpCode;
-if (other_constants_1.OtherConstants.MUTE_LOGS_DURING_UNIT_TESTS) {
-    logger_1.NewLogger().mute();
-}
-// Trace these errors
-process.on('unhandledRejection', (reason) => {
-    console.error('Unhandled rejection: ' + reason);
-    console.error(reason);
-});
-const MEMORY_MODE = true;
-const commonConf = {
-    ipv4: '127.0.0.1',
-    currency: 'bb',
-    httpLogs: true,
-    forksize: 10,
-    switchOnHeadAdvance: 6,
-    avgGenTime: 30 * 60,
-    sigQty: 1
-};
-let s1, s2, cat, toc;
-const now = Math.round(new Date().getTime() / 1000);
-describe("SelfFork", function () {
-    before(() => co(function* () {
-        s1 = duniter('/bb4', MEMORY_MODE, _.extend({
-            port: '7781',
-            pair: {
-                pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-                sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-            }
-        }, commonConf));
-        s2 = duniter('/bb5', MEMORY_MODE, _.extend({
-            port: '7782',
-            pair: {
-                pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
-                sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
-            }
-        }, commonConf));
-        cat = new TestUser_1.TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 });
-        toc = new TestUser_1.TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 });
-        const commitS1 = commit(s1);
-        const commitS2 = commit(s2, {
-            time: now + 37180
-        });
-        yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        s1.addEndpointsDefinitions(() => index_1.BmaDependency.duniter.methods.getMainEndpoint(s1.conf));
-        s2.addEndpointsDefinitions(() => index_1.BmaDependency.duniter.methods.getMainEndpoint(s2.conf));
-        // Server 1
-        yield cat.createIdentity();
-        yield toc.createIdentity();
-        yield toc.cert(cat);
-        yield cat.cert(toc);
-        yield cat.join();
-        yield toc.join();
-        yield commitS1({
-            time: now
-        });
-        yield commitS1();
-        yield commitS1();
-        yield commitS1();
-        // Server 2
-        yield sync(0, 2, s1, s2);
-        yield toolbox_1.waitToHaveBlock(s2, 2);
-        let s2p = yield s2.PeeringService.peer();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield commitS2();
-        yield s1.writePeer(s2p);
-        // Forking S1 from S2
-        yield Promise.all([
-            toolbox_1.waitForkResolution(s1, 9),
-            index_2.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey)
-        ]);
-    }));
-    after(() => {
-        return Promise.all([
-            shutDownEngine(s1),
-            shutDownEngine(s2)
-        ]);
-    });
-    describe("Server 1 /blockchain", function () {
-        it('/block/0 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/0', { json: true }), {
-                number: 0,
-                issuersCount: 0,
-                issuersFrame: 1,
-                issuersFrameVar: 0
-            });
-        });
-        it('/block/1 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/1', { json: true }), {
-                number: 1,
-                issuersCount: 1,
-                issuersFrame: 1,
-                issuersFrameVar: 5
-            });
-        });
-        it('/block/2 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/2', { json: true }), {
-                number: 2,
-                issuersCount: 1,
-                issuersFrame: 2,
-                issuersFrameVar: 4
-            });
-        });
-        it('/block/3 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/3', { json: true }), {
-                number: 3,
-                issuersCount: 1,
-                issuersFrame: 3,
-                issuersFrameVar: 3
-            });
-        });
-        it('/block/4 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/4', { json: true }), {
-                number: 4,
-                issuersCount: 2,
-                issuersFrame: 4,
-                issuersFrameVar: 7
-            });
-        });
-        it('/block/5 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/5', { json: true }), {
-                number: 5,
-                issuersCount: 2,
-                issuersFrame: 5,
-                issuersFrameVar: 6
-            });
-        });
-        it('/block/6 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/6', { json: true }), {
-                number: 6,
-                issuersCount: 2,
-                issuersFrame: 6,
-                issuersFrameVar: 5
-            });
-        });
-        it('/block/7 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/7', { json: true }), {
-                number: 7,
-                issuersCount: 2,
-                issuersFrame: 7,
-                issuersFrameVar: 4
-            });
-        });
-        it('/block/88 should not exist', function () {
-            return expectHttpCode(404, rp('http://127.0.0.1:7781/blockchain/block/88'));
-        });
-        it('/current should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7781/blockchain/current', { json: true }), {
-                number: 9
-            });
-        });
-        it('should have 2 branch', () => __awaiter(this, void 0, void 0, function* () {
-            const branches = yield s1.BlockchainService.branches();
-            branches.should.have.length(1);
-        }));
-    });
-    describe("Server 2 /blockchain", function () {
-        it('/block/0 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/0', { json: true }), {
-                number: 0
-            });
-        });
-        it('/block/1 should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/1', { json: true }), {
-                number: 1
-            });
-        });
-        it('/block/88 should not exist', function () {
-            return expectHttpCode(404, rp('http://127.0.0.1:7782/blockchain/block/88'));
-        });
-        it('/current should exist', function () {
-            return expectJSON(rp('http://127.0.0.1:7782/blockchain/current', { json: true }), {
-                number: 9
-            });
-        });
-        it('should have 1 branch', () => co(function* () {
-            const branches = yield s2.BlockchainService.branches();
-            branches.should.have.length(1);
-        }));
-    });
-});
-//# sourceMappingURL=branches2.js.map
\ No newline at end of file
diff --git a/test/integration/branches_switch.js b/test/integration/branches_switch.js
deleted file mode 100644
index d3c7eb1aa..000000000
--- a/test/integration/branches_switch.js
+++ /dev/null
@@ -1,102 +0,0 @@
-"use strict";
-Object.defineProperty(exports, "__esModule", { value: true });
-const index_1 = require("../../app/modules/crawler/index");
-const index_2 = require("../../app/modules/bma/index");
-const co = require('co');
-const _ = require('underscore');
-const duniter = require('../../index');
-const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const TestUser = require('./tools/TestUser').TestUser;
-const rp = require('request-promise');
-const httpTest = require('./tools/http');
-const commit = require('./tools/commit');
-const sync = require('./tools/sync');
-const cluster = require('cluster');
-const shutDownEngine = require('./tools/shutDownEngine');
-const expectJSON = httpTest.expectJSON;
-const MEMORY_MODE = true;
-const commonConf = {
-    ipv4: '127.0.0.1',
-    currency: 'bb',
-    httpLogs: true,
-    forksize: 30,
-    avgGenTime: 1,
-    sigQty: 1
-};
-let s1, s2, cat, toc;
-describe("Switch", function () {
-    before(() => co(function* () {
-        cluster.setMaxListeners(6);
-        s1 = duniter('/bb11', MEMORY_MODE, _.extend({
-            switchOnHeadAdvance: 0,
-            port: '7788',
-            pair: {
-                pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-                sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-            },
-            rootoffset: 10,
-            sigQty: 1, dt: 1, ud0: 120
-        }, commonConf));
-        s2 = duniter('/bb12', MEMORY_MODE, _.extend({
-            switchOnHeadAdvance: 0,
-            port: '7789',
-            pair: {
-                pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
-                sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
-            }
-        }, commonConf));
-        cat = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 });
-        toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 });
-        yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-        s1.addEndpointsDefinitions(() => index_2.BmaDependency.duniter.methods.getMainEndpoint(s1.conf));
-        s2.addEndpointsDefinitions(() => index_2.BmaDependency.duniter.methods.getMainEndpoint(s2.conf));
-        yield cat.createIdentity();
-        yield toc.createIdentity();
-        yield toc.cert(cat);
-        yield cat.cert(toc);
-        yield cat.join();
-        yield toc.join();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield sync(0, 2, s1, s2);
-        let s2p = yield s2.PeeringService.peer();
-        yield commit(s1)();
-        yield commit(s1)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        yield commit(s2)();
-        // So we now have:
-        // S1 01234
-        // S2   `3456789
-        yield s1.writePeer(s2p);
-        // Forking S1 from S2
-        yield index_1.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey);
-        // S1 should have switched to the other branch
-    }));
-    after(() => {
-        cluster.setMaxListeners(3);
-        return Promise.all([
-            shutDownEngine(s1),
-            shutDownEngine(s2)
-        ]);
-    });
-    describe("Server 1 /blockchain", function () {
-        it('/block/8 should exist on S1', function () {
-            return expectJSON(rp('http://127.0.0.1:7788/blockchain/block/8', { json: true }), {
-                number: 8
-            });
-        });
-        it('/block/8 should exist on S2', function () {
-            return expectJSON(rp('http://127.0.0.1:7789/blockchain/block/8', { json: true }), {
-                number: 8
-            });
-        });
-    });
-});
-//# sourceMappingURL=branches_switch.js.map
\ No newline at end of file
-- 
GitLab


From 7a566dd2016f0da47b66b72fd68680950a026f82 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 17 Dec 2017 16:02:24 +0100
Subject: [PATCH 09/80] [fix] tests: remove unhandledRejection EventEmitter
 leaks

---
 index.ts | 12 ++++++------
 1 file changed, 6 insertions(+), 6 deletions(-)

diff --git a/index.ts b/index.ts
index 6ce4dab8d..e7e6a32e0 100644
--- a/index.ts
+++ b/index.ts
@@ -30,6 +30,12 @@ const pluginDependency    = require('./app/modules/plugin');
 
 let sigintListening = false
 
+// Trace errors
+process.on('unhandledRejection', (reason) => {
+  logger.error('Unhandled rejection: ' + reason);
+  logger.error(reason);
+});
+
 class Stacks {
 
   static todoOnRunDone:() => any = () => process.exit()
@@ -441,12 +447,6 @@ class Stack {
 
   executeStack(argv:string[]) {
 
-    // Trace these errors
-    process.on('unhandledRejection', (reason) => {
-      logger.error('Unhandled rejection: ' + reason);
-      logger.error(reason);
-    });
-
     // Executes the command
     return this.cli.execute(argv);
   }
-- 
GitLab


From 6f01a44935ffaa5f5d699051cc7007b07ff44098 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 17 Dec 2017 16:53:32 +0100
Subject: [PATCH 10/80] [fix] #1234 Remove ARM specific code

---
 app/modules/prover/lib/engine.ts | 14 ++++----------
 1 file changed, 4 insertions(+), 10 deletions(-)

diff --git a/app/modules/prover/lib/engine.ts b/app/modules/prover/lib/engine.ts
index 30585270a..2537c5921 100644
--- a/app/modules/prover/lib/engine.ts
+++ b/app/modules/prover/lib/engine.ts
@@ -33,16 +33,10 @@ export class PowEngine {
   }
 
   async prove(stuff:any) {
-        if (this.cluster.hasProofPending) {
-          await this.cluster.cancelWork()
-        }
-    
-        const cpus = os.cpus()
-    
-        if (os.arch().match(/arm/) || cpus[0].model.match(/Atom/)) {
-          stuff.newPoW.conf.nbCores /= 2; // Make sure that only once each physical core is used (for Hyperthreading).
-        }
-        return await this.cluster.proveByWorkers(stuff)
+    if (this.cluster.hasProofPending) {
+      await this.cluster.cancelWork()
+    }
+    return await this.cluster.proveByWorkers(stuff)
   }
 
   cancel() {
-- 
GitLab


From 5d5dbb194226fc8c545ecf5ae2c38848824f1599 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Sun, 17 Dec 2017 16:00:47 +0000
Subject: [PATCH 11/80] try to exec github sync in redshift runner

---
 .gitlab-ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ddd9044a7..ec3727046 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -11,7 +11,7 @@ push_to_github:
     variables:
         GIT_STRATEGY: none
     tags:
-        - github
+        - redshift
     before_script:
         - ''
     script:
@@ -31,7 +31,7 @@ enforce_readme:
     variables:
         GIT_STRATEGY: none
     tags:
-        - github
+        - redshift
     before_script:
         - ''
     script:
-- 
GitLab


From 3bb0aa73b2db5e96d5c2417c2e2466a374025bfe Mon Sep 17 00:00:00 2001
From: cgeek <cem.moreau@gmail.com>
Date: Sat, 23 Dec 2017 19:31:49 +0100
Subject: [PATCH 12/80] [fix] #1234 Synchronize workers on each PoW task

---
 app/modules/prover/lib/PowWorker.ts       |  95 +++++
 app/modules/prover/lib/engine.ts          |   4 +-
 app/modules/prover/lib/permanentProver.ts |   2 +-
 app/modules/prover/lib/powCluster.ts      | 159 ++++----
 app/modules/prover/lib/proof.ts           | 467 +++++++++++-----------
 5 files changed, 404 insertions(+), 323 deletions(-)
 create mode 100644 app/modules/prover/lib/PowWorker.ts

diff --git a/app/modules/prover/lib/PowWorker.ts b/app/modules/prover/lib/PowWorker.ts
new file mode 100644
index 000000000..fd225941a
--- /dev/null
+++ b/app/modules/prover/lib/PowWorker.ts
@@ -0,0 +1,95 @@
+import {Querable} from "./permanentProver"
+
+const querablep = require('querablep')
+
+/*********
+ *
+ * PoW worker
+ * ----------
+ *
+ * Its model is super simple: we ask him to find a proof, and we can wait for it.
+ * Eventually, we can tell him to cancel his proof, which makes it answer `null` as proof value.
+ *
+ * The worker also provides two properties:
+ *
+ * - `worker.online`: a promise which is resolved when the worker gets « online Â» for the first time
+ * - `worker.exit`: a promise which is resolved when the worker exits (which occurs when the worker is being closed or killed)
+ *
+ ********/
+
+export class PowWorker {
+
+  private onlinePromise:Promise<void>
+  private onlineResolver:()=>void
+
+  private exitPromise:Promise<void>
+  private exitResolver:()=>void
+
+  private proofPromise:Querable<{ message: { answer:any }}|null>
+  private proofResolver:(proof:{ message: { answer:any }}|null)=>void
+
+  private messageHandler:((worker:any, msg:any)=>void)
+
+  constructor(
+    private nodejsWorker:any,
+    private onPowMessage:(message:any)=>void,
+    private onlineHandler:()=>void,
+    private exitHandler:(code:any, signal:any)=>void) {
+
+    // Handle "online" promise
+    this.onlinePromise = new Promise(res => this.onlineResolver = res)
+    nodejsWorker.on('online', () => {
+      this.onlineHandler()
+      this.onlineResolver()
+    })
+
+    // Handle "exit" promise
+    this.exitPromise = new Promise(res => this.exitResolver = res)
+    nodejsWorker.on('exit', (code:any, signal:any) => {
+      this.exitHandler(code, signal)
+      this.exitResolver()
+    })
+
+    nodejsWorker.on('message', (message:any) => {
+      if (message) {
+        this.onPowMessage(message)
+      }
+      if (this.proofPromise && message.uuid && !this.proofPromise.isResolved() && this.proofResolver) {
+        const result:{ message: { answer:any }}|null = message ? { message } : null
+        this.proofResolver(result)
+      }
+    })
+  }
+
+  get online() {
+    return this.onlinePromise
+  }
+
+  get exited() {
+    return this.exitPromise
+  }
+
+  get pid() {
+    return this.nodejsWorker.process.pid
+  }
+
+  askProof(commandMessage:{ uuid:string, command:string, value:any }) {
+    this.proofPromise = querablep(new Promise<{ message: { answer:any }}|null>(res => this.proofResolver = res))
+    this.nodejsWorker.send(commandMessage)
+    return this.proofPromise
+  }
+
+  sendConf(confMessage:{ command:string, value:any }) {
+    this.nodejsWorker.send(confMessage)
+  }
+
+  sendCancel() {
+    this.nodejsWorker.send({
+      command: 'cancel'
+    })
+  }
+
+  kill() {
+    this.nodejsWorker.kill()
+  }
+}
\ No newline at end of file
diff --git a/app/modules/prover/lib/engine.ts b/app/modules/prover/lib/engine.ts
index 2537c5921..6ab1ca645 100644
--- a/app/modules/prover/lib/engine.ts
+++ b/app/modules/prover/lib/engine.ts
@@ -33,9 +33,7 @@ export class PowEngine {
   }
 
   async prove(stuff:any) {
-    if (this.cluster.hasProofPending) {
-      await this.cluster.cancelWork()
-    }
+    await this.cluster.cancelWork()
     return await this.cluster.proveByWorkers(stuff)
   }
 
diff --git a/app/modules/prover/lib/permanentProver.ts b/app/modules/prover/lib/permanentProver.ts
index 7ef75a358..d5c4fefed 100644
--- a/app/modules/prover/lib/permanentProver.ts
+++ b/app/modules/prover/lib/permanentProver.ts
@@ -9,7 +9,7 @@ import {Server} from "../../../../server"
 
 const querablep = require('querablep');
 
-interface Querable<T> extends Promise<T> {
+export interface Querable<T> extends Promise<T> {
   isFulfilled(): boolean
   isResolved(): boolean
   isRejected(): boolean
diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts
index cd9c7c239..d0c64c4e4 100644
--- a/app/modules/prover/lib/powCluster.ts
+++ b/app/modules/prover/lib/powCluster.ts
@@ -1,5 +1,7 @@
 import {ConfDTO} from "../../../lib/dto/ConfDTO"
 import {ProverConstants} from "./constants"
+import {createPowWorker} from "./proof"
+import {PowWorker} from "./PowWorker"
 
 const _ = require('underscore')
 const nuuid = require('node-uuid');
@@ -9,6 +11,13 @@ const querablep = require('querablep')
 let clusterId = 0
 cluster.setMaxListeners(3)
 
+export interface SlaveWorker {
+  worker:PowWorker,
+  index:number,
+  online:Promise<void>,
+  nonceBeginning:number
+}
+
 /**
  * Cluster controller, handles the messages between the main program and the PoW cluster.
  */
@@ -18,15 +27,14 @@ export class Master {
 
   clusterId:number
   currentPromise:any|null = null
-  slaves:any[] = []
-  slavesMap:any = {}
+  slaves:SlaveWorker[] = []
+  slavesMap:{
+    [k:number]: SlaveWorker|null
+  } = {}
   conf:any = {}
   logger:any
   onInfoCallback:any
   workersOnline:Promise<any>[]
-  private exitHandler: (worker: any, code: any, signal: any) => void
-  private onlineHandler: (worker: any) => void
-  private messageHandler: (worker: any, msg: any) => void
 
   constructor(private nbCores:number, logger:any) {
     this.clusterId = clusterId++
@@ -34,53 +42,23 @@ export class Master {
     this.onInfoMessage = (message:any) => {
       this.logger.info(`${message.pow.pow} nonce = ${message.pow.block.nonce}`)
     }
-
-    this.exitHandler = (worker:any, code:any, signal:any) => {
-      this.logger.info(`worker ${worker.process.pid} died with code ${code} and signal ${signal}`)
-    }
-
-    this.onlineHandler = (worker:any) => {
-      // We just listen to the workers of this Master
-      if (this.slavesMap[worker.id]) {
-        this.logger.info(`[online] worker c#${this.clusterId}#w#${worker.id}`)
-        this.slavesMap[worker.id].online.extras.resolve()
-        worker.send({
-          command: 'conf',
-          value: this.conf
-        })
-      }
-    }
-
-    this.messageHandler = (worker:any, msg:any) => {
-      // Message for this cluster
-      if (this.slavesMap[worker.id]) {
-        this.onWorkerMessage(worker, msg)
-      }
-    }
   }
 
   get nbWorkers() {
     return this.slaves.length
   }
 
-  get hasProofPending() {
-    return !!this.currentPromise
-  }
-
   set onInfoMessage(callback:any) {
     this.onInfoCallback = callback
   }
 
-  onWorkerMessage(worker:any, message:any) {
+  onWorkerMessage(workerIndex:number, message:any) {
     // this.logger.info(`worker#${this.slavesMap[worker.id].index} sent message:${message}`)
-    if (message.pow && message.pow.pow) {
+    if (message && message.pow) {
       this.onInfoCallback && this.onInfoCallback(message)
     }
-    if (this.currentPromise && message.uuid === this.currentPromise.extras.uuid && !this.currentPromise.isResolved() && message.answer) {
-      this.logger.info(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index} HAS FOUND A PROOF #${message.answer.pow.pow}`)
-      this.currentPromise.extras.resolve(message.answer)
-      // Stop the slaves' current work
-      this.cancelWork()
+    if (this.currentPromise && message.uuid && !this.currentPromise.isResolved() && message.answer) {
+      this.logger.info(`ENGINE c#${this.clusterId}#${workerIndex} HAS FOUND A PROOF #${message.answer.pow.pow}`)
     } else if (message.canceled) {
       this.nbCancels++
     }
@@ -94,13 +72,26 @@ export class Master {
   initCluster() {
     // Setup master
     cluster.setupMaster({
-      exec: __filename
+      exec: __filename,
+      execArgv: [] // Do not try to debug forks
     })
 
     this.slaves = Array.from({ length: this.nbCores }).map((value, index) => {
-      const worker = cluster.fork()
-      this.logger.info(`Creating worker c#${this.clusterId}#w#${worker.id}`)
-      this.slavesMap[worker.id] = {
+      const nodejsWorker = cluster.fork()
+      const worker = new PowWorker(nodejsWorker, message => {
+        this.onWorkerMessage(index, message)
+      }, () => {
+        this.logger.info(`[online] worker c#${this.clusterId}#w#${index}`)
+        worker.sendConf({
+          command: 'conf',
+          value: this.conf
+        })
+      }, (code:any, signal:any) => {
+        this.logger.info(`worker ${worker.pid} died with code ${code} and signal ${signal}`)
+      })
+
+      this.logger.info(`Creating worker c#${this.clusterId}#w#${nodejsWorker.id}`)
+      const slave = {
 
         // The Node.js worker
         worker,
@@ -109,24 +100,16 @@ export class Master {
         index,
 
         // Worker ready
-        online: (function onlinePromise() {
-          let resolve
-          const p = querablep(new Promise(res => resolve = res))
-          p.extras = { resolve }
-          return p
-        })(),
+        online: worker.online,
 
         // Each worker has his own chunk of possible nonces
         nonceBeginning: this.nbCores === 1 ? 0 : (index + 1) * ProverConstants.NONCE_RANGE
       }
-      return this.slavesMap[worker.id]
+      this.slavesMap[nodejsWorker.id] = slave
+      return slave
     })
 
-    cluster.on('exit', this.exitHandler)
-    cluster.on('online', this.onlineHandler)
-    cluster.on('message', this.messageHandler)
-
-    this.workersOnline = this.slaves.map((s:any) => s.online)
+    this.workersOnline = this.slaves.map((s) => s.online)
     return Promise.all(this.workersOnline)
   }
 
@@ -135,7 +118,7 @@ export class Master {
     this.conf.cpu = conf.cpu || this.conf.cpu
     this.conf.prefix = this.conf.prefix || conf.prefix
     this.slaves.forEach(s => {
-      s.worker.send({
+      s.worker.sendConf({
         command: 'conf',
         value: this.conf
       })
@@ -143,41 +126,26 @@ export class Master {
     return Promise.resolve(_.clone(conf))
   }
 
-  cancelWork() {
-    this.logger.info(`Cancelling the work on PoW cluster of %s slaves`, this.slaves.length)
+  private cancelWorkersWork() {
     this.slaves.forEach(s => {
-      s.worker.send({
-        command: 'cancel'
-      })
+      s.worker.sendCancel()
     })
+  }
 
-    // Eventually force the end of current promise
-    if (this.currentPromise && !this.currentPromise.isFulfilled()) {
-      this.currentPromise.extras.resolve(null)
-    }
-
+  async cancelWork() {
+    this.cancelWorkersWork()
+    const workEnded = this.currentPromise
     // Current promise is done
     this.currentPromise = null
-
-    return Promise.resolve()
-  }
-
-  newPromise(uuid:string) {
-    let resolve
-    const p = querablep(new Promise(res => resolve = res))
-    p.extras = { resolve, uuid }
-    return p
+    return await workEnded
   }
 
   async shutDownWorkers() {
     if (this.workersOnline) {
       await Promise.all(this.workersOnline)
-      await Promise.all(this.slaves.map(async (s:any) => {
+      await Promise.all(this.slaves.map(async (s) => {
         s.worker.kill()
       }))
-      cluster.removeListener('exit', this.exitHandler)
-      cluster.removeListener('online', this.onlineHandler)
-      cluster.removeListener('message', this.messageHandler)
     }
     this.slaves = []
   }
@@ -191,9 +159,7 @@ export class Master {
 
     // Register the new proof uuid
     const uuid = nuuid.v4()
-    this.currentPromise = this.newPromise(uuid)
-
-    return (async () => {
+    this.currentPromise = querablep((async () => {
       await Promise.all(this.workersOnline)
 
       if (!this.currentPromise) {
@@ -202,8 +168,8 @@ export class Master {
       }
 
       // Start the salves' job
-      this.slaves.forEach((s:any, index) => {
-        s.worker.send({
+      const asks = this.slaves.map(async (s, index) => {
+        const proof = await s.worker.askProof({
           uuid,
           command: 'newPoW',
           value: {
@@ -222,10 +188,29 @@ export class Master {
             }
           }
         })
+        this.logger.info(`[done] worker c#${this.clusterId}#w#${index}`)
+        return {
+          workerID: index,
+          proof
+        }
       })
 
-      return await this.currentPromise
-    })()
+      // Find a proof
+      const result = await Promise.race(asks)
+      this.cancelWorkersWork()
+      // Wait for all workers to have stopped looking for a proof
+      await Promise.all(asks)
+
+      if (!result.proof || !result.proof.message.answer) {
+        this.logger.info('No engine found the proof. It was probably cancelled.')
+        return null
+      } else {
+        this.logger.info(`ENGINE c#${this.clusterId}#${result.workerID} HAS FOUND A PROOF #${result.proof.message.answer.pow.pow}`)
+        return result.proof.message.answer
+      }
+    })())
+
+    return this.currentPromise
   }
 
   static defaultLogger() {
@@ -250,5 +235,5 @@ if (cluster.isMaster) {
     process.exit(0)
   });
 
-  require('./proof')
+  createPowWorker()
 }
diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index 407c7a965..546bf676a 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -11,295 +11,298 @@ import {ProcessCpuProfiler} from "../../../ProcessCpuProfiler"
 const moment = require('moment');
 const querablep = require('querablep');
 
-let computing = querablep(Promise.resolve(null));
-let askedStop = false;
+export function createPowWorker() {
 
-// By default, we do not prefix the PoW by any number
-let prefix = 0;
+  let computing = querablep(Promise.resolve(null));
+  let askedStop = false;
 
-let signatureFunc:any, lastSecret:any, currentCPU = 1;
+// By default, we do not prefix the PoW by any number
+  let prefix = 0;
 
-process.on('uncaughtException', (err:any) => {
-  console.error(err.stack || Error(err))
-  if (process.send) {
-    process.send({error: err});
-  } else {
-    throw Error('process.send() is not defined')
-  }
-});
+  let signatureFunc:any, lastSecret:any, currentCPU = 1;
 
-process.on('unhandledRejection', () => {
-  process.exit()
-})
+  process.on('uncaughtException', (err:any) => {
+    console.error(err.stack || Error(err))
+    if (process.send) {
+      process.send({error: err});
+    } else {
+      throw Error('process.send() is not defined')
+    }
+  });
 
-process.on('message', async (message) => {
+  process.on('unhandledRejection', () => {
+    process.exit()
+  })
 
-  switch (message.command) {
+  process.on('message', async (message) => {
 
-    case 'newPoW':
-      (async () => {
-        askedStop = true
+    switch (message.command) {
 
-        // Very important: do not await if the computation is already done, to keep the lock on JS engine
-        if (!computing.isFulfilled()) {
-          await computing;
-        }
+      case 'newPoW':
+        (async () => {
+          askedStop = true
 
-        const res = await beginNewProofOfWork(message.value);
-        answer(message, res);
-      })()
-      break;
+          // Very important: do not await if the computation is already done, to keep the lock on JS engine
+          if (!computing.isFulfilled()) {
+            await computing;
+          }
 
-    case 'cancel':
-      if (!computing.isFulfilled()) {
-        askedStop = true;
-      }
-      break;
+          const res = await beginNewProofOfWork(message.value);
+          answer(message, res);
+        })()
+        break;
 
-    case 'conf':
-      if (message.value.cpu !== undefined) {
-        currentCPU = message.value.cpu
-      }
-      if (message.value.prefix !== undefined) {
-        prefix = message.value.prefix
-      }
-      answer(message, { currentCPU, prefix });
-      break;
-  }
+      case 'cancel':
+        if (!computing.isFulfilled()) {
+          askedStop = true;
+        }
+        break;
 
-})
-
-function beginNewProofOfWork(stuff:any) {
-  askedStop = false;
-  computing = querablep((async () => {
-
-    /*****************
-     * PREPARE POW STUFF
-     ****************/
-
-    let nonce = 0;
-    const maxDuration = stuff.maxDuration || 1000
-    const conf = stuff.conf;
-    const block = stuff.block;
-    const nonceBeginning = stuff.nonceBeginning;
-    const nbZeros = stuff.zeros;
-    const pair = stuff.pair;
-    const forcedTime = stuff.forcedTime;
-    currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU;
-    prefix = parseInt(conf.prefix || prefix)
-    if (prefix && prefix < ProverConstants.NONCE_RANGE) {
-      prefix *= 100 * ProverConstants.NONCE_RANGE
-    }
-    const highMark = stuff.highMark;
-    let sigFunc = null;
-    if (signatureFunc && lastSecret === pair.sec) {
-      sigFunc = signatureFunc;
-    }
-    else {
-      lastSecret = pair.sec;
-      sigFunc = (msg:string) => KeyGen(pair.pub, pair.sec).signSync(msg)
+      case 'conf':
+        if (message.value.cpu !== undefined) {
+          currentCPU = message.value.cpu
+        }
+        if (message.value.prefix !== undefined) {
+          prefix = message.value.prefix
+        }
+        answer(message, { currentCPU, prefix });
+        break;
     }
-    signatureFunc = sigFunc;
-    let pow = "", sig = "", raw = "";
 
-    /*****************
-     * GO!
-     ****************/
+  })
+
+  function beginNewProofOfWork(stuff:any) {
+    askedStop = false;
+    computing = querablep((async () => {
 
-    let pausePeriod = 1;
-    let testsCount = 0;
-    let found = false;
-    let turn = 0;
-    const profiler = new ProcessCpuProfiler(100)
-    let cpuUsage = profiler.cpuUsageOverLastMilliseconds(1)
-    // We limit the number of tests according to CPU usage
-    let testsPerRound = stuff.initialTestsPerRound || 1
-    let turnDuration = 20 // We initially goes quickly to the max speed = 50 reevaluations per second (1000 / 20)
+      /*****************
+       * PREPARE POW STUFF
+       ****************/
 
-    while (!found && !askedStop) {
+      let nonce = 0;
+      const maxDuration = stuff.maxDuration || 1000
+      const conf = stuff.conf;
+      const block = stuff.block;
+      const nonceBeginning = stuff.nonceBeginning;
+      const nbZeros = stuff.zeros;
+      const pair = stuff.pair;
+      const forcedTime = stuff.forcedTime;
+      currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU;
+      prefix = parseInt(conf.prefix || prefix)
+      if (prefix && prefix < ProverConstants.NONCE_RANGE) {
+        prefix *= 100 * ProverConstants.NONCE_RANGE
+      }
+      const highMark = stuff.highMark;
+      let sigFunc = null;
+      if (signatureFunc && lastSecret === pair.sec) {
+        sigFunc = signatureFunc;
+      }
+      else {
+        lastSecret = pair.sec;
+        sigFunc = (msg:string) => KeyGen(pair.pub, pair.sec).signSync(msg)
+      }
+      signatureFunc = sigFunc;
+      let pow = "", sig = "", raw = "";
 
       /*****************
-       * A TURN ~ 100ms
+       * GO!
        ****************/
 
-      await Promise.race([
+      let pausePeriod = 1;
+      let testsCount = 0;
+      let found = false;
+      let turn = 0;
+      const profiler = new ProcessCpuProfiler(100)
+      let cpuUsage = profiler.cpuUsageOverLastMilliseconds(1)
+      // We limit the number of tests according to CPU usage
+      let testsPerRound = stuff.initialTestsPerRound || 1
+      let turnDuration = 20 // We initially goes quickly to the max speed = 50 reevaluations per second (1000 / 20)
 
-        // I. Stop the turn if it exceeds `turnDuration` ms
-        countDown(turnDuration),
+      while (!found && !askedStop) {
 
-        // II. Process the turn's PoW
-        (async () => {
+        /*****************
+         * A TURN ~ 100ms
+         ****************/
 
-            // Prove
-          let i = 0;
-          const thisTurn = turn;
+        await Promise.race([
 
-          // Time is updated regularly during the proof
-          block.time = getBlockTime(block, conf, forcedTime)
-          if (block.number === 0) {
-            block.medianTime = block.time
-          }
-          block.inner_hash = getBlockInnerHash(block);
+          // I. Stop the turn if it exceeds `turnDuration` ms
+          countDown(turnDuration),
 
-          /*****************
-           * Iterations of a turn
-           ****************/
+          // II. Process the turn's PoW
+          (async () => {
 
-          while(!found && i < testsPerRound && thisTurn === turn && !askedStop) {
+            // Prove
+            let i = 0;
+            const thisTurn = turn;
 
-            // Nonce change (what makes the PoW change if the time field remains the same)
-            nonce++
+            // Time is updated regularly during the proof
+            block.time = getBlockTime(block, conf, forcedTime)
+            if (block.number === 0) {
+              block.medianTime = block.time
+            }
+            block.inner_hash = getBlockInnerHash(block);
 
             /*****************
-             * A PROOF OF WORK
+             * Iterations of a turn
              ****************/
 
-            // The final nonce is composed of 3 parts
-            block.nonce = prefix + nonceBeginning + nonce
-            raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n")
-            sig = dos2unix(sigFunc(raw))
-            pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase()
+            while(!found && i < testsPerRound && thisTurn === turn && !askedStop) {
 
-            /*****************
-             * Check the POW result
-             ****************/
+              // Nonce change (what makes the PoW change if the time field remains the same)
+              nonce++
 
-            let j = 0, charOK = true;
-            while (j < nbZeros && charOK) {
-              charOK = pow[j] === '0';
-              j++;
-            }
-            if (charOK) {
-              found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']')))
-            }
-            if (!found && nbZeros > 0 && j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW) {
-              pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }});
-            }
+              /*****************
+               * A PROOF OF WORK
+               ****************/
 
-            /*****************
-             * - Update local vars
-             * - Allow to receive stop signal
-             ****************/
+              // The final nonce is composed of 3 parts
+              block.nonce = prefix + nonceBeginning + nonce
+              raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n")
+              sig = dos2unix(sigFunc(raw))
+              pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase()
 
-            if (!found && !askedStop) {
-              i++;
-              testsCount++;
-              if (i % pausePeriod === 0) {
-                await countDown(0); // Very low pause, just the time to process eventual end of the turn
+              /*****************
+               * Check the POW result
+               ****************/
+
+              let j = 0, charOK = true;
+              while (j < nbZeros && charOK) {
+                charOK = pow[j] === '0';
+                j++;
+              }
+              if (charOK) {
+                found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']')))
+              }
+              if (!found && nbZeros > 0 && j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW) {
+                pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }});
               }
-            }
-          }
 
-          /*****************
-           * Check the POW result
-           ****************/
-          if (!found) {
-
-            // CPU speed recording
-            if (turn > 0) {
-              cpuUsage = profiler.cpuUsageOverLastMilliseconds(turnDuration)
-              if (cpuUsage > currentCPU + 0.005 || cpuUsage < currentCPU - 0.005) {
-                let powVariationFactor
-                // powVariationFactor = currentCPU / (cpuUsage || 0.01) / 5 // divide by 2 to avoid extreme responses
-                if (currentCPU > cpuUsage) {
-                  powVariationFactor = 1.01
-                  testsPerRound = Math.max(1, Math.ceil(testsPerRound * powVariationFactor))
-                } else {
-                  powVariationFactor = 0.99
-                  testsPerRound = Math.max(1, Math.floor(testsPerRound * powVariationFactor))
+              /*****************
+               * - Update local vars
+               * - Allow to receive stop signal
+               ****************/
+
+              if (!found && !askedStop) {
+                i++;
+                testsCount++;
+                if (i % pausePeriod === 0) {
+                  await countDown(0); // Very low pause, just the time to process eventual end of the turn
                 }
-                pausePeriod = Math.floor(testsPerRound / ProverConstants.POW_NB_PAUSES_PER_ROUND)
               }
             }
 
             /*****************
-             * UNLOAD CPU CHARGE FOR THIS TURN
+             * Check the POW result
              ****************/
-            // We wait for a maximum time of `turnDuration`.
-            // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script
-            // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu"
-            // parameter.
-            await countDown(turnDuration);
-          }
-        })()
-      ]);
+            if (!found) {
+
+              // CPU speed recording
+              if (turn > 0) {
+                cpuUsage = profiler.cpuUsageOverLastMilliseconds(turnDuration)
+                if (cpuUsage > currentCPU + 0.005 || cpuUsage < currentCPU - 0.005) {
+                  let powVariationFactor
+                  // powVariationFactor = currentCPU / (cpuUsage || 0.01) / 5 // divide by 2 to avoid extreme responses
+                  if (currentCPU > cpuUsage) {
+                    powVariationFactor = 1.01
+                    testsPerRound = Math.max(1, Math.ceil(testsPerRound * powVariationFactor))
+                  } else {
+                    powVariationFactor = 0.99
+                    testsPerRound = Math.max(1, Math.floor(testsPerRound * powVariationFactor))
+                  }
+                  pausePeriod = Math.floor(testsPerRound / ProverConstants.POW_NB_PAUSES_PER_ROUND)
+                }
+              }
 
-      // Next turn
-      turn++
+              /*****************
+               * UNLOAD CPU CHARGE FOR THIS TURN
+               ****************/
+              // We wait for a maximum time of `turnDuration`.
+              // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script
+              // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu"
+              // parameter.
+              await countDown(turnDuration);
+            }
+          })()
+        ]);
 
-      turnDuration += 1
-      turnDuration = Math.min(turnDuration, maxDuration) // Max 1 second per turn
-    }
+        // Next turn
+        turn++
 
-    /*****************
-     * POW IS OVER
-     * -----------
-     *
-     * We either have found a valid POW or a stop event has been detected.
-     ****************/
+        turnDuration += 1
+        turnDuration = Math.min(turnDuration, maxDuration) // Max 1 second per turn
+      }
 
-    if (askedStop) {
+      /*****************
+       * POW IS OVER
+       * -----------
+       *
+       * We either have found a valid POW or a stop event has been detected.
+       ****************/
 
-      // PoW stopped
-      askedStop = false;
-      pSend({ canceled: true })
-      return null
+      if (askedStop) {
 
-    } else {
+        // PoW stopped
+        askedStop = false;
+        pSend({ canceled: true })
+        return null
 
-      // PoW success
-      block.hash = pow
-      block.signature = sig
-      return {
-        pow: {
-          block: block,
-          testsCount: testsCount,
-          pow: pow
+      } else {
+
+        // PoW success
+        block.hash = pow
+        block.signature = sig
+        return {
+          pow: {
+            block: block,
+            testsCount: testsCount,
+            pow: pow
+          }
         }
       }
-    }
-  })())
+    })())
 
-  return computing;
-}
+    return computing;
+  }
 
-function countDown(duration:number) {
-  return new Promise((resolve) => setTimeout(resolve, duration));
-}
+  function countDown(duration:number) {
+    return new Promise((resolve) => setTimeout(resolve, duration));
+  }
 
-function getBlockInnerHash(block:DBBlock) {
-  const raw = rawer.getBlockInnerPart(block);
-  return hashf(raw)
-}
+  function getBlockInnerHash(block:DBBlock) {
+    const raw = rawer.getBlockInnerPart(block);
+    return hashf(raw)
+  }
 
-function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) {
-  if (forcedTime) {
-    return forcedTime;
+  function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) {
+    if (forcedTime) {
+      return forcedTime;
+    }
+    const now = moment.utc().unix();
+    const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf);
+    const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0;
+    const medianTime = block.medianTime;
+    const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset);
+    return Math.max(medianTime, upperBound);
   }
-  const now = moment.utc().unix();
-  const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf);
-  const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0;
-  const medianTime = block.medianTime;
-  const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset);
-  return Math.max(medianTime, upperBound);
-}
 
-function answer(message:any, theAnswer:any) {
-  return pSend({
-    uuid: message.uuid,
-    answer: theAnswer
-  })
-}
+  function answer(message:any, theAnswer:any) {
+    return pSend({
+      uuid: message.uuid,
+      answer: theAnswer
+    })
+  }
 
-function pSend(stuff:any) {
-  return new Promise(function (resolve, reject) {
-    if (process.send) {
-      process.send(stuff, function (error:any) {
-        !error && resolve();
-        error && reject();
-      })
-    } else {
-      reject('process.send() is not defined')
-    }
-  });
+  function pSend(stuff:any) {
+    return new Promise(function (resolve, reject) {
+      if (process.send) {
+        process.send(stuff, function (error:any) {
+          !error && resolve();
+          error && reject();
+        })
+      } else {
+        reject('process.send() is not defined')
+      }
+    });
+  }
 }
-- 
GitLab


From 155c98a7496e26e7fb7864bcaeeabf23cf92d89b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Thu, 28 Dec 2017 00:01:06 +0100
Subject: [PATCH 13/80] [fix] #1234 Need at least 1ms for PoW pauses

---
 app/modules/prover/lib/proof.ts | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index 546bf676a..c08fde11e 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -187,7 +187,7 @@ export function createPowWorker() {
                 i++;
                 testsCount++;
                 if (i % pausePeriod === 0) {
-                  await countDown(0); // Very low pause, just the time to process eventual end of the turn
+                  await countDown(1); // Very low pause, just the time to process eventual end of the turn
                 }
               }
             }
-- 
GitLab


From b2bdc42aa9762cf361fce8a22544d223bc9776fd Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?St=C3=A9phane=20Veyret?= <sveyret@axway.com>
Date: Tue, 19 Dec 2017 20:10:51 +0100
Subject: [PATCH 14/80] Change Vagrant with docker for building Debian

---
 .gitignore                            |   1 +
 release/arch/debian/Vagrantfile       |  72 -------------
 release/arch/debian/bootstrap.sh      |  12 ---
 release/arch/debian/build-deb.sh      | 145 ++++++++++++--------------
 release/arch/debian/user-bootstrap.sh |  12 ---
 release/scripts/build.sh              |   7 +-
 6 files changed, 70 insertions(+), 179 deletions(-)
 delete mode 100644 release/arch/debian/Vagrantfile
 delete mode 100644 release/arch/debian/bootstrap.sh
 delete mode 100644 release/arch/debian/user-bootstrap.sh

diff --git a/.gitignore b/.gitignore
index 0fdf9f4d6..29af0728e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -23,6 +23,7 @@ vagrant/duniter
 *.tar.gz
 *.log
 *.exe
+/release/arch/debian/duniter-source
 
 # vscode
 .vscode
diff --git a/release/arch/debian/Vagrantfile b/release/arch/debian/Vagrantfile
deleted file mode 100644
index da912f7fb..000000000
--- a/release/arch/debian/Vagrantfile
+++ /dev/null
@@ -1,72 +0,0 @@
-# -*- mode: ruby -*-
-# vi: set ft=ruby :
-
-# All Vagrant configuration is done below. The "2" in Vagrant.configure
-# configures the configuration version (we support older styles for
-# backwards compatibility). Please don't change it unless you know what
-# you're doing.
-Vagrant.configure("2") do |config|
-  # The most common configuration options are documented and commented below.
-  # For a complete reference, please see the online documentation at
-  # https://docs.vagrantup.com.
-
-  # Every Vagrant development environment requires a box. You can search for
-  # boxes at https://atlas.hashicorp.com/search.
-  config.vm.box = "https://s3.eu-central-1.amazonaws.com/duniter/vagrant/duniter_trusty64.box"
-  config.vm.provision :shell, path: "bootstrap.sh"
-
-  # Disable automatic box update checking. If you disable this, then
-  # boxes will only be checked for updates when the user runs
-  # `vagrant box outdated`. This is not recommended.
-  # config.vm.box_check_update = false
-
-  # Create a forwarded port mapping which allows access to a specific port
-  # within the machine from a port on the host machine. In the example below,
-  # accessing "localhost:8080" will access port 80 on the guest machine.
-  # config.vm.network "forwarded_port", guest: 80, host: 8080
-
-  # Create a private network, which allows host-only access to the machine
-  # using a specific IP.
-  # config.vm.network "private_network", ip: "192.168.33.10"
-
-  # Create a public network, which generally matched to bridged network.
-  # Bridged networks make the machine appear as another physical device on
-  # your network.
-  # config.vm.network "public_network"
-
-  # Share an additional folder to the guest VM. The first argument is
-  # the path on the host to the actual folder. The second argument is
-  # the path on the guest to mount the folder. And the optional third
-  # argument is a set of non-required options.
-  # config.vm.synced_folder "../data", "/vagrant_data"
-
-  # Provider-specific configuration so you can fine-tune various
-  # backing providers for Vagrant. These expose provider-specific options.
-  # Example for VirtualBox:
-  #
-   config.vm.provider "virtualbox" do |vb|
-     # Display the VirtualBox GUI when booting the machine
-     #vb.gui = true
-  
-     # Customize the amount of memory on the VM:
-     vb.memory = "2048"
-   end
-  #
-  # View the documentation for the provider you are using for more
-  # information on available options.
-
-  # Define a Vagrant Push strategy for pushing to Atlas. Other push strategies
-  # such as FTP and Heroku are also available. See the documentation at
-  # https://docs.vagrantup.com/v2/push/atlas.html for more information.
-  # config.push.define "atlas" do |push|
-  #   push.app = "YOUR_ATLAS_USERNAME/YOUR_APPLICATION_NAME"
-  # end
-
-  # Enable provisioning with a shell script. Additional provisioners such as
-  # Puppet, Chef, Ansible, Salt, and Docker are also available. Please see the
-  # documentation for more information about their specific syntax and use.
-  # config.vm.provision "shell", inline: <<-SHELL
-  #   apt-get update
-  #   apt-get install -y apache2
-  # SHELL
-end
diff --git a/release/arch/debian/bootstrap.sh b/release/arch/debian/bootstrap.sh
deleted file mode 100644
index 6666f97b5..000000000
--- a/release/arch/debian/bootstrap.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-# Yarn
-curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | sudo apt-key add -
-echo "deb https://dl.yarnpkg.com/debian/ stable main" | sudo tee /etc/apt/sources.list.d/yarn.list
-
-# System tools
-apt-get update
-apt-get install --yes git curl build-essential yarn python-minimal zip
-
-# User installation
-sudo su vagrant -c "bash /vagrant/user-bootstrap.sh"
diff --git a/release/arch/debian/build-deb.sh b/release/arch/debian/build-deb.sh
index 528b044cb..5d4618468 100644
--- a/release/arch/debian/build-deb.sh
+++ b/release/arch/debian/build-deb.sh
@@ -1,7 +1,6 @@
 #!/bin/bash
 
 # NVM
-export NVM_DIR="$HOME/.nvm"
 [ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"  # This loads nvm
 
 # Prepare
@@ -13,6 +12,7 @@ NW_VERSION=0.24.4
 NW_RELEASE="v${NW_VERSION}"
 NW="nwjs-${NW_RELEASE}-linux-x64"
 NW_GZ="${NW}.tar.gz"
+DUNITER_SRC=/dunidata/duniter-source
 
 nvm install ${NVER}
 nvm use ${NVER}
@@ -23,55 +23,37 @@ ROOT=`pwd`
 DOWNLOADS="$ROOT/downloads"
 RELEASES="$ROOT/releases"
 
-mkdir -p "$DOWNLOADS"
+mkdir -p "$DOWNLOADS" || exit 1
 
 # -----------
-# Clean sources + releases
+# Clean up
 # -----------
-rm -rf "$DOWNLOADS/duniter"
-rm -rf "$RELEASES"
-rm -rf /vagrant/*.deb
-rm -rf /vagrant/*.tar.gz
+rm -rf /dunidata/*.deb
+rm -rf /dunidata/*.tar.gz
 
 # -----------
-# Downloads
+# Sources and downloads
 # -----------
 
-cd "$DOWNLOADS"
-
-if [ ! -d "$DOWNLOADS/duniter" ]; then
-  mv /vagrant/duniter-source duniter
-  cd duniter
-  git checkout "v${DUNITER_TAG}"
-  cd ..
-fi
-
 DUNITER_DEB_VER=" $DUNITER_TAG"
 DUNITER_TAG="v$DUNITER_TAG"
 
-if [ ! -f "$DOWNLOADS/$NW_GZ" ]; then
-  wget https://dl.nwjs.io/${NW_RELEASE}/${NW_GZ}
-  tar xvzf ${NW_GZ}
-fi
+cd "${DUNITER_SRC}"
+git checkout "${DUNITER_TAG}" || exit 1
 
-if [ ! -f "$DOWNLOADS/node-${NVER}-linux-x64.tar.gz" ]; then
-  # Download Node.js and package it with the sources
-  wget http://nodejs.org/dist/${NVER}/node-${NVER}-linux-x64.tar.gz
-  tar xzf node-${NVER}-linux-x64.tar.gz
-fi
+cd "$DOWNLOADS"
+curl -O https://dl.nwjs.io/${NW_RELEASE}/${NW_GZ} || exit 1
+tar xzf ${NW_GZ} || exit 1
+# Download Node.js and package it with the sources
+curl -O http://nodejs.org/dist/${NVER}/node-${NVER}-linux-x64.tar.gz || exit 1
+tar xzf node-${NVER}-linux-x64.tar.gz || exit 1
 
 # -----------
 # Releases
 # -----------
 
-rm -rf "$RELEASES"
-mkdir -p "$RELEASES"
-
-cp -r "$DOWNLOADS/duniter" "$RELEASES/duniter"
-cd "$RELEASES"
-
-# NPM build
-cp -r duniter _npm
+mkdir -p "$RELEASES" || exit 1
+cp -r "${DUNITER_SRC}" "$RELEASES/duniter" || exit 1
 
 # Releases builds
 cd ${RELEASES}/duniter
@@ -84,41 +66,48 @@ rm -Rf .git
 [[ $? -eq 0 ]] && npm install duniter-ui@1.6.x
 [[ $? -eq 0 ]] && npm prune --production
 
-cp -r "$RELEASES/duniter" "$RELEASES/desktop_"
-cp -r "$RELEASES/duniter" "$RELEASES/server_"
+[[ $? -eq 0 ]] || exit 1
+
+cp -r "$RELEASES/duniter" "$RELEASES/desktop_" || exit 1
+cp -r "$RELEASES/duniter" "$RELEASES/server_" || exit 1
 
 # -------------------------------------------------
 # Build Desktop version (Nw.js is embedded)
 # -------------------------------------------------
 
-cd "$RELEASES/desktop_"
 echo "$NW_RELEASE"
 
-cd "$RELEASES/desktop_/node_modules/wotb"
-
 # FIX: bug of nw.js, we need to patch first.
 # TODO: remove this patch once a correct version of Nw.js is out (NodeJS 8 or 9 if the above modules are compliant)
-cp /vagrant/0.24.4_common.gypi ~/.nw-gyp/0.24.4/common.gypi
+cd "$RELEASES/desktop_/node_modules/wotb"
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure \
+  || echo "This failure is expected"
+cp /dunidata/0.24.4_common.gypi ~/.nw-gyp/0.24.4/common.gypi || exit 1
 
+cd "$RELEASES/desktop_/node_modules/wotb"
 #yarn --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/wotb.node lib/binding/Release/node-v$ADDON_VERSION-linux-x64/wotb.node
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure || exit 1
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build || exit 1
+cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/wotb.node \
+  lib/binding/Release/node-v$ADDON_VERSION-linux-x64/wotb.node || exit 1
 cd "$RELEASES/desktop_/node_modules/naclb"
 #npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/naclb.node lib/binding/Release/node-v$ADDON_VERSION-linux-x64/naclb.node
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure || exit 1
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build || exit 1
+cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/naclb.node \
+  lib/binding/Release/node-v$ADDON_VERSION-linux-x64/naclb.node || exit 1
 cd "$RELEASES/desktop_/node_modules/scryptb"
 #npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/scryptb.node lib/binding/Release/node-v$ADDON_VERSION-linux-x64/scryptb.node
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure || exit 1
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build || exit 1
+cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/scryptb.node \
+  lib/binding/Release/node-v$ADDON_VERSION-linux-x64/scryptb.node || exit 1
 cd "$RELEASES/desktop_/node_modules/sqlite3"
 #npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build
-cp lib/binding/node-webkit-$NW_RELEASE-linux-x64/node_sqlite3.node lib/binding/node-v$ADDON_VERSION-linux-x64/node_sqlite3.node
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure || exit 1
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build || exit 1
+cp lib/binding/node-webkit-$NW_RELEASE-linux-x64/node_sqlite3.node \
+  lib/binding/node-v$ADDON_VERSION-linux-x64/node_sqlite3.node || exit 1
 
 # Unused binaries
 cd "$RELEASES/desktop_/"
@@ -128,39 +117,39 @@ rm -rf node_modules/sqlite3/build
 #rm -rf node_modules/scryptb/build
 
 ## Install Nw.js
-mkdir -p "$RELEASES/desktop_release"
+mkdir -p "$RELEASES/desktop_release" || exit 1
 
 # -------------------------------------------------
 # Build Desktop version .tar.gz
 # -------------------------------------------------
 
-cp -r $DOWNLOADS/${NW}/* "$RELEASES/desktop_release/"
+cp -r $DOWNLOADS/${NW}/* "$RELEASES/desktop_release/" || exit 1
 # Embed Node.js with Nw.js to make Duniter modules installable
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/lib "$RELEASES/desktop_release/"
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/include "$RELEASES/desktop_release/"
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/bin "$RELEASES/desktop_release/"
+cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/lib "$RELEASES/desktop_release/" || exit 1
+cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/include "$RELEASES/desktop_release/" || exit 1
+cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/bin "$RELEASES/desktop_release/" || exit 1
 # Add some specific files for GUI
-cp ${RELEASES}/desktop_/gui/* "$RELEASES/desktop_release/"
+cp ${RELEASES}/desktop_/gui/* "$RELEASES/desktop_release/" || exit 1
 # Add Duniter sources
-cp -R $RELEASES/desktop_/* "$RELEASES/desktop_release/"
+cp -R $RELEASES/desktop_/* "$RELEASES/desktop_release/" || exit 1
 ## Insert Nw specific fields while they do not exist (1.3.3)
-sed -i "s/\"main\": \"index.js\",/\"main\": \"index.html\",/" "$RELEASES/desktop_release/package.json"
+sed -i "s/\"main\": \"index.js\",/\"main\": \"index.html\",/" "$RELEASES/desktop_release/package.json" || exit 1
 # Add links for Node.js + NPM
 cd "$RELEASES/desktop_release/bin"
-ln -s ../lib/node_modules/npm/bin/npm-cli.js ./npm -f
+ln -s ../lib/node_modules/npm/bin/npm-cli.js ./npm -f || exit 1
 cd ..
-ln -s ./bin/node node -f
-ln -s ./bin/npm npm -f
+ln -s ./bin/node node -f || exit 1
+ln -s ./bin/npm npm -f || exit 1
 #sed -i "s/\"node-main\": \"\.\.\/sources\/bin\/duniter\",/\"node-main\": \".\/bin\/duniter\",/" "$RELEASES/desktop_release/package.json"
 # Create a copy for TGZ binary
-cp -R "$RELEASES/desktop_release" "$RELEASES/desktop_release_tgz"
+cp -R "$RELEASES/desktop_release" "$RELEASES/desktop_release_tgz" || exit 1
 #cd "$RELEASES/desktop_release_tgz/"
 #rm -rf node_modules/sqlite3/lib/binding/node-webkit-$NW_RELEASE-linux-x64
 #rm -rf node_modules/wotb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
 #rm -rf node_modules/naclb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
 #rm -rf node_modules/scryptb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
 cd "$RELEASES/desktop_release_tgz"
-tar czf /vagrant/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz * --exclude ".git" --exclude "coverage" --exclude "test"
+tar czf /duniter/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz * --exclude "coverage" --exclude "test" || exit 1
 
 # -------------------------------------------------
 # Build Desktop version .deb
@@ -168,11 +157,11 @@ tar czf /vagrant/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz * --exclude ".g
 
 # Create .deb tree + package it
 #cp -r "$RELEASES/desktop_release/release/arch/debian/package" "$RELEASES/duniter-x64"
-cp -r "/vagrant/package" "$RELEASES/duniter-x64"
-mkdir -p "$RELEASES/duniter-x64/opt/duniter/"
+cp -r "/dunidata/package" "$RELEASES/duniter-x64" || exit 1
+mkdir -p "$RELEASES/duniter-x64/opt/duniter/" || exit 1
 chmod 755 ${RELEASES}/duniter-x64/DEBIAN/post*
 chmod 755 ${RELEASES}/duniter-x64/DEBIAN/pre*
-sed -i "s/Version:.*/Version:$DUNITER_DEB_VER/g" ${RELEASES}/duniter-x64/DEBIAN/control
+sed -i "s/Version:.*/Version:$DUNITER_DEB_VER/g" ${RELEASES}/duniter-x64/DEBIAN/control || exit 1
 cd ${RELEASES}/desktop_release/
 #rm -rf node_modules/sqlite3/lib/binding/node-webkit-$NW_RELEASE-linux-x64
 #rm -rf node_modules/wotb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
@@ -182,12 +171,12 @@ cd ${RELEASES}/desktop_release/
 #rm -rf node_modules/wotb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
 #rm -rf node_modules/naclb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
 #rm -rf node_modules/scryptb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-zip -qr ${RELEASES}/duniter-x64/opt/duniter/duniter-desktop.nw *
+zip -qr ${RELEASES}/duniter-x64/opt/duniter/duniter-desktop.nw * || exit 1
 
-sed -i "s/Package: .*/Package: duniter-desktop/g" ${RELEASES}/duniter-x64/DEBIAN/control
+sed -i "s/Package: .*/Package: duniter-desktop/g" ${RELEASES}/duniter-x64/DEBIAN/control || exit 1
 cd ${RELEASES}/
-fakeroot dpkg-deb --build duniter-x64
-mv duniter-x64.deb /vagrant/duniter-desktop-${DUNITER_TAG}-linux-x64.deb
+fakeroot dpkg-deb --build duniter-x64 || exit 1
+mv duniter-x64.deb /duniter/duniter-desktop-${DUNITER_TAG}-linux-x64.deb || exit 1
 
 # -------------------------------------------------
 # Build Server version (Node.js is embedded, not Nw.js)
@@ -195,16 +184,16 @@ mv duniter-x64.deb /vagrant/duniter-desktop-${DUNITER_TAG}-linux-x64.deb
 
 cd ${RELEASES}
 rm -rf duniter-server-x64
-cp -r duniter-x64 duniter-server-x64
+cp -r duniter-x64 duniter-server-x64 || exit 1
 
 # Remove Nw.js
 rm -rf duniter-server-x64/opt/duniter/duniter-desktop.nw*
 
 cd ${RELEASES}/server_
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64 node
-zip -qr ${RELEASES}/duniter-server-x64/opt/duniter/duniter-desktop.nw *
+cp -r ${DOWNLOADS}/node-${NVER}-linux-x64 node || exit 1
+zip -qr ${RELEASES}/duniter-server-x64/opt/duniter/duniter-desktop.nw * || exit 1
 cd ${RELEASES}
-sed -i "s/Package: .*/Package: duniter/g" ${RELEASES}/duniter-server-x64/DEBIAN/control
+sed -i "s/Package: .*/Package: duniter/g" ${RELEASES}/duniter-server-x64/DEBIAN/control || exit 1
 rm -rf ${RELEASES}/duniter-server-x64/usr
-fakeroot dpkg-deb --build duniter-server-x64
-mv duniter-server-x64.deb /vagrant/duniter-server-${DUNITER_TAG}-linux-x64.deb
+fakeroot dpkg-deb --build duniter-server-x64 || exit 1
+mv duniter-server-x64.deb /duniter/duniter-server-${DUNITER_TAG}-linux-x64.deb || exit 1
diff --git a/release/arch/debian/user-bootstrap.sh b/release/arch/debian/user-bootstrap.sh
deleted file mode 100644
index 38df75d12..000000000
--- a/release/arch/debian/user-bootstrap.sh
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/bin/bash
-
-# NVM
-curl -o- https://raw.githubusercontent.com/creationix/nvm/v0.33.1/install.sh | bash
-export NVM_DIR="$HOME/.nvm"
-[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"  # This loads nvm
-
-# Node.js
-nvm install 6
-
-# node-pre-gyp
-npm install -g nw-gyp node-pre-gyp
diff --git a/release/scripts/build.sh b/release/scripts/build.sh
index 973cc7743..b20ae0e1d 100755
--- a/release/scripts/build.sh
+++ b/release/scripts/build.sh
@@ -56,16 +56,13 @@ make)
         cd ..
       fi
 
-      [[ $? -eq 0 ]] && echo ">> Starting Vagrant Ubuntu VM..."
-      [[ $? -eq 0 ]] && vagrant up
-      [[ $? -eq 0 ]] && echo ">> VM: building Duniter..."
-      [[ $? -eq 0 ]] && vagrant ssh -- "bash -s ${TAG}" < ./build-deb.sh
+      docker pull duniter/release-builder:17.12.1
+      docker run --rm -it -v ${PWD}:/dunidata duniter/release-builder:17.12.1 ${TAG}
       if [ ! $? -eq 0 ]; then
         echo ">> Something went wrong. Stopping build."
       else
         echo ">> Build success. Shutting the VM down."
       fi
-      vagrant halt
       echo ">> VM closed."
     else
       echo "Debian binaries already built. Ready for upload."
-- 
GitLab


From 883679674bbb76580c810a1a68e7c1c3fca00d75 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?St=C3=A9phane=20Veyret?= <stephane.veyret@neptura.org>
Date: Sat, 23 Dec 2017 16:34:25 +0100
Subject: [PATCH 15/80] Factorize debian building

---
 release/arch/debian/build-deb.sh | 50 +++++++++++++++++---------------
 1 file changed, 26 insertions(+), 24 deletions(-)

diff --git a/release/arch/debian/build-deb.sh b/release/arch/debian/build-deb.sh
index 5d4618468..ec8c75440 100644
--- a/release/arch/debian/build-deb.sh
+++ b/release/arch/debian/build-deb.sh
@@ -75,6 +75,27 @@ cp -r "$RELEASES/duniter" "$RELEASES/server_" || exit 1
 # Build Desktop version (Nw.js is embedded)
 # -------------------------------------------------
 
+nw_copy() {
+	[[ -z ${1} ]] && exit 1
+	cp lib/binding/Release/node-webkit-v${NW_VERSION}-linux-x64/${1}.node \
+		lib/binding/Release/node-v${ADDON_VERSION}-linux-x64/${1}.node || exit 1
+}
+
+nw_copy_node() {
+	[[ -z ${1} ]] && exit 1
+	cp lib/binding/node-webkit-v${NW_VERSION}-linux-x64/node_${1}.node \
+		lib/binding/node-v${ADDON_VERSION}-linux-x64/node_${1}.node || exit 1
+}
+
+nw_compile() {
+	[[ -z ${1} ]] && exit 1
+	cd ${1} || exit 1
+	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} configure || exit 1
+	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} build || exit 1
+	[[ -z ${2} ]] || ${2} ${1}
+	cd ..
+}
+
 echo "$NW_RELEASE"
 
 # FIX: bug of nw.js, we need to patch first.
@@ -84,30 +105,11 @@ node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure \
   || echo "This failure is expected"
 cp /dunidata/0.24.4_common.gypi ~/.nw-gyp/0.24.4/common.gypi || exit 1
 
-cd "$RELEASES/desktop_/node_modules/wotb"
-#yarn --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure || exit 1
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build || exit 1
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/wotb.node \
-  lib/binding/Release/node-v$ADDON_VERSION-linux-x64/wotb.node || exit 1
-cd "$RELEASES/desktop_/node_modules/naclb"
-#npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure || exit 1
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build || exit 1
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/naclb.node \
-  lib/binding/Release/node-v$ADDON_VERSION-linux-x64/naclb.node || exit 1
-cd "$RELEASES/desktop_/node_modules/scryptb"
-#npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure || exit 1
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build || exit 1
-cp lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64/scryptb.node \
-  lib/binding/Release/node-v$ADDON_VERSION-linux-x64/scryptb.node || exit 1
-cd "$RELEASES/desktop_/node_modules/sqlite3"
-#npm install --build-from-source
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure || exit 1
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION build || exit 1
-cp lib/binding/node-webkit-$NW_RELEASE-linux-x64/node_sqlite3.node \
-  lib/binding/node-v$ADDON_VERSION-linux-x64/node_sqlite3.node || exit 1
+cd "$RELEASES/desktop_/node_modules/"
+nw_compile wotb nw_copy
+nw_compile naclb nw_copy
+nw_compile scryptb nw_copy
+nw_compile sqlite3 nw_copy_node
 
 # Unused binaries
 cd "$RELEASES/desktop_/"
-- 
GitLab


From 1015eba8d3ec4e3b3d0114decf7fe33511321681 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 7 Jan 2018 14:08:09 +0100
Subject: [PATCH 16/80] [fix] #1246 Concurrent identities: keep only the most
 signed one

---
 app/modules/prover/lib/blockGenerator.ts | 4 +++-
 1 file changed, 3 insertions(+), 1 deletion(-)

diff --git a/app/modules/prover/lib/blockGenerator.ts b/app/modules/prover/lib/blockGenerator.ts
index a941bbaeb..c41dfb242 100644
--- a/app/modules/prover/lib/blockGenerator.ts
+++ b/app/modules/prover/lib/blockGenerator.ts
@@ -279,7 +279,9 @@ export class BlockGenerator {
         const currentMembership = await this.dal.mindexDAL.getReducedMS(ms.issuer);
         const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1;
         if (!join.identity.revoked && currentMSN < parseInt(join.ms.number)) {
-          preJoinData[join.identity.pubkey] = join;
+          if (!preJoinData[join.identity.pubkey] || preJoinData[join.identity.pubkey].certs.length < join.certs.length) {
+            preJoinData[join.identity.pubkey] = join;
+          }
         }
       } catch (err) {
         if (err && !err.uerr) {
-- 
GitLab


From de53778fee8e089e7f52ffcb728dbbbc8da64d86 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Sun, 7 Jan 2018 14:13:21 +0100
Subject: [PATCH 17/80] [fix] #1247 Travis CI builds were broken

---
 .travis.yml  | 2 +-
 appveyor.yml | 2 +-
 2 files changed, 2 insertions(+), 2 deletions(-)

diff --git a/.travis.yml b/.travis.yml
index 32368217c..a3f3862e0 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -2,7 +2,7 @@
 language: node_js
 
 node_js:
-  - 6.11.1
+  - 8.9.2
 env:
   - CXX=g++-4.8
 addons:
diff --git a/appveyor.yml b/appveyor.yml
index 8fbf7baf6..1a271d5e4 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -1,6 +1,6 @@
 environment:
   matrix:
-    - nodejs_version: "6.9.2"
+    - nodejs_version: "8.9.2"
       ADDON_VERSION: "48"
 
 platform:
-- 
GitLab


From 3d36d961ab72b18327869c6fb67cb3150462e44a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Sun, 7 Jan 2018 18:38:23 +0000
Subject: [PATCH 18/80] update BUILDER_TAG

---
 release/scripts/build.sh | 6 ++++--
 1 file changed, 4 insertions(+), 2 deletions(-)

diff --git a/release/scripts/build.sh b/release/scripts/build.sh
index b20ae0e1d..cb375c692 100755
--- a/release/scripts/build.sh
+++ b/release/scripts/build.sh
@@ -1,5 +1,7 @@
 #!/bin/bash
 
+BUILDER_TAG="v0.0.6"
+
 TAG="$3"
 ORIGIN="$4"
 IS_LOCAL_TAG=0
@@ -56,8 +58,8 @@ make)
         cd ..
       fi
 
-      docker pull duniter/release-builder:17.12.1
-      docker run --rm -it -v ${PWD}:/dunidata duniter/release-builder:17.12.1 ${TAG}
+      docker pull duniter/release-builder:${BUILDER_TAG}
+      docker run --rm -it -v ${PWD}:/dunidata duniter/release-builder:${BUILDER_TAG} ${TAG}
       if [ ! $? -eq 0 ]; then
         echo ">> Something went wrong. Stopping build."
       else
-- 
GitLab


From 080f32dda9058697035a0724f1741e0f40831bd1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Mon, 8 Jan 2018 21:13:40 +0100
Subject: [PATCH 19/80] [fix] #1248 Could not have chained txs in a block

---
 app/lib/indexer.ts | 44 ++++++++++++++++++++++----------------------
 doc/Protocol.md    | 22 ++++++++++++++++++----
 2 files changed, 40 insertions(+), 26 deletions(-)

diff --git a/app/lib/indexer.ts b/app/lib/indexer.ts
index ddb1cfa99..b5f1c47be 100644
--- a/app/lib/indexer.ts
+++ b/app/lib/indexer.ts
@@ -865,21 +865,12 @@ export class Indexer {
       }
     }))
 
-    // BR_G46
-    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
-      const reducable = await dal.sindexDAL.sqlFind({
-        identifier: ENTRY.identifier,
-        pos: ENTRY.pos,
-        amount: ENTRY.amount,
-        base: ENTRY.base
-      });
-      ENTRY.conditions = reduce(reducable).conditions; // We valuate the input conditions, so we can map these records to a same account
-      ENTRY.available = reduce(reducable).consumed === false;
-    }))
-
-    // BR_G47
-    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
-      let source = _.filter(sindex, (src:SindexEntry) => src.identifier == ENTRY.identifier && src.pos == ENTRY.pos && src.conditions && src.op === constants.IDX_CREATE)[0];
+    const getInputLocalFirstOrFallbackGlobally = async (sindex:SindexEntry[], ENTRY:SindexEntry) => {
+      let source = _.filter(sindex, (src:SindexEntry) =>
+        src.identifier == ENTRY.identifier
+        && src.pos == ENTRY.pos
+        && src.conditions
+        && src.op === constants.IDX_CREATE)[0];
       if (!source) {
         const reducable = await dal.sindexDAL.sqlFind({
           identifier: ENTRY.identifier,
@@ -887,20 +878,29 @@ export class Indexer {
           amount: ENTRY.amount,
           base: ENTRY.base
         });
-        source = reduce(reducable);
+        source = reduce(reducable)
       }
+      return source
+    }
+
+    // BR_G46
+    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
+      const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY)
+      ENTRY.conditions = source.conditions; // We valuate the input conditions, so we can map these records to a same account
+      ENTRY.available = source.consumed === false;
+    }))
+
+    // BR_G47
+    await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
+      const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY)
       ENTRY.conditions = source.conditions;
       ENTRY.isLocked = !txSourceUnlock(ENTRY, source, HEAD);
     }))
 
     // BR_G48
     await Promise.all(_.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => {
-      ENTRY.isTimeLocked = ENTRY.written_time - reduce(await dal.sindexDAL.sqlFind({
-          identifier: ENTRY.identifier,
-          pos: ENTRY.pos,
-          amount: ENTRY.amount,
-          base: ENTRY.base
-        })).written_time < ENTRY.locktime;
+      const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY)
+      ENTRY.isTimeLocked = ENTRY.written_time - source.written_time < ENTRY.locktime;
     }))
 
     return HEAD;
diff --git a/doc/Protocol.md b/doc/Protocol.md
index b657553f4..bc424f2d7 100644
--- a/doc/Protocol.md
+++ b/doc/Protocol.md
@@ -2244,7 +2244,7 @@ Else:
 
 ####### BR_G102 - ENTRY.age
 
-For each ENTRY in local IINDEX where `op = 'UPDATE'`:
+For each ENTRY in local SINDEX where `op = 'UPDATE'`:
 
     REF_BLOCK = HEAD~<HEAD~1.number + 1 - NUMBER(ENTRY.hash)>[hash=HASH(ENTRY.created_on)]
     
@@ -2266,17 +2266,31 @@ EndIf
 
 For each `LOCAL_SINDEX[op='UPDATE'] as ENTRY`:
 
-    INPUT = REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base])
+    INPUT_ENTRIES = LOCAL_SINDEX[op='CREATE',identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    If COUNT(INPUT_ENTRIES) == 0 Then
+        INPUT_ENTRIES = GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    EndIf
+    INPUT = REDUCE(INPUT_ENTRIES)
     ENTRY.conditions = INPUT.conditions
     ENTRY.available = INPUT.consumed == false
 
 ####### BR_G47 - ENTRY.isLocked
 
-    ENTRY.isLocked = TX_SOURCE_UNLOCK(REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]).conditions, ENTRY)
+    INPUT_ENTRIES = LOCAL_SINDEX[op='CREATE',identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    If COUNT(INPUT_ENTRIES) == 0 Then
+        INPUT_ENTRIES = GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    EndIf
+    INPUT = REDUCE(INPUT_ENTRIES)
+    ENTRY.isLocked = TX_SOURCE_UNLOCK(INPUT.conditions, ENTRY)
     
 ####### BR_G48 - ENTRY.isTimeLocked
 
-    ENTRY.isTimeLocked = ENTRY.written_time - REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]).written_time < ENTRY.locktime
+    INPUT_ENTRIES = LOCAL_SINDEX[op='CREATE',identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    If COUNT(INPUT_ENTRIES) == 0 Then
+        INPUT_ENTRIES = GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]
+    EndIf
+    INPUT = REDUCE(INPUT_ENTRIES)
+    ENTRY.isTimeLocked = ENTRY.written_time - INPUT.written_time < ENTRY.locktime
 
 ##### Rules
 
-- 
GitLab


From 0e96b4a60aa5b5b1c127851f87f10933fd2431dd Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?C=C3=A9dric=20Moreau?= <cem.moreau@gmail.com>
Date: Mon, 8 Jan 2018 21:23:01 +0100
Subject: [PATCH 20/80] =?UTF-8?q?[fix]=C2=A0#1242=20Set=20the=20depth=20li?=
 =?UTF-8?q?mit=20for=20a=20chain=20of=20transactions=20to=205?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

---
 .gitignore                                    |   4 +
 app/lib/common-libs/constants.ts              |   2 +
 app/lib/rules/index.ts                        |   2 +
 app/lib/rules/local_rules.ts                  |  78 ++++++++++---
 app/modules/prover/lib/blockGenerator.ts      |  14 +--
 app/service/BlockchainService.ts              |   3 +-
 app/service/TransactionsService.ts            |   2 +-
 doc/Protocol.md                               |  35 ++++++
 .../protocol-local-rule-chained-tx-depth.ts   |  45 ++++++++
 test/integration/transactions-chaining.js     |  92 ---------------
 test/integration/transactions-chaining.ts     | 109 ++++++++++++++++++
 11 files changed, 267 insertions(+), 119 deletions(-)
 create mode 100644 test/fast/protocol-local-rule-chained-tx-depth.ts
 delete mode 100644 test/integration/transactions-chaining.js
 create mode 100644 test/integration/transactions-chaining.ts

diff --git a/.gitignore b/.gitignore
index 9b656c465..1a8aef207 100644
--- a/.gitignore
+++ b/.gitignore
@@ -59,6 +59,7 @@ test/integration/documents-currency.d.ts
 test/integration/forwarding.js
 test/integration/branches_switch.js
 test/integration/branches2.js
+test/integration/transactions-chaining.js
 test/fast/modules/crawler/block_pulling.js*
 test/fast/modules/crawler/block_pulling.d.ts
 test/fast/fork*.js*
@@ -72,3 +73,6 @@ test/fast/modules/common/grammar.d.ts
 test/fast/prover/pow-1-cluster.d.ts
 test/fast/prover/pow-1-cluster.js
 test/fast/prover/pow-1-cluster.js.map
+test/fast/protocol-local-rule-chained-tx-depth.js
+test/fast/protocol-local-rule-chained-tx-depth.js.map
+test/fast/protocol-local-rule-chained-tx-depth.d.ts
diff --git a/app/lib/common-libs/constants.ts b/app/lib/common-libs/constants.ts
index b34cb6278..e1be3f666 100644
--- a/app/lib/common-libs/constants.ts
+++ b/app/lib/common-libs/constants.ts
@@ -285,6 +285,8 @@ export const CommonConstants = {
     BLOCK: find("Block: (" + INTEGER + "-" + FINGERPRINT + ")"),
     SPECIAL_BLOCK
   },
+
+  BLOCK_MAX_TX_CHAINING_DEPTH: 5
 }
 
 function exact (regexpContent:string) {
diff --git a/app/lib/rules/index.ts b/app/lib/rules/index.ts
index 44ca3d253..28d13899b 100644
--- a/app/lib/rules/index.ts
+++ b/app/lib/rules/index.ts
@@ -33,6 +33,7 @@ export const ALIAS = {
     await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block);
     await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block);
     await LOCAL_RULES_FUNCTIONS.checkTxSignature(block);
+    await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index);
   },
 
   ALL_LOCAL_BUT_POW_AND_SIGNATURE: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => {
@@ -60,6 +61,7 @@ export const ALIAS = {
     await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block);
     await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block);
     await LOCAL_RULES_FUNCTIONS.checkTxSignature(block);
+    await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index);
   }
 }
 
diff --git a/app/lib/rules/local_rules.ts b/app/lib/rules/local_rules.ts
index 41d19f163..5a6081124 100644
--- a/app/lib/rules/local_rules.ts
+++ b/app/lib/rules/local_rules.ts
@@ -379,28 +379,70 @@ export const LOCAL_RULES_FUNCTIONS = {
       }
     }
     return true;
+  },
+
+  checkMaxTransactionChainingDepth: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => {
+    const sindex = Indexer.sindex(index)
+    const max = getMaxTransactionDepth(sindex)
+    //
+    const allowedMax = block.medianTime > 1517443200 ? CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH : 1
+    if (max > allowedMax) {
+      throw "The maximum transaction chaining length per block is " + CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH
+    }
+    return true
+  }
+}
+
+export interface SindexShortEntry {
+  op:string,
+  identifier:string,
+  pos:number,
+  tx:string|null
+}
+
+function getMaxTransactionDepth(sindex:SindexShortEntry[]) {
+  const ids = _.uniq(_.pluck(sindex, 'tx'))
+  let maxTxChainingDepth = 0
+  for (let id of ids) {
+    maxTxChainingDepth = Math.max(maxTxChainingDepth, getTransactionDepth(id, sindex, 0))
   }
+  return maxTxChainingDepth
+}
+
+function getTransactionDepth(txHash:string, sindex:SindexShortEntry[], localDepth = 0) {
+  const inputs = _.filter(sindex, (s:SindexShortEntry) => s.op === 'UPDATE' && s.tx === txHash)
+  let depth = localDepth
+  for (let input of inputs) {
+    const consumedOutput = _.findWhere(sindex, { op: 'CREATE', identifier: input.identifier, pos: input.pos })
+    if (consumedOutput) {
+      if (localDepth < 5) {
+        const subTxDepth = getTransactionDepth(consumedOutput.tx, sindex, localDepth + 1)
+        depth = Math.max(depth, subTxDepth)
+      } else {
+        depth++
+      }
+    }
+  }
+  return depth
 }
 
 function checkSingleMembershipSignature(ms:any) {
   return verify(ms.getRaw(), ms.signature, ms.issuer);
 }
 
-function checkBunchOfTransactions(transactions:TransactionDTO[], done:any = undefined){
-  const block:any = { transactions };
+function checkBunchOfTransactions(transactions:TransactionDTO[], conf:ConfDTO, options?:{ dontCareAboutChaining?:boolean }){
+  const block:any = { transactions, identities: [], joiners: [], actives: [], leavers: [], revoked: [], excluded: [], certifications: [] };
+  const index = Indexer.localIndex(block, conf)
   return (async () => {
-    try {
-      let local_rule = LOCAL_RULES_FUNCTIONS;
-      await local_rule.checkTxLen(block);
-      await local_rule.checkTxIssuers(block);
-      await local_rule.checkTxSources(block);
-      await local_rule.checkTxRecipients(block);
-      await local_rule.checkTxAmounts(block);
-      await local_rule.checkTxSignature(block);
-      done && done();
-    } catch (err) {
-      if (done) return done(err);
-      throw err;
+    let local_rule = LOCAL_RULES_FUNCTIONS;
+    await local_rule.checkTxLen(block);
+    await local_rule.checkTxIssuers(block);
+    await local_rule.checkTxSources(block);
+    await local_rule.checkTxRecipients(block);
+    await local_rule.checkTxAmounts(block);
+    await local_rule.checkTxSignature(block);
+    if (!options || !options.dontCareAboutChaining) {
+      await local_rule.checkMaxTransactionChainingDepth(block, conf, index);
     }
   })()
 }
@@ -411,9 +453,13 @@ export const LOCAL_RULES_HELPERS = {
 
   checkSingleMembershipSignature: checkSingleMembershipSignature,
 
-  checkBunchOfTransactions: checkBunchOfTransactions,
+  checkBunchOfTransactions,
+
+  getTransactionDepth,
+
+  getMaxTransactionDepth,
 
-  checkSingleTransactionLocally: (tx:any, done:any = undefined) => checkBunchOfTransactions([tx], done),
+  checkSingleTransactionLocally: (tx:any, conf:ConfDTO) => checkBunchOfTransactions([tx], conf),
 
   checkTxAmountsValidity: (tx:TransactionDTO) => {
     const inputs = tx.inputsAsObjects()
diff --git a/app/modules/prover/lib/blockGenerator.ts b/app/modules/prover/lib/blockGenerator.ts
index c41dfb242..39d9cb45a 100644
--- a/app/modules/prover/lib/blockGenerator.ts
+++ b/app/modules/prover/lib/blockGenerator.ts
@@ -65,7 +65,7 @@ export class BlockGenerator {
     const wereExcludeds = await this.dal.getRevokedPubkeys();
     const newCertsFromWoT = await generator.findNewCertsFromWoT(current);
     const newcomersLeavers = await this.findNewcomersAndLeavers(current, (joinersData:any) => generator.filterJoiners(joinersData));
-    const transactions = await this.findTransactions(current);
+    const transactions = await this.findTransactions(current, manualValues);
     const joinData = newcomersLeavers[2];
     const leaveData = newcomersLeavers[3];
     const newCertsFromNewcomers = newcomersLeavers[4];
@@ -104,7 +104,8 @@ export class BlockGenerator {
     return [cur, newWoTMembers, finalJoinData, leavers, updates];
   }
 
-  private async findTransactions(current:DBBlock) {
+  private async findTransactions(current:DBBlock, options:{ dontCareAboutChaining?:boolean }) {
+    const ALSO_CHECK_PENDING_TXS = true
     const versionMin = current ? Math.min(CommonConstants.LAST_VERSION_FOR_TX, current.version) : CommonConstants.DOCUMENTS_VERSION;
     const txs = await this.dal.getTransactionsPending(versionMin);
     const transactions = [];
@@ -113,14 +114,9 @@ export class BlockGenerator {
       obj.currency = this.conf.currency
       const tx = TransactionDTO.fromJSONObject(obj);
       try {
-        await new Promise((resolve, reject) => {
-          LOCAL_RULES_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), (err:any, res:any) => {
-            if (err) return reject(err)
-            return resolve(res)
-          })
-        })
+        await LOCAL_RULES_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), this.conf, options)
         const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 };
-        await GLOBAL_RULES_HELPERS.checkSingleTransaction(tx, nextBlockWithFakeTimeVariation, this.conf, this.dal);
+        await GLOBAL_RULES_HELPERS.checkSingleTransaction(tx, nextBlockWithFakeTimeVariation, this.conf, this.dal, ALSO_CHECK_PENDING_TXS);
         await GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal);
         transactions.push(tx);
         passingTxs.push(tx);
diff --git a/app/service/BlockchainService.ts b/app/service/BlockchainService.ts
index ba6520483..e8148d81d 100644
--- a/app/service/BlockchainService.ts
+++ b/app/service/BlockchainService.ts
@@ -217,8 +217,9 @@ export class BlockchainService extends FIFOService {
         } catch (e) {
           this.logger.error(e)
           added = false
+          const theError = e && (e.message || e)
           this.push({
-            blockResolutionError: e && e.message
+            blockResolutionError: theError
           })
         }
         i++
diff --git a/app/service/TransactionsService.ts b/app/service/TransactionsService.ts
index e6ddf1263..3068c1b71 100644
--- a/app/service/TransactionsService.ts
+++ b/app/service/TransactionsService.ts
@@ -41,7 +41,7 @@ export class TransactionService extends FIFOService {
         // Start checks...
         const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 };
         const dto = TransactionDTO.fromJSONObject(tx)
-        await LOCAL_RULES_HELPERS.checkSingleTransactionLocally(dto)
+        await LOCAL_RULES_HELPERS.checkSingleTransactionLocally(dto, this.conf)
         await GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal);
         await GLOBAL_RULES_HELPERS.checkSingleTransaction(dto, nextBlockWithFakeTimeVariation, this.conf, this.dal, CHECK_PENDING_TRANSACTIONS);
         const server_pubkey = this.conf.pair && this.conf.pair.pub;
diff --git a/doc/Protocol.md b/doc/Protocol.md
index bc424f2d7..68a5300a4 100644
--- a/doc/Protocol.md
+++ b/doc/Protocol.md
@@ -1549,6 +1549,40 @@ TRUE
 > Functionally: we cannot create nor lose money through transactions. We can only transfer coins we own.
 > Functionally: also, we cannot convert a superiod unit base into a lower one.
 
+##### Transactions chaining max depth
+
+    FUNCTION `getTransactionDepth(txHash, LOCAL_DEPTH)`:
+
+        INPUTS = LOCAL_SINDEX[op='UPDATE',tx=txHash]
+        DEPTH = LOCAL_DEPTH
+
+        FOR EACH `INPUT` OF `INPUTS`
+            CONSUMED = LOCAL_SINDEX[op='CREATE',identifier=INPUT.identifier,pos=INPUT.pos]
+            IF (CONSUMED != NULL)
+                IF (LOCAL_DEPTH < 5)
+                    DEPTH = MAX(DEPTH, getTransactionDepth(CONSUMED.tx, LOCAL_DEPTH +1)
+                ELSE
+                    DEPTH++
+                END_IF
+            END_IF
+        END_FOR
+
+        RETURN DEPTH
+
+    END_FUNCTION
+
+Then:
+
+    maxTxChainingDepth = 0
+
+For each `TX_HASH` of `UNIQ(PICK(LOCAL_SINDEX, 'tx))`:
+
+    maxTxChainingDepth = MAX(maxTxChainingDepth, getTransactionDepth(TX_HASH, 0))
+
+Rule:
+
+    maxTxChainingDepth <= 5
+
 #### Global
 
 Global validation verifies the coherence of a locally-validated block, in the context of the whole blockchain, including the block.
@@ -1580,6 +1614,7 @@ Function references:
 > If values count is even, the median is computed over the 2 centered values by an arithmetical median on them, *NOT* rounded.
 
 * *UNIQ* returns a list of the unique values in a list of values
+* *PICK* returns a list of the values by picking a particular property on each record
 * *INTEGER_PART* return the integer part of a number
 * *FIRST* return the first element in a list of values matching the given condition
 * *REDUCE* merges a set of elements into a single one, by extending the non-null properties from each record into the resulting record.
diff --git a/test/fast/protocol-local-rule-chained-tx-depth.ts b/test/fast/protocol-local-rule-chained-tx-depth.ts
new file mode 100644
index 000000000..a13f0fb3c
--- /dev/null
+++ b/test/fast/protocol-local-rule-chained-tx-depth.ts
@@ -0,0 +1,45 @@
+import {LOCAL_RULES_HELPERS} from "../../app/lib/rules/local_rules"
+
+const _ = require('underscore')
+const assert = require('assert')
+
+describe("Protocol BR_G110 - chained tx depth", () => {
+
+  const sindex = [
+    { tx: 'A', op: 'UPDATE', identifier: 'UD1', pos: 0 },
+    { tx: 'A', op: 'CREATE', identifier: 'TXA', pos: 0 },
+    { tx: 'B', op: 'UPDATE', identifier: 'TXA', pos: 0 },
+    { tx: 'B', op: 'CREATE', identifier: 'TXB', pos: 0 },
+    { tx: 'C', op: 'UPDATE', identifier: 'TXB', pos: 0 },
+    { tx: 'C', op: 'CREATE', identifier: 'TXC', pos: 0 },
+    { tx: 'D', op: 'UPDATE', identifier: 'TXC', pos: 0 },
+    { tx: 'D', op: 'CREATE', identifier: 'TXD', pos: 0 },
+    { tx: 'E', op: 'UPDATE', identifier: 'TXD', pos: 0 },
+    { tx: 'E', op: 'CREATE', identifier: 'TXE', pos: 0 },
+    { tx: 'F', op: 'UPDATE', identifier: 'TXE', pos: 0 },
+    { tx: 'F', op: 'CREATE', identifier: 'TXF', pos: 0 },
+    { tx: 'G', op: 'UPDATE', identifier: 'TXF', pos: 0 },
+    { tx: 'G', op: 'CREATE', identifier: 'TXG', pos: 0 },
+    { tx: 'H', op: 'UPDATE', identifier: 'TXG', pos: 0 },
+    { tx: 'H', op: 'CREATE', identifier: 'TXH', pos: 0 },
+  ]
+
+  it('should detected normal depth', () => {
+    assert.equal(0, LOCAL_RULES_HELPERS.getTransactionDepth('A', sindex))
+    assert.equal(1, LOCAL_RULES_HELPERS.getTransactionDepth('B', sindex))
+    assert.equal(2, LOCAL_RULES_HELPERS.getTransactionDepth('C', sindex))
+    assert.equal(3, LOCAL_RULES_HELPERS.getTransactionDepth('D', sindex))
+    assert.equal(4, LOCAL_RULES_HELPERS.getTransactionDepth('E', sindex))
+    assert.equal(5, LOCAL_RULES_HELPERS.getTransactionDepth('F', sindex))
+    assert.equal(6, LOCAL_RULES_HELPERS.getTransactionDepth('G', sindex))
+  })
+
+  it('should detected max the depth to 6', () => {
+    assert.equal(6, LOCAL_RULES_HELPERS.getTransactionDepth('H', sindex))
+  })
+
+  it('should find the max depth globally', () => {
+    assert.equal(6, LOCAL_RULES_HELPERS.getMaxTransactionDepth(sindex))
+  })
+})
+
diff --git a/test/integration/transactions-chaining.js b/test/integration/transactions-chaining.js
deleted file mode 100644
index 66a02c1ca..000000000
--- a/test/integration/transactions-chaining.js
+++ /dev/null
@@ -1,92 +0,0 @@
-"use strict";
-
-const co = require('co');
-const _ = require('underscore');
-const should = require('should');
-const assert = require('assert');
-const constants = require('../../app/lib/constants');
-const bma       = require('../../app/modules/bma').BmaDependency.duniter.methods.bma;
-const CommonConstants = require('../../app/lib/common-libs/constants').CommonConstants
-const toolbox   = require('./tools/toolbox');
-const node   = require('./tools/node');
-const TestUser = require('./tools/TestUser').TestUser
-const unit   = require('./tools/unit');
-const http   = require('./tools/http');
-
-describe("Transaction chaining", function() {
-
-  const now = 1456644632;
-
-  let s1, tic, toc
-
-  before(() => co(function*() {
-
-    s1 = toolbox.server({
-      pair: {
-        pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
-        sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
-      },
-      dt: 3600,
-      udTime0: now + 3600,
-      ud0: 1200,
-      c: 0.1
-    });
-
-    tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, { server: s1 });
-    toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 });
-
-    yield s1.initDalBmaConnections();
-    yield tic.createIdentity();
-    yield toc.createIdentity();
-    yield tic.cert(toc);
-    yield toc.cert(tic);
-    yield tic.join();
-    yield toc.join();
-    yield s1.commit({ time: now });
-    yield s1.commit({ time: now + 7210 });
-    yield s1.commit({ time: now + 7210 });
-  }));
-
-  after(() => {
-    return Promise.all([
-      s1.closeCluster()
-    ])
-  })
-
-  describe("Sources", function(){
-
-    it('it should exist block#2 with UD of 1200', () => s1.expect('/blockchain/block/2', (block) => {
-      should.exists(block);
-      assert.equal(block.number, 2);
-      assert.equal(block.dividend, 1200);
-    }));
-  });
-
-  describe("Chaining", function(){
-
-    it('with SIG and XHX', () => co(function *() {
-      // Current state
-      let current = yield s1.get('/blockchain/current');
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
-      let tx1 = yield toc.prepareITX(1040, tic); // Rest = 1200 - 1040 = 160
-      let tx2 = yield toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 160, base: 0, lock: 'SIG(' + tic.pub + ')' }], {
-        comment: 'also take the remaining 160 units',
-        blockstamp: [current.number, current.hash].join('-'),
-        theseOutputsStart: 1
-      });
-      const tmp = CommonConstants.TRANSACTION_MAX_TRIES;
-      CommonConstants.TRANSACTION_MAX_TRIES = 2;
-      yield unit.shouldNotFail(toc.sendTX(tx1));
-      yield unit.shouldNotFail(toc.sendTX(tx2));
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(1);
-      yield s1.commit({ time: now + 7210 }); // TX1 commited
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // The 160 remaining units
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(2); // The UD + 1040 units sent by toc
-      yield s1.commit({ time: now + 7210 }); // TX2 commited
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3); // The UD + 1040 + 160 units sent by toc
-      CommonConstants.TRANSACTION_MAX_TRIES = tmp;
-    }));
-  });
-});
diff --git a/test/integration/transactions-chaining.ts b/test/integration/transactions-chaining.ts
new file mode 100644
index 000000000..cbcbddf8f
--- /dev/null
+++ b/test/integration/transactions-chaining.ts
@@ -0,0 +1,109 @@
+import {CommonConstants} from "../../app/lib/common-libs/constants"
+import {TestUser} from "./tools/TestUser"
+import {TestingServer} from "./tools/toolbox"
+import {NewLogger} from "../../app/lib/logger"
+
+const should = require('should');
+const assert = require('assert');
+const toolbox   = require('./tools/toolbox');
+const unit   = require('./tools/unit');
+
+describe("Transaction chaining", () => {
+
+  const now = 1456644632;
+
+  let s1:TestingServer, tic:TestUser, toc:TestUser
+
+  before(async () => {
+
+    s1 = toolbox.server({
+      pair: {
+        pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
+        sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
+      },
+      dt: 3600,
+      udTime0: now + 3600,
+      ud0: 1200,
+      c: 0.1
+    });
+
+    tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, { server: s1 });
+    toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 });
+
+    await s1.initDalBmaConnections();
+    await tic.createIdentity();
+    await toc.createIdentity();
+    await tic.cert(toc);
+    await toc.cert(tic);
+    await tic.join();
+    await toc.join();
+    await s1.commit({ time: now });
+    await s1.commit({ time: now + 7210 });
+    await s1.commit({ time: now + 7210 });
+  })
+
+  after(() => {
+    return s1.closeCluster()
+  })
+
+  describe("Sources", () => {
+
+    it('it should exist block#2 with UD of 1200', () => s1.expect('/blockchain/block/2', (block: { number:number, dividend:number }) => {
+      should.exists(block);
+      assert.equal(block.number, 2);
+      assert.equal(block.dividend, 1200);
+    }))
+  })
+
+  describe("Chaining", () => {
+
+    it('with SIG and XHX', async () => {
+      // Current state
+      let current = await s1.get('/blockchain/current');
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
+      let tx1 = await toc.prepareITX(1040, tic); // Rest = 1200 - 1040 = 160
+      let tx2 = await toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 160, base: 0, lock: 'SIG(' + tic.pub + ')' }], {
+        comment: 'also take the remaining 160 units',
+        blockstamp: [current.number, current.hash].join('-'),
+        theseOutputsStart: 1
+      });
+      const tmp = CommonConstants.TRANSACTION_MAX_TRIES;
+      CommonConstants.TRANSACTION_MAX_TRIES = 2;
+      await unit.shouldNotFail(toc.sendTX(tx1));
+      await unit.shouldNotFail(toc.sendTX(tx2));
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(1);
+      await s1.commit({ time: now + 7210 }); // TX1 + TX2 commited
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3); // The UD + 1040 + 160 units sent by toc
+      CommonConstants.TRANSACTION_MAX_TRIES = tmp;
+    })
+
+    it('should refuse a block with more than 5 chained tx in it', async () => {
+      // Current state
+      let current = await s1.get('/blockchain/current');
+      const blockstamp = [current.number, current.hash].join('-');
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3);
+      // Ping-pong of 1200 units
+      let tx1 = await tic.prepareITX(1200, toc, "PING-PONG TX1");
+      let tx2 = await toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + tic.pub + ')' }], { blockstamp, comment: "PING-PONG TX2" });
+      let tx3 = await tic.prepareUTX(tx2, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { blockstamp, comment: "PING-PONG TX3" });
+      let tx4 = await toc.prepareUTX(tx3, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + tic.pub + ')' }], { blockstamp, comment: "PING-PONG TX4" });
+      let tx5 = await tic.prepareUTX(tx4, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { blockstamp, comment: "PING-PONG TX5" });
+      let tx6 = await toc.prepareUTX(tx5, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + tic.pub + ')' }], { blockstamp, comment: "PING-PONG TX6" });
+      let tx7 = await tic.prepareUTX(tx6, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { blockstamp, comment: "PING-PONG TX7" });
+      const tmp = CommonConstants.TRANSACTION_MAX_TRIES;
+      CommonConstants.TRANSACTION_MAX_TRIES = 2;
+      await unit.shouldNotFail(toc.sendTX(tx1));
+      await unit.shouldNotFail(toc.sendTX(tx2));
+      await unit.shouldNotFail(toc.sendTX(tx3));
+      await unit.shouldNotFail(toc.sendTX(tx4));
+      await unit.shouldNotFail(toc.sendTX(tx5));
+      await unit.shouldNotFail(toc.sendTX(tx6));
+      await unit.shouldNotFail(toc.sendTX(tx7));
+      await s1.commitWaitError({ dontCareAboutChaining: true }, 'The maximum transaction chaining length per block is 5')
+      CommonConstants.TRANSACTION_MAX_TRIES = tmp;
+    })
+  });
+});
-- 
GitLab


From 983cbc8ee755ea2c7bd70e4233c816d9a407410d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?St=C3=A9phane=20Veyret?= <sveyret@axway.com>
Date: Tue, 9 Jan 2018 16:26:20 +0100
Subject: [PATCH 21/80] Adapt docker to gitlab-ci

---
 .gitignore                                    |   2 +-
 .gitlab-ci.yml                                |  45 +++-
 release/arch/debian/build-deb.sh              | 201 ------------------
 .../arch/{debian => linux}/0.24.4_common.gypi |   0
 release/arch/linux/build-lin.sh               | 196 +++++++++++++++++
 .../{ => linux}/debian/package/DEBIAN/control |   0
 .../debian/package/DEBIAN/postinst            |   6 +-
 .../{ => linux}/debian/package/DEBIAN/prerm   |   0
 .../usr/share/applications/duniter.desktop    |   0
 release/scripts/build.sh                      |  27 ++-
 10 files changed, 251 insertions(+), 226 deletions(-)
 delete mode 100644 release/arch/debian/build-deb.sh
 rename release/arch/{debian => linux}/0.24.4_common.gypi (100%)
 create mode 100644 release/arch/linux/build-lin.sh
 rename release/arch/{ => linux}/debian/package/DEBIAN/control (100%)
 rename release/arch/{ => linux}/debian/package/DEBIAN/postinst (88%)
 rename release/arch/{ => linux}/debian/package/DEBIAN/prerm (100%)
 rename release/{arch/debian/package => contrib/desktop}/usr/share/applications/duniter.desktop (100%)

diff --git a/.gitignore b/.gitignore
index 27a53505a..5fb1642c5 100644
--- a/.gitignore
+++ b/.gitignore
@@ -19,11 +19,11 @@ vagrant/*.log
 vagrant/duniter
 
 # Releases
+/work
 *.deb
 *.tar.gz
 *.log
 *.exe
-/release/arch/debian/duniter-source
 
 # vscode
 .vscode
diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ec3727046..0f1fa048e 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,10 +1,7 @@
 stages:
-    - github-sync
-    - test
-
-before_script:
-    - export NVM_DIR="$HOME/.nvm"
-    - . "$NVM_DIR/nvm.sh"
+  - github-sync
+  - build
+  - test
 
 push_to_github:
     stage: github-sync
@@ -12,8 +9,6 @@ push_to_github:
         GIT_STRATEGY: none
     tags:
         - redshift
-    before_script:
-        - ''
     script:
         - rm -rf ./*
         - rm -rf .git
@@ -32,8 +27,6 @@ enforce_readme:
         GIT_STRATEGY: none
     tags:
         - redshift
-    before_script:
-        - ''
     script:
       - rm -rf ./*
       - rm -rf .git
@@ -47,10 +40,42 @@ enforce_readme:
       - git commit -am "Enforce github readme"
       - git push origin master
 
+build_linux:test:
+  stage: build
+  image: duniter/release-builder:v1.0.0
+  tags:
+    - nodejs
+    - nwjs
+  script:
+    - bash "release/arch/linux/build-lin.sh" "$(date +%Y%m%d).$(date +%H%M).$(date +%S)"
+  artifacts:
+    paths:
+      - work/bin/
+    expire_in: 8h
+  except:
+    - tags
+
+build_linux:deploy:
+  stage: build
+  image: duniter/release-builder:v1.0.0
+  tags:
+    - nodejs
+    - nwjs
+  script:
+    - bash "release/arch/linux/build-lin.sh" "${CI_COMMIT_TAG#v}"
+  artifacts:
+    paths:
+      - work/bin/
+  only:
+    - tags
+
 test:
   stage: test
   tags:
     - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
   script:
     - yarn
     - yarn test
diff --git a/release/arch/debian/build-deb.sh b/release/arch/debian/build-deb.sh
deleted file mode 100644
index ec8c75440..000000000
--- a/release/arch/debian/build-deb.sh
+++ /dev/null
@@ -1,201 +0,0 @@
-#!/bin/bash
-
-# NVM
-[ -s "$NVM_DIR/nvm.sh" ] && \. "$NVM_DIR/nvm.sh"  # This loads nvm
-
-# Prepare
-NODE_VERSION=8.9.1
-NVER="v$NODE_VERSION"
-DUNITER_TAG=$1
-ADDON_VERSION=57
-NW_VERSION=0.24.4
-NW_RELEASE="v${NW_VERSION}"
-NW="nwjs-${NW_RELEASE}-linux-x64"
-NW_GZ="${NW}.tar.gz"
-DUNITER_SRC=/dunidata/duniter-source
-
-nvm install ${NVER}
-nvm use ${NVER}
-npm install -g node-pre-gyp
-npm install -g nw-gyp
-# Folders
-ROOT=`pwd`
-DOWNLOADS="$ROOT/downloads"
-RELEASES="$ROOT/releases"
-
-mkdir -p "$DOWNLOADS" || exit 1
-
-# -----------
-# Clean up
-# -----------
-rm -rf /dunidata/*.deb
-rm -rf /dunidata/*.tar.gz
-
-# -----------
-# Sources and downloads
-# -----------
-
-DUNITER_DEB_VER=" $DUNITER_TAG"
-DUNITER_TAG="v$DUNITER_TAG"
-
-cd "${DUNITER_SRC}"
-git checkout "${DUNITER_TAG}" || exit 1
-
-cd "$DOWNLOADS"
-curl -O https://dl.nwjs.io/${NW_RELEASE}/${NW_GZ} || exit 1
-tar xzf ${NW_GZ} || exit 1
-# Download Node.js and package it with the sources
-curl -O http://nodejs.org/dist/${NVER}/node-${NVER}-linux-x64.tar.gz || exit 1
-tar xzf node-${NVER}-linux-x64.tar.gz || exit 1
-
-# -----------
-# Releases
-# -----------
-
-mkdir -p "$RELEASES" || exit 1
-cp -r "${DUNITER_SRC}" "$RELEASES/duniter" || exit 1
-
-# Releases builds
-cd ${RELEASES}/duniter
-# Remove git files
-rm -Rf .git
-[[ $? -eq 0 ]] && echo ">> VM: building modules..."
-[[ $? -eq 0 ]] && npm install
-
-# Duniter UI
-[[ $? -eq 0 ]] && npm install duniter-ui@1.6.x
-[[ $? -eq 0 ]] && npm prune --production
-
-[[ $? -eq 0 ]] || exit 1
-
-cp -r "$RELEASES/duniter" "$RELEASES/desktop_" || exit 1
-cp -r "$RELEASES/duniter" "$RELEASES/server_" || exit 1
-
-# -------------------------------------------------
-# Build Desktop version (Nw.js is embedded)
-# -------------------------------------------------
-
-nw_copy() {
-	[[ -z ${1} ]] && exit 1
-	cp lib/binding/Release/node-webkit-v${NW_VERSION}-linux-x64/${1}.node \
-		lib/binding/Release/node-v${ADDON_VERSION}-linux-x64/${1}.node || exit 1
-}
-
-nw_copy_node() {
-	[[ -z ${1} ]] && exit 1
-	cp lib/binding/node-webkit-v${NW_VERSION}-linux-x64/node_${1}.node \
-		lib/binding/node-v${ADDON_VERSION}-linux-x64/node_${1}.node || exit 1
-}
-
-nw_compile() {
-	[[ -z ${1} ]] && exit 1
-	cd ${1} || exit 1
-	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} configure || exit 1
-	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} build || exit 1
-	[[ -z ${2} ]] || ${2} ${1}
-	cd ..
-}
-
-echo "$NW_RELEASE"
-
-# FIX: bug of nw.js, we need to patch first.
-# TODO: remove this patch once a correct version of Nw.js is out (NodeJS 8 or 9 if the above modules are compliant)
-cd "$RELEASES/desktop_/node_modules/wotb"
-node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure \
-  || echo "This failure is expected"
-cp /dunidata/0.24.4_common.gypi ~/.nw-gyp/0.24.4/common.gypi || exit 1
-
-cd "$RELEASES/desktop_/node_modules/"
-nw_compile wotb nw_copy
-nw_compile naclb nw_copy
-nw_compile scryptb nw_copy
-nw_compile sqlite3 nw_copy_node
-
-# Unused binaries
-cd "$RELEASES/desktop_/"
-rm -rf node_modules/sqlite3/build
-#rm -rf node_modules/naclb/build
-#rm -rf node_modules/wotb/build
-#rm -rf node_modules/scryptb/build
-
-## Install Nw.js
-mkdir -p "$RELEASES/desktop_release" || exit 1
-
-# -------------------------------------------------
-# Build Desktop version .tar.gz
-# -------------------------------------------------
-
-cp -r $DOWNLOADS/${NW}/* "$RELEASES/desktop_release/" || exit 1
-# Embed Node.js with Nw.js to make Duniter modules installable
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/lib "$RELEASES/desktop_release/" || exit 1
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/include "$RELEASES/desktop_release/" || exit 1
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64/bin "$RELEASES/desktop_release/" || exit 1
-# Add some specific files for GUI
-cp ${RELEASES}/desktop_/gui/* "$RELEASES/desktop_release/" || exit 1
-# Add Duniter sources
-cp -R $RELEASES/desktop_/* "$RELEASES/desktop_release/" || exit 1
-## Insert Nw specific fields while they do not exist (1.3.3)
-sed -i "s/\"main\": \"index.js\",/\"main\": \"index.html\",/" "$RELEASES/desktop_release/package.json" || exit 1
-# Add links for Node.js + NPM
-cd "$RELEASES/desktop_release/bin"
-ln -s ../lib/node_modules/npm/bin/npm-cli.js ./npm -f || exit 1
-cd ..
-ln -s ./bin/node node -f || exit 1
-ln -s ./bin/npm npm -f || exit 1
-#sed -i "s/\"node-main\": \"\.\.\/sources\/bin\/duniter\",/\"node-main\": \".\/bin\/duniter\",/" "$RELEASES/desktop_release/package.json"
-# Create a copy for TGZ binary
-cp -R "$RELEASES/desktop_release" "$RELEASES/desktop_release_tgz" || exit 1
-#cd "$RELEASES/desktop_release_tgz/"
-#rm -rf node_modules/sqlite3/lib/binding/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/wotb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/naclb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/scryptb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-cd "$RELEASES/desktop_release_tgz"
-tar czf /duniter/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz * --exclude "coverage" --exclude "test" || exit 1
-
-# -------------------------------------------------
-# Build Desktop version .deb
-# -------------------------------------------------
-
-# Create .deb tree + package it
-#cp -r "$RELEASES/desktop_release/release/arch/debian/package" "$RELEASES/duniter-x64"
-cp -r "/dunidata/package" "$RELEASES/duniter-x64" || exit 1
-mkdir -p "$RELEASES/duniter-x64/opt/duniter/" || exit 1
-chmod 755 ${RELEASES}/duniter-x64/DEBIAN/post*
-chmod 755 ${RELEASES}/duniter-x64/DEBIAN/pre*
-sed -i "s/Version:.*/Version:$DUNITER_DEB_VER/g" ${RELEASES}/duniter-x64/DEBIAN/control || exit 1
-cd ${RELEASES}/desktop_release/
-#rm -rf node_modules/sqlite3/lib/binding/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/wotb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/naclb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/scryptb/lib/binding/Release/node-webkit-$NW_RELEASE-linux-x64
-#rm -rf node_modules/sqlite3/lib/binding/node-v$ADDON_VERSION-linux-x64
-#rm -rf node_modules/wotb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-#rm -rf node_modules/naclb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-#rm -rf node_modules/scryptb/lib/binding/Release/node-v$ADDON_VERSION-linux-x64
-zip -qr ${RELEASES}/duniter-x64/opt/duniter/duniter-desktop.nw * || exit 1
-
-sed -i "s/Package: .*/Package: duniter-desktop/g" ${RELEASES}/duniter-x64/DEBIAN/control || exit 1
-cd ${RELEASES}/
-fakeroot dpkg-deb --build duniter-x64 || exit 1
-mv duniter-x64.deb /duniter/duniter-desktop-${DUNITER_TAG}-linux-x64.deb || exit 1
-
-# -------------------------------------------------
-# Build Server version (Node.js is embedded, not Nw.js)
-# -------------------------------------------------
-
-cd ${RELEASES}
-rm -rf duniter-server-x64
-cp -r duniter-x64 duniter-server-x64 || exit 1
-
-# Remove Nw.js
-rm -rf duniter-server-x64/opt/duniter/duniter-desktop.nw*
-
-cd ${RELEASES}/server_
-cp -r ${DOWNLOADS}/node-${NVER}-linux-x64 node || exit 1
-zip -qr ${RELEASES}/duniter-server-x64/opt/duniter/duniter-desktop.nw * || exit 1
-cd ${RELEASES}
-sed -i "s/Package: .*/Package: duniter/g" ${RELEASES}/duniter-server-x64/DEBIAN/control || exit 1
-rm -rf ${RELEASES}/duniter-server-x64/usr
-fakeroot dpkg-deb --build duniter-server-x64 || exit 1
-mv duniter-server-x64.deb /duniter/duniter-server-${DUNITER_TAG}-linux-x64.deb || exit 1
diff --git a/release/arch/debian/0.24.4_common.gypi b/release/arch/linux/0.24.4_common.gypi
similarity index 100%
rename from release/arch/debian/0.24.4_common.gypi
rename to release/arch/linux/0.24.4_common.gypi
diff --git a/release/arch/linux/build-lin.sh b/release/arch/linux/build-lin.sh
new file mode 100644
index 000000000..e01deaa4f
--- /dev/null
+++ b/release/arch/linux/build-lin.sh
@@ -0,0 +1,196 @@
+#!/bin/bash
+
+if [[ -z "${1}" ]]; then
+	echo "Fatal: no version given to build script"
+	exit 1
+fi
+if [[ -s "$NVM_DIR/nvm.sh" ]]; then
+	source "$NVM_DIR/nvm.sh"
+else
+	echo "Fatal: could not load nvm"
+	exit 1
+fi
+
+# -----------
+# Prepare
+# -----------
+
+NODE_VERSION=8.9.1
+NVER="v${NODE_VERSION}"
+DUNITER_TAG="v${1}"
+DUNITER_DEB_VER=" ${1}"
+ADDON_VERSION=57
+NW_VERSION=0.24.4
+NW_RELEASE="v${NW_VERSION}"
+NW="nwjs-${NW_RELEASE}-linux-x64"
+NW_GZ="${NW}.tar.gz"
+DUNITER_UI_VER="1.6.x"
+
+nvm install ${NVER} || exit 1
+nvm use ${NVER} || exit 1
+npm install -g node-pre-gyp || exit 1
+npm install -g nw-gyp || exit 1
+
+# -----------
+# Folders
+# -----------
+
+ROOT="${PWD}"
+WORK_NAME=work
+WORK="${ROOT}/${WORK_NAME}"
+DOWNLOADS="${WORK}/downloads"
+RELEASES="${WORK}/releases"
+BIN="${WORK}/bin"
+
+mkdir -p "${DOWNLOADS}" "${RELEASES}" "${BIN}" || exit 1
+rm -rf "${BIN}/"*.{deb,tar.gz} # Clean up
+
+# -----------
+# Downloads
+# -----------
+
+cd "${DOWNLOADS}"
+curl -O https://dl.nwjs.io/${NW_RELEASE}/${NW_GZ} || exit 1
+tar xzf ${NW_GZ} || exit 1
+rm ${NW_GZ}
+curl -O http://nodejs.org/dist/${NVER}/node-${NVER}-linux-x64.tar.gz || exit 1
+tar xzf node-${NVER}-linux-x64.tar.gz || exit 1
+rm node-${NVER}-linux-x64.tar.gz
+
+# -----------
+# Releases
+# -----------
+
+# Prepare sources
+mkdir -p "${RELEASES}/duniter" || exit 1
+cp -r $(find "${ROOT}" -mindepth 1 -maxdepth 1 ! -name "${WORK_NAME}") "${RELEASES}/duniter" || exit 1
+cd "${RELEASES}/duniter"
+rm -Rf .gitignore .git || exit 1 # Remove git files
+
+# Build
+echo ">> VM: building modules..."
+npm install || exit 1
+
+# Duniter UI
+npm install "duniter-ui@${DUNITER_UI_VER}" || exit 1
+npm prune --production || exit 1
+
+rm -rf release coverage test # Non production folders
+cp -r "${RELEASES}/duniter" "${RELEASES}/desktop_" || exit 1
+cp -r "${RELEASES}/duniter" "${RELEASES}/server_" || exit 1
+
+# -------------------------------------
+# Build Desktop version against nw.js
+# -------------------------------------
+
+nw_copy() {
+	[[ -z ${1} ]] && exit 1
+	cp lib/binding/Release/node-webkit-v${NW_VERSION}-linux-x64/${1}.node \
+		lib/binding/Release/node-v${ADDON_VERSION}-linux-x64/${1}.node || exit 1
+}
+
+nw_copy_node() {
+	[[ -z ${1} ]] && exit 1
+	cp lib/binding/node-webkit-v${NW_VERSION}-linux-x64/node_${1}.node \
+		lib/binding/node-v${ADDON_VERSION}-linux-x64/node_${1}.node || exit 1
+}
+
+nw_compile() {
+	[[ -z ${1} ]] && exit 1
+	cd ${1} || exit 1
+	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} configure || exit 1
+	node-pre-gyp --runtime=node-webkit --target=${NW_VERSION} build || exit 1
+	[[ -z ${2} ]] || ${2} ${1}
+	cd ..
+}
+
+echo "${NW_RELEASE}"
+
+# FIX: bug of nw.js, we need to patch first.
+# TODO: remove this patch once a correct version of Nw.js is out (NodeJS 8 or 9 if the above modules are compliant)
+cd "${RELEASES}/desktop_/node_modules/wotb"
+node-pre-gyp --runtime=node-webkit --target=$NW_VERSION configure \
+  || echo "This failure is expected"
+cp ${ROOT}/release/arch/linux/0.24.4_common.gypi ~/.nw-gyp/0.24.4/common.gypi || exit 1
+
+cd "${RELEASES}/desktop_/node_modules/"
+nw_compile wotb nw_copy
+nw_compile naclb nw_copy
+nw_compile scryptb nw_copy
+nw_compile sqlite3 nw_copy_node
+
+# Unused binaries
+cd "${RELEASES}/desktop_/"
+rm -rf node_modules/sqlite3/build
+
+# --------------------------------
+# Embed nw.js in desktop version
+# --------------------------------
+
+# Install Nw.js
+mkdir -p "${RELEASES}/desktop_release" || exit 1
+cp -r "${DOWNLOADS}/${NW}/"* "${RELEASES}/desktop_release/" || exit 1
+# Embed Node.js with Nw.js to make Duniter modules installable
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64/lib" "${RELEASES}/desktop_release/" || exit 1
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64/include" "${RELEASES}/desktop_release/" || exit 1
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64/bin" "${RELEASES}/desktop_release/" || exit 1
+# Add some specific files for GUI
+cp "${RELEASES}/desktop_/gui/"* "${RELEASES}/desktop_release/" || exit 1
+# Add Duniter sources
+cp -R "${RELEASES}/desktop_/"* "${RELEASES}/desktop_release/" || exit 1
+# Insert Nw specific fields while they do not exist (1.3.3)
+sed -i "s/\"main\": \"index.js\",/\"main\": \"index.html\",/" "${RELEASES}/desktop_release/package.json" || exit 1
+# Add links for Node.js + NPM
+cd "${RELEASES}/desktop_release/bin"
+ln -s "../lib/node_modules/npm/bin/npm-cli.js" "./npm" -f || exit 1
+cd ..
+ln -s "./bin/node" "node" -f || exit 1
+ln -s "./bin/npm" "npm" -f || exit 1
+#sed -i "s/\"node-main\": \"\.\.\/sources\/bin\/duniter\",/\"node-main\": \".\/bin\/duniter\",/" "$RELEASES/desktop_release/package.json"
+rm -rf "${RELEASES}/desktop_"
+mv "${RELEASES}/desktop_release" "${RELEASES}/desktop_"
+
+# ---------------------------------
+# Embed node.js in server version
+# ---------------------------------
+
+cp -r "${DOWNLOADS}/node-${NVER}-linux-x64" "${RELEASES}/server_/node" || exit 1
+
+# ---------------
+# Build .tar.gz
+# ---------------
+
+cd "${RELEASES}/desktop_"
+tar czf "${BIN}/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz" * || exit 1
+
+# -----------------------
+# Build Debian packages
+# -----------------------
+
+# Parameters
+# 1: either "server" or "desktop".
+# 2: package name for Debian.
+build_deb_pack() {
+	rm -rf "${RELEASES}/duniter-x64"
+	mkdir "${RELEASES}/duniter-x64" || exit 1
+	cp -r "${ROOT}/release/arch/linux/debian/package/"* "${RELEASES}/duniter-x64" || exit 1
+	if [[ "${1}" == "desktop" ]]; then
+		cp -r "${ROOT}/release/contrib/desktop/"* "${RELEASES}/duniter-x64" || exit 1
+	fi
+	mkdir -p "${RELEASES}/duniter-x64/opt/duniter/" || exit 1
+	chmod 755 "${RELEASES}/duniter-x64/DEBIAN/"post* || exit 1
+	chmod 755 "${RELEASES}/duniter-x64/DEBIAN/"pre* || exit 1
+	sed -i "s/Version:.*/Version:${DUNITER_DEB_VER}/g" "${RELEASES}/duniter-x64/DEBIAN/control" || exit 1
+
+	cd "${RELEASES}/${1}_/"
+	zip -qr "${RELEASES}/duniter-x64/opt/duniter/duniter.zip" * || exit 1
+
+	sed -i "s/Package: .*/Package: ${2}/g" "${RELEASES}/duniter-x64/DEBIAN/control" || exit 1
+
+	cd "${RELEASES}"
+	fakeroot dpkg-deb --build duniter-x64 || exit 1
+	mv duniter-x64.deb "${BIN}/duniter-${1}-${DUNITER_TAG}-linux-x64.deb" || exit 1
+}
+
+build_deb_pack desktop duniter-desktop
+build_deb_pack server duniter
diff --git a/release/arch/debian/package/DEBIAN/control b/release/arch/linux/debian/package/DEBIAN/control
similarity index 100%
rename from release/arch/debian/package/DEBIAN/control
rename to release/arch/linux/debian/package/DEBIAN/control
diff --git a/release/arch/debian/package/DEBIAN/postinst b/release/arch/linux/debian/package/DEBIAN/postinst
similarity index 88%
rename from release/arch/debian/package/DEBIAN/postinst
rename to release/arch/linux/debian/package/DEBIAN/postinst
index 8938ddb32..dcf677ed0 100755
--- a/release/arch/debian/package/DEBIAN/postinst
+++ b/release/arch/linux/debian/package/DEBIAN/postinst
@@ -5,9 +5,9 @@ DUN_SOURCES=$DUN_ROOT/
 mkdir -p $DUN_SOURCES
 
 # Duniter sources extraction
-if [[ -f $DUN_ROOT/duniter-desktop.nw ]]; then
-  unzip -q -d $DUN_SOURCES/ $DUN_ROOT/duniter-desktop.nw
-  rm -rf $DUN_ROOT/duniter-desktop.nw
+if [[ -f $DUN_ROOT/duniter.zip ]]; then
+  unzip -q -d $DUN_SOURCES/ $DUN_ROOT/duniter.zip
+  rm -rf $DUN_ROOT/duniter.zip
 fi
 
 # Duniter-Desktop
diff --git a/release/arch/debian/package/DEBIAN/prerm b/release/arch/linux/debian/package/DEBIAN/prerm
similarity index 100%
rename from release/arch/debian/package/DEBIAN/prerm
rename to release/arch/linux/debian/package/DEBIAN/prerm
diff --git a/release/arch/debian/package/usr/share/applications/duniter.desktop b/release/contrib/desktop/usr/share/applications/duniter.desktop
similarity index 100%
rename from release/arch/debian/package/usr/share/applications/duniter.desktop
rename to release/contrib/desktop/usr/share/applications/duniter.desktop
diff --git a/release/scripts/build.sh b/release/scripts/build.sh
index cb375c692..612bbbe3b 100755
--- a/release/scripts/build.sh
+++ b/release/scripts/build.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-BUILDER_TAG="v0.0.6"
+BUILDER_TAG="v1.0.0"
 
 TAG="$3"
 ORIGIN="$4"
@@ -43,31 +43,36 @@ make)
       echo ">> Build success."
     fi
     ;;
-  deb)
-    cd release/arch/debian
+  lin)
+    cd release/arch/linux
     if [[ ! -f "duniter-desktop-$TAG-linux-x64.deb" ]]; then
 
       #### PREPARE SOURCE CODE ####
-      rm -rf duniter-source
       # Clone from remote
       echo ">> VM: Cloning sources from ${ORIGIN}..."
       git clone "${ORIGIN}" duniter-source
-      if [ ${IS_LOCAL_TAG} -eq 1 ]; then
-        cd duniter-source
-        ./release/new_version.sh "$TAG"
-        cd ..
-      fi
+      cd duniter-source
+      [[ ${IS_LOCAL_TAG} -eq 1 ]] && ./release/new_version.sh "${TAG}"
+      git checkout "v${TAG}"
+      cd ..
 
       docker pull duniter/release-builder:${BUILDER_TAG}
-      docker run --rm -it -v ${PWD}:/dunidata duniter/release-builder:${BUILDER_TAG} ${TAG}
+cat <<EOF |
+      cd /builds/duniter-source
+      bash "release/arch/linux/build-lin.sh" "${TAG}" || exit 1
+      exit 0
+EOF
+      docker run --rm -i -v ${PWD}/duniter-source:/builds/duniter-source duniter/release-builder:${BUILDER_TAG}
       if [ ! $? -eq 0 ]; then
         echo ">> Something went wrong. Stopping build."
       else
+        mv duniter-source/work/bin/* .
         echo ">> Build success. Shutting the VM down."
       fi
+      rm -rf duniter-source
       echo ">> VM closed."
     else
-      echo "Debian binaries already built. Ready for upload."
+      echo "Linux binaries already built. Ready for upload."
     fi
     ;;
   win)
-- 
GitLab


From d3d6de27042a05fd5f26710ec7f9bbbefe0b1326 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Wed, 10 Jan 2018 15:50:17 +0000
Subject: [PATCH 22/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 0f1fa048e..a867cf961 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -44,8 +44,7 @@ build_linux:test:
   stage: build
   image: duniter/release-builder:v1.0.0
   tags:
-    - nodejs
-    - nwjs
+    - redshift-duniter-builder
   script:
     - bash "release/arch/linux/build-lin.sh" "$(date +%Y%m%d).$(date +%H%M).$(date +%S)"
   artifacts:
@@ -59,8 +58,7 @@ build_linux:deploy:
   stage: build
   image: duniter/release-builder:v1.0.0
   tags:
-    - nodejs
-    - nwjs
+    - redshift-duniter-builder
   script:
     - bash "release/arch/linux/build-lin.sh" "${CI_COMMIT_TAG#v}"
   artifacts:
-- 
GitLab


From 9072651b7bad06dec7c7fa7e3ba007e69e5a4276 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Wed, 10 Jan 2018 17:25:16 +0000
Subject: [PATCH 23/80] upgrade BUILDER_TAG to v1.0.1

---
 release/scripts/build.sh | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/release/scripts/build.sh b/release/scripts/build.sh
index 612bbbe3b..6a0ae2940 100755
--- a/release/scripts/build.sh
+++ b/release/scripts/build.sh
@@ -1,6 +1,6 @@
 #!/bin/bash
 
-BUILDER_TAG="v1.0.0"
+BUILDER_TAG="v1.0.1"
 
 TAG="$3"
 ORIGIN="$4"
-- 
GitLab


From e4af1721609b2b46ee7bdf0b88793e0701c7e588 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Wed, 10 Jan 2018 18:36:43 +0000
Subject: [PATCH 24/80] Upgrade builder image to v1.0.1

---
 .gitlab-ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index a867cf961..8e2b49506 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -42,7 +42,7 @@ enforce_readme:
 
 build_linux:test:
   stage: build
-  image: duniter/release-builder:v1.0.0
+  image: duniter/release-builder:v1.0.1
   tags:
     - redshift-duniter-builder
   script:
@@ -56,7 +56,7 @@ build_linux:test:
 
 build_linux:deploy:
   stage: build
-  image: duniter/release-builder:v1.0.0
+  image: duniter/release-builder:v1.0.1
   tags:
     - redshift-duniter-builder
   script:
-- 
GitLab


From 560bf7e6a932a9f3dc8aac400fe89a9e03de421c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Wed, 10 Jan 2018 18:40:50 +0000
Subject: [PATCH 25/80] remove enforce-readme job for tests

---
 .gitlab-ci.yml | 19 -------------------
 1 file changed, 19 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 8e2b49506..189d749fa 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -21,25 +21,6 @@ push_to_github:
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
 
-enforce_readme:
-    stage: github-sync
-    variables:
-        GIT_STRATEGY: none
-    tags:
-        - redshift
-    script:
-      - rm -rf ./*
-      - rm -rf .git
-      - git clone $GITHUB_URL_AND_KEY .
-      - git config --global user.email "contact@duniter.org"
-      - git config --global user.name "Duniter"
-      - git checkout master
-      - cat .github/github_disclaimer.md > README.md.new
-      - cat README.md >> README.md.new
-      - mv README.md.new README.md
-      - git commit -am "Enforce github readme"
-      - git push origin master
-
 build_linux:test:
   stage: build
   image: duniter/release-builder:v1.0.1
-- 
GitLab


From 9ff581eed0d7c864754c0a5f565f6244c48f5f00 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Wed, 10 Jan 2018 20:08:19 +0000
Subject: [PATCH 26/80] add release-message job & move tests before releases &
 create build job

---
 .gitlab-ci.yml | 48 ++++++++++++++++++++++++++++++++++++------------
 1 file changed, 36 insertions(+), 12 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 189d749fa..ebba64453 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -2,6 +2,8 @@ stages:
   - github-sync
   - build
   - test
+  - release
+  - release-message
 
 push_to_github:
     stage: github-sync
@@ -20,9 +22,30 @@ push_to_github:
         - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
-
-build_linux:test:
+        
+build:
   stage: build
+  tags:
+    - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
+  script:
+    - yarn
+
+test:
+  stage: test
+  tags:
+    - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
+  script:
+    - yarn
+    - yarn test
+
+release_linux:test:
+  stage: release
   image: duniter/release-builder:v1.0.1
   tags:
     - redshift-duniter-builder
@@ -35,8 +58,8 @@ build_linux:test:
   except:
     - tags
 
-build_linux:deploy:
-  stage: build
+release_linux:deploy:
+  stage: release
   image: duniter/release-builder:v1.0.1
   tags:
     - redshift-duniter-builder
@@ -47,14 +70,15 @@ build_linux:deploy:
       - work/bin/
   only:
     - tags
-
-test:
-  stage: test
+    
+enforce-message:
+  stage: release-message
   tags:
     - redshift
-  before_script:
-    - export NVM_DIR="$HOME/.nvm"
-    - . "$NVM_DIR/nvm.sh"
+  variables:
+    JOB_ARTIFACTS: 'build'
+    EXPECTED_ARTIFACTS: '["duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
   script:
-    - yarn
-    - yarn test
+    - python3 .gitlab/releaser.py
+  only:
+    - tags
-- 
GitLab


From f2012455c891d3654c530d37fadc669e672b223d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Wed, 10 Jan 2018 20:10:32 +0000
Subject: [PATCH 27/80] Add releaser.py

---
 .gitlab/releaser.py | 154 ++++++++++++++++++++++++++++++++++++++++++++
 1 file changed, 154 insertions(+)
 create mode 100644 .gitlab/releaser.py

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
new file mode 100644
index 000000000..97dd7e948
--- /dev/null
+++ b/.gitlab/releaser.py
@@ -0,0 +1,154 @@
+#!/usr/bin/python3
+'''
+This module is meant to overload the release note in gitlab for the current project.
+Expects to find in environment following variables:
+  - CI_PROJECT_URL - Automatically set by gitlab-ci
+  - CI_COMMIT_TAG - Automatically set by gitlab-ci
+  - CI_PROJECT_ID - Automatically set by gitlab-ci
+  - CI_COMMIT_TAG - Automatically set by gitlab-ci
+  - RELEASER_TOKEN - Token used by technical user
+  - JOB_ARTIFACTS - String containing job name containing all artifacts, to set manually
+  - EXPECTED_ARTIFACTS - List containing all artifacts generated to set manually
+'''
+
+import math
+import urllib.request
+import urllib.error
+import json
+import os
+import jinja2
+
+def convert_size(size_bytes):
+    '''Print proper size'''
+    if size_bytes == 0:
+        return '0B'
+    size_name = ('B', 'KB', 'MB', 'GB', 'TB', 'PB', 'EB', 'ZB', 'YB')
+    i = int(math.floor(math.log(size_bytes, 1024)))
+    power = math.pow(1024, i)
+    size = round(size_bytes / power, 2)
+    return '%s %s' % (size, size_name[i])
+
+def get_current_message():
+    '''Get current release message'''
+    releaser_token = os.environ['RELEASER_TOKEN']
+    ci_project_id = os.environ['CI_PROJECT_ID']
+    ci_commit_tag = os.environ['CI_COMMIT_TAG']
+    tag_url = 'https://git.duniter.org/api/v4/projects/'
+    tag_url += ci_project_id
+    tag_url += '/repository/tags/'
+    tag_url += ci_commit_tag
+    request = urllib.request.Request(tag_url)
+    request.add_header('Private-Token', releaser_token)
+    response = urllib.request.urlopen(request)
+    data = json.load(response)
+    if data['release'] is None:
+        return False, ''
+    else:
+        return True, data['release']['description'].split('# Downloads')[0]
+
+def build_artifact_url(artifact, source):
+    '''Given an artifact name, builds the url to download it'''
+    job_artifacts = os.environ['JOB_ARTIFACTS']
+    ci_project_url = os.environ['CI_PROJECT_URL']
+    ci_commit_tag = os.environ['CI_COMMIT_TAG']
+    if source:
+        source_url = ci_project_url
+        source_url += '/repository/'
+        source_url += ci_commit_tag
+        source_url += '/archive.'
+        source_url += artifact
+        return source_url
+    else:
+        artifact_url = ci_project_url
+        artifact_url += '/-/jobs/artifacts/'
+        artifact_url += ci_commit_tag
+        artifact_url += '/raw/'
+        artifact_url += artifact
+        artifact_url += '?job='
+        artifact_url += job_artifacts
+        return artifact_url
+
+def get_artifact_weight(location):
+    '''Retrieve size of artifacts'''
+    size = os.path.getsize(location)
+    return convert_size(int(size))
+
+
+def build_compiled_message(current_message):
+    '''Create a new release message using the release template'''
+
+    expected_artifacts = os.environ['EXPECTED_ARTIFACTS']
+    try:
+        expected_artifacts = json.loads(expected_artifacts)
+    except json.decoder.JSONDecodeError:
+        print('CRITICAL EXPECTED_ARTIFACTS environment variable JSON probably malformed')
+        print('CRITICAL Correct : \'["test_linux.txt","test_windows.txt"]\' ')
+        print('CRITICAL Not Correct: "[\'test_linux.txt\',\'test_windows.txt\']" ')
+        exit(1)
+    artifacts_list = []
+    for artifact in expected_artifacts:
+        artifact_dict = {
+            'name': artifact,
+            'url': build_artifact_url(artifact, False),
+            'size': get_artifact_weight(artifact),
+            'icon': ':package:'
+        }
+        artifacts_list.append(artifact_dict)
+    expected_sources = ['tar.gz', 'zip']
+    for source in expected_sources:
+        source_url = build_artifact_url(source, True)
+        artifact_dict = {
+            'name': 'Source code ' + source,
+            'url': source_url,
+            'size': get_artifact_weight(source_url),
+            'icon': ':compression:'
+        }
+        artifacts_list.append(artifact_dict)
+
+    j2_env = jinja2.Environment(
+        loader=jinja2.FileSystemLoader(
+            os.path.dirname(os.path.abspath(__file__))
+            ),
+        trim_blocks=True
+        )
+    # pylint: disable=maybe-no-member
+    template = j2_env.get_template('release_template.md')
+    return template.render(
+        current_message=current_message,
+        artifacts=artifacts_list
+    )
+
+
+def send_compiled_message(exists_release, compiled_message):
+    '''Send to gitlab new message'''
+    releaser_token = os.environ['RELEASER_TOKEN']
+    ci_project_id = os.environ['CI_PROJECT_ID']
+    ci_commit_tag = os.environ['CI_COMMIT_TAG']
+    release_url = 'https://git.duniter.org/api/v4/projects/'
+    release_url += ci_project_id
+    release_url += '/repository/tags/'
+    release_url += ci_commit_tag
+    release_url += '/release'
+    if exists_release:
+        # We need to send a PUT request
+        method = 'PUT'
+    else:
+        # We need to send a POST request
+        method = 'POST'
+    send_data = {
+        'tag_name':ci_commit_tag,
+        'description':compiled_message
+        }
+    send_data_serialized = json.dumps(send_data).encode('utf-8')
+    request = urllib.request.Request(release_url, data=send_data_serialized, method=method)
+    request.add_header('Private-Token', releaser_token)
+    request.add_header('Content-Type', 'application/json')
+    urllib.request.urlopen(request)
+
+def main():
+    '''Execute main scenario'''
+    exists_release, current_message = get_current_message()
+    compiled_message = build_compiled_message(current_message)
+    send_compiled_message(exists_release, compiled_message)
+    print('Artifacts uploaded successfully')
+main()
-- 
GitLab


From 3f0160f7b3bdd6be8e09b571140ed0839af61c64 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Wed, 10 Jan 2018 20:12:22 +0000
Subject: [PATCH 28/80] Add release template

---
 .gitlab/release_template.md | 9 +++++++++
 1 file changed, 9 insertions(+)
 create mode 100644 .gitlab/release_template.md

diff --git a/.gitlab/release_template.md b/.gitlab/release_template.md
new file mode 100644
index 000000000..99b23ff46
--- /dev/null
+++ b/.gitlab/release_template.md
@@ -0,0 +1,9 @@
+{{current_message}}
+
+# Downloads
+{% for artifact in artifacts %}
+***
+[{{artifact.icon}} {{artifact.name}}]({{artifact.url}})  
+_{{artifact.size}}_
+***
+{% endfor %}
-- 
GitLab


From d8aa8657252547fa411dfdefd25f4b43dfbf97d9 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Wed, 10 Jan 2018 20:50:59 +0000
Subject: [PATCH 29/80] delay the chaining trigger to 1st Mar

---
 app/lib/rules/local_rules.ts | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/app/lib/rules/local_rules.ts b/app/lib/rules/local_rules.ts
index 5a6081124..1ce1ebd17 100644
--- a/app/lib/rules/local_rules.ts
+++ b/app/lib/rules/local_rules.ts
@@ -385,7 +385,7 @@ export const LOCAL_RULES_FUNCTIONS = {
     const sindex = Indexer.sindex(index)
     const max = getMaxTransactionDepth(sindex)
     //
-    const allowedMax = block.medianTime > 1517443200 ? CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH : 1
+    const allowedMax = block.medianTime > 1519862400 ? CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH : 1
     if (max > allowedMax) {
       throw "The maximum transaction chaining length per block is " + CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH
     }
-- 
GitLab


From a421e579319b66b5e078ba3600385512da03b8b2 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Wed, 10 Jan 2018 20:58:38 +0000
Subject: [PATCH 30/80] Update releaser.py

---
 .gitlab/releaser.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 97dd7e948..4a43dafea 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -94,7 +94,7 @@ def build_compiled_message(current_message):
             'icon': ':package:'
         }
         artifacts_list.append(artifact_dict)
-    expected_sources = ['tar.gz', 'zip']
+    expected_sources = ['tar.gz', 'deb']
     for source in expected_sources:
         source_url = build_artifact_url(source, True)
         artifact_dict = {
-- 
GitLab


From 8fb9b5329bd16535bd9f825cfd92801ae083c82b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Thu, 11 Jan 2018 12:38:14 +0000
Subject: [PATCH 31/80] manual releases jobs

---
 .gitlab-ci.yml | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index ebba64453..d9d899bc1 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -55,8 +55,7 @@ release_linux:test:
     paths:
       - work/bin/
     expire_in: 8h
-  except:
-    - tags
+  when: manual
 
 release_linux:deploy:
   stage: release
@@ -68,8 +67,9 @@ release_linux:deploy:
   artifacts:
     paths:
       - work/bin/
+  when: manual
   only:
-    - tags
+  - master
     
 enforce-message:
   stage: release-message
@@ -80,5 +80,6 @@ enforce-message:
     EXPECTED_ARTIFACTS: '["duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
   script:
     - python3 .gitlab/releaser.py
+  when: manual
   only:
-    - tags
+  - master
-- 
GitLab


From 3ca885ea90f7aaed2b31a05e037d5383ee72161f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Thu, 11 Jan 2018 13:30:12 +0000
Subject: [PATCH 32/80] temporary changes to test the release-message job

---
 .gitlab-ci.yml | 26 +++-----------------------
 1 file changed, 3 insertions(+), 23 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index d9d899bc1..14ebd9e23 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,7 +1,5 @@
 stages:
   - github-sync
-  - build
-  - test
   - release
   - release-message
 
@@ -22,27 +20,6 @@ push_to_github:
         - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
-        
-build:
-  stage: build
-  tags:
-    - redshift
-  before_script:
-    - export NVM_DIR="$HOME/.nvm"
-    - . "$NVM_DIR/nvm.sh"
-  script:
-    - yarn
-
-test:
-  stage: test
-  tags:
-    - redshift
-  before_script:
-    - export NVM_DIR="$HOME/.nvm"
-    - . "$NVM_DIR/nvm.sh"
-  script:
-    - yarn
-    - yarn test
 
 release_linux:test:
   stage: release
@@ -56,6 +33,9 @@ release_linux:test:
       - work/bin/
     expire_in: 8h
   when: manual
+  except:
+    - tags
+  
 
 release_linux:deploy:
   stage: release
-- 
GitLab


From 29e313e6b2a763ae177089f9487571db500cb77f Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Thu, 11 Jan 2018 13:35:31 +0000
Subject: [PATCH 33/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 14ebd9e23..b3390a77d 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -49,6 +49,7 @@ release_linux:deploy:
       - work/bin/
   when: manual
   only:
+  - tags
   - master
     
 enforce-message:
@@ -62,4 +63,5 @@ enforce-message:
     - python3 .gitlab/releaser.py
   when: manual
   only:
+  - tags
   - master
-- 
GitLab


From aeb004c3d381912094f21954eaac4bac9fd6cfa7 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Thu, 11 Jan 2018 13:50:44 +0000
Subject: [PATCH 34/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index b3390a77d..718dd6777 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -57,7 +57,7 @@ enforce-message:
   tags:
     - redshift
   variables:
-    JOB_ARTIFACTS: 'build'
+    JOB_ARTIFACTS: 'release'
     EXPECTED_ARTIFACTS: '["duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
   script:
     - python3 .gitlab/releaser.py
-- 
GitLab


From 61fa2161d36faffba21b9b6b7fe5d5d7ee34eb4a Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Thu, 11 Jan 2018 14:28:34 +0000
Subject: [PATCH 35/80] use py3 image for release-message job

---
 .gitlab-ci.yml | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 718dd6777..e38cc684c 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -54,8 +54,7 @@ release_linux:deploy:
     
 enforce-message:
   stage: release-message
-  tags:
-    - redshift
+  image: tensorflow/tensorflow:latest-py3
   variables:
     JOB_ARTIFACTS: 'release'
     EXPECTED_ARTIFACTS: '["duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
-- 
GitLab


From c813a6c230cb3436a91e4c1728b6295f37a2fc4d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Thu, 11 Jan 2018 15:20:47 +0000
Subject: [PATCH 36/80] restore build and tests jobs

---
 .gitlab-ci.yml | 23 +++++++++++++++++++++++
 1 file changed, 23 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e38cc684c..21aa1a7dc 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,5 +1,7 @@
 stages:
   - github-sync
+  - build
+  - test
   - release
   - release-message
 
@@ -20,6 +22,27 @@ push_to_github:
         - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
+        
+build:
+  stage: build
+  tags:
+    - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
+  script:
+    - yarn
+
+test:
+  stage: test
+  tags:
+    - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
+  script:
+    - yarn
+    - yarn test
 
 release_linux:test:
   stage: release
-- 
GitLab


From 40bdc0ada356f64872c352d1958ea7863c24d9dd Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?St=C3=A9phane=20Veyret?= <sveyret@axway.com>
Date: Fri, 12 Jan 2018 13:23:32 +0100
Subject: [PATCH 37/80] Change contrib directory to extra

The contrib name (taken from other open-source projects) seems a bit confusing. Extra will probably be clearer.
The Debian data where moved under linux directory, but are also used for ARM. As these are files used to build the package (and not vital to the functionning of duniter)
they have their place in extra.
The Debian data are used by many scripts which were not updated after debian folder move.
---
 release/arch/arm/build-arm.sh                          |  2 +-
 release/arch/linux/build-lin.sh                        |  4 ++--
 .../linux => extra}/debian/package/DEBIAN/control      |  0
 .../linux => extra}/debian/package/DEBIAN/postinst     |  0
 .../{arch/linux => extra}/debian/package/DEBIAN/prerm  |  0
 .../desktop/usr/share/applications/duniter.desktop     |  0
 release/new_prerelease.sh                              | 10 +++++-----
 release/new_version.sh                                 |  4 ++--
 8 files changed, 10 insertions(+), 10 deletions(-)
 rename release/{arch/linux => extra}/debian/package/DEBIAN/control (100%)
 rename release/{arch/linux => extra}/debian/package/DEBIAN/postinst (100%)
 rename release/{arch/linux => extra}/debian/package/DEBIAN/prerm (100%)
 rename release/{contrib => extra}/desktop/usr/share/applications/duniter.desktop (100%)

diff --git a/release/arch/arm/build-arm.sh b/release/arch/arm/build-arm.sh
index 68d0bcf45..fc0891fd3 100755
--- a/release/arch/arm/build-arm.sh
+++ b/release/arch/arm/build-arm.sh
@@ -86,7 +86,7 @@ mkdir -p duniter_release
 cp -R ${SRC}/* duniter_release/
 
 # Creating DEB packaging
-mv duniter_release/release/arch/debian/package duniter-${ARCH}
+mv duniter_release/release/extra/debian/package duniter-${ARCH}
 mkdir -p duniter-${ARCH}/opt/duniter/
 chmod 755 duniter-${ARCH}/DEBIAN/post*
 chmod 755 duniter-${ARCH}/DEBIAN/pre*
diff --git a/release/arch/linux/build-lin.sh b/release/arch/linux/build-lin.sh
index e01deaa4f..6abf5c16f 100644
--- a/release/arch/linux/build-lin.sh
+++ b/release/arch/linux/build-lin.sh
@@ -173,9 +173,9 @@ tar czf "${BIN}/duniter-desktop-${DUNITER_TAG}-linux-x64.tar.gz" * || exit 1
 build_deb_pack() {
 	rm -rf "${RELEASES}/duniter-x64"
 	mkdir "${RELEASES}/duniter-x64" || exit 1
-	cp -r "${ROOT}/release/arch/linux/debian/package/"* "${RELEASES}/duniter-x64" || exit 1
+	cp -r "${ROOT}/release/extra/debian/package/"* "${RELEASES}/duniter-x64" || exit 1
 	if [[ "${1}" == "desktop" ]]; then
-		cp -r "${ROOT}/release/contrib/desktop/"* "${RELEASES}/duniter-x64" || exit 1
+		cp -r "${ROOT}/release/extra/desktop/"* "${RELEASES}/duniter-x64" || exit 1
 	fi
 	mkdir -p "${RELEASES}/duniter-x64/opt/duniter/" || exit 1
 	chmod 755 "${RELEASES}/duniter-x64/DEBIAN/"post* || exit 1
diff --git a/release/arch/linux/debian/package/DEBIAN/control b/release/extra/debian/package/DEBIAN/control
similarity index 100%
rename from release/arch/linux/debian/package/DEBIAN/control
rename to release/extra/debian/package/DEBIAN/control
diff --git a/release/arch/linux/debian/package/DEBIAN/postinst b/release/extra/debian/package/DEBIAN/postinst
similarity index 100%
rename from release/arch/linux/debian/package/DEBIAN/postinst
rename to release/extra/debian/package/DEBIAN/postinst
diff --git a/release/arch/linux/debian/package/DEBIAN/prerm b/release/extra/debian/package/DEBIAN/prerm
similarity index 100%
rename from release/arch/linux/debian/package/DEBIAN/prerm
rename to release/extra/debian/package/DEBIAN/prerm
diff --git a/release/contrib/desktop/usr/share/applications/duniter.desktop b/release/extra/desktop/usr/share/applications/duniter.desktop
similarity index 100%
rename from release/contrib/desktop/usr/share/applications/duniter.desktop
rename to release/extra/desktop/usr/share/applications/duniter.desktop
diff --git a/release/new_prerelease.sh b/release/new_prerelease.sh
index bd5d78caa..1a3a0a520 100755
--- a/release/new_prerelease.sh
+++ b/release/new_prerelease.sh
@@ -42,13 +42,13 @@ for asset in $EXPECTED_ASSETS; do
 
     echo "Missing asset: $asset"
 
-    # Debian
+    # Linux
     if [[ $asset == *"linux-x64.deb" ]] || [[ $asset == *"linux-x64.tar.gz" ]]; then
       if [[ $ARCH == "x86_64" ]]; then
-        echo "Starting Debian build..."
-        ./release/scripts/build.sh make deb $TAG
-        DEB_PATH="$PWD/release/arch/debian/$asset"
-        node ./release/scripts/upload-release.js $TOKEN $TAG $DEB_PATH
+        echo "Starting Linux build..."
+        ./release/scripts/build.sh make lin $TAG
+        LIN_PATH="$PWD/release/arch/linux/$asset"
+        node ./release/scripts/upload-release.js $TOKEN $TAG $LIN_PATH
       else
         echo "This computer cannot build this asset, required architecture is 'x86_64'. Skipping."
       fi
diff --git a/release/new_version.sh b/release/new_version.sh
index fbbda1bdc..6195860cb 100755
--- a/release/new_version.sh
+++ b/release/new_version.sh
@@ -8,7 +8,7 @@ if [[ $1 =~ ^[0-9]+.[0-9]+.[0-9]+((a|b)[0-9]+)?$ ]]; then
   echo "Changing to version: $1"
   # Change the version in package.json and test file
   sed -i "s/version\": .*/version\": \"$1\",/g" package.json
-  sed -i "s/Version: .*/Version: $1/g" release/arch/debian/package/DEBIAN/control
+  sed -i "s/Version: .*/Version: $1/g" release/extra/debian/package/DEBIAN/control
   sed -i "s/version').equal('.*/version').equal('$1');/g" test/integration/branches.js
   sed -i "s/ release: .*/ release: v$1/g" appveyor.yml
 
@@ -21,7 +21,7 @@ if [[ $1 =~ ^[0-9]+.[0-9]+.[0-9]+((a|b)[0-9]+)?$ ]]; then
 
   # Commit
   git reset HEAD
-  git add package.json test/integration/branches.js gui/index.html release/arch/debian/package/DEBIAN/control release/arch/windows/duniter.iss
+  git add package.json test/integration/branches.js gui/index.html release/extra/debian/package/DEBIAN/control release/arch/windows/duniter.iss
   git commit -m "v$1"
   git tag "v$1"
 else
-- 
GitLab


From 2472c075affd1745c53f041ad7b2799bfb30ca5c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Fri, 12 Jan 2018 18:38:05 +0000
Subject: [PATCH 38/80] Update releaser.py

---
 .gitlab/releaser.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 4a43dafea..37cdf8398 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -38,6 +38,7 @@ def get_current_message():
     tag_url += '/repository/tags/'
     tag_url += ci_commit_tag
     request = urllib.request.Request(tag_url)
+    print(tag_url)
     request.add_header('Private-Token', releaser_token)
     response = urllib.request.urlopen(request)
     data = json.load(response)
-- 
GitLab


From 23920065f5c5b52f243af0e67844dd1f9c35b5b5 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Sat, 13 Jan 2018 15:56:57 +0000
Subject: [PATCH 39/80] debug : print api response

---
 .gitlab/releaser.py | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 37cdf8398..ed7d375e9 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -40,6 +40,7 @@ def get_current_message():
     request = urllib.request.Request(tag_url)
     print(tag_url)
     request.add_header('Private-Token', releaser_token)
+    print(response)
     response = urllib.request.urlopen(request)
     data = json.load(response)
     if data['release'] is None:
-- 
GitLab


From b8a93a1741ae532e9fd69ec163410d9df77a2b32 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Sat, 13 Jan 2018 16:40:10 +0000
Subject: [PATCH 40/80] Update releaser.py

---
 .gitlab/releaser.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index ed7d375e9..0f777a4c1 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -40,8 +40,8 @@ def get_current_message():
     request = urllib.request.Request(tag_url)
     print(tag_url)
     request.add_header('Private-Token', releaser_token)
-    print(response)
     response = urllib.request.urlopen(request)
+    print(response)
     data = json.load(response)
     if data['release'] is None:
         return False, ''
-- 
GitLab


From 3dd22198a8debfe981f04c767bac191c5e0dcd20 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Sat, 13 Jan 2018 16:40:39 +0000
Subject: [PATCH 41/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 21 ---------------------
 1 file changed, 21 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 21aa1a7dc..66d1bc031 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -22,27 +22,6 @@ push_to_github:
         - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
-        
-build:
-  stage: build
-  tags:
-    - redshift
-  before_script:
-    - export NVM_DIR="$HOME/.nvm"
-    - . "$NVM_DIR/nvm.sh"
-  script:
-    - yarn
-
-test:
-  stage: test
-  tags:
-    - redshift
-  before_script:
-    - export NVM_DIR="$HOME/.nvm"
-    - . "$NVM_DIR/nvm.sh"
-  script:
-    - yarn
-    - yarn test
 
 release_linux:test:
   stage: release
-- 
GitLab


From b59018bbf240618bf4590db730c4c905d006c159 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Sun, 14 Jan 2018 16:17:20 +0000
Subject: [PATCH 42/80] Update releaser.py

---
 .gitlab/releaser.py | 4 ----
 1 file changed, 4 deletions(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 0f777a4c1..72627c960 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -30,7 +30,6 @@ def convert_size(size_bytes):
 
 def get_current_message():
     '''Get current release message'''
-    releaser_token = os.environ['RELEASER_TOKEN']
     ci_project_id = os.environ['CI_PROJECT_ID']
     ci_commit_tag = os.environ['CI_COMMIT_TAG']
     tag_url = 'https://git.duniter.org/api/v4/projects/'
@@ -38,10 +37,7 @@ def get_current_message():
     tag_url += '/repository/tags/'
     tag_url += ci_commit_tag
     request = urllib.request.Request(tag_url)
-    print(tag_url)
-    request.add_header('Private-Token', releaser_token)
     response = urllib.request.urlopen(request)
-    print(response)
     data = json.load(response)
     if data['release'] is None:
         return False, ''
-- 
GitLab


From 1a033fb1408883d6d9acdcb6e3cf3b4f2235d513 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Sun, 14 Jan 2018 16:41:58 +0000
Subject: [PATCH 43/80] upgrade release-builder tag

---
 .gitlab-ci.yml | 8 +++++---
 1 file changed, 5 insertions(+), 3 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 66d1bc031..bd8593c1b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -25,7 +25,7 @@ push_to_github:
 
 release_linux:test:
   stage: release
-  image: duniter/release-builder:v1.0.1
+  image: duniter/release-builder:v1.0.3
   tags:
     - redshift-duniter-builder
   script:
@@ -41,7 +41,7 @@ release_linux:test:
 
 release_linux:deploy:
   stage: release
-  image: duniter/release-builder:v1.0.1
+  image: duniter/release-builder:v1.0.3
   tags:
     - redshift-duniter-builder
   script:
@@ -56,7 +56,9 @@ release_linux:deploy:
     
 enforce-message:
   stage: release-message
-  image: tensorflow/tensorflow:latest-py3
+  image: duniter/release-builder:v1.0.3
+  tags:
+    - redshift-duniter-builder
   variables:
     JOB_ARTIFACTS: 'release'
     EXPECTED_ARTIFACTS: '["duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
-- 
GitLab


From 25ebcb305b64669b9fbcd3ae4e31238b298b5b1c Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Sun, 14 Jan 2018 16:53:48 +0000
Subject: [PATCH 44/80] build in image release-builder:v1.0.1

---
 .gitlab-ci.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index bd8593c1b..535a0686d 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -25,7 +25,7 @@ push_to_github:
 
 release_linux:test:
   stage: release
-  image: duniter/release-builder:v1.0.3
+  image: duniter/release-builder:v1.0.1
   tags:
     - redshift-duniter-builder
   script:
@@ -41,7 +41,7 @@ release_linux:test:
 
 release_linux:deploy:
   stage: release
-  image: duniter/release-builder:v1.0.3
+  image: duniter/release-builder:v1.0.1
   tags:
     - redshift-duniter-builder
   script:
-- 
GitLab


From d62753d5523efebc292012ea203d14285084b72b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Mon, 15 Jan 2018 14:07:19 +0000
Subject: [PATCH 45/80] Add test.py

---
 .gitlab/test.py | 12 ++++++++++++
 1 file changed, 12 insertions(+)
 create mode 100644 .gitlab/test.py

diff --git a/.gitlab/test.py b/.gitlab/test.py
new file mode 100644
index 000000000..0b44babec
--- /dev/null
+++ b/.gitlab/test.py
@@ -0,0 +1,12 @@
+#!/usr/bin/env python3
+
+import urllib.request
+
+url = "https://g1-test.duniter.org/network/peers"
+
+request = urllib.request.Request(url)
+response = urllib.request.urlopen(request)
+
+print('status: %s' % response.status)
+print('headers:', response.headers)
+print('body:' + response.read().decode('utf-8'))
\ No newline at end of file
-- 
GitLab


From 8459aded54d93bf8493df5b62775a5789e38652b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Mon, 15 Jan 2018 14:09:39 +0000
Subject: [PATCH 46/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 11 +++++++++--
 1 file changed, 9 insertions(+), 2 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 535a0686d..2cb730662 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,7 +1,6 @@
 stages:
   - github-sync
-  - build
-  - test
+  - debug
   - release
   - release-message
 
@@ -22,6 +21,14 @@ push_to_github:
         - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
+        
+debug:
+  stage: debug
+  image: tensorflow/tensorflow:latest-py3
+  tags:
+    - redshift-duniter-builder
+  script:
+    - python3 .gitlab/test.py
 
 release_linux:test:
   stage: release
-- 
GitLab


From 28e7ed6b069375cb67ce2ac8f944f84b45800578 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Mon, 15 Jan 2018 14:21:33 +0000
Subject: [PATCH 47/80] releaser.py : add debug print

---
 .gitlab/releaser.py | 11 ++++++++++-
 1 file changed, 10 insertions(+), 1 deletion(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 72627c960..154c46be4 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -38,6 +38,11 @@ def get_current_message():
     tag_url += ci_commit_tag
     request = urllib.request.Request(tag_url)
     response = urllib.request.urlopen(request)
+    '''debug'''
+    print('status: %s' % response.status)
+    print('headers:', response.headers)
+    print('body:' + response.read().decode('utf-8'))
+
     data = json.load(response)
     if data['release'] is None:
         return False, ''
@@ -141,7 +146,11 @@ def send_compiled_message(exists_release, compiled_message):
     request = urllib.request.Request(release_url, data=send_data_serialized, method=method)
     request.add_header('Private-Token', releaser_token)
     request.add_header('Content-Type', 'application/json')
-    urllib.request.urlopen(request)
+    response = urllib.request.urlopen(request)
+    '''debug'''
+    print('status: %s' % response.status)
+    print('headers:', response.headers)
+    print('body:' + response.read().decode('utf-8'))
 
 def main():
     '''Execute main scenario'''
-- 
GitLab


From 0f8d1ec682c1d6c69ff17cef251506c37c355da1 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Mon, 15 Jan 2018 14:31:59 +0000
Subject: [PATCH 48/80] restore py3 image

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 2cb730662..7180a8e3d 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -63,7 +63,7 @@ release_linux:deploy:
     
 enforce-message:
   stage: release-message
-  image: duniter/release-builder:v1.0.3
+  image: tensorflow/tensorflow:latest-py3
   tags:
     - redshift-duniter-builder
   variables:
-- 
GitLab


From 611b70c1d83aa52ab9b2e6e09cfa7e4d2e76edb0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?=C3=89lo=C3=AFs?= <elois@ifee.fr>
Date: Mon, 15 Jan 2018 14:59:13 +0000
Subject: [PATCH 49/80] releaser.py : del decode('utf-8)

---
 .gitlab/releaser.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 154c46be4..1a80a6f02 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -41,7 +41,7 @@ def get_current_message():
     '''debug'''
     print('status: %s' % response.status)
     print('headers:', response.headers)
-    print('body:' + response.read().decode('utf-8'))
+    print('body:' + response.read())
 
     data = json.load(response)
     if data['release'] is None:
@@ -150,7 +150,7 @@ def send_compiled_message(exists_release, compiled_message):
     '''debug'''
     print('status: %s' % response.status)
     print('headers:', response.headers)
-    print('body:' + response.read().decode('utf-8'))
+    print('body:' + response.read())
 
 def main():
     '''Execute main scenario'''
-- 
GitLab


From 9f9d6e6dfac2feefac99f715d7129931cda91b57 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 15:10:54 +0000
Subject: [PATCH 50/80] Delete test.py

---
 .gitlab/test.py | 12 ------------
 1 file changed, 12 deletions(-)
 delete mode 100644 .gitlab/test.py

diff --git a/.gitlab/test.py b/.gitlab/test.py
deleted file mode 100644
index 0b44babec..000000000
--- a/.gitlab/test.py
+++ /dev/null
@@ -1,12 +0,0 @@
-#!/usr/bin/env python3
-
-import urllib.request
-
-url = "https://g1-test.duniter.org/network/peers"
-
-request = urllib.request.Request(url)
-response = urllib.request.urlopen(request)
-
-print('status: %s' % response.status)
-print('headers:', response.headers)
-print('body:' + response.read().decode('utf-8'))
\ No newline at end of file
-- 
GitLab


From 135cd13dd31ba0c959076caf0cd962822ab4051e Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 15:16:35 +0000
Subject: [PATCH 51/80] del debug job

---
 .gitlab-ci.yml | 9 ---------
 1 file changed, 9 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7180a8e3d..9c61c0595 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,6 +1,5 @@
 stages:
   - github-sync
-  - debug
   - release
   - release-message
 
@@ -21,14 +20,6 @@ push_to_github:
         - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
-        
-debug:
-  stage: debug
-  image: tensorflow/tensorflow:latest-py3
-  tags:
-    - redshift-duniter-builder
-  script:
-    - python3 .gitlab/test.py
 
 release_linux:test:
   stage: release
-- 
GitLab


From aff9d34db6a87eba859a87ed58e762867c23d18b Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 15:37:22 +0000
Subject: [PATCH 52/80] remove debug code

---
 .gitlab/releaser.py | 8 --------
 1 file changed, 8 deletions(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 1a80a6f02..84e94c521 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -38,10 +38,6 @@ def get_current_message():
     tag_url += ci_commit_tag
     request = urllib.request.Request(tag_url)
     response = urllib.request.urlopen(request)
-    '''debug'''
-    print('status: %s' % response.status)
-    print('headers:', response.headers)
-    print('body:' + response.read())
 
     data = json.load(response)
     if data['release'] is None:
@@ -147,10 +143,6 @@ def send_compiled_message(exists_release, compiled_message):
     request.add_header('Private-Token', releaser_token)
     request.add_header('Content-Type', 'application/json')
     response = urllib.request.urlopen(request)
-    '''debug'''
-    print('status: %s' % response.status)
-    print('headers:', response.headers)
-    print('body:' + response.read())
 
 def main():
     '''Execute main scenario'''
-- 
GitLab


From f0281624618b320e38163262f9cd8b0eee4fb43a Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 15:58:13 +0000
Subject: [PATCH 53/80] decode api response

---
 .gitlab/releaser.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 84e94c521..e66737936 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -39,7 +39,7 @@ def get_current_message():
     request = urllib.request.Request(tag_url)
     response = urllib.request.urlopen(request)
 
-    data = json.load(response)
+    data = json.load(response.read().decode())
     if data['release'] is None:
         return False, ''
     else:
-- 
GitLab


From b99199e8ba0136d0af9deaf5209dc88f4b6b0491 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 16:27:20 +0000
Subject: [PATCH 54/80] all artifacts expire if not keep manualy

---
 .gitlab-ci.yml | 1 +
 1 file changed, 1 insertion(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 9c61c0595..8d21f62ff 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -47,6 +47,7 @@ release_linux:deploy:
   artifacts:
     paths:
       - work/bin/
+    expire_in: 8h
   when: manual
   only:
   - tags
-- 
GitLab


From ad2835e9ae942e868f87aba75b774768349df61e Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 16:28:24 +0000
Subject: [PATCH 55/80] [fix] py3 error : 'str' object has no attribute 'read'

---
 .gitlab/releaser.py | 3 +--
 1 file changed, 1 insertion(+), 2 deletions(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index e66737936..2c40b0e9f 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -38,8 +38,7 @@ def get_current_message():
     tag_url += ci_commit_tag
     request = urllib.request.Request(tag_url)
     response = urllib.request.urlopen(request)
-
-    data = json.load(response.read().decode())
+    data = json.load(response.decode())
     if data['release'] is None:
         return False, ''
     else:
-- 
GitLab


From 737a9286e55a84abd9dcb0634d6bd36cb6e2bdfd Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 16:57:51 +0000
Subject: [PATCH 56/80] Add new file

---
 .gitlab/test.py | 17 +++++++++++++++++
 1 file changed, 17 insertions(+)
 create mode 100644 .gitlab/test.py

diff --git a/.gitlab/test.py b/.gitlab/test.py
new file mode 100644
index 000000000..cb81bb442
--- /dev/null
+++ b/.gitlab/test.py
@@ -0,0 +1,17 @@
+#!/usr/bin/env python3
+
+import urllib.request
+
+def get_current_message():
+    '''Get current release message'''
+    url = "https://git.duniter.org/api/v4/projects/47/repository/tags/0.99.18"
+    request = urllib.request.Request(url)
+    response = urllib.request.urlopen(request)
+    print('status: %s' % response.status)
+    print('headers:', response.headers)
+    print('body:' + response.read().decode('utf-8'))
+    data = json.load(response.decode())
+    if data['release'] is None:
+        return False, ''
+    else:
+        return True, data['release']['description'].split('# Downloads')[0]
-- 
GitLab


From 2ba133783f6e30f655f7d2e155c151e79f94eb4a Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 16:59:36 +0000
Subject: [PATCH 57/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 9 +++++++++
 1 file changed, 9 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 8d21f62ff..2478c5e35 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,7 +1,16 @@
 stages:
+  - debug
   - github-sync
   - release
   - release-message
+  
+debug:
+    stage: debug
+    image: tensorflow/tensorflow:latest-py3
+    tags:
+      - redshift-duniter-builder
+    script:
+      - python3 .gitlab/test.py
 
 push_to_github:
     stage: github-sync
-- 
GitLab


From 678fb554df396775c7addfc2596aad77740b4348 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 17:02:26 +0000
Subject: [PATCH 58/80] Update test.py

---
 .gitlab/test.py | 11 +++++++++++
 1 file changed, 11 insertions(+)

diff --git a/.gitlab/test.py b/.gitlab/test.py
index cb81bb442..42970572b 100644
--- a/.gitlab/test.py
+++ b/.gitlab/test.py
@@ -1,6 +1,11 @@
 #!/usr/bin/env python3
 
+import math
 import urllib.request
+import urllib.error
+import json
+import os
+import jinja2
 
 def get_current_message():
     '''Get current release message'''
@@ -15,3 +20,9 @@ def get_current_message():
         return False, ''
     else:
         return True, data['release']['description'].split('# Downloads')[0]
+
+def main():
+    '''Execute main scenario'''
+    exists_release, current_message = get_current_message()
+    print('end')
+main()
-- 
GitLab


From c79042ad8657b5aa59eb6518655de80d53db9ed6 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 17:06:14 +0000
Subject: [PATCH 59/80] Update test.py

---
 .gitlab/test.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab/test.py b/.gitlab/test.py
index 42970572b..895d6f12e 100644
--- a/.gitlab/test.py
+++ b/.gitlab/test.py
@@ -15,7 +15,7 @@ def get_current_message():
     print('status: %s' % response.status)
     print('headers:', response.headers)
     print('body:' + response.read().decode('utf-8'))
-    data = json.load(response.decode())
+    data = json.load(response.read().decode())
     if data['release'] is None:
         return False, ''
     else:
-- 
GitLab


From 9b97a8acfdb52242c03cccdbf3bd2df29c9e6217 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 17:11:51 +0000
Subject: [PATCH 60/80] Update test.py

---
 .gitlab/test.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab/test.py b/.gitlab/test.py
index 895d6f12e..5bc682472 100644
--- a/.gitlab/test.py
+++ b/.gitlab/test.py
@@ -15,7 +15,7 @@ def get_current_message():
     print('status: %s' % response.status)
     print('headers:', response.headers)
     print('body:' + response.read().decode('utf-8'))
-    data = json.load(response.read().decode())
+    data = json.load(response)
     if data['release'] is None:
         return False, ''
     else:
-- 
GitLab


From eadd4a29c342b98308d542238e03cf7aa99984b1 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 18:01:20 +0000
Subject: [PATCH 61/80] Update test.py

---
 .gitlab/test.py | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.gitlab/test.py b/.gitlab/test.py
index 5bc682472..c882dc40c 100644
--- a/.gitlab/test.py
+++ b/.gitlab/test.py
@@ -14,7 +14,6 @@ def get_current_message():
     response = urllib.request.urlopen(request)
     print('status: %s' % response.status)
     print('headers:', response.headers)
-    print('body:' + response.read().decode('utf-8'))
     data = json.load(response)
     if data['release'] is None:
         return False, ''
-- 
GitLab


From f4a700129176e7dfdea4b81dc91d5612b9a189fa Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 18:07:54 +0000
Subject: [PATCH 62/80] extract body of api response

---
 .gitlab/test.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/.gitlab/test.py b/.gitlab/test.py
index c882dc40c..02113ca0a 100644
--- a/.gitlab/test.py
+++ b/.gitlab/test.py
@@ -14,7 +14,8 @@ def get_current_message():
     response = urllib.request.urlopen(request)
     print('status: %s' % response.status)
     print('headers:', response.headers)
-    data = json.load(response)
+    response_body = response.read()
+    data = json.load(response_body)
     if data['release'] is None:
         return False, ''
     else:
-- 
GitLab


From 431b014b37c3be2b7c7e03aa4424e1e851cbb1a2 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 18:19:26 +0000
Subject: [PATCH 63/80] Update test.py

---
 .gitlab/test.py | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.gitlab/test.py b/.gitlab/test.py
index 02113ca0a..d8517e741 100644
--- a/.gitlab/test.py
+++ b/.gitlab/test.py
@@ -14,8 +14,8 @@ def get_current_message():
     response = urllib.request.urlopen(request)
     print('status: %s' % response.status)
     print('headers:', response.headers)
-    response_body = response.read()
-    data = json.load(response_body)
+    response_data = response.read().decode()
+    data = json.load(response_data)
     if data['release'] is None:
         return False, ''
     else:
-- 
GitLab


From 93149a80950a4c99aa3545d7da6768bbed91f355 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 18:20:58 +0000
Subject: [PATCH 64/80] Update test.py

---
 .gitlab/test.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab/test.py b/.gitlab/test.py
index d8517e741..bf1429ffa 100644
--- a/.gitlab/test.py
+++ b/.gitlab/test.py
@@ -15,7 +15,7 @@ def get_current_message():
     print('status: %s' % response.status)
     print('headers:', response.headers)
     response_data = response.read().decode()
-    data = json.load(response_data)
+    data = json.loads(response_data)
     if data['release'] is None:
         return False, ''
     else:
-- 
GitLab


From b8936e2e8788de6d62ac17f9f2218429bf02ee8f Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 18:22:44 +0000
Subject: [PATCH 65/80] Update releaser.py

---
 .gitlab/releaser.py | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 2c40b0e9f..2da7acd22 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -38,7 +38,8 @@ def get_current_message():
     tag_url += ci_commit_tag
     request = urllib.request.Request(tag_url)
     response = urllib.request.urlopen(request)
-    data = json.load(response.decode())
+    response_data = response.read().decode()
+    data = json.loads(response_data)
     if data['release'] is None:
         return False, ''
     else:
-- 
GitLab


From ec96180a4cc504d4bf2672a17a55dd0cdc696495 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 19:00:37 +0000
Subject: [PATCH 66/80] explain the path to each artifacts

---
 .gitlab-ci.yml | 22 ++++++++++------------
 1 file changed, 10 insertions(+), 12 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 2478c5e35..b1087be5b 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -3,14 +3,6 @@ stages:
   - github-sync
   - release
   - release-message
-  
-debug:
-    stage: debug
-    image: tensorflow/tensorflow:latest-py3
-    tags:
-      - redshift-duniter-builder
-    script:
-      - python3 .gitlab/test.py
 
 push_to_github:
     stage: github-sync
@@ -35,18 +27,22 @@ release_linux:test:
   image: duniter/release-builder:v1.0.1
   tags:
     - redshift-duniter-builder
+  variables:
+    TEST_TAG: "$(date +%Y%m%d).$(date +%H%M).$(date +%S)"
   script:
-    - bash "release/arch/linux/build-lin.sh" "$(date +%Y%m%d).$(date +%H%M).$(date +%S)"
+    - bash "release/arch/linux/build-lin.sh" "${TEST_TAG}"
   artifacts:
     paths:
-      - work/bin/
+      - work/bin/duniter-desktop-"${TEST_TAG}"-linux-x64.deb
+      - work/bin/duniter-desktop-"${TEST_TAG}"-linux-x64.tar.gz
+      - work/bin/duniter-server-"${TEST_TAG}"-linux-x64.deb
     expire_in: 8h
   when: manual
   except:
     - tags
   
 
-release_linux:deploy:
+release_linux:deploy:desktop:
   stage: release
   image: duniter/release-builder:v1.0.1
   tags:
@@ -55,7 +51,9 @@ release_linux:deploy:
     - bash "release/arch/linux/build-lin.sh" "${CI_COMMIT_TAG#v}"
   artifacts:
     paths:
-      - work/bin/
+      - work/bin/duniter-desktop-"${CI_COMMIT_TAG}"-linux-x64.deb
+      - work/bin/duniter-desktop-"${CI_COMMIT_TAG}"-linux-x64.tar.gz
+      - work/bin/duniter-server-"${CI_COMMIT_TAG}"-linux-x64.deb
     expire_in: 8h
   when: manual
   only:
-- 
GitLab


From 734206cd4961e6de09d35c15d6815a08356d2f76 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 19:00:56 +0000
Subject: [PATCH 67/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 1 -
 1 file changed, 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index b1087be5b..35b9e2974 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,5 +1,4 @@
 stages:
-  - debug
   - github-sync
   - release
   - release-message
-- 
GitLab


From d172dd31041305813885675780559c50d8be8cef Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 19:21:22 +0000
Subject: [PATCH 68/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 35b9e2974..4aeac48b8 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -27,14 +27,14 @@ release_linux:test:
   tags:
     - redshift-duniter-builder
   variables:
-    TEST_TAG: "$(date +%Y%m%d).$(date +%H%M).$(date +%S)"
+    TEST_TAG: '$(date +%Y%m%d).$(date +%H%M).$(date +%S)'
   script:
     - bash "release/arch/linux/build-lin.sh" "${TEST_TAG}"
   artifacts:
     paths:
-      - work/bin/duniter-desktop-"${TEST_TAG}"-linux-x64.deb
-      - work/bin/duniter-desktop-"${TEST_TAG}"-linux-x64.tar.gz
-      - work/bin/duniter-server-"${TEST_TAG}"-linux-x64.deb
+      - work/bin/duniter-desktop-v"${TEST_TAG}"-linux-x64.deb
+      - work/bin/duniter-desktop-v"${TEST_TAG}"-linux-x64.tar.gz
+      - work/bin/duniter-server-v"${TEST_TAG}"-linux-x64.deb
     expire_in: 8h
   when: manual
   except:
-- 
GitLab


From 772c8b4eaff8cfa6941a7849e47b3da2d4386aed Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 19:38:00 +0000
Subject: [PATCH 69/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 12 +++++++-----
 1 file changed, 7 insertions(+), 5 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 4aeac48b8..af3b38d61 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -27,14 +27,16 @@ release_linux:test:
   tags:
     - redshift-duniter-builder
   variables:
-    TEST_TAG: '$(date +%Y%m%d).$(date +%H%M).$(date +%S)'
+    DAY: $(date +%Y%m%d)
+    HOUR: $(date +%H%M)
+    SEC: $(date +%S)
   script:
-    - bash "release/arch/linux/build-lin.sh" "${TEST_TAG}"
+    - bash "release/arch/linux/build-lin.sh" "${DAY}.${HOUR}.${SEC}"
   artifacts:
     paths:
-      - work/bin/duniter-desktop-v"${TEST_TAG}"-linux-x64.deb
-      - work/bin/duniter-desktop-v"${TEST_TAG}"-linux-x64.tar.gz
-      - work/bin/duniter-server-v"${TEST_TAG}"-linux-x64.deb
+      - work/bin/duniter-desktop-v"${DAY}.${HOUR}.${SEC}"-linux-x64.deb
+      - work/bin/duniter-desktop-v"${DAY}.${HOUR}.${SEC}"-linux-x64.tar.gz
+      - work/bin/duniter-server-v"${DAY}.${HOUR}.${SEC}"-linux-x64.deb
     expire_in: 8h
   when: manual
   except:
-- 
GitLab


From 36eff08a370c35b568cb29288ab2ca2c9281bf74 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 19:49:01 +0000
Subject: [PATCH 70/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 6 ++----
 1 file changed, 2 insertions(+), 4 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index af3b38d61..e9281fff2 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -31,12 +31,10 @@ release_linux:test:
     HOUR: $(date +%H%M)
     SEC: $(date +%S)
   script:
-    - bash "release/arch/linux/build-lin.sh" "${DAY}.${HOUR}.${SEC}"
+    - bash "release/arch/linux/build-lin.sh" "$(date +%Y%m%d).$(date +%H%M).$(date +%S)"
   artifacts:
     paths:
-      - work/bin/duniter-desktop-v"${DAY}.${HOUR}.${SEC}"-linux-x64.deb
-      - work/bin/duniter-desktop-v"${DAY}.${HOUR}.${SEC}"-linux-x64.tar.gz
-      - work/bin/duniter-server-v"${DAY}.${HOUR}.${SEC}"-linux-x64.deb
+      - work/bin/
     expire_in: 8h
   when: manual
   except:
-- 
GitLab


From d0a7b4e853987d364e6319ead6d628544e06e7af Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Mon, 15 Jan 2018 20:02:57 +0000
Subject: [PATCH 71/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index e9281fff2..45b2c0885 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -50,9 +50,9 @@ release_linux:deploy:desktop:
     - bash "release/arch/linux/build-lin.sh" "${CI_COMMIT_TAG#v}"
   artifacts:
     paths:
-      - work/bin/duniter-desktop-"${CI_COMMIT_TAG}"-linux-x64.deb
-      - work/bin/duniter-desktop-"${CI_COMMIT_TAG}"-linux-x64.tar.gz
-      - work/bin/duniter-server-"${CI_COMMIT_TAG}"-linux-x64.deb
+      - work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb
+      - work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz
+      - work/bin/duniter-server-${CI_COMMIT_TAG}-linux-x64.deb
     expire_in: 8h
   when: manual
   only:
-- 
GitLab


From a4b7333f8bd7d0ed861c153235c836e9360da10c Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Wed, 17 Jan 2018 21:30:41 +0000
Subject: [PATCH 72/80] [fix] add full path to EXPECTED_ARTIFACTS

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 45b2c0885..17c2c4334 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -66,7 +66,7 @@ enforce-message:
     - redshift-duniter-builder
   variables:
     JOB_ARTIFACTS: 'release'
-    EXPECTED_ARTIFACTS: '["duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
+    EXPECTED_ARTIFACTS: '["work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","work/bin/duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
   script:
     - python3 .gitlab/releaser.py
   when: manual
-- 
GitLab


From 08174adddc5179f5f3076e01819323fc3e521ef7 Mon Sep 17 00:00:00 2001
From: Vincent Texier <vit@free.fr>
Date: Thu, 18 Jan 2018 11:32:36 +0100
Subject: [PATCH 73/80] Add NodeJS version in starting logs

---
 app/modules/daemon.ts | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/app/modules/daemon.ts b/app/modules/daemon.ts
index 1ea72f5a6..584920f3c 100644
--- a/app/modules/daemon.ts
+++ b/app/modules/daemon.ts
@@ -95,6 +95,9 @@ module.exports = {
 
         logger.info(">> Server starting...");
 
+        // Log NodeJS version
+        logger.info('NodeJS version: ' + process.version);
+
         await server.checkConfig();
         // Add signing & public key functions to PeeringService
         logger.info('Node version: ' + server.version);
-- 
GitLab


From 7628fa75c93e7fdd2202c1d76a5e8f935aa62fe8 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Thu, 18 Jan 2018 15:29:41 +0000
Subject: [PATCH 74/80] releases page : test without sources

---
 .gitlab/releaser.py | 10 ----------
 1 file changed, 10 deletions(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index 2da7acd22..ad2ec95f7 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -93,16 +93,6 @@ def build_compiled_message(current_message):
             'icon': ':package:'
         }
         artifacts_list.append(artifact_dict)
-    expected_sources = ['tar.gz', 'deb']
-    for source in expected_sources:
-        source_url = build_artifact_url(source, True)
-        artifact_dict = {
-            'name': 'Source code ' + source,
-            'url': source_url,
-            'size': get_artifact_weight(source_url),
-            'icon': ':compression:'
-        }
-        artifacts_list.append(artifact_dict)
 
     j2_env = jinja2.Environment(
         loader=jinja2.FileSystemLoader(
-- 
GitLab


From 97604cf4a927f569670b658bee12f381c87fb18e Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Thu, 18 Jan 2018 16:33:18 +0000
Subject: [PATCH 75/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 17 ++++++++---------
 1 file changed, 8 insertions(+), 9 deletions(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 17c2c4334..5d45193d9 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,8 +1,7 @@
 stages:
   - github-sync
-  - release
-  - release-message
-
+  - releases
+  - releases-page
 push_to_github:
     stage: github-sync
     variables:
@@ -21,8 +20,8 @@ push_to_github:
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
 
-release_linux:test:
-  stage: release
+releases:test:
+  stage: releases
   image: duniter/release-builder:v1.0.1
   tags:
     - redshift-duniter-builder
@@ -41,8 +40,8 @@ release_linux:test:
     - tags
   
 
-release_linux:deploy:desktop:
-  stage: release
+releases:
+  stage: releases
   image: duniter/release-builder:v1.0.1
   tags:
     - redshift-duniter-builder
@@ -60,12 +59,12 @@ release_linux:deploy:desktop:
   - master
     
 enforce-message:
-  stage: release-message
+  stage: releases-message
   image: tensorflow/tensorflow:latest-py3
   tags:
     - redshift-duniter-builder
   variables:
-    JOB_ARTIFACTS: 'release'
+    JOB_ARTIFACTS: 'releases'
     EXPECTED_ARTIFACTS: '["work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.deb","work/bin/duniter-desktop-${CI_COMMIT_TAG}-linux-x64.tar.gz","work/bin/duniter-server-${CI_COMMIT_TAG}-linux-x64.deb"]'
   script:
     - python3 .gitlab/releaser.py
-- 
GitLab


From 4ee3aec004478328d549cf743bb362fec70cd2d1 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Thu, 18 Jan 2018 16:34:41 +0000
Subject: [PATCH 76/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 5d45193d9..32a9a3493 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -58,7 +58,7 @@ releases:
   - tags
   - master
     
-enforce-message:
+releases-message:
   stage: releases-message
   image: tensorflow/tensorflow:latest-py3
   tags:
-- 
GitLab


From 798de6bf9d643ab263fef65ebc395bb367553c43 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Thu, 18 Jan 2018 16:35:10 +0000
Subject: [PATCH 77/80] Update .gitlab-ci.yml

---
 .gitlab-ci.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 32a9a3493..9a0190c44 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -59,7 +59,7 @@ releases:
   - master
     
 releases-message:
-  stage: releases-message
+  stage: releases-page
   image: tensorflow/tensorflow:latest-py3
   tags:
     - redshift-duniter-builder
-- 
GitLab


From 624e1f323a69ab8cf0d14e0e111d847f8ab455ef Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Thu, 18 Jan 2018 17:00:29 +0000
Subject: [PATCH 78/80] Delete test.py

---
 .gitlab/test.py | 28 ----------------------------
 1 file changed, 28 deletions(-)
 delete mode 100644 .gitlab/test.py

diff --git a/.gitlab/test.py b/.gitlab/test.py
deleted file mode 100644
index bf1429ffa..000000000
--- a/.gitlab/test.py
+++ /dev/null
@@ -1,28 +0,0 @@
-#!/usr/bin/env python3
-
-import math
-import urllib.request
-import urllib.error
-import json
-import os
-import jinja2
-
-def get_current_message():
-    '''Get current release message'''
-    url = "https://git.duniter.org/api/v4/projects/47/repository/tags/0.99.18"
-    request = urllib.request.Request(url)
-    response = urllib.request.urlopen(request)
-    print('status: %s' % response.status)
-    print('headers:', response.headers)
-    response_data = response.read().decode()
-    data = json.loads(response_data)
-    if data['release'] is None:
-        return False, ''
-    else:
-        return True, data['release']['description'].split('# Downloads')[0]
-
-def main():
-    '''Execute main scenario'''
-    exists_release, current_message = get_current_message()
-    print('end')
-main()
-- 
GitLab


From ab0fa2f81239788ce73968248be4b0eb860f2013 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Thu, 18 Jan 2018 17:07:59 +0000
Subject: [PATCH 79/80] releases page : try to hide path in release name

---
 .gitlab/releaser.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.gitlab/releaser.py b/.gitlab/releaser.py
index ad2ec95f7..833027fd0 100644
--- a/.gitlab/releaser.py
+++ b/.gitlab/releaser.py
@@ -87,7 +87,7 @@ def build_compiled_message(current_message):
     artifacts_list = []
     for artifact in expected_artifacts:
         artifact_dict = {
-            'name': artifact,
+            'name': artifact.split('/')[-1],
             'url': build_artifact_url(artifact, False),
             'size': get_artifact_weight(artifact),
             'icon': ':package:'
-- 
GitLab


From 6fc858b23a6f39b60f657c38ce33d71e55424119 Mon Sep 17 00:00:00 2001
From: releaser-duniter <admin@duniter.org>
Date: Thu, 18 Jan 2018 17:32:16 +0000
Subject: [PATCH 80/80] restore build and test jobs

---
 .gitlab-ci.yml | 23 +++++++++++++++++++++++
 1 file changed, 23 insertions(+)

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 9a0190c44..c53f53871 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,5 +1,7 @@
 stages:
   - github-sync
+  - build
+  - test
   - releases
   - releases-page
 push_to_github:
@@ -19,6 +21,27 @@ push_to_github:
         - bash -c "cat packed-refs | grep -v 'refs/pull' > packed-refs-new; echo 'Removed pull refs.'"
         - mv packed-refs-new packed-refs
         - bash -c "git push --force --mirror github 2>&1 | grep -v duniter-gitlab; echo $?"
+        
+build:
+  stage: build
+  tags:
+    - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
+  script:
+    - yarn
+
+test:
+  stage: test
+  tags:
+    - redshift
+  before_script:
+    - export NVM_DIR="$HOME/.nvm"
+    - . "$NVM_DIR/nvm.sh"
+  script:
+    - yarn
+    - yarn test
 
 releases:test:
   stage: releases
-- 
GitLab