diff --git a/.eslintrc b/.eslintrc
index 5c8edccf218aed1f6f1447f25cf4885ad35962d7..9bab7d318bd79afd99277f8e5333f1da6b71a798 100644
--- a/.eslintrc
+++ b/.eslintrc
@@ -22,6 +22,7 @@
     "no-trailing-spaces": 0,
     "no-unused-expressions": 0,
     "comma-spacing": 0,
+    "semi": 0,
 
     "quotes": [0, "double"],
     "linebreak-style": [1,"unix"],
@@ -37,7 +38,6 @@
     "no-unused-vars": [1],
     "space-infix-ops": [1],
     "handle-callback-err": [1],
-    "semi": [1,"always"],
     "no-extra-semi": [1]
   },
   "env": {
diff --git a/app/cli.js b/app/cli.js
index 39d3c065914caf264761c3787ef4f33246b8d96e..e0c32c2d429dff54fb5133f2546a4d75e1f16002 100644
--- a/app/cli.js
+++ b/app/cli.js
@@ -1,870 +1,103 @@
 "use strict";
 
 const co = require('co');
-const logger = require('../app/lib/logger')('cli');
-const async = require('async');
-const Q = require('q');
-const _ = require('underscore');
-const program = require('commander');
-const contacter = require('../app/lib/contacter');
-const directory = require('../app/lib/system/directory');
-const wizard = require('../app/lib/wizard');
-const multicaster = require('../app/lib/streams/multicaster');
-const keyring = require('../app/lib/crypto/keyring');
-const base58 = require('../app/lib/crypto/base58');
+const Command = require('commander').Command;
 const pjson = require('../package.json');
 const duniter = require('../index');
-const Peer = require('../app/lib/entity/peer');
-const Block = require('../app/lib/entity/block');
-const constants = require('../app/lib/constants');
 
-let currentCommand = Promise.resolve(true);
+module.exports = () => {
 
-let onResolve, onReject, onService, closeCommand = () => Promise.resolve(true);
+  const options = [];
+  const commands = [];
 
-module.exports = (programArgs) => {
-
-  currentCommand = new Promise((resolve, reject) => {
-    onResolve = resolve;
-    onReject = reject;
-  });
-  
   return {
 
-    // Some external event can trigger the program closing function
-    closeCommand: () => closeCommand(),
-
-    // To execute the provided command
-    execute: (onServiceCallback) => co(function*() {
-
-      onService = onServiceCallback;
-      program.parse(programArgs);
-
-      if (programArgs.length <= 2) {
-        onReject('No command given.');
-      }
-
-      const res = yield currentCommand;
-      if (closeCommand) {
-        yield closeCommand();
-      }
-      return res;
-    })
-  };
-};
-
-function subCommand(promiseFunc) {
-  return function() {
-    let args = Array.prototype.slice.call(arguments, 0);
-    return co(function*() {
-      try {
-        let result = yield promiseFunc.apply(null, args);
-        onResolve(result);
-      } catch (e) {
-        if (e && e.uerr) {
-          onReject(e.uerr.message);
-        } else {
-          onReject(e);
-        }
-      }
-    })
-  };
-}
-
-const ERASE_IF_ALREADY_RECORDED = true;
-const NO_LOGS = true;
-
-program
-  .version(pjson.version)
-  .usage('<command> [options]')
-
-  .option('--home <path>', 'Path to Duniter HOME (defaults to "$HOME/.config/duniter").')
-  .option('-d, --mdb <name>', 'Database name (defaults to "duniter_default").')
-
-  .option('--autoconf', 'With `config` and `init` commands, will guess the best network and key options witout asking for confirmation')
-  .option('--ipv4 <address>', 'IPv4 interface to listen for requests')
-  .option('--ipv6 <address>', 'IPv6 interface to listen for requests')
-  .option('--remoteh <host>', 'Remote interface others may use to contact this node')
-  .option('--remote4 <host>', 'Remote interface for IPv4 access')
-  .option('--remote6 <host>', 'Remote interface for IPv6 access')
-  .option('-p, --port <port>', 'Port to listen for requests', parseInt)
-  .option('--remotep <port>', 'Remote port others may use to contact this node')
-  .option('--upnp', 'Use UPnP to open remote port')
-  .option('--noupnp', 'Do not use UPnP to open remote port')
-  .option('--addep <endpoint>', 'With `config` command, add given endpoint to the list of endpoints of this node')
-  .option('--remep <endpoint>', 'With `config` command, remove given endpoint to the list of endpoints of this node')
-
-  .option('--salt <salt>', 'Key salt to generate this key\'s secret key')
-  .option('--passwd <password>', 'Password to generate this key\'s secret key')
-  .option('--participate <Y|N>', 'Participate to writing the blockchain')
-  .option('--cpu <percent>', 'Percent of CPU usage for proof-of-work computation', parsePercent)
-
-  .option('-c, --currency <name>', 'Name of the currency managed by this node.')
-  .option('--sigPeriod <timestamp>', 'Minimum delay between 2 certifications of a same issuer, in seconds.')
-  .option('--sigStock <count>', 'Maximum quantity of valid certifications per member.')
-  .option('--sigWindow <duration>', 'Maximum age of a non-written certification.')
-  .option('--idtyWindow <duration>', 'Maximum age of a non-written certification.')
-  .option('--sigValidity <timestamp>', 'Validity duration of a certification, in seconds.')
-  .option('--msValidity <timestamp>', 'Validity duration of a memberships, in seconds.')
-  .option('--sigQty <number>', 'Minimum number of required certifications to be a member/stay as a member')
-  .option('--medtblocks <number>', 'medianTimeBlocks parameter of UCP')
-  .option('--avgGenTime <number>', 'avgGenTime parameter of UCP')
-  .option('--dtdiffeval <number>', 'dtDiffEval parameter of UCP')
-  .option('--powZeroMin <number>', 'Minimum number of leading zeros for a proof-of-work')
-  .option('--powPeriod <number>', 'Number of blocks to wait to decrease proof-of-work difficulty by one')
-  .option('--powDelay <number>', 'Number of seconds to wait before starting the computation of next block')
-  .option('--growth <number>', 'Universal Dividend %growth. Aka. \'c\' parameter in RTM', parsePercent)
-  .option('--ud0 <number>', 'Universal Dividend initial value')
-  .option('--dt <number>', 'Number of seconds between two UD')
-  .option('--rootoffset <number>', 'Allow to give a time offset for first block (offset in the past)')
-  .option('--show', 'With gen-next or gen-root commands, displays the generated block')
-
-  .option('--nointeractive', 'Disable interactive sync UI')
-  .option('--nocautious', 'Do not check blocks validity during sync')
-  .option('--cautious', 'Check blocks validity during sync (overrides --nocautious option)')
-  .option('--nopeers', 'Do not retrieve peers during sync')
-  .option('--nostdout', 'Disable stdout printing for `export-bc` command')
-  .option('--noshuffle', 'Disable peers shuffling for `sync` command')
-
-  .option('--timeout <milliseconds>', 'Timeout to use when contacting peers', parseInt)
-  .option('--httplogs', 'Enable HTTP logs')
-  .option('--nohttplogs', 'Disable HTTP logs')
-  .option('--isolate', 'Avoid the node to send peering or status informations to the network')
-  .option('--check', 'With gen-next: just check validity of generated block')
-  .option('--forksize <size>', 'Maximum size of fork window', parseInt)
-  .option('--memory', 'Memory mode')
-;
-
-program
-  .command('start')
-  .description('Start Duniter node daemon.')
-  .action(subCommand(service((server, conf) => new Promise((resolve, reject) => {
-    co(function*() {
-        try {
-          const bma = require('./lib/streams/bma');
-
-          logger.info(">> NODE STARTING");
+    addOption: (optFormat, optDesc, optParser) => options.push({ optFormat, optDesc, optParser }),
 
-          // Public http interface
-          let bmapi = yield bma(server, null, conf.httplogs);
+    addCommand: (command, executionCallback) => commands.push({ command, executionCallback }),
 
-          // Routing documents
-          server.routing();
-
-          // Services
-          yield server.startServices();
-          yield bmapi.openConnections();
-
-          logger.info('>> Server ready!');
+    // To execute the provided command
+    execute: (programArgs) => co(function*() {
 
-        } catch (e) {
-          reject(e);
-        }
-    });
-  }))));
+      const program = new Command();
 
-program
-  .command('stop')
-  .description('Stop Duniter node daemon.')
-  .action(subCommand(needsToBeLaunchedByScript));
+      // Callback for command success
+      let onResolve;
 
-program
-  .command('restart')
-  .description('Restart Duniter node daemon.')
-  .action(subCommand(needsToBeLaunchedByScript));
+      // Callback for command rejection
+      let onReject = () => Promise.reject(Error("Uninitilized rejection throw"));
 
-program
-  .command('wizard [step]')
-  .description('Launch the configuration wizard.')
-  .action(subCommand(function (step) {
-    // Only show message "Saved"
-    return connect(function (step, server, conf) {
-      return new Promise((resolve, reject) => {
-        async.series([
-          function (next) {
-            startWizard(step, server, conf, next);
-          }
-        ], (err) => {
-          if (err) return reject(err);
-          resolve();
-        });
+      // Command execution promise
+      const currentCommand = new Promise((resolve, reject) => {
+        onResolve = resolve;
+        onReject = reject;
       });
-    })(step, null);
-  }));
 
-program
-  .command('sync [host] [port] [to]')
-  .description('Synchronize blockchain from a remote Duniter node')
-  .action(subCommand(service(function (host, port, to, server, conf) {
-    if (!host) {
-      throw 'Host is required.';
-    }
-    if (!port) {
-      throw 'Port is required.';
-    }
-    return co(function *() {
-      let cautious;
-      if (program.nocautious) {
-        cautious = false;
-      }
-      if (program.cautious) {
-        cautious = true;
-      }
-      yield server.synchronize(host, port, parseInt(to), 0, !program.nointeractive, cautious, program.nopeers, program.noshuffle);
-      if (server) {
-        yield server.disconnect();
-      }
-    });
-  })));
+      program
+        .version(pjson.version)
+        .usage('<command> [options]')
 
-program
-  .command('peer [host] [port]')
-  .description('Exchange peerings with another node')
-  .action(subCommand(service(function (host, port, server) {
-    return co(function *() {
-      try {
-        logger.info('Fetching peering record at %s:%s...', host, port);
-        let peering = yield contacter.statics.fetchPeer(host, port);
-        logger.info('Apply peering ...');
-        yield server.PeeringService.submitP(peering, ERASE_IF_ALREADY_RECORDED, !program.nocautious);
-        logger.info('Applied');
-        let selfPeer = yield server.dal.getPeer(server.PeeringService.pubkey);
-        if (!selfPeer) {
-          yield Q.nfcall(server.PeeringService.generateSelfPeer, server.conf, 0);
-          selfPeer = yield server.dal.getPeer(server.PeeringService.pubkey);
-        }
-        logger.info('Send self peering ...');
-        var caster = multicaster();
-        yield caster.sendPeering(Peer.statics.peerize(peering), Peer.statics.peerize(selfPeer));
-        logger.info('Sent.');
-        yield server.disconnect();
-      } catch(e) {
-        logger.error(e.code || e.message || e);
-        throw Error("Exiting");
-      }
-    });
-  })));
+        .option('--home <path>', 'Path to Duniter HOME (defaults to "$HOME/.config/duniter").')
+        .option('-d, --mdb <name>', 'Database name (defaults to "duniter_default").')
 
-program
-  .command('dump [what]')
-  .description('Diverse dumps of the inner data')
-  .action(subCommand(connect(makeDump, true)));
+        .option('--autoconf', 'With `config` and `init` commands, will guess the best network and key options witout asking for confirmation')
+        .option('--addep <endpoint>', 'With `config` command, add given endpoint to the list of endpoints of this node')
+        .option('--remep <endpoint>', 'With `config` command, remove given endpoint to the list of endpoints of this node')
 
-program
-  .command('revert [count]')
-  .description('Revert (undo + remove) the top [count] blocks from the blockchain. EXPERIMENTAL')
-  .action(subCommand(service(function (count, server) {
-    return co(function *() {
-      try {
-        for (let i = 0; i < count; i++) {
-          yield server.revert();
-        }
-      } catch (err) {
-        logger.error('Error during revert:', err);
-      }
-      // Save DB
-      yield server.disconnect();
-    });
-  })));
+        .option('--cpu <percent>', 'Percent of CPU usage for proof-of-work computation', parsePercent)
 
-program
-  .command('revert-to [number]')
-  .description('Revert (undo + remove) top blockchain blocks until block #[number] is reached. EXPERIMENTAL')
-  .action(subCommand(service(function (number, server) {
-    return co(function *() {
-      try {
-        yield server.revertTo(number);
-      } catch (err) {
-        logger.error('Error during revert:', err);
-      }
-      // Save DB
-      if (server) {
-        yield server.disconnect();
-      }
-    });
-  })));
+        .option('-c, --currency <name>', 'Name of the currency managed by this node.')
 
-program
-  .command('reapply-to [number]')
-  .description('Reapply reverted blocks until block #[number] is reached. EXPERIMENTAL')
-  .action(subCommand(service(function (number, server) {
-    return co(function *() {
-      try {
-        yield server.reapplyTo(number);
-      } catch (err) {
-        logger.error('Error during reapply:', err);
-      }
-      // Save DB
-      if (server) {
-        yield server.disconnect();
-      }
-    });
-  })));
+        .option('--nostdout', 'Disable stdout printing for `export-bc` command')
+        .option('--noshuffle', 'Disable peers shuffling for `sync` command')
 
-program
-  .command('gen-next [host] [port] [difficulty]')
-  .description('Tries to generate the next block of the blockchain')
-  .action(subCommand(service(generateAndSend("generateNext"))));
+        .option('--timeout <milliseconds>', 'Timeout to use when contacting peers', parseInt)
+        .option('--httplogs', 'Enable HTTP logs')
+        .option('--nohttplogs', 'Disable HTTP logs')
+        .option('--isolate', 'Avoid the node to send peering or status informations to the network')
+        .option('--forksize <size>', 'Maximum size of fork window', parseInt)
+        .option('--memory', 'Memory mode')
+      ;
 
-program
-  .command('gen-root [host] [port] [difficulty]')
-  .description('Tries to generate root block, with choice of root members')
-    .action(subCommand(service(function (host, port, difficulty, server, conf) {
-      if (!host) {
-        throw 'Host is required.';
-      }
-      if (!port) {
-        throw 'Port is required.';
+      for (const opt of options) {
+        program
+          .option(opt.optFormat, opt.optDesc, opt.optParser);
       }
-      if (!difficulty) {
-        throw 'Difficulty is required.';
-      }
-      return generateAndSend("generateManualRoot")(host, port, difficulty, server, conf);
-    })));
 
-function generateAndSend(generationMethod) {
-  return function (host, port, difficulty, server, conf) {
-    return new Promise((resolve, reject) => {
-      async.waterfall([
-        function (next) {
-          var method = eval('server.BlockchainService.' + generationMethod);
-          co(function*(){
-            try {
-              const block = yield method();
-              next(null, block);
-            } catch(e) {
-              next(e);
-            }
-          });
-        },
-        function (block, next) {
-          if (program.check) {
-            block.time = block.medianTime;
-            program.show && console.log(block.getRawSigned());
-            co(function*(){
+      for (const cmd of commands) {
+        program
+          .command(cmd.command.name)
+          .description(cmd.command.desc)
+          .action(function() {
+            const args = Array.from(arguments);
+            return co(function*() {
               try {
-                yield server.doCheckBlock(block);
-                logger.info('Acceptable block');
-                next();
+                const resOfExecution = yield cmd.executionCallback.apply(null, [program].concat(args));
+                onResolve(resOfExecution);
               } catch (e) {
-                next(e);
+                onReject(e);
               }
             });
-          }
-          else {
-            logger.debug('Block to be sent: %s', block.quickDescription());
-            var wiz = wizard(server);
-            async.waterfall([
-              function (next) {
-                if (!conf.salt && !conf.passwd)
-                  wiz.configKey(conf, next);
-                else
-                  next();
-              },
-              function (next) {
-                // Extract key pair
-                co(function*(){
-                  try {
-                    const pair = yield keyring.scryptKeyPair(conf.salt, conf.passwd);
-                    next(null, pair);
-                  } catch(e) {
-                    next(e);
-                  }
-                });
-              },
-              function (pair, next) {
-                proveAndSend(server, block, pair.publicKey, parseInt(difficulty), host, parseInt(port), next);
-              }
-            ], next);
-          }
-        }
-      ], (err, data) => {
-        err && reject(err);
-        !err && resolve(data);
-      });
-    });
-  };
-}
-
-function proveAndSend(server, block, issuer, difficulty, host, port, done) {
-  var BlockchainService = server.BlockchainService;
-  async.waterfall([
-    function (next) {
-      block.issuer = issuer;
-      program.show && console.log(block.getRawSigned());
-      co(function*(){
-        try {
-          const proven = yield BlockchainService.prove(block, difficulty);
-          next(null, proven);
-        } catch(e) {
-          next(e);
-        }
-      });
-    },
-    function (block, next) {
-      var peer = new Peer({
-        endpoints: [['BASIC_MERKLED_API', host, port].join(' ')]
-      });
-      program.show && console.log(block.getRawSigned());
-      logger.info('Posted block ' + block.quickDescription());
-      co(function*(){
-        try {
-          yield multicaster(server.conf).sendBlock(peer, block);
-          next();
-        } catch(e) {
-          next(e);
-        }
-      });
-    }
-  ], done);
-}
-
-program
-  .command('export-bc [upto]')
-  .description('Exports the whole blockchain as JSON array, up to [upto] block number (excluded).')
-  .action(subCommand(service(function (upto, server) {
-    return co(function *() {
-      try {
-        let CHUNK_SIZE = 500;
-        let jsoned = [];
-        let current = yield server.dal.getCurrentBlockOrNull();
-        let lastNumber = current ? current.number + 1 : -1;
-        if (upto !== undefined && upto.match(/\d+/)) {
-          lastNumber = Math.min(parseInt(upto), lastNumber);
-        }
-        let chunksCount = Math.floor(lastNumber / CHUNK_SIZE);
-        let chunks = [];
-        // Max-size chunks
-        for (let i = 0, len = chunksCount; i < len; i++) {
-          chunks.push({start: i * CHUNK_SIZE, to: i * CHUNK_SIZE + CHUNK_SIZE - 1});
-        }
-        // A last chunk
-        if (lastNumber > chunksCount * CHUNK_SIZE) {
-          chunks.push({start: chunksCount * CHUNK_SIZE, to: lastNumber});
-        }
-        for (const chunk of chunks) {
-          let blocks = yield server.dal.getBlocksBetween(chunk.start, chunk.to);
-          blocks.forEach(function (block) {
-            jsoned.push(_(new Block(block).json()).omit('raw'));
           });
-        }
-        if (!program.nostdout) {
-          console.log(JSON.stringify(jsoned, null, "  "));
-        }
-        yield server.disconnect();
-        return jsoned;
-      } catch(err) {
-          logger.warn(err.message || err);
-          yield server.disconnect();
-      }
-    });
-  }, NO_LOGS)));
-
-program
-  .command('check-config')
-  .description('Checks the node\'s configuration')
-  .action(subCommand(service(function (server) {
-    return server.checkConfig()
-      .then(function () {
-        logger.warn('Configuration seems correct.');
-      })
-  })));
-
-program
-  .command('config')
-  .description('Register configuration in database')
-  .action(subCommand(connect(configure)));
-
-program
-  .command('reset [config|data|peers|tx|stats|all]')
-  .description('Reset configuration, data, peers, transactions or everything in the database')
-  .action(subCommand((type) => {
-    let init = ['data', 'all'].indexOf(type) !== -1 ? server : connect;
-    return init(function (server) {
-      if (!~['config', 'data', 'peers', 'stats', 'all'].indexOf(type)) {
-        throw constants.ERRORS.CLI_CALLERR_RESET;
       }
-      return co(function*() {
-        try {
-          if (type == 'data') {
-            yield server.resetData();
-            logger.warn('Data successfully reseted.');
-          }
-          if (type == 'peers') {
-            yield server.resetPeers();
-            logger.warn('Peers successfully reseted.');
-          }
-          if (type == 'stats') {
-            yield server.resetStats();
-            logger.warn('Stats successfully reseted.');
-          }
-          if (type == 'config') {
-            yield server.resetConf();
-            logger.warn('Configuration successfully reseted.');
-          }
-          if (type == 'all') {
-            yield server.resetAll();
-            logger.warn('Data & Configuration successfully reseted.');
-          }
-        } catch (e) {
-          logger.error(e);
-        }
-      });
-    }, type != 'peers')(type);
-  }));
-
-function startWizard(step, server, conf, done) {
-  var wiz = wizard(server);
-  var task = {
-    'currency': wiz.configCurrency,
-    'basic': wiz.configBasic,
-    'pow': wiz.configPoW,
-    'network': wiz.configNetwork,
-    'network-reconfigure': wiz.configNetworkReconfigure,
-    'key': wiz.configKey,
-    'ucp': wiz.configUCP
-  };
-  var wizDo = task[step] || wiz.configAll;
-  async.waterfall([
-    function (next) {
-      wizDo(conf, next);
-    },
-    function (next) {
-      co(function*(){
-        try {
-          yield server.dal.saveConf(conf);
-          logger.debug("Configuration saved.");
-          next();
-        } catch(e) {
-          next(e);
-        }
-      });
-    },
-    function (next) {
-      // Check config
-      service(function (key, server, conf) {
-        next();
-      })(null, null);
-    }
-  ], done);
-}
-
-function makeDump(what, server, conf) {
-  return co(function *() {
-    try {
-      server.dal.wotb.showWoT();
-    } catch (e) {
-      logger.error(e);
-    }
-    server.disconnect();
-    throw Error("Exiting");
-  });
-}
-
-function commandLineConf(conf) {
-
-  conf = conf || {};
-  conf.sync = conf.sync || {};
-  var cli = {
-    currency: program.currency,
-    cpu: program.cpu,
-    server: {
-      port: program.port,
-      ipv4address: program.ipv4,
-      ipv6address: program.ipv6,
-      salt: program.salt,
-      passwd: program.passwd,
-      remote: {
-        host: program.remoteh,
-        ipv4: program.remote4,
-        ipv6: program.remote6,
-        port: program.remotep
-      }
-    },
-    db: {
-      mport: program.mport,
-      mdb: program.mdb,
-      home: program.home
-    },
-    net: {
-      upnp: program.upnp,
-      noupnp: program.noupnp
-    },
-    logs: {
-      http: program.httplogs,
-      nohttp: program.nohttplogs
-    },
-    endpoints: [],
-    rmEndpoints: [],
-    ucp: {
-      rootoffset: program.rootoffset,
-      sigPeriod: program.sigPeriod,
-      sigStock: program.sigStock,
-      sigWindow: program.sigWindow,
-      idtyWindow: program.idtyWindow,
-      msWindow: program.msWindow,
-      sigValidity: program.sigValidity,
-      sigQty: program.sigQty,
-      msValidity: program.msValidity,
-      powZeroMin: program.powZeroMin,
-      powPeriod: program.powPeriod,
-      powDelay: program.powDelay,
-      participate: program.participate,
-      ud0: program.ud0,
-      c: program.growth,
-      dt: program.dt,
-      incDateMin: program.incDateMin,
-      medtblocks: program.medtblocks,
-      dtdiffeval: program.dtdiffeval,
-      avgGenTime: program.avgGenTime
-    },
-    isolate: program.isolate,
-    forksize: program.forksize,
-    nofork: program.nofork,
-    timeout: program.timeout
-  };
-
-  // Update conf
-  if (cli.currency)                         conf.currency = cli.currency;
-  if (cli.server.ipv4address)               conf.ipv4 = cli.server.ipv4address;
-  if (cli.server.ipv6address)               conf.ipv6 = cli.server.ipv6address;
-  if (cli.server.port)                      conf.port = cli.server.port;
-  if (cli.server.salt)                      conf.salt = cli.server.salt;
-  if (cli.server.passwd != undefined)       conf.passwd = cli.server.passwd;
-  if (cli.server.remote.host != undefined)  conf.remotehost = cli.server.remote.host;
-  if (cli.server.remote.ipv4 != undefined)  conf.remoteipv4 = cli.server.remote.ipv4;
-  if (cli.server.remote.ipv6 != undefined)  conf.remoteipv6 = cli.server.remote.ipv6;
-  if (cli.server.remote.port != undefined)  conf.remoteport = cli.server.remote.port;
-  if (cli.ucp.rootoffset)                   conf.rootoffset = cli.ucp.rootoffset;
-  if (cli.ucp.sigPeriod)                    conf.sigPeriod = cli.ucp.sigPeriod;
-  if (cli.ucp.sigStock)                     conf.sigStock = cli.ucp.sigStock;
-  if (cli.ucp.sigWindow)                    conf.sigWindow = cli.ucp.sigWindow;
-  if (cli.ucp.idtyWindow)                   conf.idtyWindow = cli.ucp.idtyWindow;
-  if (cli.ucp.msWindow)                     conf.msWindow = cli.ucp.msWindow;
-  if (cli.ucp.sigValidity)                  conf.sigValidity = cli.ucp.sigValidity;
-  if (cli.ucp.msValidity)                   conf.msValidity = cli.ucp.msValidity;
-  if (cli.ucp.sigQty)                       conf.sigQty = cli.ucp.sigQty;
-  if (cli.ucp.msValidity)                   conf.msValidity = cli.ucp.msValidity;
-  if (cli.ucp.powZeroMin)                   conf.powZeroMin = cli.ucp.powZeroMin;
-  if (cli.ucp.powPeriod)                    conf.powPeriod = cli.ucp.powPeriod;
-  if (cli.ucp.powDelay)                     conf.powDelay = cli.ucp.powDelay;
-  if (cli.ucp.participate)                  conf.participate = cli.ucp.participate == 'Y';
-  if (cli.ucp.dt)                           conf.dt = cli.ucp.dt;
-  if (cli.ucp.c)                            conf.c = cli.ucp.c;
-  if (cli.ucp.ud0)                          conf.ud0 = cli.ucp.ud0;
-  if (cli.ucp.incDateMin)                   conf.incDateMin = cli.ucp.incDateMin;
-  if (cli.ucp.medtblocks)                   conf.medianTimeBlocks = cli.ucp.medtblocks;
-  if (cli.ucp.avgGenTime)                   conf.avgGenTime = cli.ucp.avgGenTime;
-  if (cli.ucp.dtdiffeval)                   conf.dtDiffEval = cli.ucp.dtdiffeval;
-  if (cli.net.upnp)                         conf.upnp = true;
-  if (cli.net.noupnp)                       conf.upnp = false;
-  if (cli.cpu)                              conf.cpu = Math.max(0.01, Math.min(1.0, cli.cpu));
-  if (cli.logs.http)                        conf.httplogs = true;
-  if (cli.logs.nohttp)                      conf.httplogs = false;
-  if (cli.db.mport)                         conf.mport = cli.db.mport;
-  if (cli.db.home)                          conf.home = cli.db.home;
-  if (cli.db.mdb)                           conf.mdb = cli.db.mdb;
-  if (cli.isolate)                          conf.isolate = cli.isolate;
-  if (cli.timeout)                          conf.timeout = cli.timeout;
-  if (cli.forksize != null)                 conf.forksize = cli.forksize;
-
-  // Specific internal settings
-  conf.createNext = true;
-  return _(conf).extend({routing: true});
-}
-
-function connect(callback, useDefaultConf) {
-  return function () {
-    var cbArgs = arguments;
-    var dbName = program.mdb || "duniter_default";
-    var dbHome = program.home;
-
-    var server = duniter({home: dbHome, name: dbName}, commandLineConf());
-
-    // If ever the process gets interrupted
-    let isSaving = false;
-    closeCommand = () => co(function*() {
-      if (!isSaving) {
-        isSaving = true;
-        // Save DB
-        return server.disconnect();
-      }
-    });
-
-    // Initialize server (db connection, ...)
-    return server.plugFileSystem(useDefaultConf)
-      .then(() => server.loadConf())
-      .then(function () {
-        try {
-          cbArgs.length--;
-          cbArgs[cbArgs.length++] = server;
-          cbArgs[cbArgs.length++] = server.conf;
-          return callback.apply(this, cbArgs);
-        } catch(e) {
-          server.disconnect();
-          throw e;
-	}
-      });
-  };
-}
-
-/**
- * Super basic server with only its home path set
- * @param callback
- * @param useDefaultConf
- * @returns {Function}
- */
-function server(callback, useDefaultConf) {
-  return function () {
-    var cbArgs = arguments;
-    var dbName = program.mdb || "duniter_default";
-    var dbHome = program.home;
-
-    var server = duniter({home: dbHome, name: dbName}, commandLineConf());
-
-    cbArgs.length--;
-    cbArgs[cbArgs.length++] = server;
-    cbArgs[cbArgs.length++] = server.conf;
-    return callback.apply(this, cbArgs);
-  };
-}
 
-function service(callback, nologs) {
-
-  return function () {
-
-    if (nologs) {
-      // Disable logs
-      require('../app/lib/logger')().mute();
-    }
-
-    var cbArgs = arguments;
-    var dbName = program.mdb;
-    var dbHome = program.home;
-
-    // Add log files for this instance
-    logger.addHomeLogs(directory.getHome(dbName, dbHome));
-
-    var server = duniter({home: dbHome, name: dbName, memory: program.memory}, commandLineConf());
-
-    // If ever the process gets interrupted
-    let isSaving = false;
-    closeCommand = () => co(function*() {
-      if (!isSaving) {
-        isSaving = true;
-        // Save DB
-        return server.disconnect();
-      }
-    });
+      program
+        .on('*', function (cmd) {
+          console.log("Unknown command '%s'. Try --help for a listing of commands & options.", cmd);
+          onResolve();
+        });
 
-    const that = this;
+      program.parse(programArgs);
 
-    // Initialize server (db connection, ...)
-    return co(function*() {
-      try {
-        yield server.initWithDAL();
-        yield configure(server, server.conf || {});
-        yield server.loadConf();
-        cbArgs.length--;
-        cbArgs[cbArgs.length++] = server;
-        cbArgs[cbArgs.length++] = server.conf;
-        cbArgs[cbArgs.length++] = program;
-        onService && onService(server);
-        return callback.apply(that, cbArgs);
-      } catch (e) {
-        server.disconnect();
-        throw e;
+      if (programArgs.length <= 2) {
+        onReject('No command given.');
       }
-    });
+      return currentCommand;
+    })
   };
-}
+};
 
 function parsePercent(s) {
-  var f = parseFloat(s);
+  const f = parseFloat(s);
   return isNaN(f) ? 0 : f;
 }
-
-program
-  .on('*', function (cmd) {
-    console.log("Unknown command '%s'. Try --help for a listing of commands & options.", cmd);
-    throw Error("Exiting");
-  });
-
-module.exports.addCommand = (command, requirements, promiseCallback) => {
-  program
-    .command(command.name)
-    .description(command.desc)
-    .action(subCommand(service(promiseCallback)));
-};
-
-module.exports.addOption = (optFormat, optDesc, optParser) => {
-  program
-    .option(optFormat, optDesc, optParser);
-};
-
-function needsToBeLaunchedByScript() {
-    logger.error('This command must not be launched directly, using duniter.sh script');
-    return Promise.resolve();
-}
-
-function configure(server, conf) {
-  return co(function *() {
-    if (typeof server == "string" || typeof conf == "string") {
-      throw constants.ERRORS.CLI_CALLERR_CONFIG;
-    }
-    let wiz = wizard();
-    // UPnP override
-    if (program.noupnp === true) {
-      conf.upnp = false;
-    }
-    if (program.upnp === true) {
-      conf.upnp = true;
-    }
-    // Network autoconf
-    const autoconfNet = program.autoconf
-      || !(conf.ipv4 || conf.ipv6)
-      || !(conf.remoteipv4 || conf.remoteipv6 || conf.remotehost)
-      || !(conf.port && conf.remoteport);
-    if (autoconfNet) {
-      yield Q.nbind(wiz.networkReconfiguration, wiz)(conf, autoconfNet, program.noupnp);
-    }
-    const hasSaltPasswdKey = conf.salt && conf.passwd;
-    const hasKeyPair = conf.pair && conf.pair.pub && conf.pair.sec;
-    const autoconfKey = program.autoconf || (!hasSaltPasswdKey && !hasKeyPair);
-    if (autoconfKey) {
-      yield Q.nbind(wiz.keyReconfigure, wiz)(conf, autoconfKey);
-    }
-    // Try to add an endpoint if provided
-    if (program.addep) {
-      if (conf.endpoints.indexOf(program.addep) === -1) {
-        conf.endpoints.push(program.addep);
-      }
-      // Remove it from "to be removed" list
-      const indexInRemove = conf.rmEndpoints.indexOf(program.addep);
-      if (indexInRemove !== -1) {
-        conf.rmEndpoints.splice(indexInRemove, 1);
-      }
-    }
-    // Try to remove an endpoint if provided
-    if (program.remep) {
-      if (conf.rmEndpoints.indexOf(program.remep) === -1) {
-        conf.rmEndpoints.push(program.remep);
-      }
-      // Remove it from "to be added" list
-      const indexInToAdd = conf.endpoints.indexOf(program.remep);
-      if (indexInToAdd !== -1) {
-        conf.endpoints.splice(indexInToAdd, 1);
-      }
-    }
-    return server.dal.saveConf(conf)
-      .then(function () {
-        try {
-          logger.debug("Configuration saved.");
-          return conf;
-        } catch (e) {
-          logger.error("Configuration could not be saved: " + e);
-          throw Error(e);
-        }
-      });
-  });
-}
diff --git a/app/controllers/abstract.js b/app/controllers/abstract.js
deleted file mode 100644
index 977232e298f28cac748d2a19cb50ba03f9959176..0000000000000000000000000000000000000000
--- a/app/controllers/abstract.js
+++ /dev/null
@@ -1,22 +0,0 @@
-
-"use strict";
-const co = require('co');
-const dos2unix = require('../lib/system/dos2unix');
-
-module.exports = function AbstractController (server) {
-
-  const logger = require('../lib/logger')('abstractController');
-
-  this.pushEntity = (req, rawer, type) => co(function *() {
-    let rawDocument = rawer(req);
-    rawDocument = dos2unix(rawDocument);
-    const written = yield server.writeRaw(rawDocument, type);
-    try {
-      return written.json();
-    } catch (e) {
-      logger.error('Written:', written);
-      logger.error(e);
-      throw e;
-    }
-  });
-};
diff --git a/app/controllers/blockchain.js b/app/controllers/blockchain.js
deleted file mode 100644
index e0a3e4a37bd6491e528c4118ff6b7435c4b5c4c7..0000000000000000000000000000000000000000
--- a/app/controllers/blockchain.js
+++ /dev/null
@@ -1,149 +0,0 @@
-"use strict";
-
-const co               = require('co');
-const _                = require('underscore');
-const rules            = require('../lib/rules');
-const constants        = require('../lib/constants');
-const http2raw         = require('../lib/helpers/http2raw');
-const Membership       = require('../lib/entity/membership');
-const AbstractController = require('./abstract');
-
-module.exports = function (server) {
-  return new BlockchainBinding(server);
-};
-
-function BlockchainBinding (server) {
-
-  AbstractController.call(this, server);
-
-  const conf = server.conf;
-
-  // Services
-  const ParametersService = server.ParametersService;
-  const BlockchainService = server.BlockchainService;
-  const IdentityService   = server.IdentityService;
-
-  // Models
-  const Block      = require('../lib/entity/block');
-  const Stat       = require('../lib/entity/stat');
-
-  this.parseMembership = (req) => this.pushEntity(req, http2raw.membership, constants.ENTITY_MEMBERSHIP);
-
-  this.parseBlock = (req) => this.pushEntity(req, http2raw.block, constants.ENTITY_BLOCK);
-
-  this.parameters = () => server.dal.getParameters();
-
-  this.with = {
-
-    newcomers: getStat('newcomers'),
-    certs:     getStat('certs'),
-    joiners:   getStat('joiners'),
-    actives:   getStat('actives'),
-    leavers:   getStat('leavers'),
-    revoked:  getStat('revoked'),
-    excluded:  getStat('excluded'),
-    ud:        getStat('ud'),
-    tx:        getStat('tx')
-  };
-
-  function getStat (statName) {
-    return () => co(function *() {
-      let stat = yield server.dal.getStat(statName);
-      return { result: new Stat(stat).json() };
-    });
-  }
-
-  this.promoted = (req) => co(function *() {
-    const number = yield ParametersService.getNumberP(req);
-    const promoted = yield BlockchainService.promoted(number);
-    return new Block(promoted).json();
-  });
-
-  this.blocks = (req) => co(function *() {
-    const params = ParametersService.getCountAndFrom(req);
-    const count = parseInt(params.count);
-    const from = parseInt(params.from);
-    let blocks = yield BlockchainService.blocksBetween(from, count);
-    blocks = blocks.map((b) => (new Block(b).json()));
-    return blocks;
-  });
-
-  this.current = () => co(function *() {
-    const current = yield server.dal.getCurrentBlockOrNull();
-    if (!current) throw constants.ERRORS.NO_CURRENT_BLOCK;
-    return new Block(current).json();
-  });
-
-  this.hardship = (req) => co(function *() {
-    let nextBlockNumber = 0;
-    const search = yield ParametersService.getSearchP(req);
-    const idty = yield IdentityService.findMemberWithoutMemberships(search);
-    if (!idty) {
-      throw constants.ERRORS.NO_MATCHING_IDENTITY;
-    }
-    if (!idty.member) {
-      throw constants.ERRORS.NOT_A_MEMBER;
-    }
-    const current = yield BlockchainService.current();
-    if (current) {
-      nextBlockNumber = current ? current.number + 1 : 0;
-    }
-    const difficulty = yield server.getBcContext().getIssuerPersonalizedDifficulty(idty.pubkey);
-    return {
-      "block": nextBlockNumber,
-      "level": difficulty
-    };
-  });
-
-  this.difficulties = () => co(function *() {
-    const current = yield server.dal.getCurrentBlockOrNull();
-    const number = (current && current.number) || 0;
-    const issuers = yield server.dal.getUniqueIssuersBetween(number - 1 - current.issuersFrame, number - 1);
-    const difficulties = [];
-    for (const issuer of issuers) {
-      const member = yield server.dal.getWrittenIdtyByPubkey(issuer);
-      const difficulty = yield server.getBcContext().getIssuerPersonalizedDifficulty(member.pubkey);
-      difficulties.push({
-        uid: member.uid,
-        level: difficulty
-      });
-    }
-    return {
-      "block": number + 1,
-      "levels": _.sortBy(difficulties, (diff) => diff.level)
-    };
-  });
-
-  this.memberships = (req) => co(function *() {
-    const search = yield ParametersService.getSearchP(req);
-    const idty = yield IdentityService.findMember(search);
-    const json = {
-      pubkey: idty.pubkey,
-      uid: idty.uid,
-      sigDate: idty.buid,
-      memberships: []
-    };
-    json.memberships = idty.memberships.map((msObj) => {
-      const ms = new Membership(msObj);
-      return {
-        version: ms.version,
-        currency: conf.currency,
-        membership: ms.membership,
-        blockNumber: parseInt(ms.blockNumber),
-        blockHash: ms.blockHash,
-        written: (!ms.written_number && ms.written_number !== 0) ? null : ms.written_number
-      };
-    });
-    json.memberships = _.sortBy(json.memberships, 'blockNumber');
-    json.memberships.reverse();
-    return json;
-  });
-
-  this.branches = () => co(function *() {
-    const branches = yield BlockchainService.branches();
-    const blocks = branches.map((b) => new Block(b).json());
-    return {
-      blocks: blocks
-    };
-  });
-}
diff --git a/app/controllers/network.js b/app/controllers/network.js
deleted file mode 100644
index 328e084dfa28ae80a21d81e9ce5c65f654938e54..0000000000000000000000000000000000000000
--- a/app/controllers/network.js
+++ /dev/null
@@ -1,72 +0,0 @@
-"use strict";
-const _                = require('underscore');
-const co               = require('co');
-const Q                = require('q');
-const http2raw         = require('../lib/helpers/http2raw');
-const constants        = require('../lib/constants');
-const Peer             = require('../lib/entity/peer');
-const AbstractController = require('./abstract');
-
-module.exports = function (server) {
-  return new NetworkBinding(server);
-};
-
-function NetworkBinding (server) {
-
-  AbstractController.call(this, server);
-
-  // Services
-  const MerkleService     = server.MerkleService;
-  const PeeringService    = server.PeeringService;
-
-  this.cert = PeeringService.cert;
-
-  this.peer = () => co(function *() {
-    const p = yield PeeringService.peer();
-    if (!p) {
-      throw constants.ERRORS.SELF_PEER_NOT_FOUND;
-    }
-    return p.json();
-  });
-
-  this.peersGet = (req) => co(function *() {
-    let merkle = yield server.dal.merkleForPeers();
-    return yield MerkleService.processForURL(req, merkle, (hashes) => {
-      return co(function *() {
-        try {
-          let peers = yield server.dal.findPeersWhoseHashIsIn(hashes);
-          const map = {};
-          peers.forEach((peer) => {
-            map[peer.hash] = Peer.statics.peerize(peer).json();
-          });
-          if (peers.length == 0) {
-            throw constants.ERRORS.PEER_NOT_FOUND;
-          }
-          return map;
-        } catch (e) {
-          throw e;
-        }
-      });
-    });
-  });
-
-  this.peersPost = (req) => this.pushEntity(req, http2raw.peer, constants.ENTITY_PEER);
-
-  this.peers = () => co(function *() {
-    let peers = yield server.dal.listAllPeers();
-    return {
-      peers: peers.map((p) => {
-        return _.pick(p,
-          'version',
-          'currency',
-          'status',
-          'first_down',
-          'last_try',
-          'pubkey',
-          'block',
-          'signature',
-          'endpoints');
-      })
-    };
-  });
-}
diff --git a/app/controllers/node.js b/app/controllers/node.js
deleted file mode 100644
index 4ebc3aba40f65896c3ab91498c3301a4709d0dc4..0000000000000000000000000000000000000000
--- a/app/controllers/node.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-
-const co = require('co');
-
-module.exports = function (server) {
-  return new NodeBinding(server);
-};
-
-function NodeBinding (server) {
-
-  this.summary = () => {
-    return {
-      "duniter": {
-        "software": "duniter",
-        "version": server.version,
-        "forkWindowSize": server.conf.forksize
-      }
-    };
-  };
-
-  this.sandboxes = () => co(function*() {
-    return {
-      identities: yield sandboxIt(server.dal.idtyDAL.sandbox),
-      certifications: yield sandboxIt(server.dal.certDAL.sandbox),
-      memberships: yield sandboxIt(server.dal.msDAL.sandbox),
-      transactions: yield sandboxIt(server.dal.txsDAL.sandbox)
-    };
-  });
-}
-
-function sandboxIt(sandbox) {
-  return co(function*() {
-    return {
-      size: sandbox.maxSize,
-      free: yield sandbox.getSandboxRoom()
-    };
-  });
-}
diff --git a/app/controllers/transactions.js b/app/controllers/transactions.js
deleted file mode 100644
index ed2e19ed0074cd18732574341545c0ac53981f00..0000000000000000000000000000000000000000
--- a/app/controllers/transactions.js
+++ /dev/null
@@ -1,110 +0,0 @@
-"use strict";
-const co               = require('co');
-const _                = require('underscore');
-const http2raw         = require('../lib/helpers/http2raw');
-const Transaction      = require('../lib/entity/transaction');
-const constants        = require('../lib/constants');
-const AbstractController = require('./abstract');
-
-module.exports = function (server) {
-  return new TransactionBinding(server);
-};
-
-function TransactionBinding(server) {
-
-  AbstractController.call(this, server);
-
-  const conf = server.conf;
-
-  // Services
-  const ParametersService = server.ParametersService;
-
-  // Models
-  const Source = require('../lib/entity/source');
-
-  this.parseTransaction = (req) => this.pushEntity(req, http2raw.transaction, constants.ENTITY_TRANSACTION);
-
-  this.getSources = (req) => co(function *() {
-    const pubkey = yield ParametersService.getPubkeyP(req);
-    const sources = yield server.dal.getAvailableSourcesByPubkey(pubkey);
-    const result = {
-      "currency": conf.currency,
-      "pubkey": pubkey,
-      "sources": []
-    };
-    sources.forEach(function (src) {
-      result.sources.push(new Source(src).json());
-    });
-    return result;
-  });
-
-  this.getHistory = (req) => co(function *() {
-    const pubkey = yield ParametersService.getPubkeyP(req);
-    return getFilteredHistory(pubkey, (results) => results);
-  });
-
-  this.getHistoryBetweenBlocks = (req) => co(function *() {
-    const pubkey = yield ParametersService.getPubkeyP(req);
-    const from = yield ParametersService.getFromP(req);
-    const to = yield ParametersService.getToP(req);
-    return getFilteredHistory(pubkey, (res) => {
-      const histo = res.history;
-      histo.sent =     _.filter(histo.sent, function(tx){ return tx && tx.block_number >= from && tx.block_number <= to; });
-      histo.received = _.filter(histo.received, function(tx){ return tx && tx.block_number >= from && tx.block_number <= to; });
-      _.extend(histo, { sending: [], receiving: [] });
-      return res;
-    });
-  });
-
-  this.getHistoryBetweenTimes = (req) => co(function *() {
-    const pubkey = yield ParametersService.getPubkeyP(req);
-    const from = yield ParametersService.getFromP(req);
-    const to = yield ParametersService.getToP(req);
-    return getFilteredHistory(pubkey, (res) => {
-      const histo = res.history;
-      histo.sent =     _.filter(histo.sent, function(tx){ return tx && tx.time >= from && tx.time <= to; });
-      histo.received = _.filter(histo.received, function(tx){ return tx && tx.time >= from && tx.time <= to; });
-      _.extend(histo, { sending: [], receiving: [] });
-      return res;
-    });
-  });
-
-  this.getPendingForPubkey = (req) => co(function *() {
-    const pubkey = yield ParametersService.getPubkeyP(req);
-    return getFilteredHistory(pubkey, function(res) {
-      const histo = res.history;
-      _.extend(histo, { sent: [], received: [] });
-      return res;
-    });
-  });
-
-  this.getPending = (req) => co(function *() {
-    const pending = yield server.dal.getTransactionsPending();
-    const res = {
-      "currency": conf.currency,
-      "pending": pending
-    };
-    pending.map(function(tx, index) {
-      pending[index] = _.omit(new Transaction(tx).json(), 'currency', 'raw');
-    });
-    return res;
-  });
-
-  const getFilteredHistory = (pubkey, filter) => co(function*() {
-      let history = yield server.dal.getTransactionsHistory(pubkey);
-      let result = {
-        "currency": conf.currency,
-        "pubkey": pubkey,
-        "history": history
-      };
-      _.keys(history).map((key) => {
-        history[key].map((tx, index) => {
-          history[key][index] = _.omit(new Transaction(tx).json(), 'currency', 'raw');
-          _.extend(history[key][index], {block_number: tx && tx.block_number, time: tx && tx.time});
-        });
-      });
-      return filter(result);
-  });
-
-  return this;
-}
diff --git a/app/controllers/uds.js b/app/controllers/uds.js
deleted file mode 100644
index 6afeabace3c9ad74cb98e6715fc3210ec573229b..0000000000000000000000000000000000000000
--- a/app/controllers/uds.js
+++ /dev/null
@@ -1,64 +0,0 @@
-"use strict";
-const co = require('co');
-const Q = require('q');
-const _ = require('underscore');
-
-module.exports = function (server) {
-  return new UDBinding(server);
-};
-
-function UDBinding(server) {
-
-  const conf = server.conf;
-
-  // Services
-  const ParametersService = server.ParametersService;
-
-  // Models
-  const Source = require('../lib/entity/source');
-
-  this.getHistory = (req) => co(function *() {
-    const pubkey = yield ParametersService.getPubkeyP(req);
-    return getUDSources(pubkey, (results) => results);
-  });
-
-  this.getHistoryBetweenBlocks = (req) => co(function *() {
-    const pubkey = yield ParametersService.getPubkeyP(req);
-    const from = yield ParametersService.getFromP(req);
-    const to = yield ParametersService.getToP(req);
-    return getUDSources(pubkey, (results) => {
-      results.history.history = _.filter(results.history.history, function(ud){ return ud.block_number >= from && ud.block_number <= to; });
-      return results;
-    });
-  });
-
-  this.getHistoryBetweenTimes = (req) => co(function *() {
-    const pubkey = yield ParametersService.getPubkeyP(req);
-    const from = yield ParametersService.getFromP(req);
-    const to = yield ParametersService.getToP(req);
-    return getUDSources(pubkey, (results) => {
-      results.history.history = _.filter(results.history.history, function(ud){ return ud.time >= from && ud.time <= to; });
-      return results;
-    });
-  });
-
-  function getUDSources(pubkey, filter) {
-    return co(function *() {
-      const history = yield server.dal.getUDHistory(pubkey);
-      const result = {
-        "currency": conf.currency,
-        "pubkey": pubkey,
-        "history": history
-      };
-      _.keys(history).map((key) => {
-        history[key].map((src, index) => {
-          history[key][index] = _.omit(new Source(src).UDjson(), 'currency', 'raw');
-          _.extend(history[key][index], { block_number: src && src.block_number, time: src && src.time });
-        });
-      });
-      return filter(result);
-    });
-  }
-  
-  return this;
-}
diff --git a/app/controllers/wot.js b/app/controllers/wot.js
deleted file mode 100644
index fb8979a7a641e66dadedc14a8d6cc06e14d5c434..0000000000000000000000000000000000000000
--- a/app/controllers/wot.js
+++ /dev/null
@@ -1,244 +0,0 @@
-"use strict";
-const co = require('co');
-const _        = require('underscore');
-const http2raw = require('../lib/helpers/http2raw');
-const constants = require('../lib/constants');
-const AbstractController = require('./abstract');
-const logger   = require('../lib/logger')();
-
-module.exports = function (server) {
-  return new WOTBinding(server);
-};
-
-function WOTBinding (server) {
-
-  AbstractController.call(this, server);
-
-  const ParametersService = server.ParametersService;
-  const IdentityService   = server.IdentityService;
-  const BlockchainService   = server.BlockchainService;
-
-  const Identity = require('../lib/entity/identity');
-
-  this.lookup = (req) => co(function *() {
-    // Get the search parameter from HTTP query
-    const search = yield ParametersService.getSearchP(req);
-    // Make the research
-    const identities = yield IdentityService.searchIdentities(search);
-    // Entitify each result
-    identities.forEach((idty, index) => identities[index] = new Identity(idty));
-    // Prepare some data to avoid displaying expired certifications
-    for (const idty of identities) {
-      const certs = yield server.dal.certsToTarget(idty.pubkey, idty.getTargetHash());
-      const validCerts = [];
-      for (const cert of certs) {
-        const member = yield IdentityService.getWrittenByPubkey(cert.from);
-        if (member) {
-          cert.uids = [member.uid];
-          cert.isMember = member.member;
-          cert.wasMember = member.wasMember;
-        } else {
-          const potentials = yield IdentityService.getPendingFromPubkey(cert.from);
-          cert.uids = _(potentials).pluck('uid');
-          cert.isMember = false;
-          cert.wasMember = false;
-        }
-        validCerts.push(cert);
-      }
-      idty.certs = validCerts;
-      const signed = yield server.dal.certsFrom(idty.pubkey);
-      const validSigned = [];
-      for (let j = 0; j < signed.length; j++) {
-        const cert = _.clone(signed[j]);
-        cert.idty = yield server.dal.getIdentityByHashOrNull(cert.target);
-        if (cert.idty) {
-          validSigned.push(cert);
-        } else {
-          logger.debug('A certification to an unknown identity was found (%s => %s)', cert.from, cert.to);
-        }
-      }
-      idty.signed = validSigned;
-    }
-    if (identities.length == 0) {
-      throw constants.ERRORS.NO_MATCHING_IDENTITY;
-    }
-    const resultsByPubkey = {};
-    identities.forEach((identity) => {
-      const jsoned = identity.json();
-      if (!resultsByPubkey[jsoned.pubkey]) {
-        // Create the first matching identity with this pubkey in the map
-        resultsByPubkey[jsoned.pubkey] = jsoned;
-      } else {
-        // Merge the identity with the existing(s)
-        const existing = resultsByPubkey[jsoned.pubkey];
-        // We add the UID of the identity to the list of already added UIDs
-        existing.uids = existing.uids.concat(jsoned.uids);
-        // We do not merge the `signed`: every identity with the same pubkey has the same `signed` because it the *pubkey* which signs, not the identity
-      }
-    });
-    return {
-      partial: false,
-      results: Object.values(resultsByPubkey)
-    };
-  });
-
-  this.members = () => co(function *() {
-    const identities = yield server.dal.getMembers();
-    const json = {
-      results: []
-    };
-    identities.forEach((identity) => json.results.push({ pubkey: identity.pubkey, uid: identity.uid }));
-    return json;
-  });
-
-  this.certifiersOf = (req) => co(function *() {
-    const search = yield ParametersService.getSearchP(req);
-    const idty = yield IdentityService.findMemberWithoutMemberships(search);
-    const certs = yield server.dal.certsToTarget(idty.pubkey, idty.getTargetHash());
-    idty.certs = [];
-    for (const cert of certs) {
-      const certifier = yield server.dal.getWrittenIdtyByPubkey(cert.from);
-      if (certifier) {
-        cert.uid = certifier.uid;
-        cert.isMember = certifier.member;
-        cert.sigDate = certifier.buid;
-        cert.wasMember = true; // As we checked if(certified)
-        if (!cert.cert_time) {
-          // TODO: would be more efficient to save medianTime on certification reception
-          let certBlock = yield server.dal.getBlock(cert.block_number);
-          cert.cert_time = {
-            block: certBlock.number,
-            medianTime: certBlock.medianTime
-          };
-        }
-        idty.certs.push(cert);
-      }
-    }
-    const json = {
-      pubkey: idty.pubkey,
-      uid: idty.uid,
-      sigDate: idty.buid,
-      isMember: idty.member,
-      certifications: []
-    };
-    idty.certs.forEach(function(cert){
-      json.certifications.push({
-        pubkey: cert.from,
-        uid: cert.uid,
-        isMember: cert.isMember,
-        wasMember: cert.wasMember,
-        cert_time: cert.cert_time,
-        sigDate: cert.sigDate,
-        written: cert.linked ? {
-          number: cert.written_block,
-          hash: cert.written_hash
-        } : null,
-        signature: cert.sig
-      });
-    });
-    return json;
-  });
-
-  this.requirements = (req) => co(function *() {
-    const search = yield ParametersService.getSearchP(req);
-    const identities = yield IdentityService.searchIdentities(search);
-    const all = yield BlockchainService.requirementsOfIdentities(identities);
-    if (!all || !all.length) {
-      throw constants.ERRORS.NO_IDTY_MATCHING_PUB_OR_UID;
-    }
-    return {
-      identities: all
-    };
-  });
-
-  this.certifiedBy = (req) => co(function *() {
-    const search = yield ParametersService.getSearchP(req);
-    const idty = yield IdentityService.findMemberWithoutMemberships(search);
-    const certs = yield server.dal.certsFrom(idty.pubkey);
-    idty.certs = [];
-    for (const cert of certs) {
-      const certified = yield server.dal.getWrittenIdtyByPubkey(cert.to);
-      if (certified) {
-        cert.uid = certified.uid;
-        cert.isMember = certified.member;
-        cert.sigDate = certified.buid;
-        cert.wasMember = true; // As we checked if(certified)
-        if (!cert.cert_time) {
-          // TODO: would be more efficient to save medianTime on certification reception (note: now partially done with INDEX)
-          let certBlock = yield server.dal.getBlock(cert.block_number);
-          cert.cert_time = {
-            block: certBlock.number,
-            medianTime: certBlock.medianTime
-          };
-        }
-        idty.certs.push(cert);
-      }
-    }
-    const json = {
-      pubkey: idty.pubkey,
-      uid: idty.uid,
-      sigDate: idty.buid,
-      isMember: idty.member,
-      certifications: []
-    };
-    idty.certs.forEach((cert) => json.certifications.push({
-        pubkey: cert.to,
-        uid: cert.uid,
-        isMember: cert.isMember,
-        wasMember: cert.wasMember,
-        cert_time: cert.cert_time,
-        sigDate: cert.sigDate,
-        written: cert.linked ? {
-          number: cert.written_block,
-          hash: cert.written_hash
-        } : null,
-        signature: cert.sig
-      })
-    );
-    return json;
-  });
-
-  this.identityOf = (req) => co(function *() {
-    let search = yield ParametersService.getSearchP(req);
-    let idty = yield IdentityService.findMemberWithoutMemberships(search);
-    if (!idty) {
-      throw 'Identity not found';
-    }
-    if (!idty.member) {
-      throw 'Not a member';
-    }
-    return {
-      pubkey: idty.pubkey,
-      uid: idty.uid,
-      sigDate: idty.buid
-    };
-  });
-
-  this.add = (req) => this.pushEntity(req, http2raw.identity, constants.ENTITY_IDENTITY);
-
-  this.certify = (req) => this.pushEntity(req, http2raw.certification, constants.ENTITY_CERTIFICATION);
-
-  this.revoke = (req) => this.pushEntity(req, http2raw.revocation, constants.ENTITY_REVOCATION);
-
-  this.pendingMemberships = (req) => co(function*() {
-    const memberships = yield server.dal.findNewcomers();
-    const json = {
-      memberships: []
-    };
-    json.memberships = memberships.map((ms) => {
-      return {
-        pubkey: ms.issuer,
-        uid: ms.userid,
-        version: ms.version,
-        currency: server.conf.currency,
-        membership: ms.membership,
-        blockNumber: parseInt(ms.blockNumber),
-        blockHash: ms.blockHash,
-        written: (!ms.written_number && ms.written_number !== 0) ? null : ms.written_number
-      };
-    });
-    json.memberships = _.sortBy(json.memberships, 'blockNumber');
-    json.memberships.reverse();
-    return json;
-  });
-}
diff --git a/app/lib/cfs.js b/app/lib/cfs.js
index a6eab9a600bd88aa0e537da9e677eaf3e2f32155..4d8a55804e809c4277d8a54938d9c16f0b74da31 100644
--- a/app/lib/cfs.js
+++ b/app/lib/cfs.js
@@ -1,6 +1,5 @@
 "use strict";
 
-const Q = require('q');
 const _ = require('underscore');
 const co = require('co');
 const path = require('path');
@@ -24,7 +23,7 @@ function CFSCore(rootPath, qfs, parent) {
 
   /**
    * Creates the deletion folder before effective deletion.
-   * @returns {*|any|Q.Promise<void>} Promise of creation.
+   * @returns {*|any|Promise<void>} Promise of creation.
    */
   const createDeletionFolder = () => deletionFolderPromise || (deletionFolderPromise = that.makeTree('.deleted'));
 
@@ -197,7 +196,7 @@ function CFSCore(rootPath, qfs, parent) {
       return JSON.parse(data);
     } catch(err) {
       if (data && err.message.match(/^Unexpected token {/)) {
-        // TODO: this is a bug thrown during Unit Tests with MEMORY_MODE true...
+        // This is a bug thrown during Unit Tests with MEMORY_MODE true...
         return JSON.parse(data.match(/^(.*)}{.*/)[1] + '}');
       } else if (err.message.match(/^Unexpected end of input/)) {
         // Could not read, return empty object
diff --git a/app/lib/computation/blockGenerator.js b/app/lib/computation/blockGenerator.js
deleted file mode 100644
index 4fad108d3b309101ee7397cbc530158e34b51da1..0000000000000000000000000000000000000000
--- a/app/lib/computation/blockGenerator.js
+++ /dev/null
@@ -1,735 +0,0 @@
-"use strict";
-const _               = require('underscore');
-const co              = require('co');
-const Q               = require('q');
-const moment          = require('moment');
-const inquirer        = require('inquirer');
-const indexer         = require('../dup/indexer');
-const rawer           = require('../ucp/rawer');
-const hashf           = require('../ucp/hashf');
-const constants       = require('../constants');
-const base58          = require('../crypto/base58');
-const rules           = require('../rules/index');
-const keyring          = require('../crypto/keyring');
-const Identity        = require('../entity/identity');
-const Certification   = require('../entity/certification');
-const Membership      = require('../entity/membership');
-const Block           = require('../entity/block');
-const Transaction     = require('../entity/transaction');
-
-module.exports = (mainContext, prover) => {
-  return new BlockGenerator(mainContext, prover);
-};
-
-function BlockGenerator(mainContext, prover) {
-
-  const that = this;
-  let conf, dal, keyPair, selfPubkey, logger;
-
-  this.setConfDAL = (newConf, newDAL, newKeyPair) => {
-    dal = newDAL;
-    conf = newConf;
-    keyPair = newKeyPair;
-    selfPubkey = newKeyPair.publicKey;
-    logger = require('../logger')(dal.profile);
-  };
-
-  this.nextBlock = (manualValues) => generateNextBlock(new NextBlockGenerator(mainContext, conf, dal), manualValues);
-
-  this.manualRoot = () => co(function *() {
-    let current = yield dal.getCurrentBlockOrNull();
-    if (current) {
-      throw 'Cannot generate root block: it already exists.';
-    }
-    return generateNextBlock(new ManualRootGenerator());
-  });
-
-  this.makeNextBlock = (block, trial, manualValues) => co(function *() {
-    const unsignedBlock = block || (yield that.nextBlock(manualValues));
-    const trialLevel = trial || (yield mainContext.getIssuerPersonalizedDifficulty(selfPubkey));
-    return prover.prove(unsignedBlock, trialLevel, (manualValues && manualValues.time) || null);
-  });
-
-  /**
-   * Generate next block, gathering both updates & newcomers
-   */
-  const generateNextBlock = (generator, manualValues) => co(function *() {
-    const current = yield dal.getCurrentBlockOrNull();
-    const revocations = yield dal.getRevocatingMembers();
-    const exclusions = yield dal.getToBeKickedPubkeys();
-    const newCertsFromWoT = yield generator.findNewCertsFromWoT(current);
-    const newcomersLeavers = yield findNewcomersAndLeavers(current, generator.filterJoiners);
-    const transactions = yield findTransactions(current);
-    const joinData = newcomersLeavers[2];
-    const leaveData = newcomersLeavers[3];
-    const newCertsFromNewcomers = newcomersLeavers[4];
-    const certifiersOfNewcomers = _.uniq(_.keys(joinData).reduce((certifiers, newcomer) => {
-      return certifiers.concat(_.pluck(joinData[newcomer].certs, 'from'));
-    }, []));
-    const certifiers = [].concat(certifiersOfNewcomers);
-    // Merges updates
-    _(newCertsFromWoT).keys().forEach(function(certified){
-      newCertsFromWoT[certified] = newCertsFromWoT[certified].filter((cert) => {
-        // Must not certify a newcomer, since it would mean multiple certifications at same time from one member
-        const isCertifier = certifiers.indexOf(cert.from) != -1;
-        if (!isCertifier) {
-          certifiers.push(cert.from);
-        }
-        return !isCertifier;
-      });
-    });
-    _(newCertsFromNewcomers).keys().forEach((certified) => {
-      newCertsFromWoT[certified] = (newCertsFromWoT[certified] || []).concat(newCertsFromNewcomers[certified]);
-    });
-    // Revocations
-    // Create the block
-    return createBlock(current, joinData, leaveData, newCertsFromWoT, revocations, exclusions, transactions, manualValues);
-  });
-
-  const findNewcomersAndLeavers  = (current, filteringFunc) => co(function*() {
-    const newcomers = yield findNewcomers(current, filteringFunc);
-    const leavers = yield findLeavers(current);
-
-    const cur = newcomers.current;
-    const newWoTMembers = newcomers.newWotMembers;
-    const finalJoinData = newcomers.finalJoinData;
-    const updates = newcomers.updates;
-
-    return [cur, newWoTMembers, finalJoinData, leavers, updates];
-  });
-
-  const findTransactions = (current) => co(function*() {
-    const versionMin = current ? Math.min(constants.LAST_VERSION_FOR_TX, current.version) : constants.DOCUMENTS_VERSION;
-    const txs = yield dal.getTransactionsPending(versionMin);
-    const transactions = [];
-    const passingTxs = [];
-    for (const obj of txs) {
-      const tx = new Transaction(obj, conf.currency);
-      const extractedTX = tx.getTransaction();
-      try {
-        yield Q.nbind(rules.HELPERS.checkBunchOfTransactions, rules, passingTxs.concat(extractedTX));
-        const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 };
-        yield rules.HELPERS.checkSingleTransaction(extractedTX, nextBlockWithFakeTimeVariation, conf, dal);
-        yield rules.HELPERS.checkTxBlockStamp(extractedTX, dal);
-        transactions.push(tx);
-        passingTxs.push(extractedTX);
-        logger.info('Transaction %s added to block', tx.hash);
-      } catch (err) {
-        logger.error(err);
-        const currentNumber = (current && current.number) || 0;
-        const blockstamp = extractedTX.blockstamp || (currentNumber + '-');
-        const txBlockNumber = parseInt(blockstamp.split('-')[0]);
-        // 10 blocks before removing the transaction
-        if (currentNumber - txBlockNumber + 1 >= constants.TRANSACTION_MAX_TRIES) {
-          yield dal.removeTxByHash(extractedTX.hash);
-        }
-      }
-    }
-    return transactions;
-  });
-
-  const findLeavers = (current) => co(function*() {
-    const leaveData = {};
-    const memberships = yield dal.findLeavers();
-    const leavers = [];
-    memberships.forEach((ms) => leavers.push(ms.issuer));
-    for (const ms of memberships) {
-      const leave = { identity: null, ms: ms, key: null, idHash: '' };
-      leave.idHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase();
-      let block;
-      if (current) {
-        block = yield dal.getBlock(ms.number);
-      }
-      else {
-        block = {};
-      }
-      const identity = yield dal.getIdentityByHashOrNull(leave.idHash);
-      const currentMembership = yield dal.mindexDAL.getReducedMS(ms.issuer);
-      const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1;
-      if (identity && block && currentMSN < leave.ms.number && identity.member) {
-        // MS + matching cert are found
-        leave.identity = identity;
-        leaveData[identity.pubkey] = leave;
-      }
-    }
-    return leaveData;
-  });
-
-  const findNewcomers = (current, filteringFunc) => co(function*() {
-    const updates = {};
-    const preJoinData = yield getPreJoinData(current);
-    const joinData = yield filteringFunc(preJoinData);
-    const members = yield dal.getMembers();
-    const wotMembers = _.pluck(members, 'pubkey');
-    // Checking step
-    const newcomers = _(joinData).keys();
-    const nextBlockNumber = current ? current.number + 1 : 0;
-    try {
-      const realNewcomers = yield iteratedChecking(newcomers, (someNewcomers) => co(function*() {
-        const nextBlock = {
-          number: nextBlockNumber,
-          joiners: someNewcomers,
-          identities: _.filter(newcomers.map((pub) => joinData[pub].identity), { wasMember: false }).map((idty) => idty.pubkey)
-        };
-        const newLinks = yield computeNewLinks(nextBlockNumber, someNewcomers, joinData, updates);
-        yield checkWoTConstraints(nextBlock, newLinks, current);
-      }));
-      const newLinks = yield computeNewLinks(nextBlockNumber, realNewcomers, joinData, updates);
-      const newWoT = wotMembers.concat(realNewcomers);
-      const finalJoinData = {};
-      realNewcomers.forEach((newcomer) => {
-        // Only keep membership of selected newcomers
-        finalJoinData[newcomer] = joinData[newcomer];
-        // Only keep certifications from final members
-        const keptCerts = [];
-        joinData[newcomer].certs.forEach((cert) => {
-          const issuer = cert.from;
-          if (~newWoT.indexOf(issuer) && ~newLinks[cert.to].indexOf(issuer)) {
-            keptCerts.push(cert);
-          }
-        });
-        joinData[newcomer].certs = keptCerts;
-      });
-      return {
-        current: current,
-        newWotMembers: wotMembers.concat(realNewcomers),
-        finalJoinData: finalJoinData,
-        updates: updates
-      }
-    } catch(err) {
-      logger.error(err);
-      throw err;
-    }
-  });
-
-  const checkWoTConstraints = (block, newLinks, current) => co(function*() {
-    if (block.number < 0) {
-      throw 'Cannot compute WoT constraint for negative block number';
-    }
-    const newcomers = block.joiners.map((inlineMS) => inlineMS.split(':')[0]);
-    const realNewcomers = block.identities;
-    for (const newcomer of newcomers) {
-      if (block.number > 0) {
-        try {
-          // Will throw an error if not enough links
-          yield mainContext.checkHaveEnoughLinks(newcomer, newLinks);
-          // This one does not throw but returns a boolean
-          const isOut = yield rules.HELPERS.isOver3Hops(newcomer, newLinks, realNewcomers, current, conf, dal);
-          if (isOut) {
-            throw 'Key ' + newcomer + ' is not recognized by the WoT for this block';
-          }
-        } catch (e) {
-          logger.debug(e);
-          throw e;
-        }
-      }
-    }
-  });
-
-  const iteratedChecking = (newcomers, checkWoTForNewcomers) => co(function*() {
-    const passingNewcomers = [];
-    let hadError = false;
-    for (const newcomer of newcomers) {
-      try {
-        yield checkWoTForNewcomers(passingNewcomers.concat(newcomer));
-        passingNewcomers.push(newcomer);
-      } catch (err) {
-        hadError = hadError || err;
-      }
-    }
-    if (hadError) {
-      return yield iteratedChecking(passingNewcomers, checkWoTForNewcomers);
-    } else {
-      return passingNewcomers;
-    }
-  });
-
-  const getPreJoinData = (current) => co(function*() {
-    const preJoinData = {};
-    const memberships = yield dal.findNewcomers();
-    const joiners = [];
-    memberships.forEach((ms) =>joiners.push(ms.issuer));
-    for (const ms of memberships) {
-      try {
-        if (ms.block != constants.BLOCK.SPECIAL_BLOCK) {
-          let msBasedBlock = yield dal.getBlockByBlockstampOrNull(ms.block);
-          if (!msBasedBlock) {
-            throw constants.ERRORS.BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK;
-          }
-          let age = current.medianTime - msBasedBlock.medianTime;
-          if (age > conf.msWindow) {
-            throw constants.ERRORS.TOO_OLD_MEMBERSHIP;
-          }
-        }
-        const idtyHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase();
-        const join = yield that.getSinglePreJoinData(current, idtyHash, joiners);
-        join.ms = ms;
-        const currentMembership = yield dal.mindexDAL.getReducedMS(ms.issuer);
-        const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1;
-        if (!join.identity.revoked && currentMSN < parseInt(join.ms.number)) {
-          preJoinData[join.identity.pubkey] = join;
-        }
-      } catch (err) {
-        if (err && !err.uerr) {
-          logger.warn(err);
-        }
-      }
-    }
-    return preJoinData;
-  });
-
-  const computeNewLinks = (forBlock, theNewcomers, joinData, updates) => co(function *() {
-    let newCerts = yield that.computeNewCerts(forBlock, theNewcomers, joinData);
-    return that.newCertsToLinks(newCerts, updates);
-  });
-
-  this.newCertsToLinks = (newCerts, updates) => {
-    let newLinks = {};
-    _.mapObject(newCerts, function(certs, pubkey) {
-      newLinks[pubkey] = _.pluck(certs, 'from');
-    });
-    _.mapObject(updates, function(certs, pubkey) {
-      newLinks[pubkey] = (newLinks[pubkey] || []).concat(_.pluck(certs, 'pubkey'));
-    });
-    return newLinks;
-  };
-
-  this.computeNewCerts = (forBlock, theNewcomers, joinData) => co(function *() {
-    const newCerts = {}, certifiers = [];
-    const certsByKey = _.mapObject(joinData, function(val){ return val.certs; });
-    for (const newcomer of theNewcomers) {
-      // New array of certifiers
-      newCerts[newcomer] = newCerts[newcomer] || [];
-      // Check wether each certification of the block is from valid newcomer/member
-      for (const cert of certsByKey[newcomer]) {
-        const isAlreadyCertifying = certifiers.indexOf(cert.from) !== -1;
-        if (!(isAlreadyCertifying && forBlock > 0)) {
-          if (~theNewcomers.indexOf(cert.from)) {
-            // Newcomer to newcomer => valid link
-            newCerts[newcomer].push(cert);
-            certifiers.push(cert.from);
-          } else {
-            let isMember = yield dal.isMember(cert.from);
-            // Member to newcomer => valid link
-            if (isMember) {
-              newCerts[newcomer].push(cert);
-              certifiers.push(cert.from);
-            }
-          }
-        }
-      }
-    }
-    return newCerts;
-  });
-
-  this.getSinglePreJoinData = (current, idHash, joiners) => co(function *() {
-    const identity = yield dal.getIdentityByHashOrNull(idHash);
-    let foundCerts = [];
-    const vHEAD_1 = yield mainContext.getvHEAD_1();
-    if (!identity) {
-      throw 'Identity with hash \'' + idHash + '\' not found';
-    }
-    if (current && identity.buid == constants.BLOCK.SPECIAL_BLOCK && !identity.wasMember) {
-      throw constants.ERRORS.TOO_OLD_IDENTITY;
-    }
-    else if (!identity.wasMember && identity.buid != constants.BLOCK.SPECIAL_BLOCK) {
-      const idtyBasedBlock = yield dal.getBlock(identity.buid);
-      const age = current.medianTime - idtyBasedBlock.medianTime;
-      if (age > conf.idtyWindow) {
-        throw constants.ERRORS.TOO_OLD_IDENTITY;
-      }
-    }
-    const idty = new Identity(identity);
-    idty.currency = conf.currency;
-    const createIdentity = idty.rawWithoutSig();
-    const verified = keyring.verify(createIdentity, idty.sig, idty.pubkey);
-    if (!verified) {
-      throw constants.ERRORS.IDENTITY_WRONGLY_SIGNED;
-    }
-    const isIdentityLeaving = yield dal.isLeaving(idty.pubkey);
-    if (!isIdentityLeaving) {
-      if (!current) {
-        // Look for certifications from initial joiners
-        // TODO: check if this is still working
-        const certs = yield dal.certsNotLinkedToTarget(idHash);
-        foundCerts = _.filter(certs, function(cert){
-          // Add 'joiners && ': special case when block#0 not written ANd not joiner yet (avoid undefined error)
-          return joiners && ~joiners.indexOf(cert.from);
-        });
-      } else {
-        // Look for certifications from WoT members
-        let certs = yield dal.certsNotLinkedToTarget(idHash);
-        const certifiers = [];
-        for (const cert of certs) {
-          try {
-            const basedBlock = yield dal.getBlock(cert.block_number);
-            if (!basedBlock) {
-              throw 'Unknown timestamp block for identity';
-            }
-            if (current) {
-              const age = current.medianTime - basedBlock.medianTime;
-              if (age > conf.sigWindow || age > conf.sigValidity) {
-                throw 'Too old certification';
-              }
-            }
-            // Already exists a link not replayable yet?
-            let exists = yield dal.existsNonReplayableLink(cert.from, cert.to);
-            if (exists) {
-              throw 'It already exists a similar certification written, which is not replayable yet';
-            }
-            // Already exists a link not chainable yet?
-            exists = yield dal.existsNonChainableLink(cert.from, vHEAD_1, conf.sigStock);
-            if (exists) {
-              throw 'It already exists a certification written which is not chainable yet';
-            }
-            const isMember = yield dal.isMember(cert.from);
-            const doubleSignature = ~certifiers.indexOf(cert.from) ? true : false;
-            if (isMember && !doubleSignature) {
-              const isValid = yield rules.HELPERS.checkCertificationIsValidForBlock(cert, { number: current.number + 1, currency: current.currency }, identity, conf, dal);
-              if (isValid) {
-                certifiers.push(cert.from);
-                foundCerts.push(cert);
-              }
-            }
-          } catch (e) {
-            logger.warn(e.stack || e.message || e);
-            // Go on
-          }
-        }
-      }
-    }
-    return {
-      identity: identity,
-      key: null,
-      idHash: idHash,
-      certs: foundCerts
-    };
-  });
-
-  const createBlock = (current, joinData, leaveData, updates, revocations, exclusions, transactions, manualValues) => {
-    return co(function *() {
-
-      const vHEAD = yield mainContext.getvHeadCopy();
-      const vHEAD_1 = yield mainContext.getvHEAD_1();
-      const maxLenOfBlock = indexer.DUP_HELPERS.getMaxBlockSize(vHEAD);
-      let blockLen = 0;
-      // Revocations have an impact on exclusions
-      revocations.forEach((idty) => exclusions.push(idty.pubkey));
-      // Prevent writing joins/updates for excluded members
-      exclusions = _.uniq(exclusions);
-      exclusions.forEach((excluded) => {
-        delete updates[excluded];
-        delete joinData[excluded];
-        delete leaveData[excluded];
-      });
-      _(leaveData).keys().forEach((leaver) => {
-        delete updates[leaver];
-        delete joinData[leaver];
-      });
-      const block = new Block();
-      block.number = current ? current.number + 1 : 0;
-      // Compute the new MedianTime
-      if (block.number == 0) {
-        block.medianTime = moment.utc().unix() - conf.rootoffset;
-      }
-      else {
-        block.medianTime = vHEAD.medianTime;
-      }
-      // Choose the version
-      block.version = (manualValues && manualValues.version) || (yield rules.HELPERS.getMaxPossibleVersionNumber(current, block));
-      block.currency = current ? current.currency : conf.currency;
-      block.nonce = 0;
-      block.parameters = block.number > 0 ? '' : [
-        conf.c, conf.dt, conf.ud0,
-        conf.sigPeriod, conf.sigStock, conf.sigWindow, conf.sigValidity,
-        conf.sigQty, conf.idtyWindow, conf.msWindow, conf.xpercent, conf.msValidity,
-        conf.stepMax, conf.medianTimeBlocks, conf.avgGenTime, conf.dtDiffEval,
-        (conf.percentRot == 1 ? "1.0" : conf.percentRot)
-      ].join(':');
-      block.previousHash = current ? current.hash : "";
-      block.previousIssuer = current ? current.issuer : "";
-      if (selfPubkey)
-        block.issuer = selfPubkey;
-      // Members merkle
-      const joiners = _(joinData).keys();
-      const previousCount = current ? current.membersCount : 0;
-      if (joiners.length == 0 && !current) {
-        throw constants.ERRORS.CANNOT_ROOT_BLOCK_NO_MEMBERS;
-      }
-
-      // Kicked people
-      block.excluded = exclusions;
-
-      /*****
-       * Priority 1: keep the WoT sane
-       */
-      // Certifications from the WoT, to the WoT
-      _(updates).keys().forEach((certifiedMember) => {
-        const certs = updates[certifiedMember] || [];
-        certs.forEach((cert) => {
-          if (blockLen < maxLenOfBlock) {
-            block.certifications.push(new Certification(cert).inline());
-            blockLen++;
-          }
-        });
-      });
-      // Renewed
-      joiners.forEach((joiner) => {
-        const data = joinData[joiner];
-        // Join only for non-members
-        if (data.identity.member) {
-          if (blockLen < maxLenOfBlock) {
-            block.actives.push(new Membership(data.ms).inline());
-            blockLen++;
-          }
-        }
-      });
-      // Leavers
-      const leavers = _(leaveData).keys();
-      leavers.forEach((leaver) => {
-        const data = leaveData[leaver];
-        // Join only for non-members
-        if (data.identity.member) {
-          if (blockLen < maxLenOfBlock) {
-            block.leavers.push(new Membership(data.ms).inline());
-            blockLen++;
-          }
-        }
-      });
-
-      /*****
-       * Priority 2: revoked identities
-       */
-      revocations.forEach((idty) => {
-        if (blockLen < maxLenOfBlock) {
-          block.revoked.push([idty.pubkey, idty.revocation_sig].join(':'));
-          blockLen++;
-        }
-      });
-
-      /*****
-       * Priority 3: newcomers/renewcomers
-       */
-      let countOfCertsToNewcomers = 0;
-      // Newcomers
-      // Newcomers + back people
-      joiners.forEach((joiner) => {
-        const data = joinData[joiner];
-        // Identities only for never-have-been members
-        if (!data.identity.member && !data.identity.wasMember) {
-          block.identities.push(new Identity(data.identity).inline());
-        }
-        // Join only for non-members
-        if (!data.identity.member) {
-          block.joiners.push(new Membership(data.ms).inline());
-        }
-      });
-      block.identities = _.sortBy(block.identities, (line) => {
-        const sp = line.split(':');
-        return sp[2] + sp[3];
-      });
-
-      // Certifications from the WoT, to newcomers
-      joiners.forEach((joiner) => {
-        const data = joinData[joiner] || [];
-        data.certs.forEach((cert) => {
-          countOfCertsToNewcomers++;
-          block.certifications.push(new Certification(cert).inline());
-        });
-      });
-
-      // Eventually revert newcomers/renewcomer
-      if (Block.statics.getLen(block) > maxLenOfBlock) {
-        for (let i = 0; i < block.identities.length; i++) {
-          block.identities.pop();
-          block.joiners.pop();
-        }
-        for (let i = 0; i < countOfCertsToNewcomers; i++) {
-          block.certifications.pop();
-        }
-      }
-
-      // Final number of members
-      block.membersCount = previousCount + block.joiners.length - block.excluded.length;
-
-      vHEAD.membersCount = block.membersCount;
-
-      /*****
-       * Priority 4: transactions
-       */
-      block.transactions = [];
-      blockLen = Block.statics.getLen(block);
-      if (blockLen < maxLenOfBlock) {
-        transactions.forEach((tx) => {
-          const txLen = Transaction.statics.getLen(tx);
-          if (txLen <= constants.MAXIMUM_LEN_OF_COMPACT_TX && blockLen + txLen <= maxLenOfBlock && tx.version == constants.TRANSACTION_VERSION) {
-            block.transactions.push({ raw: tx.compact() });
-          }
-          blockLen += txLen;
-        });
-      }
-
-      /**
-       * Finally handle the Universal Dividend
-       */
-      block.powMin = vHEAD.powMin;
-
-      // BR_G13
-      indexer.prepareDividend(vHEAD, vHEAD_1, conf);
-
-      // BR_G14
-      indexer.prepareUnitBase(vHEAD, vHEAD_1, conf);
-
-      // Universal Dividend
-      if (vHEAD.new_dividend) {
-        block.dividend = vHEAD.dividend;
-        block.unitbase = vHEAD.unitBase;
-      } else {
-        block.unitbase = block.number == 0 ? 0 : current.unitbase;
-      }
-      // Rotation
-      block.issuersCount = vHEAD.issuersCount;
-      block.issuersFrame = vHEAD.issuersFrame;
-      block.issuersFrameVar = vHEAD.issuersFrameVar;
-      // InnerHash
-      block.time = block.medianTime;
-      block.inner_hash = hashf(rawer.getBlockInnerPart(block)).toUpperCase();
-      if (manualValues) {
-        _.extend(block, _.omit(manualValues, 'time'));
-      }
-      return block;
-    });
-  }
-}
-
-/**
- * Class to implement strategy of automatic selection of incoming data for next block.
- * @constructor
- */
-function NextBlockGenerator(mainContext, conf, dal) {
-
-  const logger = require('../logger')(dal.profile);
-
-  this.findNewCertsFromWoT = (current) => co(function *() {
-    const updates = {};
-    const updatesToFrom = {};
-    const certs = yield dal.certsFindNew();
-    const vHEAD_1 = yield mainContext.getvHEAD_1();
-    for (const cert of certs) {
-      const targetIdty = yield dal.getIdentityByHashOrNull(cert.target);
-      // The identity must be known
-      if (targetIdty) {
-        const certSig = cert.sig;
-        // Do not rely on certification block UID, prefer using the known hash of the block by its given number
-        const targetBlock = yield dal.getBlock(cert.block_number);
-        // Check if writable
-        let duration = current && targetBlock ? current.medianTime - parseInt(targetBlock.medianTime) : 0;
-        if (targetBlock && duration <= conf.sigWindow) {
-          cert.sig = '';
-          cert.currency = conf.currency;
-          cert.issuer = cert.from;
-          cert.idty_issuer = targetIdty.pubkey;
-          cert.idty_uid = targetIdty.uid;
-          cert.idty_buid = targetIdty.buid;
-          cert.idty_sig = targetIdty.sig;
-          cert.buid = current ? [cert.block_number, targetBlock.hash].join('-') : constants.BLOCK.SPECIAL_BLOCK;
-          const rawCert = Certification.statics.fromJSON(cert).getRaw();
-          if (keyring.verify(rawCert, certSig, cert.from)) {
-            cert.sig = certSig;
-            let exists = false;
-            if (current) {
-              // Already exists a link not replayable yet?
-              exists = yield dal.existsNonReplayableLink(cert.from, cert.to);
-            }
-            if (!exists) {
-              // Already exists a link not chainable yet?
-              // No chainability block means absolutely nobody can issue certifications yet
-              exists = yield dal.existsNonChainableLink(cert.from, vHEAD_1, conf.sigStock);
-              if (!exists) {
-                // It does NOT already exists a similar certification written, which is not replayable yet
-                // Signatory must be a member
-                const isSignatoryAMember = yield dal.isMember(cert.from);
-                const isCertifiedANonLeavingMember = isSignatoryAMember && (yield dal.isMemberAndNonLeaver(cert.to));
-                // Certified must be a member and non-leaver
-                if (isSignatoryAMember && isCertifiedANonLeavingMember) {
-                  updatesToFrom[cert.to] = updatesToFrom[cert.to] || [];
-                  updates[cert.to] = updates[cert.to] || [];
-                  if (updatesToFrom[cert.to].indexOf(cert.from) == -1) {
-                    updates[cert.to].push(cert);
-                    updatesToFrom[cert.to].push(cert.from);
-                  }
-                }
-              }
-            }
-          }
-        }
-      }
-    }
-    return updates;
-  });
-
-  this.filterJoiners = (preJoinData) => co(function*() {
-    const filtered = {};
-    const filterings = [];
-    const filter = (pubkey) => co(function*() {
-      try {
-        // No manual filtering, takes all BUT already used UID or pubkey
-        let exists = yield rules.HELPERS.checkExistsUserID(preJoinData[pubkey].identity.uid, dal);
-        if (exists && !preJoinData[pubkey].identity.wasMember) {
-          throw 'UID already taken';
-        }
-        exists = yield rules.HELPERS.checkExistsPubkey(pubkey, dal);
-        if (exists && !preJoinData[pubkey].identity.wasMember) {
-          throw 'Pubkey already taken';
-        }
-        filtered[pubkey] = preJoinData[pubkey];
-      }
-      catch (err) {
-        logger.warn(err);
-      }
-    });
-    _.keys(preJoinData).forEach( (joinPubkey) => filterings.push(filter(joinPubkey)));
-    yield filterings;
-    return filtered;
-  });
-}
-
-/**
- * Class to implement strategy of manual selection of root members for root block.
- * @constructor
- */
-function ManualRootGenerator() {
-
-  this.findNewCertsFromWoT = () => Q({});
-
-  this.filterJoiners = (preJoinData) => co(function*() {
-    const filtered = {};
-    const newcomers = _(preJoinData).keys();
-    const uids = [];
-    newcomers.forEach((newcomer) => uids.push(preJoinData[newcomer].ms.userid));
-
-    if (newcomers.length > 0) {
-      return new Promise((resolve, reject) => {
-        inquirer.prompt([{
-              type: "checkbox",
-              name: "uids",
-              message: "Newcomers to add",
-              choices: uids,
-              default: uids[0]
-            }],
-            (answers) => {
-              newcomers.forEach((newcomer) => {
-                if (~answers.uids.indexOf(preJoinData[newcomer].ms.userid))
-                  filtered[newcomer] = preJoinData[newcomer];
-              });
-              if (answers.uids.length == 0)
-                reject('No newcomer selected');
-              else
-                resolve(filtered);
-            });
-      });
-    } else {
-      throw 'No newcomer found';
-    }
-  });
-}
diff --git a/app/lib/computation/blockProver.js b/app/lib/computation/blockProver.js
deleted file mode 100644
index cd02fa0772575fd35d54713ce174d324bca260be..0000000000000000000000000000000000000000
--- a/app/lib/computation/blockProver.js
+++ /dev/null
@@ -1,253 +0,0 @@
-"use strict";
-const co              = require('co');
-const _               = require('underscore');
-const constants       = require('../constants');
-const base58          = require('../crypto/base58');
-const engine          = require('../pow/engine');
-const path            = require('path');
-const Block           = require('../entity/block');
-const querablep       = require('../querablep');
-
-const POW_FOUND = true;
-const POW_NOT_FOUND_YET = false;
-
-module.exports = (server) => new BlockGenerator(server);
-
-function BlockGenerator(notifier) {
-
-  let conf, pair, logger, wait = null, waitResolve;
-
-  let workerFarmPromise;
-
-  function getWorker() {
-    return (workerFarmPromise || (workerFarmPromise = co(function*() {
-      return new WorkerFarm();
-    })));
-  }
-
-  this.setConfDAL = (newConf, newDAL, newPair) => {
-    conf = newConf;
-    pair = newPair;
-    logger = require('../logger')('prover');
-  };
-
-  const debug = process.execArgv.toString().indexOf('--debug') !== -1;
-  if(debug) {
-    //Set an unused port number.
-    process.execArgv = [];
-  }
-
-  this.waitForNewAsking = () => wait = new Promise((resolve) => {
-    waitResolve = resolve;
-  });
-
-  this.cancel = (gottenBlock) => co(function*() {
-    // If no farm was instanciated, tehre is nothing to do yet
-    if (workerFarmPromise) {
-      let farm = yield getWorker();
-      if (farm.isComputing() && !farm.isStopping()) {
-        yield farm.stopPoW(gottenBlock);
-      }
-      if (waitResolve) {
-        waitResolve();
-        waitResolve = null;
-        wait = null;
-      }
-    }
-  });
-
-  this.prove = function (block, difficulty, forcedTime) {
-
-    if (waitResolve) {
-      waitResolve();
-      waitResolve = null;
-      wait = null;
-    }
-
-    const remainder = difficulty % 16;
-    const nbZeros = (difficulty - remainder) / 16;
-    const highMark = constants.PROOF_OF_WORK.UPPER_BOUND[remainder];
-
-    return co(function*() {
-
-      let powFarm = yield getWorker();
-
-      if (block.number == 0) {
-        // On initial block, difficulty is the one given manually
-        block.powMin = difficulty;
-      }
-
-      // Start
-      powFarm.setOnAlmostPoW(function(pow, matches, block, found) {
-        powEvent(found, pow);
-        if (matches && matches[1].length >= constants.PROOF_OF_WORK.MINIMAL_TO_SHOW_IN_LOGS) {
-          logger.info('Matched %s zeros %s with Nonce = %s for block#%s by %s', matches[1].length, pow, block.nonce, block.number, block.issuer.slice(0,6));
-        }
-      });
-
-      block.nonce = 0;
-      logger.info('Generating proof-of-work with %s leading zeros followed by [0-' + highMark + ']... (CPU usage set to %s%) for block#%s', nbZeros, (conf.cpu * 100).toFixed(0), block.number, block.issuer.slice(0,6));
-      const start = Date.now();
-      let result = yield powFarm.askNewProof({
-        newPoW: { conf: conf, block: block, zeros: nbZeros, highMark: highMark, forcedTime: forcedTime,
-          pair: pair.json()
-        }
-      });
-      if (!result) {
-        logger.info('GIVEN proof-of-work for block#%s with %s leading zeros followed by [0-' + highMark + ']! stop PoW for %s', block.number, nbZeros, pair.publicKey.slice(0,6));
-        throw 'Proof-of-work computation canceled because block received';
-      } else {
-        const proof = result.block;
-        const testsCount = result.testsCount;
-        const duration = (Date.now() - start);
-        const testsPerSecond = (testsCount / (duration / 1000)).toFixed(2);
-        logger.info('Done: %s in %ss (%s tests, ~%s tests/s)', proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond);
-        logger.info('FOUND proof-of-work with %s leading zeros followed by [0-' + highMark + ']!', nbZeros);
-        return new Block(proof);
-      }
-    });
-  };
-
-  this.changeCPU = (cpu) => co(function*() {
-    const farm = yield getWorker();
-    yield farm.changeCPU(cpu);
-  });
-
-  this.changePoWPrefix = (prefix) => co(function*() {
-    const farm = yield getWorker();
-    yield farm.changePoWPrefix(prefix);
-  });
-
-  function powEvent(found, hash) {
-    notifier && notifier.push({ pow: { found, hash } });
-  }
-
-  function WorkerFarm() {
-    // Create
-    const that = this;
-    // We use as much cores as available, but not more than CORES_MAXIMUM_USE_IN_PARALLEL
-    const cores = require('os').cpus().slice(0, constants.CORES_MAXIMUM_USE_IN_PARALLEL);
-    // Each worker has his own chunk of possible nonces
-    const workers = cores.map((c, index) => new Worker((index + 1), (index + 1) * constants.NONCE_RANGE, pair.publicKey));
-
-    let powPromise = null;
-    let stopPromise = null;
-
-    this.changeCPU = (cpu) => Promise.all(workers.map((worker) => worker.changeConf({ cpu })));
-
-    this.changePoWPrefix = (prefix) => Promise.all(workers.map((worker) => worker.changeConf({ prefix })));
-
-    this.isComputing = () => powPromise !== null && !powPromise.isResolved();
-
-    this.isStopping = () => stopPromise !== null && !stopPromise.isResolved();
-
-    const whenReady = () => Promise.all(workers.map((worker) => worker.whenReady()));
-
-    this.stopPoW = (gottenBlock) => {
-      stopPromise = querablep(Promise.all(workers.map((worker) => worker.stopPoW(gottenBlock))));
-      return stopPromise;
-    };
-
-    this.askNewProof = (stuff) => co(function*() {
-      yield whenReady();
-      // Starts a new race to find the PoW
-      const races = workers.map((worker) => querablep(worker.askNewProof(_.clone(stuff))));
-      powPromise = querablep(Promise.race(races));
-      // Wait for the PoW to be either found or canceled
-      let res = yield powPromise;
-      if (res) {
-        let engineNumber = races.reduce((doneIndex, obj, index) => {
-          if (doneIndex !== null) return doneIndex;
-          if (races[index].isResolved()) {
-            return index + 1;
-          }
-          return null;
-        }, null);
-        logger.info('ENGINE #%s HAS FOUND A PROOF', engineNumber);
-      }
-      // Ask for stopping the other engines
-      that.stopPoW();
-      // But also give the answer in the **same time**, without waiting for effective stop of the engines
-      return (res && res.pow);
-    });
-
-    this.setOnAlmostPoW = (onPoW) => workers.map((worker) => worker.setOnAlmostPoW(onPoW));
-  }
-
-  function Worker(id, nonceBeginning) {
-
-    const theEngine = engine();
-
-    let onAlmostPoW, prefix = 0;
-
-    const checkPoWandNotify = (hash, block, found) => {
-      const matches = hash.match(/^(0{2,})[^0]/);
-      if (matches && onAlmostPoW) {
-        onAlmostPoW(hash, matches, block, found);
-      }
-    };
-
-    this.whenReady = () => this.stopPoW();
-
-    this.changeConf = (conf) => co(function*() {
-      if (conf.prefix) {
-        prefix = conf.prefix;
-      }
-      logger.info('Changing conf to: %s on engine#%s', JSON.stringify(conf), id);
-      theEngine.setValue('conf', conf );
-    });
-
-    /**
-     * Eventually stops the engine PoW if one was computing
-     */
-    this.stopPoW = () => {
-      logger.info('Stop proof-of-work worker #%s', id);
-      return theEngine.cancel();
-    };
-
-    /**
-     * Starts a new computation of PoW
-     * @param stuff The necessary data for computing the PoW
-     */
-    this.askNewProof = (stuff) => co(function*() {
-      // Keep track of PoW advancement
-      theEngine.setOnInfoMessage((message) => {
-        if (message.error) {
-          logger.error('Error in engine#%s:', id, message.error);
-        } else if (message.pow) {
-          // A message about the PoW
-          const msg = message.pow;
-          if (!msg.canceled) {
-            checkPoWandNotify(msg.pow, msg.block, POW_NOT_FOUND_YET);
-          }
-        }
-      });
-      // Keep the engine alive as long as it hasn't found the proof OR hasn't been canceled
-      let interval = setInterval(() => {
-        return theEngine.status();
-      }, constants.ENGINE_IDLE_INTERVAL);
-      // Starts the PoW
-      const res = yield theEngine.prove(
-        stuff.newPoW.block,
-        nonceBeginning,
-        stuff.newPoW.zeros,
-        stuff.newPoW.highMark,
-        stuff.newPoW.pair,
-        stuff.newPoW.forcedTime,
-        stuff.newPoW.conf.medianTimeBlocks,
-        stuff.newPoW.conf.avgGenTime,
-        stuff.newPoW.conf.cpu,
-        prefix
-      );
-      clearInterval(interval);
-      if (res) {
-        checkPoWandNotify(res.pow.pow, res.pow.block, POW_FOUND);
-      }
-      return res;
-    });
-
-    this.setOnAlmostPoW = function(onPoW) {
-      onAlmostPoW = onPoW;
-    };
-  }
-}
diff --git a/app/lib/computation/blockchainContext.js b/app/lib/computation/blockchainContext.js
index dbc66ec86dd47b8f208de9376888eda05ee766de..54d31fb873cae1776382d1420e9378cbdbcbdfdc 100644
--- a/app/lib/computation/blockchainContext.js
+++ b/app/lib/computation/blockchainContext.js
@@ -3,8 +3,6 @@ const _               = require('underscore');
 const co              = require('co');
 const Q               = require('q');
 const indexer         = require('../dup/indexer');
-const hashf           = require('../ucp/hashf');
-const rawer           = require('../ucp/rawer');
 const constants       = require('../constants');
 const rules           = require('../rules/index');
 const Identity        = require('../entity/identity');
@@ -263,9 +261,7 @@ function BlockchainContext() {
     }
     const block = forks[0];
     yield that.checkBlock(block, constants.WITH_SIGNATURES_AND_POW);
-    const res = yield that.addBlock(block);
     logger.debug('Applied block #%s', block.number);
-    // return res;
   });
 
   this.revertBlock = (block) => co(function *() {
@@ -383,7 +379,7 @@ function BlockchainContext() {
     // Delete eventually present transactions
     yield that.deleteTransactions(block);
 
-    yield dal.trimSandboxes(block, conf);
+    yield dal.trimSandboxes(block);
 
     return block;
   });
@@ -455,7 +451,7 @@ function BlockchainContext() {
         dal.wotb.setEnabled(false, idty.wotb_id);
       }
       // Undo newcomers
-      for (const identity of block.identities) {
+      for (let i = 0; i < block.identities.length; i++) {
         // Does not matter which one it really was, we pop the last X identities
         dal.wotb.removeNode();
       }
diff --git a/app/lib/computation/permanentProver.js b/app/lib/computation/permanentProver.js
deleted file mode 100644
index 536c5153663ac519cc46f001ced8d89cb3d6ebc1..0000000000000000000000000000000000000000
--- a/app/lib/computation/permanentProver.js
+++ /dev/null
@@ -1,184 +0,0 @@
-"use strict";
-
-const co        = require('co');
-const constants = require('../constants');
-const rules     = require('../rules');
-const parsers   = require('../streams/parsers');
-
-module.exports = (server) => new PermanentProver(server);
-
-function PermanentProver(server) {
-
-  const logger = require('../logger')('permprover');
-  const that = this;
-
-  let onBlockCallback = null,
-      blockchainChangedResolver = null,
-      powPromise = null,
-      promiseOfWaitingBetween2BlocksOfOurs = null,
-      lastComputedBlock = null;
-
-  // Promises triggering the prooving lopp
-  let resolveContinuePromise = null;
-  let continuePromise = new Promise((resolve) => resolveContinuePromise = resolve);
-
-  this.isPoWWaiting = () => !powPromise;
-
-  this.allowedToStart = () => resolveContinuePromise(true);
-
-  this.loops = 0;
-
-  /******************
-   * Main proof loop
-   *****************/
-  co(function*() {
-    while (yield continuePromise) {
-      try {
-        const waitingRaces = [];
-
-        // By default, we do not make a new proof
-        let doProof = false;
-
-        try {
-          const selfPubkey = server.keyPair.publicKey;
-          const dal = server.dal;
-          const conf = server.conf;
-          if (!conf.participate) {
-            throw 'This node is configured for not participating to compute blocks, but this message is showing up. Weird.';
-          }
-          if (!selfPubkey) {
-            throw 'No self pubkey found.';
-          }
-          let block, current;
-          const isMember = yield dal.isMember(selfPubkey);
-          if (!isMember) {
-            throw 'Local node is not a member. Waiting to be a member before computing a block.';
-          }
-          current = yield dal.getCurrentBlockOrNull();
-          if (!current) {
-            throw 'Waiting for a root block before computing new blocks';
-          }
-          const trial = yield server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey);
-          checkTrialIsNotTooHigh(trial, current, selfPubkey);
-          const lastIssuedByUs = current.issuer == selfPubkey;
-          const pullingPromise = server.PeeringService.pullingPromise();
-          if (pullingPromise && !pullingPromise.isFulfilled()) {
-            logger.warn('Waiting for the end of pulling...');
-            yield pullingPromise;
-            logger.warn('Pulling done. Continue proof-of-work loop.');
-          }
-          if (lastIssuedByUs && !promiseOfWaitingBetween2BlocksOfOurs) {
-            promiseOfWaitingBetween2BlocksOfOurs = new Promise((resolve) => setTimeout(resolve, conf.powDelay));
-            logger.warn('Waiting ' + conf.powDelay + 'ms before starting to compute next block...');
-          } else {
-            // We have waited enough
-            promiseOfWaitingBetween2BlocksOfOurs = null;
-            // But under some conditions, we can make one
-            doProof = true;
-          }
-        } catch (e) {
-          logger.warn(e);
-        }
-
-        if (doProof) {
-
-          /*******************
-           * COMPUTING A BLOCK
-           ******************/
-          if (!onBlockCallback) {
-            throw Error('No callback has been provided to handle newly found proofs');
-          }
-
-          yield Promise.race([
-
-            // We still listen at eventual blockchain change
-            co(function*() {
-              // If the blockchain changes
-              yield new Promise((resolve) => blockchainChangedResolver = resolve);
-              // Then cancel the generation
-              yield server.BlockchainService.prover.cancel();
-            }),
-
-            // The generation
-            co(function*() {
-              try {
-                const current = yield server.dal.getCurrentBlockOrNull();
-                const selfPubkey = server.keyPair.publicKey;
-                const block2 = yield server.BlockchainService.generateNext();
-                const trial2 = yield server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey);
-                checkTrialIsNotTooHigh(trial2, current, selfPubkey);
-                lastComputedBlock = yield server.BlockchainService.makeNextBlock(block2, trial2);
-                yield onBlockCallback(lastComputedBlock);
-              } catch (e) {
-                logger.warn('The proof-of-work generation was canceled: %s', (e && e.message) || e || 'unkonwn reason');
-              }
-            })
-          ]);
-        } else {
-
-          /*******************
-           * OR WAITING PHASE
-           ******************/
-          if (promiseOfWaitingBetween2BlocksOfOurs) {
-            waitingRaces.push(promiseOfWaitingBetween2BlocksOfOurs);
-          }
-
-          let raceDone = false;
-
-          yield Promise.race(waitingRaces.concat([
-
-            // The blockchain has changed! We or someone else found a proof, we must make a gnu one
-            new Promise((resolve) => blockchainChangedResolver = () => {
-              logger.warn('Blockchain changed!');
-              resolve();
-            }),
-
-            // Security: if nothing happens for a while, trigger the whole process again
-            new Promise((resolve) => setTimeout(() => {
-              if (!raceDone) {
-                logger.warn('Security trigger: proof-of-work process seems stuck');
-                resolve();
-              }
-            }, constants.POW_SECURITY_RETRY_DELAY))
-          ]));
-
-          raceDone = true;
-        }
-      } catch (e) {
-        logger.warn(e);
-      }
-
-      that.loops++;
-      // Informative variable
-      logger.trace('PoW loops = %s', that.loops);
-    }
-  });
-
-  this.blockchainChanged = (gottenBlock) => co(function*() {
-    if (!gottenBlock || !lastComputedBlock || gottenBlock.hash !== lastComputedBlock.hash) {
-      // Cancel any processing proof
-      yield server.BlockchainService.prover.cancel(gottenBlock);
-      // If we were waiting, stop it and process the continuous generation
-      blockchainChangedResolver && blockchainChangedResolver();
-    }
-  });
-
-  this.stopEveryting = () => co(function*() {
-    // First: avoid continuing the main loop
-    continuePromise = new Promise((resolve) => resolveContinuePromise = resolve);
-    // Second: stop any started proof
-    yield server.BlockchainService.prover.cancel();
-    // If we were waiting, stop it and process the continuous generation
-    blockchainChangedResolver && blockchainChangedResolver();
-  });
-
-  this.onBlockComputed = (callback) => onBlockCallback = callback;
-
-  function checkTrialIsNotTooHigh(trial, current, selfPubkey) {
-    if (trial > (current.powMin + constants.POW_MAXIMUM_ACCEPTABLE_HANDICAP)) {
-      logger.debug('Trial = %s, powMin = %s, pubkey = %s', trial, current.powMin, selfPubkey.slice(0, 6));
-      throw 'Too high difficulty: waiting for other members to write next block';
-    }
-  }
-}
-
diff --git a/app/lib/constants.js b/app/lib/constants.js
index b2f8138be925245f9940722bde3bd24d08905f5b..e709036809764947105cf1b5a40a33eb6314b3d8 100644
--- a/app/lib/constants.js
+++ b/app/lib/constants.js
@@ -32,14 +32,6 @@ const MAXIMUM_LEN_OF_COMPACT_TX = 100;
 
 module.exports = {
 
-  ENTITY_TRANSACTION: 'transaction',
-  ENTITY_BLOCK: 'block',
-  ENTITY_MEMBERSHIP: 'membership',
-  ENTITY_PEER: 'peer',
-  ENTITY_IDENTITY: 'identity',
-  ENTITY_CERTIFICATION: 'certification',
-  ENTITY_REVOCATION: 'revocation',
-
   ERROR: {
 
     PEER: {
@@ -59,7 +51,6 @@ module.exports = {
     SIGNATURE_DOES_NOT_MATCH:             { httpCode: 400, uerr: { ucode: 1003, message: "Signature does not match" }},
     ALREADY_UP_TO_DATE:                   { httpCode: 400, uerr: { ucode: 1004, message: "Already up-to-date" }},
     WRONG_DOCUMENT:                       { httpCode: 400, uerr: { ucode: 1005, message: "Document has unkown fields or wrong line ending format" }},
-    HTTP_LIMITATION:                      { httpCode: 503, uerr: { ucode: 1006, message: "This URI has reached its maximum usage quota. Please retry later." }},
     SANDBOX_FOR_IDENTITY_IS_FULL:         { httpCode: 503, uerr: { ucode: 1007, message: "The identities' sandbox is full. Please retry with another document or retry later." }},
     SANDBOX_FOR_CERT_IS_FULL:             { httpCode: 503, uerr: { ucode: 1008, message: "The certifications' sandbox is full. Please retry with another document or retry later." }},
     SANDBOX_FOR_MEMERSHIP_IS_FULL:        { httpCode: 503, uerr: { ucode: 1009, message: "The memberships' sandbox is full. Please retry with another document or retry later." }},
@@ -69,31 +60,16 @@ module.exports = {
     CLI_CALLERR_RESET:                    { httpCode: 503, uerr: { ucode: 1013, message: "Bad command: usage is `reset config`, `reset data`, `reset peers`, `reset stats` or `reset all`" }},
     CLI_CALLERR_CONFIG:                   { httpCode: 503, uerr: { ucode: 1014, message: "Bad command: usage is `config`." }},
 
-    HTTP_PARAM_PUBKEY_REQUIRED:           { httpCode: 400, uerr: { ucode: 1101, message: "Parameter `pubkey` is required" }},
-    HTTP_PARAM_IDENTITY_REQUIRED:         { httpCode: 400, uerr: { ucode: 1102, message: "Parameter `identity` is required" }},
-    HTTP_PARAM_PEER_REQUIRED:             { httpCode: 400, uerr: { ucode: 1103, message: "Requires a peer" }},
-    HTTP_PARAM_BLOCK_REQUIRED:            { httpCode: 400, uerr: { ucode: 1104, message: "Requires a block" }},
-    HTTP_PARAM_MEMBERSHIP_REQUIRED:       { httpCode: 400, uerr: { ucode: 1105, message: "Requires a membership" }},
-    HTTP_PARAM_TX_REQUIRED:               { httpCode: 400, uerr: { ucode: 1106, message: "Requires a transaction" }},
-    HTTP_PARAM_SIG_REQUIRED:              { httpCode: 400, uerr: { ucode: 1107, message: "Parameter `sig` is required" }},
-    HTTP_PARAM_CERT_REQUIRED:             { httpCode: 400, uerr: { ucode: 1108, message: "Parameter `cert` is required" }},
-    HTTP_PARAM_REVOCATION_REQUIRED:       { httpCode: 400, uerr: { ucode: 1109, message: "Parameter `revocation` is required" }},
-    HTTP_PARAM_CONF_REQUIRED:             { httpCode: 400, uerr: { ucode: 1110, message: "Parameter `conf` is required" }},
-    HTTP_PARAM_CPU_REQUIRED:              { httpCode: 400, uerr: { ucode: 1111, message: "Parameter `cpu` is required" }},
-
     // Business errors
     NO_MATCHING_IDENTITY:                 { httpCode: 404, uerr: { ucode: 2001, message: "No matching identity" }},
     UID_ALREADY_USED:                     { httpCode: 400, uerr: { ucode: 2002, message: "UID already used in the blockchain" }},
     PUBKEY_ALREADY_USED:                  { httpCode: 400, uerr: { ucode: 2003, message: "Pubkey already used in the blockchain" }},
     NO_MEMBER_MATCHING_PUB_OR_UID:        { httpCode: 404, uerr: { ucode: 2004, message: "No member matching this pubkey or uid" }},
-    SELF_PEER_NOT_FOUND:                  { httpCode: 404, uerr: { ucode: 2005, message: "Self peering was not found" }},
     WRONG_SIGNATURE_MEMBERSHIP:           { httpCode: 400, uerr: { ucode: 2006, message: "wrong signature for membership" }},
     ALREADY_RECEIVED_MEMBERSHIP:          { httpCode: 400, uerr: { ucode: 2007, message: "Already received membership" }},
     MEMBERSHIP_A_NON_MEMBER_CANNOT_LEAVE: { httpCode: 400, uerr: { ucode: 2008, message: "A non-member cannot leave" }},
     NOT_A_MEMBER:                         { httpCode: 400, uerr: { ucode: 2009, message: "Not a member" }},
-    NO_CURRENT_BLOCK:                     { httpCode: 404, uerr: { ucode: 2010, message: "No current block" }},
     BLOCK_NOT_FOUND:                      { httpCode: 404, uerr: { ucode: 2011, message: "Block not found" }},
-    PEER_NOT_FOUND:                       { httpCode: 404, uerr: { ucode: 2012, message: "Peer not found" }},
     WRONG_UNLOCKER:                       { httpCode: 400, uerr: { ucode: 2013, message: "Wrong unlocker in transaction" }},
     LOCKTIME_PREVENT:                     { httpCode: 400, uerr: { ucode: 2014, message: "Locktime not elapsed yet" }},
     SOURCE_ALREADY_CONSUMED:              { httpCode: 400, uerr: { ucode: 2015, message: "Source already consumed" }},
@@ -123,9 +99,6 @@ module.exports = {
   IPV4_REGEXP: IPV4_REGEXP,
   IPV6_REGEXP: IPV6_REGEXP,
 
-  SALT: exact(".+"),
-  PASSWORD: exact(".*"),
-
   INTEGER: /^\d+$/,
   FINGERPRINT: exact(FINGERPRINT),
   TIMESTAMP: exact(TIMESTAMP),
@@ -146,7 +119,7 @@ module.exports = {
   TRANSACTION_VERSION: 10,
 
   REVOCATION_FACTOR: 2, // This is protocol fixed value
-  NB_DIGITS_UD: 6,      // This is protocol fixed value
+  NB_DIGITS_UD: 4,      // This is protocol fixed value
   FIRST_UNIT_BASE: 0,
 
   TRANSACTION_EXPIRY_DELAY: 3600 * 24 * 7,
@@ -249,30 +222,17 @@ module.exports = {
     MAX_NON_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS: 6,
     MAX_NON_MEMBERS_TO_FORWARD_TO: 4,
     MAX_MEMBERS_TO_FORWARD_TO: 6,
-    COUNT_FOR_ENOUGH_PEERS: 4,
     MAX_CONCURRENT_POST: 3,
     DEFAULT_TIMEOUT: 10 * 1000, // 10 seconds
-    SYNC_LONG_TIMEOUT: 30 * 1000, // 30 seconds
-    DEFAULT_PORT: 8999,
-    PORT: {
-      START: 15000
-    },
-    UPNP: {
-      INTERVAL: 300,
-      TTL: 600
-    },
     SYNC: {
       MAX: 20
     },
     STATUS_INTERVAL: {
       UPDATE: 2, // Every X blocks
       MAX: 20 // MAX Y blocks
-    },
-    SYNC_PEERS_INTERVAL: 3, // Every 3 block average generation time
-    TEST_PEERS_INTERVAL: 10 // In seconds
+    }
   },
   PROOF_OF_WORK: {
-    MINIMAL_TO_SHOW: 2,
     MINIMAL_TO_SHOW_IN_LOGS: 3,
     EVALUATION: 1000,
     UPPER_BOUND: [
@@ -295,17 +255,6 @@ module.exports = {
     ]
   },
 
-  DURATIONS: {
-    TEN_SECONDS: 10,
-    A_MINUTE: 60,
-    TEN_MINUTES: 600,
-    AN_HOUR: 3600,
-    A_DAY: 3600 * 24,
-    A_WEEK: 3600 * 24 * 7,
-    A_MONTH: (3600 * 24 * 365.25) / 12
-  },
-
-  DEFAULT_CPU: 0.6,
   DEFAULT_CURRENCY_NAME: "no_currency",
 
   CONTRACT: {
@@ -336,15 +285,13 @@ module.exports = {
   },
 
   BRANCHES: {
-    DEFAULT_WINDOW_SIZE: 100,
-    SWITCH_ON_BRANCH_AHEAD_BY_X_MINUTES: 30
+    DEFAULT_WINDOW_SIZE: 100
   },
 
   INVALIDATE_CORE_CACHE: true,
   WITH_SIGNATURES_AND_POW: true,
 
   NO_FORK_ALLOWED: false,
-  FORK_ALLOWED: true,
 
   SAFE_FACTOR: 3,
   BLOCKS_COLLECT_THRESHOLD: 30, // Blocks to collect from memory and persist
@@ -357,21 +304,17 @@ module.exports = {
   SANDBOX_SIZE_MEMBERSHIPS: 200,
 
   MAXIMUM_LEN_OF_COMPACT_TX: MAXIMUM_LEN_OF_COMPACT_TX,
-  MAX_NUMBER_OF_PEERS_FOR_PULLING: 4,
 
   CURRENT_BLOCK_CACHE_DURATION: 10 * 1000, // 30 seconds
   CORES_MAXIMUM_USE_IN_PARALLEL: 8, // For more cores, we need to use a better PoW synchronization algorithm
 
   ENGINE_IDLE_INTERVAL: 5000,
 
-  // When to trigger the PoW process again if no PoW is triggered for a while. In milliseconds.
-  POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000,
-
   POW_DIFFICULTY_RANGE_RATIO: 1.189,
 
   TRANSACTION_MAX_TRIES: 10,
-  NONCE_RANGE: 1000 * 1000 * 1000 * 100,
-  POW_MAXIMUM_ACCEPTABLE_HANDICAP: 64,
+
+  ACCOUNT_MINIMUM_CURRENT_BASED_AMOUNT: 100,
 
   // INDEXES
   M_INDEX: 'MINDEX',
@@ -379,10 +322,7 @@ module.exports = {
   S_INDEX: 'SINDEX',
   C_INDEX: 'CINDEX',
   IDX_CREATE: 'CREATE',
-  IDX_UPDATE: 'UPDATE',
-
-  PULLING_MINIMAL_DELAY: 20,
-  PULLING_INTERVAL_TARGET: 240
+  IDX_UPDATE: 'UPDATE'
 };
 
 function exact (regexpContent) {
diff --git a/app/lib/contacter.js b/app/lib/contacter.js
deleted file mode 100644
index 9187a44830e730dd7482c3feef55e9f638fcc42b..0000000000000000000000000000000000000000
--- a/app/lib/contacter.js
+++ /dev/null
@@ -1,105 +0,0 @@
-"use strict";
-
-const co = require('co');
-const rp = require('request-promise');
-const constants = require('./constants');
-const sanitize = require('./streams/sanitize');
-const dtos = require('./streams/dtos');
-
-/**
- * Created by cgeek on 16/10/16.
- */
-
-const contacter = module.exports = (host, port, opts) => new Contacter(host, port, opts);
-
-function Contacter(host, port, opts) {
-
-  opts = opts || {};
-  const options = {
-    timeout: opts.timeout || constants.NETWORK.DEFAULT_TIMEOUT
-  };
-
-  this.host = host;
-  this.port = port;
-
-  this.getSummary = () => get('/node/summary/', dtos.Summary);
-  this.getLookup = (search) => get('/wot/lookup/', dtos.Lookup, search);
-  this.getBlock = (number) => get('/blockchain/block/', dtos.Block, number);
-  this.getCurrent = () => get('/blockchain/current', dtos.Block);
-  this.getPeer = () => get('/network/peering', dtos.Peer);
-  this.getPeers = (obj) => get('/network/peering/peers', dtos.MerkleOfPeers, obj);
-  this.getSources = (pubkey) => get('/tx/sources/', dtos.Sources, pubkey);
-  this.getBlocks = (count, fromNumber) => get('/blockchain/blocks/', dtos.Blocks, [count, fromNumber].join('/'));
-  this.postPeer = (peer) => post('/network/peering/peers', dtos.Peer, { peer: peer });
-  this.processTransaction = (rawTX) => post('/tx/process', dtos.Transaction, { transaction: rawTX });
-
-  // We suppose that IPv6 is already wrapped by [], for example 'http://[::1]:80/index.html'
-  const fullyQualifiedHost = [host, port].join(':');
-
-  function get(url, dtoContract, param) {
-    if (typeof param === 'object') {
-      // Classical URL params (a=1&b=2&...)
-      param = '?' + Object.keys(param).map((k) => [k, param[k]].join('=')).join('&');
-    }
-    return co(function*() {
-      try {
-        const json = yield rp.get({
-          url: 'http://' + fullyQualifiedHost + url + (param !== undefined ? param : ''),
-          json: true,
-          timeout: options.timeout
-        });
-        // Prevent JSON injection
-        return sanitize(json, dtoContract);
-      } catch (e) {
-        throw e.error;
-      }
-    });
-  }
-
-  function post(url, dtoContract, data) {
-    return co(function*() {
-      try {
-        const json = yield rp.post({
-          url: 'http://' + fullyQualifiedHost + url,
-          body: data,
-          json: true,
-          timeout: options.timeout
-        });
-        // Prevent JSON injection
-        return sanitize(json, dtoContract);
-      } catch (e) {
-        throw e.error;
-      }
-    });
-  }
-}
-
-contacter.statics = {};
-
-contacter.statics.quickly = (host, port, opts, callbackPromise) => co(function*() {
-  const node = contacter(host, port, opts);
-  return callbackPromise(node);
-});
-
-contacter.statics.quickly2 = (peer, opts, callbackPromise) => co(function*() {
-  const Peer = require('./entity/peer');
-  const p = Peer.statics.fromJSON(peer);
-  const node = new Contacter(p.getHostPreferDNS(), p.getPort(), opts);
-  return callbackPromise(node);
-});
-
-contacter.statics.fetchPeer = (host, port, opts) => contacter.statics.quickly(host, port, opts, (node) => node.getPeer());
-
-contacter.statics.fetchBlock = (number, peer, opts) => contacter.statics.quickly2(peer, opts, (node) => node.getBlock(number));
-
-contacter.statics.isReachableFromTheInternet = (peer, opts) => co(function*() {
-  const Peer = require('./entity/peer');
-  const p = Peer.statics.fromJSON(peer);
-  const node = new Contacter(p.getHostPreferDNS(), p.getPort(), opts);
-  try {
-    yield node.getPeer();
-    return true;
-  } catch (e) {
-    return false;
-  }
-});
diff --git a/app/lib/crypto/base58.js b/app/lib/crypto/base58.js
deleted file mode 100644
index 3b88f9d489222d48bb82e9400b9ff15fe3d3bade..0000000000000000000000000000000000000000
--- a/app/lib/crypto/base58.js
+++ /dev/null
@@ -1,78 +0,0 @@
-// Base58 encoding/decoding
-// Originally written by Mike Hearn for BitcoinJ
-// Copyright (c) 2011 Google Inc
-// Ported to JavaScript by Stefan Thomas
-// Merged Buffer refactorings from base58-native by Stephen Pair
-// Copyright (c) 2013 BitPay Inc
-
-"use strict";
-
-let Base58 = {}
-
-Base58.alphabet = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
-Base58.alphabetMap = {}
-
-for(let i = 0; i < Base58.alphabet.length; i++) {
-  Base58.alphabetMap[Base58.alphabet.charAt(i)] = i
-}
-
-Base58.encode = function(buffer) {
-  if (buffer.length === 0) return ''
-
-  let i, j, digits = [0]
-  for (i = 0; i < buffer.length; i++) {
-    for (j = 0; j < digits.length; j++) digits[j] <<= 8
-    digits[digits.length - 1] += buffer[i]
-
-    let carry = 0
-    for (j = digits.length - 1; j >= 0; j--){
-      digits[j] += carry
-      carry = (digits[j] / 58) | 0
-      digits[j] %= 58
-    }
-
-    while (carry) {
-      digits.unshift(carry)
-      carry = (digits[0] / 58) | 0
-      digits[0] %= 58
-    }
-  }
-
-  // deal with leading zeros
-  for (i = 0; i < buffer.length - 1 && buffer[i] == 0; i++) digits.unshift(0)
-
-  return digits.map(function(digit) { return Base58.alphabet[digit] }).join('')
-}
-
-Base58.decode = function(string) {
-  if (string.length === 0) return (new Uint8Array())
-
-  let input = string.split('').map(function(c){
-    return Base58.alphabetMap[c]
-  })
-
-  let i, j, bytes = [0]
-  for (i = 0; i < input.length; i++) {
-    for (j = 0; j < bytes.length; j++) bytes[j] *= 58
-    bytes[bytes.length - 1] += input[i]
-
-    let carry = 0
-    for (j = bytes.length - 1; j >= 0; j--){
-      bytes[j] += carry
-      carry = bytes[j] >> 8
-      bytes[j] &= 0xff
-    }
-
-    while (carry) {
-      bytes.unshift(carry)
-      carry = bytes[0] >> 8
-      bytes[0] &= 0xff
-    }
-  }
-
-  // deal with leading zeros
-  for (i = 0; i < input.length - 1 && input[i] == 0; i++) bytes.unshift(0)
-  return (new Uint8Array(bytes))
-}
-
-module.exports = Base58;
\ No newline at end of file
diff --git a/app/lib/crypto/base64.js b/app/lib/crypto/base64.js
deleted file mode 100644
index dd9e765e1b6fbef7f0374c0a5c79c1cef60daead..0000000000000000000000000000000000000000
--- a/app/lib/crypto/base64.js
+++ /dev/null
@@ -1,103 +0,0 @@
-/* OpenPGP radix-64/base64 string encoding/decoding
- * Copyright 2005 Herbert Hanewinkel, www.haneWIN.de
- * version 1.0, check www.haneWIN.de for the latest version
- *
- * This software is provided as-is, without express or implied warranty.  
- * Permission to use, copy, modify, distribute or sell this software, with or
- * without fee, for any purpose and by any individual or organization, is hereby
- * granted, provided that the above copyright notice and this paragraph appear 
- * in all copies. Distribution as a part of an application or binary must
- * include the above copyright notice in the documentation and/or other materials
- * provided with the application or distribution.
- */
-
-"use strict";
-
-/**
- * @module encoding/base64
- */
-
-let b64s = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/';
-
-/**
- * Convert binary string to radix-64
- * @param {String} t binary string to convert
- * @returns {string} radix-64 version of input string
- * @static
- */
-function s2r(t) {
-  let a, c, n;
-  let r = '',
-    l = 0,
-    s = 0;
-  let tl = t.length;
-
-  for (n = 0; n < tl; n++) {
-    c = t.charCodeAt(n);
-    if (s === 0) {
-      r += b64s.charAt((c >> 2) & 63);
-      a = (c & 3) << 4;
-    } else if (s == 1) {
-      r += b64s.charAt((a | (c >> 4) & 15));
-      a = (c & 15) << 2;
-    } else if (s == 2) {
-      r += b64s.charAt(a | ((c >> 6) & 3));
-      l += 1;
-      if ((l % 60) === 0)
-        r += "\n";
-      r += b64s.charAt(c & 63);
-    }
-    l += 1;
-    if ((l % 60) === 0)
-      r += "\n";
-
-    s += 1;
-    if (s == 3)
-      s = 0;
-  }
-  if (s > 0) {
-    r += b64s.charAt(a);
-    l += 1;
-    if ((l % 60) === 0)
-      r += "\n";
-    r += '=';
-    l += 1;
-  }
-  if (s == 1) {
-    if ((l % 60) === 0)
-      r += "\n";
-    r += '=';
-  }
-
-  return r;
-}
-
-/**
- * Convert radix-64 to binary string
- * @param {String} t radix-64 string to convert
- * @returns {string} binary version of input string
- * @static
- */
-function r2s(t) {
-  let c, n;
-  let r = '',
-    s = 0,
-    a = 0;
-  let tl = t.length;
-
-  for (n = 0; n < tl; n++) {
-    c = b64s.indexOf(t.charAt(n));
-    if (c >= 0) {
-      if (s)
-        r += String.fromCharCode(a | (c >> (6 - s)) & 255);
-      s = (s + 2) & 7;
-      a = (c << s) & 255;
-    }
-  }
-  return r;
-}
-
-module.exports = {
-  encode: s2r,
-  decode: r2s
-};
diff --git a/app/lib/crypto/keyring.js b/app/lib/crypto/keyring.js
deleted file mode 100644
index b922c222f8348a2021ba369120506e2328babb5c..0000000000000000000000000000000000000000
--- a/app/lib/crypto/keyring.js
+++ /dev/null
@@ -1,95 +0,0 @@
-"use strict";
-const Q          = require('q');
-const co          = require('co');
-const _           = require('underscore');
-const nacl        = require('tweetnacl');
-const scrypt      = require('scryptb');
-const base58      = require('./base58');
-const rawer       = require('../ucp/rawer');
-const naclBinding = require('naclb');
-
-nacl.util = require('./nacl-util');
-
-const crypto_sign_BYTES = 64;
-const SEED_LENGTH = 32; // Length of the key
-// TODO: change key parameters
-const TEST_PARAMS = {
-  "N":4096,
-  "r":16,
-  "p":1
-};
-
-const enc = nacl.util.encodeBase64,
-    dec = nacl.util.decodeBase64;
-
-/**
- * Verify a signature against data & public key.
- * Return true of false as callback argument.
- */
-function verify(rawMsg, rawSig, rawPub) {
-  const msg = nacl.util.decodeUTF8(rawMsg);
-  const sig = nacl.util.decodeBase64(rawSig);
-  const pub = base58.decode(rawPub);
-  const m = new Uint8Array(crypto_sign_BYTES + msg.length);
-  const sm = new Uint8Array(crypto_sign_BYTES + msg.length);
-  let i;
-  for (i = 0; i < crypto_sign_BYTES; i++) sm[i] = sig[i];
-  for (i = 0; i < msg.length; i++) sm[i+crypto_sign_BYTES] = msg[i];
-
-  // Call to verification lib...
-  return naclBinding.verify(m, sm, pub);
-}
-
-function Key(pub, sec) {
-  /*****************************
-  *
-  *      GENERAL CRYPTO
-  *
-  *****************************/
-
-  this.publicKey = pub;
-  this.secretKey = sec;
-
-  const rawSec = () => base58.decode(this.secretKey);
-
-  this.json = () => { return {
-    pub: this.publicKey,
-    sec: this.secretKey
-  }};
-
-  this.sign = (msg) => Promise.resolve(this.signSync(msg));
-
-  this.signSync = (msg) => {
-    const m = nacl.util.decodeUTF8(msg);
-    const signedMsg = naclBinding.sign(m, rawSec()); // TODO: super weird
-    const sig = new Uint8Array(crypto_sign_BYTES);
-    for (let i = 0; i < sig.length; i++) {
-      sig[i] = signedMsg[i];
-    }
-    return nacl.util.encodeBase64(sig);
-  };
-}
-
-const getScryptKey = (key, salt) => co(function*() {
-  // console.log('Derivating the key...');
-  const res = yield Q.nbind(scrypt.hash, scrypt, key, TEST_PARAMS, SEED_LENGTH, salt);
-  return dec(res.toString("base64"));
-});
-
-/**
- * Generates a new keypair object from salt + password strings.
- * Returns: { publicKey: pubkeyObject, secretKey: secretkeyObject }.
- */
-const getScryptKeyPair = (salt, key) => co(function*() {
-  const keyBytes = yield getScryptKey(key, salt);
-  const pair = nacl.sign.keyPair.fromSeed(keyBytes);
-  return new Key(base58.encode(pair.publicKey),
-      base58.encode(pair.secretKey));
-});
-
-
-module.exports ={
-  scryptKeyPair: getScryptKeyPair,
-  Key: (pub, sec) => new Key(pub, sec),
-  verify: verify
-};
diff --git a/app/lib/crypto/nacl-util.js b/app/lib/crypto/nacl-util.js
deleted file mode 100644
index 05cebc0f408ea69106c6a1f7994976c353f936a7..0000000000000000000000000000000000000000
--- a/app/lib/crypto/nacl-util.js
+++ /dev/null
@@ -1,50 +0,0 @@
-// Written in 2014-2016 by Dmitry Chestnykh and Devi Mandiri.
-// Public domain.
-(function(root, f) {
-  'use strict';
-  if (typeof module !== 'undefined' && module.exports) module.exports = f();
-  else if (root.nacl) root.nacl.util = f();
-  else {
-    root.nacl = {};
-    root.nacl.util = f();
-  }
-}(this, function() {
-  'use strict';
-
-  let util = {};
-
-  util.decodeUTF8 = function(s) {
-    let i, d = unescape(encodeURIComponent(s)), b = new Uint8Array(d.length);
-    for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i);
-    return b;
-  };
-
-  util.encodeUTF8 = function(arr) {
-    let i, s = [];
-    for (i = 0; i < arr.length; i++) s.push(String.fromCharCode(arr[i]));
-    return decodeURIComponent(escape(s.join('')));
-  };
-
-  util.encodeBase64 = function(arr) {
-    if (typeof btoa === 'undefined' || !window) {
-      return (new Buffer(arr)).toString('base64');
-    } else {
-      let i, s = [], len = arr.length;
-      for (i = 0; i < len; i++) s.push(String.fromCharCode(arr[i]));
-      return btoa(s.join(''));
-    }
-  };
-
-  util.decodeBase64 = function(s) {
-    if (typeof atob === 'undefined' || !window) {
-      return new Uint8Array(Array.prototype.slice.call(new Buffer(s, 'base64'), 0));
-    } else {
-      let i, d = atob(s), b = new Uint8Array(d.length);
-      for (i = 0; i < d.length; i++) b[i] = d.charCodeAt(i);
-      return b;
-    }
-  };
-
-  return util;
-
-}));
diff --git a/app/lib/dal/drivers/sqlite.js b/app/lib/dal/drivers/sqlite.js
index 2aaccbb58543ed936c2e3504bfc56408487412ef..107004f80e594c35be2e767cfad9532ef33c3c80 100644
--- a/app/lib/dal/drivers/sqlite.js
+++ b/app/lib/dal/drivers/sqlite.js
@@ -19,11 +19,11 @@ function SQLiteDriver(path) {
 
   function getDB() {
     return dbPromise || (dbPromise = co(function*() {
-        logger.debug('Opening SQLite database "%s"...', path);
-        let sqlite = new sqlite3.Database(path);
-        yield new Promise((resolve) => sqlite.once('open', resolve));
-        // Database is opened and ready
-        return sqlite;
+      logger.debug('Opening SQLite database "%s"...', path);
+      let sqlite = new sqlite3.Database(path);
+      yield new Promise((resolve) => sqlite.once('open', resolve));
+      // Database is opened and ready
+      return sqlite;
     }));
   }
 
@@ -88,4 +88,4 @@ function SQLiteDriver(path) {
       }
     });
   });
-}
\ No newline at end of file
+}
diff --git a/app/lib/dal/fileDAL.js b/app/lib/dal/fileDAL.js
index aeb9a60d52059dab9139e08e7e954b3da6fb1f82..be5d314ba8e579482b203907c85e8c14a2ebc81f 100644
--- a/app/lib/dal/fileDAL.js
+++ b/app/lib/dal/fileDAL.js
@@ -3,10 +3,7 @@ const Q       = require('q');
 const co      = require('co');
 const _       = require('underscore');
 const indexer = require('../dup/indexer');
-const hashf   = require('../ucp/hashf');
-const wotb    = require('../wot');
 const logger = require('../logger')('filedal');
-const directory = require('../system/directory');
 const Configuration = require('../entity/configuration');
 const Merkle = require('../entity/merkle');
 const Transaction = require('../entity/transaction');
@@ -55,21 +52,6 @@ function FileDAL(params) {
     'peerDAL': that.peerDAL,
     'confDAL': that.confDAL,
     'statDAL': that.statDAL,
-    'ghostDAL': {
-      init: () => co(function *() {
-
-        // Create extra views (useful for stats or debug)
-        return that.blockDAL.exec('BEGIN;' +
-            'CREATE VIEW IF NOT EXISTS identities_pending AS SELECT * FROM idty WHERE NOT written;' +
-            'CREATE VIEW IF NOT EXISTS certifications_pending AS SELECT * FROM cert WHERE NOT written;' +
-            'CREATE VIEW IF NOT EXISTS transactions_pending AS SELECT * FROM txs WHERE NOT written;' +
-            'CREATE VIEW IF NOT EXISTS transactions_desc AS SELECT * FROM txs ORDER BY time DESC;' +
-            'CREATE VIEW IF NOT EXISTS forks AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE fork ORDER BY number DESC;' +
-            'CREATE VIEW IF NOT EXISTS blockchain AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE NOT fork ORDER BY number DESC;' +
-            'CREATE VIEW IF NOT EXISTS network AS select i.uid, (last_try - first_down) / 1000 as down_delay_in_sec, p.* from peer p LEFT JOIN idty i on i.pubkey = p.pubkey ORDER by down_delay_in_sec;' +
-            'COMMIT;');
-      })
-    },
     'bindexDAL': that.bindexDAL,
     'mindexDAL': that.mindexDAL,
     'iindexDAL': that.iindexDAL,
@@ -77,8 +59,6 @@ function FileDAL(params) {
     'cindexDAL': that.cindexDAL
   };
 
-  let currency = '';
-
   this.init = () => co(function *() {
     const dalNames = _.keys(that.newDals);
     for (const dalName of dalNames) {
@@ -87,7 +67,6 @@ function FileDAL(params) {
     }
     logger.debug("Upgrade database...");
     yield that.metaDAL.upgradeDatabase();
-    // TODO: remove as of v1.0
     const latestMember = yield that.iindexDAL.getLatestMember();
     if (latestMember && that.wotb.getWoTSize() > latestMember.wotb_id + 1) {
       logger.warn('Maintenance: cleaning wotb...');
@@ -243,7 +222,16 @@ function FileDAL(params) {
       const idty = yield Q(queryPromise);
       if (idty) {
         const mss = yield that.msDAL.getMembershipsOfIssuer(idty.pubkey);
-        idty.memberships = mss;
+        const mssFromMindex = yield that.mindexDAL.reducable(idty.pubkey);
+        idty.memberships = mss.concat(mssFromMindex.map((ms) => {
+          const sp = ms.created_on.split('-');
+          return {
+            membership: ms.leaving ? 'OUT' : 'IN',
+            number: sp[0],
+            fpr: sp[1],
+            written_number: parseInt(ms.written_on)
+          }
+        }));
         return idty;
       }
     } catch (err) {
@@ -304,18 +292,19 @@ function FileDAL(params) {
     const certs = yield that.certDAL.getToTarget(hash);
     const links = yield that.cindexDAL.getValidLinksTo(pub);
     let matching = certs;
-    links.map((entry) => {
+    yield links.map((entry) => co(function*() {
       entry.from = entry.issuer;
-      const co = entry.created_on.split('-');
-      const wo = entry.written_on.split('-');
-      entry.block = parseInt(co[0]);
-      entry.block_number = parseInt(co[0]);
-      entry.block_hash = co[1];
+      const wbt = entry.written_on.split('-');
+      const blockNumber = parseInt(entry.created_on); // created_on field of `c_index` does not have the full blockstamp
+      const basedBlock = yield that.getBlock(blockNumber);
+      entry.block = blockNumber;
+      entry.block_number = blockNumber;
+      entry.block_hash = basedBlock ? basedBlock.hash : null;
       entry.linked = true;
-      entry.written_block = parseInt(wo[0]);
-      entry.written_hash = wo[1];
+      entry.written_block = parseInt(wbt[0]);
+      entry.written_hash = wbt[1];
       matching.push(entry);
-    });
+    }));
     matching  = _.sortBy(matching, (c) => -c.block);
     matching.reverse();
     return matching;
@@ -329,15 +318,15 @@ function FileDAL(params) {
       const idty = yield that.getWrittenIdtyByPubkey(entry.receiver);
       entry.from = entry.issuer;
       entry.to = entry.receiver;
-      const co = entry.created_on.split('-');
-      const wo = entry.written_on.split('-');
-      entry.block = parseInt(co[0]);
-      entry.block_number = parseInt(co[0]);
-      entry.block_hash = co[1];
+      const cbt = entry.created_on.split('-');
+      const wbt = entry.written_on.split('-');
+      entry.block = parseInt(cbt[0]);
+      entry.block_number = parseInt(cbt[0]);
+      entry.block_hash = cbt[1];
       entry.target = idty.hash;
       entry.linked = true;
-      entry.written_block = parseInt(wo[0]);
-      entry.written_hash = wo[1];
+      entry.written_block = parseInt(wbt[0]);
+      entry.written_hash = wbt[1];
       matching.push(entry);
     }));
     matching  = _.sortBy(matching, (c) => -c.block);
@@ -458,7 +447,7 @@ function FileDAL(params) {
             .indexOf(p.status) !== -1).value();
   });
 
-  this.listAllPeersWithStatusNewUPWithtout = (pubkey) => co(function *() {
+  this.listAllPeersWithStatusNewUPWithtout = () => co(function *() {
     const peers = yield that.peerDAL.listAll();
     return _.chain(peers).filter((p) => p.status == 'UP').filter((p) => p.pubkey);
   });
@@ -512,7 +501,7 @@ function FileDAL(params) {
   this.saveBlock = (block) => co(function*() {
     block.wrong = false;
     yield [
-      that.saveBlockInFile(block, true),
+      that.saveBlockInFile(block),
       that.saveTxsInFiles(block.transactions, {block_number: block.number, time: block.medianTime, currency: block.currency })
     ];
   });
@@ -525,6 +514,7 @@ function FileDAL(params) {
     let cindex = indexer.cindex(index);
     const HEAD = yield indexer.completeGlobalScope(block, conf, index, that);
     sindex = sindex.concat(yield indexer.ruleIndexGenDividend(HEAD, that));
+    sindex = sindex.concat(yield indexer.ruleIndexGarbageSmallAccounts(HEAD, sindex, that));
     cindex = cindex.concat(yield indexer.ruleIndexGenCertificationExpiry(HEAD, that));
     mindex = mindex.concat(yield indexer.ruleIndexGenMembershipExpiry(HEAD, that));
     iindex = iindex.concat(yield indexer.ruleIndexGenExclusionByMembership(HEAD, mindex));
@@ -557,7 +547,7 @@ function FileDAL(params) {
     return true;
   });
 
-  this.trimSandboxes = (block, conf) => co(function*() {
+  this.trimSandboxes = (block) => co(function*() {
     yield that.certDAL.trimExpiredCerts(block.medianTime);
     yield that.msDAL.trimExpiredMemberships(block.medianTime);
     yield that.idtyDAL.trimExpiredIdentities(block.medianTime);
@@ -566,7 +556,7 @@ function FileDAL(params) {
 
   this.savePendingMembership = (ms) => that.msDAL.savePendingMembership(ms);
 
-  this.saveBlockInFile = (block, check) => co(function *() {
+  this.saveBlockInFile = (block) => co(function *() {
     yield that.writeFileOfBlock(block);
   });
 
@@ -685,16 +675,21 @@ function FileDAL(params) {
       const savedConf = yield that.confDAL.loadConf();
       conf = _(savedConf).extend(overrideConf || {});
     }
-    // TODO: Do something about the currency global variable
-    currency = conf.currency;
+    if (that.loadConfHook) {
+      yield that.loadConfHook(conf);
+    }
     return conf;
   });
 
   this.saveConf = (confToSave) => {
-    // TODO: Do something about the currency global variable
-    currency = confToSave.currency;
-    // Save the conf in file
-    return that.confDAL.saveConf(confToSave);
+    return co(function*() {
+      // Save the conf in file
+      let theConf = confToSave;
+      if (that.saveConfHook) {
+        theConf = yield that.saveConfHook(theConf);
+      }
+      return that.confDAL.saveConf(theConf);
+    });
   };
 
   /***********************
diff --git a/app/lib/dal/sqliteDAL/AbstractIndex.js b/app/lib/dal/sqliteDAL/AbstractIndex.js
index 4bfce7b82b58cfec2ae2fc89414380b04904e08c..c4f00a27f836aebe46d6c4646f8b4e958ba36ac7 100644
--- a/app/lib/dal/sqliteDAL/AbstractIndex.js
+++ b/app/lib/dal/sqliteDAL/AbstractIndex.js
@@ -33,4 +33,4 @@ function AbstractIndex() {
       }
     }
   });
-}
\ No newline at end of file
+}
diff --git a/app/lib/dal/sqliteDAL/AbstractSQLite.js b/app/lib/dal/sqliteDAL/AbstractSQLite.js
index cf2ea2f65ec2ae4af071861f65d9fffe35ee15c7..2551ce2ea1a894220bdcf6dece97f946217071aa 100644
--- a/app/lib/dal/sqliteDAL/AbstractSQLite.js
+++ b/app/lib/dal/sqliteDAL/AbstractSQLite.js
@@ -284,4 +284,4 @@ function AbstractSQLite(driver) {
     }
     return row;
   }
-}
\ No newline at end of file
+}
diff --git a/app/lib/dal/sqliteDAL/BlockDAL.js b/app/lib/dal/sqliteDAL/BlockDAL.js
index 72915804168f059ce0eda8947eb3a8902c78cd6e..31eb7fc2b6313cf762c85dbf2775b495753d83d0 100644
--- a/app/lib/dal/sqliteDAL/BlockDAL.js
+++ b/app/lib/dal/sqliteDAL/BlockDAL.js
@@ -24,7 +24,7 @@ function BlockDAL(driver) {
   this.table = 'block';
   this.fields = ['fork', 'hash', 'inner_hash', 'signature', 'currency', 'issuer', 'issuersCount', 'issuersFrame', 'issuersFrameVar', 'parameters', 'previousHash', 'previousIssuer', 'version', 'membersCount', 'monetaryMass', 'UDTime', 'medianTime', 'dividend', 'unitbase', 'time', 'powMin', 'number', 'nonce', 'transactions', 'certifications', 'identities', 'joiners', 'actives', 'leavers', 'revoked', 'excluded', 'len'];
   this.arrays = ['identities','certifications','actives','revoked','excluded','leavers','joiners','transactions'];
-  this.bigintegers = ['monetaryMass','dividend'];
+  this.bigintegers = ['monetaryMass'];
   this.booleans = ['wrong'];
   this.pkFields = ['number','hash'];
 
@@ -45,7 +45,7 @@ function BlockDAL(driver) {
       'monetaryMass VARCHAR(100) DEFAULT \'0\',' +
       'UDTime DATETIME,' +
       'medianTime DATETIME NOT NULL,' +
-      'dividend VARCHAR(100) DEFAULT \'0\',' +
+      'dividend INTEGER DEFAULT \'0\',' +
       'unitbase INTEGER NULL,' +
       'time DATETIME NOT NULL,' +
       'powMin INTEGER NOT NULL,' +
diff --git a/app/lib/dal/sqliteDAL/CertDAL.js b/app/lib/dal/sqliteDAL/CertDAL.js
index 195168476162e7001f00c60bc0e05582e53b372d..a6959ed708195f794a740c767d0674541921634b 100644
--- a/app/lib/dal/sqliteDAL/CertDAL.js
+++ b/app/lib/dal/sqliteDAL/CertDAL.js
@@ -111,4 +111,4 @@ function CertDAL(driver) {
 
   this.getSandboxRoom = () => this.sandbox.getSandboxRoom();
   this.setSandboxSize = (maxSize) => this.sandbox.maxSize = maxSize;
-}
\ No newline at end of file
+}
diff --git a/app/lib/dal/sqliteDAL/IdentityDAL.js b/app/lib/dal/sqliteDAL/IdentityDAL.js
index 8befe1751ad1aa259ae46e77d669ee6f1338b9c0..11d273675b7dfbe7aa116e140319a76dac89e7aa 100644
--- a/app/lib/dal/sqliteDAL/IdentityDAL.js
+++ b/app/lib/dal/sqliteDAL/IdentityDAL.js
@@ -2,9 +2,7 @@
  * Created by cgeek on 22/08/15.
  */
 
-const Q = require('q');
 const co = require('co');
-const logger = require('../../logger')('idtyDAL');
 const constants = require('../../constants');
 const AbstractSQLite = require('./AbstractSQLite');
 const SandBox = require('./SandBox');
diff --git a/app/lib/dal/sqliteDAL/MembershipDAL.js b/app/lib/dal/sqliteDAL/MembershipDAL.js
index d4338a4ce89ce1fa85debd2b3456f1546c8b7119..e3f5564a7f56facd7b4d92be66dc830934994c50 100644
--- a/app/lib/dal/sqliteDAL/MembershipDAL.js
+++ b/app/lib/dal/sqliteDAL/MembershipDAL.js
@@ -2,7 +2,6 @@
  * Created by cgeek on 22/08/15.
  */
 
-const Q = require('q');
 const co = require('co');
 const _ = require('underscore');
 const AbstractSQLite = require('./AbstractSQLite');
diff --git a/app/lib/dal/sqliteDAL/MetaDAL.js b/app/lib/dal/sqliteDAL/MetaDAL.js
index b6bf4f3d8eeb6540653da06c62d3518501c41ec4..69244c74b480a3a6909ae60fcd3e0b23bd513625 100644
--- a/app/lib/dal/sqliteDAL/MetaDAL.js
+++ b/app/lib/dal/sqliteDAL/MetaDAL.js
@@ -5,7 +5,6 @@
  */
 
 const co = require('co');
-const _ = require('underscore');
 const logger = require('../../logger')('metaDAL');
 const AbstractSQLite = require('./AbstractSQLite');
 
@@ -33,7 +32,15 @@ function MetaDAL(driver) {
     0: 'BEGIN; COMMIT;',
 
     // Test
-    1: 'BEGIN; COMMIT;',
+    1: 'BEGIN;' +
+    'CREATE VIEW IF NOT EXISTS identities_pending AS SELECT * FROM idty WHERE NOT written;' +
+    'CREATE VIEW IF NOT EXISTS certifications_pending AS SELECT * FROM cert WHERE NOT written;' +
+    'CREATE VIEW IF NOT EXISTS transactions_pending AS SELECT * FROM txs WHERE NOT written;' +
+    'CREATE VIEW IF NOT EXISTS transactions_desc AS SELECT * FROM txs ORDER BY time DESC;' +
+    'CREATE VIEW IF NOT EXISTS forks AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE fork ORDER BY number DESC;' +
+    'CREATE VIEW IF NOT EXISTS blockchain AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE NOT fork ORDER BY number DESC;' +
+    'CREATE VIEW IF NOT EXISTS network AS select i.uid, (last_try - first_down) / 1000 as down_delay_in_sec, p.* from peer p LEFT JOIN idty i on i.pubkey = p.pubkey ORDER by down_delay_in_sec;' +
+    'COMMIT;',
 
     // New `receveid` column
     2: 'BEGIN; ALTER TABLE txs ADD COLUMN received INTEGER NULL; COMMIT;',
diff --git a/app/lib/dal/sqliteDAL/SandBox.js b/app/lib/dal/sqliteDAL/SandBox.js
index e452310a348976dac7abbd39de533f23d7a2819d..4e7d836ee66dcbba0906831cb14cd7bbf6d3afbf 100644
--- a/app/lib/dal/sqliteDAL/SandBox.js
+++ b/app/lib/dal/sqliteDAL/SandBox.js
@@ -1,8 +1,6 @@
 "use strict";
 
 const co = require('co');
-const colors = require('colors');
-const logger = require('../../logger')('sqlite');
 
 module.exports = SandBox;
 
@@ -24,8 +22,8 @@ function SandBox(maxSize, findElements, compareElements) {
     return comparison > 0;
   });
 
-  this.getSandboxRoom = (underBlock) => co(function *() {
+  this.getSandboxRoom = () => co(function *() {
     const elems = yield findElements();
     return that.maxSize - elems.length;
   });
-}
\ No newline at end of file
+}
diff --git a/app/lib/dal/sqliteDAL/index/BIndexDAL.js b/app/lib/dal/sqliteDAL/index/BIndexDAL.js
index dfc2cb3b03c7bd3a0b6c3caa19e777b723f52226..9d5b80b743e6116c2a953cbda6ec787fe3892be3 100644
--- a/app/lib/dal/sqliteDAL/index/BIndexDAL.js
+++ b/app/lib/dal/sqliteDAL/index/BIndexDAL.js
@@ -3,7 +3,6 @@
  */
 
 const co = require('co');
-const _ = require('underscore');
 const AbstractSQLite = require('./../AbstractSQLite');
 
 module.exports = BIndexDAL;
diff --git a/app/lib/dal/sqliteDAL/index/SIndexDAL.js b/app/lib/dal/sqliteDAL/index/SIndexDAL.js
index 07f77c9eaaa2dfcd3e885b05dbbc256ced1ac53a..d1b3ea817b1426b788a4f6357ec151a655bc1e1c 100644
--- a/app/lib/dal/sqliteDAL/index/SIndexDAL.js
+++ b/app/lib/dal/sqliteDAL/index/SIndexDAL.js
@@ -5,6 +5,7 @@
 const _ = require('underscore');
 const co = require('co');
 const indexer = require('../../../dup/indexer');
+const constants = require('../../../constants');
 const AbstractSQLite = require('./../AbstractSQLite');
 
 module.exports = SIndexDAL;
@@ -33,7 +34,7 @@ function SIndexDAL(driver) {
     'conditions'
   ];
   this.arrays = [];
-  this.bigintegers = ['amount'];
+  this.bigintegers = [];
   this.booleans = ['consumed'];
   this.pkFields = ['op', 'identifier', 'pos', 'written_on'];
   this.translated = {};
@@ -48,7 +49,7 @@ function SIndexDAL(driver) {
       'created_on VARCHAR(80) NULL,' +
       'written_on VARCHAR(80) NOT NULL,' +
       'written_time INTEGER NOT NULL,' +
-      'amount VARCHAR(50) NULL,' +
+      'amount INTEGER NULL,' +
       'base INTEGER NULL,' +
       'locktime INTEGER NULL,' +
       'consumed BOOLEAN NOT NULL,' +
@@ -97,6 +98,23 @@ function SIndexDAL(driver) {
     return _.sortBy(filtered, (row) => row.type == 'D' ? 0 : 1);
   });
 
+  this.findLowerThan = (amount, base) => co(function*() {
+    const baseConditions = Array.from({ length: (base + 1) }).map((el, index) => {
+      return '(base = ' + index + ' and amount < ' + (amount * Math.pow(10, base - index)) + ')';
+    }).join(' OR ');
+    const potentials = yield that.query('SELECT * FROM ' + that.table + ' s1 ' +
+      'WHERE s1.op = ? ' +
+      'AND (' + baseConditions + ') ' +
+      'AND NOT EXISTS (' +
+      ' SELECT * ' +
+      ' FROM s_index s2 ' +
+      ' WHERE s2.identifier = s1.identifier ' +
+      ' AND s2.pos = s1.pos ' +
+      ' AND s2.op = ?' +
+      ')', [constants.IDX_CREATE, constants.IDX_UPDATE]);
+    return potentials;
+  });
+
   this.trimConsumedSource = (belowNumber) => co(function*() {
     const toDelete = yield that.query('SELECT * FROM ' + that.table + ' WHERE consumed AND CAST(written_on as int) < ?', [belowNumber]);
     const queries = [];
diff --git a/app/lib/dup/indexer.js b/app/lib/dup/indexer.js
index 7955f821b0bdf13242cae317dd67e6a713f2d5c8..70a67b4314b412824d0f658f9d015a7d00058513 100644
--- a/app/lib/dup/indexer.js
+++ b/app/lib/dup/indexer.js
@@ -3,9 +3,9 @@
 const co              = require('co');
 const _               = require('underscore');
 const constants       = require('../constants');
-const rawer           = require('../ucp/rawer');
+const rawer           = require('duniter-common').rawer;
 const unlock          = require('../ucp/txunlock');
-const keyring         = require('../crypto/keyring');
+const keyring         = require('duniter-common').keyring;
 const Block           = require('../entity/block');
 const Identity        = require('../entity/identity');
 const Certification   = require('../entity/certification');
@@ -233,8 +233,8 @@ const indexer = module.exports = {
           unlock: txObj.unlocks[k],
           amount: input.amount,
           base: input.base,
-          consumed: true,
           conditions: null,
+          consumed: true,
           txObj: txObj
         });
         k++;
@@ -253,8 +253,8 @@ const indexer = module.exports = {
           locktime: obj.locktime,
           amount: output.amount,
           base: output.base,
-          consumed: false,
           conditions: output.conditions,
+          consumed: false,
           txObj: obj
         });
       }
@@ -468,8 +468,8 @@ const indexer = module.exports = {
       const ratio = constants.POW_DIFFICULTY_RANGE_RATIO;
       const maxGenTime = Math.ceil(conf.avgGenTime * ratio);
       const minGenTime = Math.floor(conf.avgGenTime / ratio);
-      const minSpeed = 1/ maxGenTime;
-      const maxSpeed = 1/ minGenTime;
+      const minSpeed = 1 / maxGenTime;
+      const maxSpeed = 1 / minGenTime;
 
       if (HEAD.diffNumber != HEAD_1.diffNumber && HEAD.speed >= maxSpeed && (HEAD_1.powMin + 2) % 16 == 0) {
         HEAD.powMin = HEAD_1.powMin + 2;
@@ -713,6 +713,7 @@ const indexer = module.exports = {
         amount: ENTRY.amount,
         base: ENTRY.base
       });
+      ENTRY.conditions = reduce(reducable).conditions; // We valuate the input conditions, so we can map these records to a same account
       ENTRY.available = reduce(reducable).consumed === false;
     }));
 
@@ -783,11 +784,11 @@ const indexer = module.exports = {
     } else {
       const issuersVar = (HEAD.issuersCount - HEAD_1.issuersCount);
       if (HEAD_1.issuersFrameVar > 0) {
-        HEAD.issuersFrameVar = HEAD_1.issuersFrameVar + 5*issuersVar - 1;
+        HEAD.issuersFrameVar = HEAD_1.issuersFrameVar + 5 * issuersVar - 1;
       } else if (HEAD_1.issuersFrameVar < 0) {
-        HEAD.issuersFrameVar = HEAD_1.issuersFrameVar + 5*issuersVar + 1;
+        HEAD.issuersFrameVar = HEAD_1.issuersFrameVar + 5 * issuersVar + 1;
       } else {
-        HEAD.issuersFrameVar = HEAD_1.issuersFrameVar + 5*issuersVar;
+        HEAD.issuersFrameVar = HEAD_1.issuersFrameVar + 5 * issuersVar;
       }
     }
   },
@@ -837,7 +838,7 @@ const indexer = module.exports = {
 
   // BR_G14
   prepareUnitBase: (HEAD) => {
-    if (HEAD.dividend >= Math.pow(10, 6)) {
+    if (HEAD.dividend >= Math.pow(10, constants.NB_DIGITS_UD)) {
       HEAD.dividend = Math.ceil(HEAD.dividend / 10);
       HEAD.new_dividend = HEAD.dividend;
       HEAD.unitBase = HEAD.unitBase + 1;
@@ -1279,6 +1280,42 @@ const indexer = module.exports = {
     return dividends;
   }),
 
+  // BR_G106
+  ruleIndexGarbageSmallAccounts: (HEAD, sindex, dal) => co(function*() {
+    const garbages = [];
+    let potentialSources = yield dal.sindexDAL.findLowerThan(constants.ACCOUNT_MINIMUM_CURRENT_BASED_AMOUNT, HEAD.unitBase);
+    potentialSources = potentialSources.concat(_.where(sindex, { op: constants.IDX_CREATE }));
+    const accountsBalance = potentialSources.reduce((map, src) => {
+      if (!map[src.conditions]) {
+        map[src.conditions] = { amount: 0, sources: [] };
+      }
+      map[src.conditions].amount += src.amount * Math.pow(10, src.base);
+      map[src.conditions].sources.push(src);
+      return map;
+    }, {});
+    const accounts = Object.keys(accountsBalance);
+    for (const account of accounts) {
+      const amount = accountsBalance[account].amount;
+      const sources = accountsBalance[account].sources;
+      if (amount < constants.ACCOUNT_MINIMUM_CURRENT_BASED_AMOUNT * Math.pow(10, HEAD.unitBase)) {
+        for (const src of sources) {
+          garbages.push({
+            op: 'UPDATE',
+            identifier: src.identifier,
+            pos: src.pos,
+            amount: src.amount,
+            base: src.base,
+            written_on: [HEAD.number, HEAD.hash].join('-'),
+            written_time: HEAD.medianTime,
+            conditions: src.conditions,
+            consumed: true // It is now consumed
+          });
+        }
+      }
+    }
+    return garbages;
+  }),
+
   // BR_G92
   ruleIndexGenCertificationExpiry: (HEAD, dal) => co(function*() {
     const expiries = [];
@@ -1447,7 +1484,7 @@ function median(values) {
     // Even number: the median is the average between the 2 central values, ceil rounded.
     const firstValue = values[nbValues / 2];
     const secondValue = values[nbValues / 2 - 1];
-    med = ((firstValue + secondValue) / 2); // TODO v1.0 median ceil rounded
+    med = ((firstValue + secondValue) / 2);
   } else {
     med = values[(nbValues + 1) / 2 - 1];
   }
diff --git a/app/lib/entity/block.js b/app/lib/entity/block.js
index 743c16206a87df5fcbd69de29108d2b63793f337..01463aa0c85f8d13c84957be09713eee2d3cbf93 100644
--- a/app/lib/entity/block.js
+++ b/app/lib/entity/block.js
@@ -1,7 +1,6 @@
 "use strict";
 const _ = require('underscore');
-const constants = require('../constants');
-const hashf = require('../ucp/hashf');
+const hashf = require('duniter-common').hashf;
 const Transaction = require('./transaction');
 
 module.exports = Block;
@@ -69,8 +68,8 @@ function Block(json) {
       "issuersFrameVar",
       "len"
     ].forEach((field) => {
-        json[field] = parseInt(this[field], 10);
-      });
+      json[field] = parseInt(this[field], 10);
+    });
     [
       "currency",
       "issuer",
@@ -78,20 +77,20 @@ function Block(json) {
       "hash",
       "parameters"
     ].forEach((field) => {
-        json[field] = this[field] || "";
-      });
+      json[field] = this[field] || "";
+    });
     [
       "previousHash",
       "previousIssuer",
       "inner_hash"
     ].forEach((field) => {
-        json[field] = this[field] || null;
-      });
+      json[field] = this[field] || null;
+    });
     [
       "dividend"
     ].forEach((field) => {
-        json[field] = parseInt(this[field]) || null;
-      });
+      json[field] = parseInt(this[field]) || null;
+    });
     [
       "identities",
       "joiners",
@@ -101,19 +100,19 @@ function Block(json) {
       "excluded",
       "certifications"
     ].forEach((field) => {
-        json[field] = [];
-        this[field].forEach((raw) => {
-          json[field].push(raw);
-        });
+      json[field] = [];
+      this[field].forEach((raw) => {
+        json[field].push(raw);
       });
+    });
     [
       "transactions"
     ].forEach((field) => {
-        json[field] = [];
-        this[field].forEach((obj) => {
-          json[field].push(_(obj).omit('raw', 'certifiers', 'hash'));
-        });
+      json[field] = [];
+      this[field].forEach((obj) => {
+        json[field].push(_(obj).omit('raw', 'certifiers', 'hash'));
       });
+    });
     json.raw = this.getRaw();
     return json;
   };
@@ -126,23 +125,23 @@ function Block(json) {
   };
 
   this.getRawInnerPart = () => {
-    return require('../ucp/rawer').getBlockInnerPart(this);
+    return require('duniter-common').rawer.getBlockInnerPart(this);
   };
 
   this.getRaw = () => {
-    return require('../ucp/rawer').getBlockWithInnerHashAndNonce(this);
+    return require('duniter-common').rawer.getBlockWithInnerHashAndNonce(this);
   };
 
   this.getSignedPart = () => {
-    return require('../ucp/rawer').getBlockInnerHashAndNonce(this);
+    return require('duniter-common').rawer.getBlockInnerHashAndNonce(this);
   };
 
   this.getProofOfWorkPart = () => {
-    return require('../ucp/rawer').getBlockInnerHashAndNonceWithSignature(this);
+    return require('duniter-common').rawer.getBlockInnerHashAndNonceWithSignature(this);
   };
 
   this.getRawSigned = () => {
-    return require('../ucp/rawer').getBlock(this);
+    return require('duniter-common').rawer.getBlock(this);
   };
 
   this.quickDescription = () => {
diff --git a/app/lib/entity/certification.js b/app/lib/entity/certification.js
index 7f1d5776f5f663273ce096f29687ea96cc868fe6..6461b44d39e2f140cb7498f90b26021a3e166ee6 100644
--- a/app/lib/entity/certification.js
+++ b/app/lib/entity/certification.js
@@ -1,14 +1,14 @@
 "use strict";
 const _ = require('underscore');
-const rawer = require('../ucp/rawer');
-const ucp = require('../ucp/buid');
+const rawer = require('duniter-common').rawer;
+const ucp = require('duniter-common').buid;
 
 const Certification = function(json) {
 
   this.linked = false;
 
   _(json).keys().forEach((key) => {
-   this[key] = json[key];
+    this[key] = json[key];
   });
 
   this.from  = this.pubkey = this.from || this.pubkey || this.issuer;
diff --git a/app/lib/entity/configuration.js b/app/lib/entity/configuration.js
index cd840569933374b87dcf442e6e560c39a6958878..331342371dad7d4e6839306adfd6077d99cbf842 100644
--- a/app/lib/entity/configuration.js
+++ b/app/lib/entity/configuration.js
@@ -5,19 +5,8 @@ const constants = require('../constants');
 const defaultConf = function() {
   return {
     "currency": null,
-    "port": constants.NETWORK.DEFAULT_PORT,
-    "ipv4": "127.0.0.1",
-    "ipv6": null,
-    "upnp": true,
-    "remotehost": null,
-    "remoteipv4": null,
-    "remoteipv6": null,
-    "remoteport": constants.NETWORK.DEFAULT_PORT,
-    "salt": "",
-    "passwd": "",
     "endpoints": [],
     "rmEndpoints": [],
-    "cpu": constants.DEFAULT_CPU,
     "upInterval": 3600 * 1000,
     "c": constants.CONTRACT.DEFAULT.C,
     "dt": constants.CONTRACT.DEFAULT.DT,
@@ -33,11 +22,9 @@ const defaultConf = function() {
     "avgGenTime": constants.CONTRACT.DEFAULT.AVGGENTIME,
     "dtDiffEval": constants.CONTRACT.DEFAULT.DTDIFFEVAL,
     "medianTimeBlocks": constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS,
-    "participate": true,
     "httplogs": false,
     "udid2": false,
     "timeout": 3000,
-    "routing": false,
     "isolate": false,
     "forksize": constants.BRANCHES.DEFAULT_WINDOW_SIZE
   };
diff --git a/app/lib/entity/identity.js b/app/lib/entity/identity.js
index d02c23ea3b19373508b5e93cf03ff8a2f979281c..f4cdc9499387bae65964530238484717d5cd3840 100644
--- a/app/lib/entity/identity.js
+++ b/app/lib/entity/identity.js
@@ -1,7 +1,7 @@
 "use strict";
 const _ = require('underscore');
-const hashf = require('../ucp/hashf');
-const rawer = require('../ucp/rawer');
+const hashf = require('duniter-common').hashf;
+const rawer = require('duniter-common').rawer;
 
 const Identity = function(json) {
 
diff --git a/app/lib/entity/membership.js b/app/lib/entity/membership.js
index 95e960b20ab032702287a65b55cd80a21632f34b..92167fc66cee519fc77ff62f27e4eaf96c761f53 100644
--- a/app/lib/entity/membership.js
+++ b/app/lib/entity/membership.js
@@ -1,7 +1,7 @@
 "use strict";
 const _ = require('underscore');
 const moment = require('moment');
-const rawer = require('../ucp/rawer');
+const rawer = require('duniter-common').rawer;
 const constants = require('../constants');
 
 const Membership = function(json) {
diff --git a/app/lib/entity/peer.js b/app/lib/entity/peer.js
index 4be05dd88aba57ab3f086742ce05237f17e82359..8cc54e1a76a66bd0322ba492b7dab1b9ffa4f6cc 100644
--- a/app/lib/entity/peer.js
+++ b/app/lib/entity/peer.js
@@ -1,8 +1,7 @@
 "use strict";
-const Q = require('q');
 const _ = require('underscore');
-const contacter = require('../contacter');
-const rawer = require('../ucp/rawer');
+const contacter = require('duniter-crawler').duniter.methods.contacter;
+const rawer = require('duniter-common').rawer;
 const constants = require('../constants');
 
 module.exports = Peer;
@@ -14,7 +13,7 @@ function Peer(json) {
   this.documentType = 'peer';
 
   _(json).keys().forEach((key) => {
-   this[key] = json[key];
+    this[key] = json[key];
   });
 
   this.endpoints = this.endpoints || [];
diff --git a/app/lib/entity/revocation.js b/app/lib/entity/revocation.js
index 8187a624b18d0a2aadce3e2e8245beb17c1bd9d5..b3acbfd801c6de80f32f521c850309f13cc0b606 100644
--- a/app/lib/entity/revocation.js
+++ b/app/lib/entity/revocation.js
@@ -1,13 +1,12 @@
 "use strict";
 const _ = require('underscore');
-const rawer = require('../ucp/rawer');
-const ucp = require('../ucp/buid');
+const rawer = require('duniter-common').rawer;
 const Identity = require('./identity');
 
 const Revocation = function(json) {
 
   _(json).keys().forEach((key) => {
-   this[key] = json[key];
+    this[key] = json[key];
   });
 
   this.getRaw = () => rawer.getOfficialRevocation(this);
diff --git a/app/lib/entity/source.js b/app/lib/entity/source.js
index a903563aca3432c74c3f850af77c9717be41e6b8..05b6fcf452c7e6d51effb8900c7de49376050b14 100644
--- a/app/lib/entity/source.js
+++ b/app/lib/entity/source.js
@@ -35,4 +35,4 @@ function Source(json) {
       "base": this.base
     };
   };
-}
\ No newline at end of file
+}
diff --git a/app/lib/entity/stat.js b/app/lib/entity/stat.js
index 09e9f53307a357ce8487d497d48b5d815074a0b6..1b459dbd90d1be15bc1a7ecf43e80daba0e8001e 100644
--- a/app/lib/entity/stat.js
+++ b/app/lib/entity/stat.js
@@ -3,7 +3,7 @@ const _ = require('underscore');
 
 const Stat = function(json) {
   _(json).keys().forEach((key) => {
-   this[key] = json[key];
+    this[key] = json[key];
   });
 
   this.json = function () {
diff --git a/app/lib/entity/transaction.js b/app/lib/entity/transaction.js
index 868e4addbb9c2dd0467edb749a8968b111890c29..91a2baa92844ea2059dfd5fbbd3ee32d731cc6f4 100644
--- a/app/lib/entity/transaction.js
+++ b/app/lib/entity/transaction.js
@@ -1,8 +1,7 @@
 "use strict";
 let _ = require('underscore');
-let rawer = require('../ucp/rawer');
-let hashf = require('../ucp/hashf');
-let constants = require('../constants');
+let rawer = require('duniter-common').rawer;
+let hashf = require('duniter-common').hashf;
 
 let Transaction = function(obj, currency) {
 
@@ -15,7 +14,7 @@ let Transaction = function(obj, currency) {
   this.issuers = [];
 
   _(json).keys().forEach((key) => {
-   this[key] = json[key];
+    this[key] = json[key];
   });
 
   // Store the maximum output base
diff --git a/app/lib/helpers/http2raw.js b/app/lib/helpers/http2raw.js
deleted file mode 100644
index 76580f1908e290ffe88e7b6f26cce3a140a9c959..0000000000000000000000000000000000000000
--- a/app/lib/helpers/http2raw.js
+++ /dev/null
@@ -1,38 +0,0 @@
-"use strict";
-
-const constants = require('../constants');
-
-module.exports = {
-  identity:      requiresParameter('identity',    constants.ERRORS.HTTP_PARAM_IDENTITY_REQUIRED),
-  certification: requiresParameter('cert',        constants.ERRORS.HTTP_PARAM_CERT_REQUIRED),
-  revocation:    requiresParameter('revocation',  constants.ERRORS.HTTP_PARAM_REVOCATION_REQUIRED),
-  transaction:   requiresParameter('transaction', constants.ERRORS.HTTP_PARAM_TX_REQUIRED),
-  peer:          requiresParameter('peer',        constants.ERRORS.HTTP_PARAM_PEER_REQUIRED),
-  membership:    Http2RawMembership,
-  block:         requiresParameter('block',       constants.ERRORS.HTTP_PARAM_BLOCK_REQUIRED),
-  conf:          requiresParameter('conf',        constants.ERRORS.HTTP_PARAM_CONF_REQUIRED),
-  cpu:           requiresParameter('cpu',         constants.ERRORS.HTTP_PARAM_CPU_REQUIRED)
-};
-
-function requiresParameter(parameter, err) {
-  return (req) => {
-    if(!req.body || req.body[parameter] === undefined){
-      throw err;
-    }
-    return req.body[parameter];
-  };
-}
-
-function Http2RawMembership (req) {
-  if(!(req.body && req.body.membership)){
-    throw constants.ERRORS.HTTP_PARAM_MEMBERSHIP_REQUIRED;
-  }
-  let ms = req.body.membership;
-  if(req.body && req.body.signature){
-    ms = [ms, req.body.signature].join('');
-    if (!ms.match(/\n$/)) {
-      ms += '\n';
-    }
-  }
-  return ms;
-}
diff --git a/app/lib/helpers/http400.js b/app/lib/helpers/http400.js
deleted file mode 100644
index 5029d40cc20f2d00a50c993901c26aa1388d8d6c..0000000000000000000000000000000000000000
--- a/app/lib/helpers/http400.js
+++ /dev/null
@@ -1,9 +0,0 @@
-"use strict";
-const logger = require('../logger')('http');
-
-module.exports = function http400 (res) {
-  return function (err) {
-    logger.warn(err);
-    res.send(400, err);
-  };
-}
diff --git a/app/lib/helpers/parameters.js b/app/lib/helpers/parameters.js
deleted file mode 100644
index 2bb664b68203eb90da081f586a32c6ec71e56305..0000000000000000000000000000000000000000
--- a/app/lib/helpers/parameters.js
+++ /dev/null
@@ -1,101 +0,0 @@
-"use strict";
-const Q = require('q');
-const constants = require('../constants');
-
-module.exports = function () {
-  return new ParameterNamespace();
-};
-
-function ParameterNamespace () {
-
-  this.getSearch = function (req, callback) {
-    if(!req.params || !req.params.search){
-      callback("No search criteria given");
-      return;
-    }
-    callback(null, req.params.search);
-  };
-
-  this.getSearchP = (req) => Q.nbind(this.getSearch, this)(req);
-
-  this.getCountAndFrom = function (req){
-    if(!req.params.from){
-      throw "From is required";
-    }
-    if(!req.params.count){
-      throw "Count is required";
-    }
-    const matches = req.params.from.match(/^(\d+)$/);
-    if(!matches){
-      throw "From format is incorrect, must be a positive integer";
-    }
-    const matches2 = req.params.count.match(/^(\d+)$/);
-    if(!matches2){
-      throw "Count format is incorrect, must be a positive integer";
-    }
-    return {
-      count: matches2[1],
-      from: matches[1]
-    };
-  };
-
-  this.getPubkey = function (req, callback){
-    if(!req.params.pubkey){
-      callback('Parameter `pubkey` is required');
-      return;
-    }
-    const matches = req.params.pubkey.match(constants.PUBLIC_KEY);
-    if(!matches){
-      callback("Pubkey format is incorrect, must be a Base58 string");
-      return;
-    }
-    callback(null, matches[0]);
-  };
-
-  this.getPubkeyP = (req) => Q.nbind(this.getPubkey, this)(req);
-
-  this.getFrom = function (req, callback){
-    if(!req.params.from){
-      callback('Parameter `from` is required');
-      return;
-    }
-    const matches = req.params.from.match(/^(\d+)$/);
-    if(!matches){
-      callback("From format is incorrect, must be a positive or zero integer");
-      return;
-    }
-    callback(null, matches[0]);
-  };
-
-  this.getFromP = (req) => Q.nbind(this.getFrom, this)(req);
-
-  this.getTo = function (req, callback){
-    if(!req.params.to){
-      callback('Parameter `to` is required');
-      return;
-    }
-    const matches = req.params.to.match(/^(\d+)$/);
-    if(!matches){
-      callback("To format is incorrect, must be a positive or zero integer");
-      return;
-    }
-    callback(null, matches[0]);
-  };
-
-  this.getToP = (req) => Q.nbind(this.getTo, this)(req);
-
-  this.getNumber = function (req, callback){
-    if(!req.params.number){
-      callback("Number is required");
-      return;
-    }
-    const matches = req.params.number.match(/^(\d+)$/);
-    if(!matches){
-      callback("Number format is incorrect, must be a positive integer");
-      return;
-    }
-    callback(null, parseInt(matches[1]));
-  };
-
-  this.getNumberP = (req) => Q.nbind(this.getNumber, this)(req);
-}
diff --git a/app/lib/logger/index.js b/app/lib/logger/index.js
index 5c149af281e894a24e4857ca05873d60c3374578..81f9e37daa9383a69485b924f7dd278303fbc9f9 100644
--- a/app/lib/logger/index.js
+++ b/app/lib/logger/index.js
@@ -3,7 +3,6 @@ const moment = require('moment');
 const path = require('path');
 const winston = require('winston');
 const cbLogger = require('./callbackLogger');
-const directory = require('../system/directory');
 
 const customLevels = {
   levels: {
diff --git a/app/lib/pow/engine.js b/app/lib/pow/engine.js
deleted file mode 100644
index 07674f03fef060ad5a19310a264d0e83aed3c6c8..0000000000000000000000000000000000000000
--- a/app/lib/pow/engine.js
+++ /dev/null
@@ -1,99 +0,0 @@
-"use strict";
-
-const childProcess = require('child_process');
-const path = require('path');
-const co = require('co');
-const os = require('os');
-const nuuid = require('node-uuid');
-const querablep = require('../querablep');
-
-module.exports = function (opts) {
-  return new PowEngine(opts);
-};
-
-function PowEngine() {
-
-  const that = this;
-
-  // Super important for Node.js debugging
-  const debug = process.execArgv.toString().indexOf('--debug') !== -1;
-  if(debug) {
-    //Set an unused port number.
-    process.execArgv = [];
-  }
-
-  const logger = require('../logger')('engine');
-  let powProcess;
-  let onInfoMessage;
-
-  const exchanges = {};
-
-  const restart = () => co(function*(){
-    if (!powProcess || !powProcess.connected) {
-      powProcess = childProcess.fork(path.join(__dirname, '.', 'proof.js'));
-
-      powProcess.on('message', function(msg) {
-        if (!msg.uuid) {
-          if (onInfoMessage) {
-            onInfoMessage(msg);
-          }
-        } else if (!exchanges[msg.uuid]) {
-          logger.error('PoW engine has sent a message about an unknown uuid:');
-          logger.debug(msg);
-        } else if (exchanges[msg.uuid].isFulfilled()) {
-          logger.error('PoW engine has sent a message about an already fulfilled uuid:');
-          logger.debug(msg);
-        } else {
-          exchanges[msg.uuid].extras.resolve(msg.answer);
-        }
-      });
-
-      powProcess.on('exit', function() {
-        logger.warn('PoW engine has exited.');
-      });
-    }
-  });
-
-  const ask = (command, value) => co(function*(){
-    // Restart the engine as it regularly closes itself if not used (free memory + cpu)
-    yield restart();
-    const uuid = nuuid.v4();
-    let resolve, reject;
-    exchanges[uuid] = querablep(new Promise((res, rej) => {
-      resolve = res;
-      reject = rej;
-    }));
-    exchanges[uuid].extras = { resolve, reject };
-    // Send the message
-    powProcess.send({ command, uuid, value });
-    // Wait for the answer
-    return exchanges[uuid];
-  });
-
-  this.prove = (block, nonceBeginning, zeros, highMark, pair, forcedTime, medianTimeBlocks, avgGenTime, cpu, prefix) => {
-    if (os.arch().match(/arm/)) {
-      cpu /= 2; // Don't know exactly why is ARM so much saturated by PoW, so let's divide by 2
-    }
-    return ask('newPoW', { block, nonceBeginning, zeros, highMark, pair, forcedTime, conf: { medianTimeBlocks, avgGenTime, cpu, prefix } });
-  };
-
-  this.status = () => ask('state');
-
-  this.cancel = () => co(function*() {
-    if (that.isConnected()) {
-      return ask('cancel');
-    }
-  });
-
-  this.getValue = (key) => ask(key);
-
-  this.setValue = (key, value) => co(function*() {
-    if (that.isConnected()) {
-      return ask(key, value);
-    }
-  });
-
-  this.isConnected = () => powProcess ? powProcess.connected : false;
-
-  this.setOnInfoMessage = (callback) => onInfoMessage = callback;
-}
\ No newline at end of file
diff --git a/app/lib/pow/proof.js b/app/lib/pow/proof.js
deleted file mode 100644
index 35de280845db216f16366acf16c2486992bc5d4b..0000000000000000000000000000000000000000
--- a/app/lib/pow/proof.js
+++ /dev/null
@@ -1,270 +0,0 @@
-"use strict";
-const co = require('co');
-const moment = require('moment');
-const hashf = require('./../ucp/hashf');
-const dos2unix = require('./../system/dos2unix');
-const querablep = require('./../querablep');
-const rules = require('./../rules/index');
-const constants = require('./../constants');
-const keyring = require('./../crypto/keyring');
-const rawer = require('./../ucp/rawer');
-
-let AUTOKILL_TIMEOUT_DELAY = 10 * 1000;
-const TURN_DURATION_IN_MILLISEC = 1000;
-const PAUSES_PER_TURN = 5;
-
-let timeoutAutoKill = null;
-let computing = querablep(Promise.resolve(null));
-let askedStop = false;
-
-// By default, we do not prefix the PoW by any number
-let prefix = 0;
-
-let signatureFunc, id, lastPub, lastSecret, currentCPU = 1;
-
-process.on('uncaughtException', (err) => {
-  console.error(err.stack || Error(err));
-  process.send({error: err});
-});
-
-autoKillIfNoContact();
-
-process.on('message', (message) => co(function*() {
-
-  switch (message.command) {
-
-    case 'state':
-      answer(message, computing.isFulfilled() ? 'ready' : 'computing');
-      break;
-
-    case 'autokillTimeout':
-      AUTOKILL_TIMEOUT_DELAY = message.value;
-      answer(message, 'OK');
-      break;
-
-    case 'identify':
-      lastPub = message.value.pubkey;
-      id = message.value.identifier;
-      answer(message, 'OK');
-      break;
-
-    case 'pubkey': answer(message, lastPub);    break;
-    case 'id':     answer(message, id);         break;
-    case 'cpu':    answer(message, currentCPU); break;
-    case 'prefix': answer(message, prefix);     break;
-
-    case 'newPoW':
-      co(function*() {
-        yield computing;
-        const res = yield beginNewProofOfWork(message.value);
-        answer(message, res);
-      });
-      break;
-
-    case 'cancel':
-      if (computing.isFulfilled()) {
-        answer(message, 'ready');
-      } else {
-        askedStop = true;
-        answer(message, 'cancelling');
-      }
-      break;
-
-    case 'conf':
-      if (message.value.cpu !== undefined) {
-        currentCPU = message.value.cpu;
-      }
-      if (message.value.prefix !== undefined) {
-        prefix = parseInt(message.value.prefix) * 10 * constants.NONCE_RANGE;
-      }
-      answer(message, { currentCPU, prefix });
-      break;
-  }
-
-  // We received a message, we postpone the autokill protection trigger
-  autoKillIfNoContact();
-
-}));
-
-function beginNewProofOfWork(stuff) {
-  askedStop = false;
-  computing = querablep(co(function*() {
-    pSend({ powStatus: 'started block#' + stuff.block.number });
-    let nonce = 0;
-    let foundBlock = null;
-    const conf = stuff.conf;
-    const block = stuff.block;
-    const nonceBeginning = stuff.nonceBeginning;
-    const nbZeros = stuff.zeros;
-    const pair = stuff.pair;
-    const forcedTime = stuff.forcedTime;
-    currentCPU = conf.cpu || constants.DEFAULT_CPU;
-    prefix = parseInt(conf.prefix || prefix) * 10 * constants.NONCE_RANGE;
-    const highMark = stuff.highMark;
-    let sigFunc = null;
-    if (signatureFunc && lastSecret == pair.sec) {
-      sigFunc = signatureFunc;
-    }
-    else {
-      lastSecret = pair.sec;
-      lastPub = pair.pub;
-      sigFunc = keyring.Key(pair.pub, pair.sec).signSync;
-    }
-    signatureFunc = sigFunc;
-    let pow = "", sig = "", raw = "";
-
-    block.time = getBlockTime(block, conf, forcedTime);
-
-    // Really start now
-    let testsCount = 0;
-    if (nbZeros == 0) {
-      block.time = block.medianTime;
-    }
-    // Compute block's hash
-    block.inner_hash = getBlockInnerHash(block);
-    let found = false;
-    let score = 0;
-    let turn = 0;
-    while (!found && !askedStop) {
-
-      // We make a bunch of tests every second
-      yield Promise.race([
-        countDown(TURN_DURATION_IN_MILLISEC),
-        co(function*() {
-          try {
-
-            /*****
-             * A NEW TURN
-             */
-            // Prove
-            let i = 0;
-            const thisTurn = turn;
-            const pausePeriod = score ? score / PAUSES_PER_TURN : 10; // number of pauses per turn
-            // We limit the number of t
-            const testsPerRound = score ? Math.floor(score * currentCPU) : 1000 * 1000 * 1000;
-            // Time is updated regularly during the proof
-            block.time = getBlockTime(block, conf, forcedTime);
-            if (block.number == 0) {
-              block.medianTime = block.time;
-            }
-            block.inner_hash = getBlockInnerHash(block);
-            while(!found && i < testsPerRound && thisTurn == turn && !askedStop) {
-              nonce++;
-              // The final nonce is composed of 3 parts
-              block.nonce = prefix + nonceBeginning + nonce;
-              raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n");
-              sig = dos2unix(sigFunc(raw));
-              pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase();
-              let j = 0, charOK = true;
-              while (j < nbZeros && charOK) {
-                charOK = pow[j] == '0';
-                j++;
-              }
-              if (charOK) {
-                found = pow[nbZeros].match(new RegExp('[0-' + highMark + ']'));
-              }
-              if (!found && nbZeros > 0 && j - 1 >= constants.PROOF_OF_WORK.MINIMAL_TO_SHOW) {
-                pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }});
-              }
-              if (!found) {
-                i++;
-                testsCount++;
-                if (i % pausePeriod == 0) {
-                  yield countDown(0); // Very low pause, just the time to process eventual end of the turn
-                }
-              }
-            }
-            if (!found) {
-              if (turn > 0 && !score) {
-                score = testsCount;
-              }
-              // We wait for main "while" countdown to end the turn. This gives of a bit of breath to the CPU (the amount
-              // of "breath" depends on the "cpu" parameter.
-              yield countDown(TURN_DURATION_IN_MILLISEC);
-            }
-          } catch (e) {
-            console.error(e);
-          }
-        })
-      ]);
-      turn++;
-    }
-    block.hash = pow;
-    block.signature = sig;
-    if (askedStop) {
-      askedStop = false;
-      yield pSend({ pow: { canceled: true }});
-      pSend({ powStatus: 'canceled block#' + block.number });
-      pSend({ powStatus: 'ready' });
-    } else {
-      foundBlock = {
-        pow: {
-          block: block,
-          testsCount: testsCount,
-          pow: pow
-        }
-      };
-      pSend({ powStatus: 'found' });
-    }
-    return foundBlock;
-  }));
-  return computing;
-}
-
-function countDown(duration) {
-  return new Promise((resolve) => setTimeout(resolve, duration));
-}
-
-function getBlockInnerHash(block) {
-  const raw = rawer.getBlockInnerPart(block);
-  return hash(raw);
-}
-
-function hash(str) {
-  return hashf(str).toUpperCase();
-}
-
-function getBlockTime (block, conf, forcedTime) {
-  if (forcedTime) {
-    return forcedTime;
-  }
-  const now = moment.utc().unix();
-  const maxAcceleration = rules.HELPERS.maxAcceleration(block, conf);
-  const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0;
-  const medianTime = block.medianTime;
-  const upperBound = block.number == 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset);
-  return Math.max(medianTime, upperBound);
-}
-
-function answer(message, answer) {
-  return new Promise(function (resolve, reject) {
-    process.send({
-      uuid: message.uuid,
-      answer,
-      pubkey: lastPub
-    }, function (error) {
-      !error && resolve();
-      error && reject();
-    });
-  });
-}
-
-function pSend(stuff) {
-  stuff.pubkey = lastPub;
-  return new Promise(function (resolve, reject) {
-    process.send(stuff, function (error) {
-      !error && resolve();
-      error && reject();
-    });
-  });
-}
-
-function autoKillIfNoContact() {
-  if (timeoutAutoKill) {
-    clearTimeout(timeoutAutoKill);
-  }
-  // If the timeout is not cleared in some way, the process exits
-  timeoutAutoKill = setTimeout(() => {
-    process.exit();
-  }, AUTOKILL_TIMEOUT_DELAY);
-}
diff --git a/app/lib/querablep.js b/app/lib/querablep.js
deleted file mode 100644
index e9653c9a7a2016d263e93851fff7075c5a304736..0000000000000000000000000000000000000000
--- a/app/lib/querablep.js
+++ /dev/null
@@ -1,17 +0,0 @@
-"use strict";
-
-module.exports = function makeQuerablePromise(promise) {
-
-  // Don't create a wrapper for promises that can already be queried.
-  if (promise.isResolved) return promise;
-
-  var isResolved = false;
-  var isRejected = false;
-
-  // Observe the promise, saving the fulfillment in a closure scope.
-  var result = promise.then((v) => { isResolved = true; return v; }, (e)  => { isRejected = true; throw e; });
-  result.isFulfilled = () => isResolved || isRejected;
-  result.isResolved = () => isResolved;
-  result.isRejected = () => isRejected;
-  return result;
-};
diff --git a/app/lib/rules/global_rules.js b/app/lib/rules/global_rules.js
index e837eb2ae232fc2ecaa3eefdc2844bb04d813de0..4dc3c55eb7caadb801ba3b72cfe100ae854167aa 100644
--- a/app/lib/rules/global_rules.js
+++ b/app/lib/rules/global_rules.js
@@ -4,12 +4,10 @@ const Q              = require('q');
 const co             = require('co');
 const _              = require('underscore');
 const constants      = require('../constants');
-const keyring         = require('../crypto/keyring');
-const rawer          = require('../ucp/rawer');
+const keyring         = require('duniter-common').keyring;
+const rawer          = require('duniter-common').rawer;
 const indexer        = require('../dup/indexer');
 const Identity       = require('../entity/identity');
-const Membership     = require('../entity/membership');
-const Certification  = require('../entity/certification');
 const Transaction    = require('../entity/transaction');
 const logger         = require('../logger')('globr');
 const unlock         = require('../ucp/txunlock');
diff --git a/app/lib/rules/index.js b/app/lib/rules/index.js
index ff8e25af4009e2f17f1a9cd407525357cc4133a5..284ce198aecd0b7aa7d4fdff858db41584b3e245 100644
--- a/app/lib/rules/index.js
+++ b/app/lib/rules/index.js
@@ -78,7 +78,7 @@ rules.ALIAS = {
 rules.CHECK = {
   ASYNC: {
     ALL_LOCAL: checkLocal(rules.ALIAS.ALL_LOCAL),
-    ALL_LOCAL_BUT_POW: checkLocal(rules.ALIAS.ALL_LOCAL_BUT_POW_AND_SIGNATURE),
+    ALL_LOCAL_BUT_POW: checkLocal(rules.ALIAS.ALL_LOCAL_BUT_POW_AND_SIGNATURE)
   }
 };
 
diff --git a/app/lib/rules/local_rules.js b/app/lib/rules/local_rules.js
index c7f4d95d7920f4930f3368bf868b8294b6f30df0..779c64400d4d1916933f4beab7f3d835fac0ad22 100644
--- a/app/lib/rules/local_rules.js
+++ b/app/lib/rules/local_rules.js
@@ -4,12 +4,13 @@ const co         = require('co');
 const _          = require('underscore');
 const constants  = require('../constants');
 const indexer    = require('../dup/indexer');
-const hashf      = require('../ucp/hashf');
-const keyring    = require('../crypto/keyring');
-const rawer      = require('../ucp/rawer');
+const hashf      = require('duniter-common').hashf;
+const keyring    = require('duniter-common').keyring;
+const rawer      = require('duniter-common').rawer;
 const Identity   = require('../entity/identity');
 const Membership = require('../entity/membership');
 const Transaction = require('../entity/transaction');
+const maxAcceleration = require('duniter-common').rules.HELPERS.maxAcceleration;
 
 let rules = {};
 
@@ -79,8 +80,8 @@ rules.FUNCTIONS = {
   checkBlockTimes: (block, conf) => co(function *() {
     const time = parseInt(block.time);
     const medianTime = parseInt(block.medianTime);
-    if (block.number > 0 && (time < medianTime || time > medianTime + maxAcceleration(block, conf)))
-      throw Error('A block must have its Time between MedianTime and MedianTime + ' + maxAcceleration(block, conf));
+    if (block.number > 0 && (time < medianTime || time > medianTime + maxAcceleration(conf)))
+      throw Error('A block must have its Time between MedianTime and MedianTime + ' + maxAcceleration(conf));
     else if (block.number == 0 && time != medianTime)
       throw Error('Root block must have Time equal MedianTime');
     return true;
@@ -143,7 +144,7 @@ rules.FUNCTIONS = {
       .pluck('pub')
       .value();
     for (const pub of revocations) {
-      const exclusions = _(iindex).where({ op: constants.IDX_UPDATE, member: false });
+      const exclusions = _(iindex).where({ op: constants.IDX_UPDATE, member: false, pub });
       if (exclusions.length == 0) {
         throw Error('A revoked member must be excluded');
       }
@@ -313,7 +314,7 @@ rules.FUNCTIONS = {
     return true;
   }),
 
-  checkTxSources: (block, conf) => co(function *() {
+  checkTxSources: (block) => co(function *() {
     const txs = block.getTransactions();
     for (const tx of txs) {
       if (!tx.inputs || tx.inputs.length == 0) {
@@ -370,16 +371,11 @@ rules.FUNCTIONS = {
   })
 };
 
-function maxAcceleration (block, conf) {
-  let maxGenTime = Math.ceil(conf.avgGenTime * constants.POW_DIFFICULTY_RANGE_RATIO);
-  return Math.ceil(maxGenTime * conf.medianTimeBlocks);
-}
-
 function checkSingleMembershipSignature(ms) {
   return keyring.verify(ms.getRaw(), ms.signature, ms.issuer);
 }
 
-function getSigResult(tx, a) {
+function getSigResult(tx) {
   let sigResult = { sigs: {}, matching: true };
   let json = { "version": tx.version, "currency": tx.currency, "blockstamp": tx.blockstamp, "locktime": tx.locktime, "inputs": [], "outputs": [], "issuers": tx.issuers, "signatures": [], "comment": tx.comment };
   tx.inputs.forEach(function (input) {
@@ -431,7 +427,7 @@ function checkBunchOfTransactions(txs, done){
 
 rules.HELPERS = {
 
-  maxAcceleration: maxAcceleration,
+  maxAcceleration: (block, conf) => maxAcceleration(conf),
 
   checkSingleMembershipSignature: checkSingleMembershipSignature,
 
@@ -489,7 +485,7 @@ rules.HELPERS = {
     }
   },
 
-  getMaxPossibleVersionNumber: (current, block) => co(function*() {
+  getMaxPossibleVersionNumber: (current) => co(function*() {
     // Looking at current blockchain, find what is the next maximum version we can produce
 
     // 1. We follow previous block's version
diff --git a/app/lib/streams/bma.js b/app/lib/streams/bma.js
deleted file mode 100644
index 7d1158749a9d9ff8f96ebef35065b10dd0c5aa89..0000000000000000000000000000000000000000
--- a/app/lib/streams/bma.js
+++ /dev/null
@@ -1,31 +0,0 @@
-"use strict";
-
-const network = require('../system/network');
-const routes = require('./routes');
-
-module.exports = function(server, interfaces, httpLogs) {
-
-  if (!interfaces) {
-    interfaces = [];
-    if (server.conf) {
-      if (server.conf.ipv4) {
-        interfaces = [{
-          ip: server.conf.ipv4,
-          port: server.conf.port
-        }];
-      }
-      if (server.conf.ipv6) {
-        interfaces.push({
-          ip: server.conf.ipv6,
-          port: (server.conf.remoteport || server.conf.port) // We try to get the best one
-        });
-      }
-    }
-  }
-
-  return network.createServersAndListen('Duniter server', interfaces, httpLogs, null, (app, httpMethods) => {
-    
-    routes.bma(server, '', app, httpMethods);
-
-  }, routes.bmaWS(server, ''));
-};
diff --git a/app/lib/streams/dtos.js b/app/lib/streams/dtos.js
deleted file mode 100644
index 92661f943ae46f19225c96b9c57ecfb3372b140f..0000000000000000000000000000000000000000
--- a/app/lib/streams/dtos.js
+++ /dev/null
@@ -1,484 +0,0 @@
-"use strict";
-
-let dtos;
-
-module.exports = dtos = {};
-
-dtos.Summary = {
-  duniter: {
-    "software": String,
-    "version": String,
-    "forkWindowSize": Number
-  }
-};
-
-dtos.Parameters = {
-  currency: String,
-  c: Number,
-  dt: Number,
-  ud0: Number,
-  sigPeriod: Number,
-  sigStock: Number,
-  sigWindow: Number,
-  sigValidity: Number,
-  sigQty: Number,
-  idtyWindow: Number,
-  msWindow: Number,
-  xpercent: Number,
-  msValidity: Number,
-  stepMax: Number,
-  medianTimeBlocks: Number,
-  avgGenTime: Number,
-  dtDiffEval: Number,
-  percentRot: Number
-};
-
-dtos.Membership = {
-  "signature": String,
-  "membership": {
-    "version": Number,
-    "currency": String,
-    "issuer": String,
-    "membership": String,
-    "date": Number,
-    "sigDate": Number,
-    "raw": String
-  }
-};
-
-dtos.Memberships = {
-  "pubkey": String,
-  "uid": String,
-  "sigDate": String,
-  "memberships": [
-    {
-      "version": Number,
-      "currency": String,
-      "membership": String,
-      "blockNumber": Number,
-      "blockHash": String,
-      "written": Number
-    }
-  ]
-};
-
-dtos.MembershipList = {
-  "memberships": [
-    {
-      "pubkey": String,
-      "uid": String,
-      "version": Number,
-      "currency": String,
-      "membership": String,
-      "blockNumber": Number,
-      "blockHash": String,
-      "written": Number
-    }
-  ]
-};
-
-dtos.TransactionOfBlock = {
-  "version": Number,
-  "currency": String,
-  "comment": String,
-  "locktime": Number,
-  "signatures": [String],
-  "outputs": [String],
-  "inputs": [String],
-  "unlocks": [String],
-  "block_number": Number,
-  "blockstamp": String,
-  "blockstampTime": Number,
-  "time": Number,
-  "issuers": [String]
-};
-
-dtos.Block = {
-  "version": Number,
-  "currency": String,
-  "number": Number,
-  "issuer": String,
-  "issuersFrame": Number,
-  "issuersFrameVar": Number,
-  "issuersCount": Number,
-  "parameters": String,
-  "membersCount": Number,
-  "monetaryMass": Number,
-  "powMin": Number,
-  "time": Number,
-  "medianTime": Number,
-  "dividend": Number,
-  "unitbase": Number,
-  "hash": String,
-  "previousHash": String,
-  "previousIssuer": String,
-  "identities": [String],
-  "certifications": [String],
-  "joiners": [String],
-  "actives": [String],
-  "leavers": [String],
-  "revoked": [String],
-  "excluded": [String],
-  "transactions": [dtos.TransactionOfBlock],
-  "nonce": Number,
-  "inner_hash": String,
-  "signature": String,
-  "raw": String
-};
-
-dtos.Hardship = {
-  "block": Number,
-  "level": Number
-};
-
-dtos.Difficulty = {
-  "uid": String,
-  "level": Number
-};
-
-dtos.Difficulties = {
-  "block": Number,
-  "levels": [dtos.Difficulty]
-};
-
-dtos.Blocks = [dtos.Block];
-
-dtos.Stat = {
-  "result": {
-    "blocks": [Number]
-  }
-};
-
-dtos.Branches = {
-  "blocks": [dtos.Block]
-};
-
-dtos.Peer = {
-  "version": Number,
-  "currency": String,
-  "pubkey": String,
-  "block": String,
-  "endpoints": [String],
-  "signature": String,
-  "raw": String
-};
-
-dtos.DBPeer = {
-  "version": Number,
-  "currency": String,
-  "pubkey": String,
-  "block": String,
-  "status": String,
-  "first_down": Number,
-  "last_try": Number,
-  "endpoints": [String],
-  "signature": String,
-  "raw": String
-};
-
-dtos.Peers = {
-  "peers": [dtos.DBPeer]
-};
-
-dtos.MerkleOfPeers = {
-  "depth": Number,
-  "nodesCount": Number,
-  "leavesCount": Number,
-  "root": String,
-  "leaves": [String],
-  "leaf": {
-    "hash": String,
-    "value": dtos.DBPeer
-  }
-};
-
-dtos.Other = {
-  "pubkey": String,
-  "meta": {
-    "block_number": Number,
-    "block_hash": String
-  },
-  "uids": [String],
-  "isMember": Boolean,
-  "wasMember": Boolean,
-  "signature": String
-};
-
-dtos.UID = {
-  "uid": String,
-  "meta": {
-    "timestamp": String
-  },
-  "self": String,
-  "revocation_sig": String,
-  "revoked": Boolean,
-  "revoked_on": Number,
-  "others": [dtos.Other]
-};
-
-dtos.Signed = {
-  "uid": String,
-  "pubkey": String,
-  "meta": {
-    "timestamp": String
-  },
-  "cert_time": {
-    "block": Number,
-    "block_hash": String
-  },
-  "isMember": Boolean,
-  "wasMember": Boolean,
-  "signature": String
-};
-
-dtos.CertIdentity = {
-  "issuer": String,
-  "uid": String,
-  "timestamp": String,
-  "sig": String
-};
-
-dtos.Cert = {
-  "issuer": String,
-  "timestamp": String,
-  "sig": String,
-  "target": dtos.CertIdentity
-};
-
-dtos.Identity = {
-  "pubkey": String,
-  "uids": [dtos.UID],
-  "signed": [dtos.Signed]
-};
-
-dtos.Result = {
-  "result": Boolean
-};
-
-dtos.Lookup = {
-  "partial": Boolean,
-  "results": [dtos.Identity]
-};
-
-dtos.Members = {
-  "results": [{
-    pubkey: String,
-    uid: String
-  }]
-};
-
-dtos.RequirementsCert = {
-  from: String,
-  to: String,
-  expiresIn: Number
-};
-
-dtos.Requirements = {
-  "identities": [{
-    pubkey: String,
-    uid: String,
-    meta: {
-      timestamp: String
-    },
-    expired: Boolean,
-    outdistanced: Boolean,
-    certifications: [dtos.RequirementsCert],
-    membershipPendingExpiresIn: Number,
-    membershipExpiresIn: Number
-  }]
-};
-
-dtos.Certification = {
-  "pubkey": String,
-  "uid": String,
-  "isMember": Boolean,
-  "wasMember": Boolean,
-  "cert_time": {
-    "block": Number,
-    "medianTime": Number
-  },
-  "sigDate": String,
-  "written": {
-    "number": Number,
-    "hash": String
-  },
-  "signature": String
-};
-
-dtos.Certifications = {
-  "pubkey": String,
-  "uid": String,
-  "sigDate": String,
-  "isMember": Boolean,
-  "certifications": [dtos.Certification]
-};
-
-dtos.SimpleIdentity = {
-  "pubkey": String,
-  "uid": String,
-  "sigDate": String
-};
-
-dtos.Transaction = {
-  "version": Number,
-  "currency": String,
-  "issuers": [String],
-  "inputs": [String],
-  "unlocks": [String],
-  "outputs": [String],
-  "comment": String,
-  "locktime": Number,
-  "signatures": [String],
-  "raw": String,
-  "hash": String
-};
-
-dtos.Source = {
-  "type": String,
-  "noffset": Number,
-  "identifier": String,
-  "amount": Number,
-  "base": Number
-};
-
-dtos.Sources = {
-  "currency": String,
-  "pubkey": String,
-  "sources": [dtos.Source]
-};
-
-dtos.TxOfHistory = {
-  "version": Number,
-  "issuers": [String],
-  "inputs": [String],
-  "unlocks": [String],
-  "outputs": [String],
-  "comment": String,
-  "locktime": Number,
-  "received": Number,
-  "signatures": [String],
-  "hash": String,
-  "block_number": Number,
-  "time": Number,
-  "blockstamp": String,
-  "blockstampTime": Number
-};
-
-dtos.TxHistory = {
-  "currency": String,
-  "pubkey": String,
-  "history": {
-    "sent": [dtos.TxOfHistory],
-    "received": [dtos.TxOfHistory],
-    "sending": [dtos.TxOfHistory],
-    "receiving": [dtos.TxOfHistory],
-    "pending": [dtos.TxOfHistory]
-  }
-};
-
-dtos.TxPending = {
-  "currency": String,
-  "pending": [dtos.Transaction]
-};
-
-dtos.UD = {
-  "block_number": Number,
-  "consumed": Boolean,
-  "time": Number,
-  "amount": Number,
-  "base": Number
-};
-
-dtos.UDHistory = {
-  "currency": String,
-  "pubkey": String,
-  "history": {
-    "history": [dtos.UD]
-  }
-};
-
-dtos.NetAddress = {
-  "address": String,
-  "family": String,
-  "internal": Boolean,
-  "mac": String,
-  "netmask": String
-};
-
-dtos.NetworkInterface = {
-  "name": String,
-  "addresses": [dtos.NetAddress]
-};
-
-dtos.NetworkConf = {
-  "local": {
-    "ipv4": String,
-    "ipv6": String,
-    "port": Number
-  },
-  "remote": {
-    "ipv4": String,
-    "ipv6": String,
-    "port": Number,
-    "dns": String,
-    "upnp": Boolean
-  }
-};
-
-dtos.NetworkInterfaces = {
-  "local": [dtos.NetworkInterface],
-  "remote": [dtos.NetworkInterface],
-  "auto": dtos.NetworkConf,
-  "conf": dtos.NetworkConf
-};
-
-dtos.Boolean = {
-  "success": Boolean
-};
-
-dtos.SummaryConf = {
-  "cpu": Number
-};
-
-dtos.AdminSummary = {
-  "version": String,
-  "host": String,
-  "current": dtos.Block,
-  "rootBlock": dtos.Block,
-  "pubkey": String,
-  "seckey": String,
-  "conf": dtos.SummaryConf,
-  "parameters": dtos.Parameters,
-  "lastUDBlock": dtos.Block
-};
-
-dtos.PoWSummary = {
-  "total": Number,
-  "mirror": Boolean,
-  "waiting": Boolean
-};
-
-dtos.PreviewPubkey = {
-  "pubkey": String
-};
-
-dtos.Sandbox = {
-  size: Number,
-  free: Number
-};
-
-dtos.IdentitySandbox = dtos.Sandbox;
-dtos.CertificationSandbox = dtos.Sandbox;
-dtos.MembershipSandbox = dtos.Sandbox;
-dtos.TransactionSandbox = dtos.Sandbox;
-
-dtos.Sandboxes = {
-  identities: dtos.IdentitySandbox,
-  certifications: dtos.CertificationSandbox,
-  memberships: dtos.MembershipSandbox,
-  transactions: dtos.TransactionSandbox
-};
-
-dtos.LogLink = {
-  link: String
-};
diff --git a/app/lib/streams/jsoner.js b/app/lib/streams/jsoner.js
deleted file mode 100644
index 2b342e35a25fb9a814a7e29df4998bb41dce7269..0000000000000000000000000000000000000000
--- a/app/lib/streams/jsoner.js
+++ /dev/null
@@ -1,21 +0,0 @@
-"use strict";
-const util     = require('util');
-const stream   = require('stream');
-
-module.exports = function () {
-  return new JSONer();
-};
-
-function JSONer () {
-
-  stream.Transform.call(this, { objectMode: true });
-
-  const that = this;
-
-  this._write = function (entity, enc, done) {
-    that.push(entity.json());
-    done();
-  };
-};
-
-util.inherits(JSONer, stream.Transform);
diff --git a/app/lib/streams/parsers/GenericParser.js b/app/lib/streams/parsers/GenericParser.js
index e5b60948c1891289caacc0e0339ba4e1b1d2ba0e..753752e7d8bcc8ae1fc4d9286d83c9366ed9d754 100644
--- a/app/lib/streams/parsers/GenericParser.js
+++ b/app/lib/streams/parsers/GenericParser.js
@@ -1,7 +1,7 @@
 "use strict";
 const util                 = require('util');
 const stream               = require('stream');
-const hashf                = require('../../ucp/hashf');
+const hashf                = require('duniter-common').hashf;
 const logger               = require('../../logger')('gen_parser');
 const constants            = require('../../constants');
 
@@ -14,7 +14,7 @@ function GenericParser (captures, multipleLinesFields, rawerFunc) {
 
   this.rawerFunc = rawerFunc;
 
-  this._simpleLineExtraction = (pr, rawEntry, cap, parser) => {
+  this._simpleLineExtraction = (pr, rawEntry, cap) => {
     const fieldValue = rawEntry.match(cap.regexp);
     if(fieldValue && fieldValue.length >= 2){
       pr[cap.prop] = cap.parser ? cap.parser(fieldValue[1], pr) : fieldValue[1];
diff --git a/app/lib/streams/parsers/block.js b/app/lib/streams/parsers/block.js
index eebd6528ca487d7185a6016b86ba5e13864ad8fd..58368da34851b718a45782faaca0d4ae84ae510b 100644
--- a/app/lib/streams/parsers/block.js
+++ b/app/lib/streams/parsers/block.js
@@ -2,8 +2,8 @@
 const util          = require('util');
 const GenericParser = require('./GenericParser');
 const Block         = require('../../entity/block');
-const hashf         = require('../../ucp/hashf');
-const rawer         = require('../../ucp/rawer');
+const hashf         = require('duniter-common').hashf;
+const rawer         = require('duniter-common').rawer;
 const constants     = require('../../constants');
 
 module.exports = BlockParser;
@@ -54,7 +54,7 @@ function BlockParser (onError) {
     obj.transactions = obj.transactions || [];
     obj.version = obj.version || '';
     obj.type = obj.type || '';
-    obj.hash = hashf(require('../../ucp/rawer').getBlockInnerHashAndNonceWithSignature(obj)).toUpperCase();
+    obj.hash = hashf(require('duniter-common').rawer.getBlockInnerHashAndNonceWithSignature(obj)).toUpperCase();
     obj.inner_hash = obj.inner_hash || '';
     obj.currency = obj.currency || '';
     obj.nonce = obj.nonce || '';
@@ -163,7 +163,7 @@ function splitAndMatch (separator, regexp) {
   };
 }
 
-function extractTransactions(raw, obj) {
+function extractTransactions(raw) {
   const regexps = {
     "issuers": constants.TRANSACTION.SENDER,
     "inputs": constants.TRANSACTION.SOURCE_V3,
diff --git a/app/lib/streams/parsers/certification.js b/app/lib/streams/parsers/certification.js
index 814fe80cb15c469036adef405861e10459b4c2dd..aa57b0ef41818fe519687777a161d75a7f4ac766 100644
--- a/app/lib/streams/parsers/certification.js
+++ b/app/lib/streams/parsers/certification.js
@@ -1,9 +1,7 @@
 "use strict";
 const GenericParser = require('./GenericParser');
 const util          = require('util');
-const ucp           = require('../../ucp/buid');
-const rawer         = require('../../ucp/rawer');
-const hashf         = require('../../ucp/hashf');
+const rawer         = require('duniter-common').rawer;
 const constants     = require('../../constants');
 
 module.exports = CertificationParser;
diff --git a/app/lib/streams/parsers/identity.js b/app/lib/streams/parsers/identity.js
index bdc2c7510f6ec59ffb3a6a11ddc59abfd37b2ba7..0f79a5d4b364a59017c801cabbca6774f4f88798 100644
--- a/app/lib/streams/parsers/identity.js
+++ b/app/lib/streams/parsers/identity.js
@@ -1,9 +1,8 @@
 "use strict";
 const GenericParser = require('./GenericParser');
 const util          = require('util');
-const ucp           = require('../../ucp/buid');
-const rawer         = require('../../ucp/rawer');
-const hashf         = require('../../ucp/hashf');
+const rawer         = require('duniter-common').rawer;
+const hashf         = require('duniter-common').hashf;
 const constants     = require('../../constants');
 
 module.exports = IdentityParser;
diff --git a/app/lib/streams/parsers/membership.js b/app/lib/streams/parsers/membership.js
index 19c6ba6cf52bcd8c1c2dddda243e96e0376f4426..a0243c4efff5ec2a4f18ee85f798ee35d442a92a 100644
--- a/app/lib/streams/parsers/membership.js
+++ b/app/lib/streams/parsers/membership.js
@@ -1,7 +1,7 @@
 "use strict";
 const GenericParser = require('./GenericParser');
-const ucp           = require('../../ucp/buid');
-const rawer         = require('../../ucp/rawer');
+const ucp           = require('duniter-common').buid;
+const rawer         = require('duniter-common').rawer;
 const util          = require('util');
 const constants     = require('../../constants');
 
diff --git a/app/lib/streams/parsers/peer.js b/app/lib/streams/parsers/peer.js
index 66a7437a77861a67c8bc219a3337be5e84e31d6a..64a58436b0276cedf22c61db782d9cf30bddbf0b 100644
--- a/app/lib/streams/parsers/peer.js
+++ b/app/lib/streams/parsers/peer.js
@@ -1,6 +1,6 @@
 "use strict";
 const GenericParser = require('./GenericParser');
-const rawer         = require('../../ucp/rawer');
+const rawer         = require('duniter-common').rawer;
 const util          = require('util');
 const constants     = require('../../constants');
 
diff --git a/app/lib/streams/parsers/revocation.js b/app/lib/streams/parsers/revocation.js
index 52903ce60e196c3e921d4018bbf241c3538cbfec..f77544659715501aa0043bb3581e46e6d8505ded 100644
--- a/app/lib/streams/parsers/revocation.js
+++ b/app/lib/streams/parsers/revocation.js
@@ -1,10 +1,8 @@
 "use strict";
 const GenericParser = require('./GenericParser');
 const util          = require('util');
-const moment        = require('moment');
-const ucp           = require('../../ucp/buid');
-const rawer         = require('../../ucp/rawer');
-const hashf         = require('../../ucp/hashf');
+const rawer         = require('duniter-common').rawer;
+const hashf         = require('duniter-common').hashf;
 const constants     = require('../../constants');
 
 module.exports = RevocationParser;
diff --git a/app/lib/streams/parsers/transaction.js b/app/lib/streams/parsers/transaction.js
index 06e3358becbdce182e2bab1ad49ac64564e6335d..25da6f462c8f46a8e82e96b78808418fc881ddf5 100644
--- a/app/lib/streams/parsers/transaction.js
+++ b/app/lib/streams/parsers/transaction.js
@@ -1,6 +1,6 @@
 "use strict";
 const GenericParser = require('./GenericParser');
-const rawer         = require('../../ucp/rawer');
+const rawer         = require('duniter-common').rawer;
 const constants     = require('../../constants');
 const util          = require('util');
 
@@ -72,7 +72,7 @@ function extractIssuers(raw) {
   return issuers;
 }
 
-function extractInputs(raw, obj) {
+function extractInputs(raw) {
   const inputs = [];
   const lines = raw.split(/\n/);
   for (const line of lines) {
diff --git a/app/lib/streams/router.js b/app/lib/streams/router.js
index c646dc64a83b3eb4de42f35d5915f211533ab4b1..a02a94a82bfb60474e393b1fe2a4d65c0b1ce5b8 100644
--- a/app/lib/streams/router.js
+++ b/app/lib/streams/router.js
@@ -1,19 +1,18 @@
 "use strict";
 
 const co = require('co');
-const _ = require('underscore');
 const util     = require('util');
 const stream   = require('stream');
 const Peer     = require('../entity/peer');
 const constants = require('../constants');
 
-module.exports = function (PeeringService, conf, dal) {
-  return new Router(PeeringService, conf, dal);
+module.exports = function (PeeringService, dal) {
+  return new Router(PeeringService, dal);
 };
 
-function Router (PeeringService, conf, dal) {
+function Router (PeeringService, dal) {
   
-  this.setConfDAL = (theConf, theDAL) => {
+  this.setConfDAL = (theDAL) => {
     dal = theDAL;
   };
 
diff --git a/app/lib/streams/routes.js b/app/lib/streams/routes.js
deleted file mode 100644
index a6cf8ad9c16e2fd041b066e0e6df982d2a63e92f..0000000000000000000000000000000000000000
--- a/app/lib/streams/routes.js
+++ /dev/null
@@ -1,126 +0,0 @@
-"use strict";
-
-const co = require('co');
-const es = require('event-stream');
-const dtos = require('./dtos');
-const sanitize = require('./sanitize');
-const limiter = require('../system/limiter');
-const constants = require('../../lib/constants');
-const logger = require('../logger')('routes');
-
-const WebSocketServer = require('ws').Server;
-
-module.exports = {
-
-  bma: function(server, prefix, app, httpMethods) {
-
-    const node         = require('../../controllers/node')(server);
-    const blockchain   = require('../../controllers/blockchain')(server);
-    const net          = require('../../controllers/network')(server, server.conf);
-    const wot          = require('../../controllers/wot')(server);
-    const transactions = require('../../controllers/transactions')(server);
-    const dividend     = require('../../controllers/uds')(server);
-    httpMethods.httpGET(  prefix + '/',                                      node.summary,                         dtos.Summary,        limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/node/summary',                          node.summary,                         dtos.Summary,        limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/node/sandboxes',                        node.sandboxes,                       dtos.Sandboxes,      limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/parameters',                 blockchain.parameters,                dtos.Parameters,     limiter.limitAsHighUsage());
-    httpMethods.httpPOST( prefix + '/blockchain/membership',                 blockchain.parseMembership,           dtos.Membership,     limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/memberships/:search',        blockchain.memberships,               dtos.Memberships,    limiter.limitAsHighUsage());
-    httpMethods.httpPOST( prefix + '/blockchain/block',                      blockchain.parseBlock,                dtos.Block,          limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/block/:number',              blockchain.promoted,                  dtos.Block,          limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/blocks/:count/:from',        blockchain.blocks,                    dtos.Blocks,         limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/current',                    blockchain.current,                   dtos.Block,          limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/hardship/:search',           blockchain.hardship,                  dtos.Hardship,       limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/difficulties',               blockchain.difficulties,              dtos.Difficulties,   limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/with/newcomers',             blockchain.with.newcomers,            dtos.Stat,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/with/certs',                 blockchain.with.certs,                dtos.Stat,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/with/joiners',               blockchain.with.joiners,              dtos.Stat,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/with/actives',               blockchain.with.actives,              dtos.Stat,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/with/leavers',               blockchain.with.leavers,              dtos.Stat,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/with/excluded',              blockchain.with.excluded,             dtos.Stat,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/with/revoked',               blockchain.with.revoked,              dtos.Stat,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/with/ud',                    blockchain.with.ud,                   dtos.Stat,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/with/tx',                    blockchain.with.tx,                   dtos.Stat,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/blockchain/branches',                   blockchain.branches,                  dtos.Branches,       limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/network/peering',                       net.peer,                             dtos.Peer,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/network/peering/peers',                 net.peersGet,                         dtos.MerkleOfPeers,  limiter.limitAsVeryHighUsage());
-    httpMethods.httpPOST( prefix + '/network/peering/peers',                 net.peersPost,                        dtos.Peer,           limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/network/peers',                         net.peers,                            dtos.Peers,          limiter.limitAsHighUsage());
-    httpMethods.httpPOST( prefix + '/wot/add',                               wot.add,                              dtos.Identity,       limiter.limitAsHighUsage());
-    httpMethods.httpPOST( prefix + '/wot/certify',                           wot.certify,                          dtos.Cert,           limiter.limitAsHighUsage());
-    httpMethods.httpPOST( prefix + '/wot/revoke',                            wot.revoke,                           dtos.Result,         limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/wot/lookup/:search',                    wot.lookup,                           dtos.Lookup,         limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/wot/members',                           wot.members,                          dtos.Members,        limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/wot/pending',                           wot.pendingMemberships,               dtos.MembershipList, limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/wot/requirements/:search',              wot.requirements,                     dtos.Requirements,   limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/wot/certifiers-of/:search',             wot.certifiersOf,                     dtos.Certifications, limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/wot/certified-by/:search',              wot.certifiedBy,                      dtos.Certifications, limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/wot/identity-of/:search',               wot.identityOf,                       dtos.SimpleIdentity, limiter.limitAsHighUsage());
-    httpMethods.httpPOST( prefix + '/tx/process',                            transactions.parseTransaction,        dtos.Transaction,    limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/tx/sources/:pubkey',                    transactions.getSources,              dtos.Sources,        limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/tx/history/:pubkey',                    transactions.getHistory,              dtos.TxHistory,      limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/tx/history/:pubkey/blocks/:from/:to',   transactions.getHistoryBetweenBlocks, dtos.TxHistory,      limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/tx/history/:pubkey/times/:from/:to',    transactions.getHistoryBetweenTimes,  dtos.TxHistory,      limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/tx/history/:pubkey/pending',            transactions.getPendingForPubkey,     dtos.TxHistory,      limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/tx/pending',                            transactions.getPending,              dtos.TxPending,      limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/ud/history/:pubkey',                    dividend.getHistory,                  dtos.UDHistory,      limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/ud/history/:pubkey/blocks/:from/:to',   dividend.getHistoryBetweenBlocks,     dtos.UDHistory,      limiter.limitAsHighUsage());
-    httpMethods.httpGET(  prefix + '/ud/history/:pubkey/times/:from/:to',    dividend.getHistoryBetweenTimes,      dtos.UDHistory,      limiter.limitAsHighUsage());
-  },
-  
-  bmaWS: function(server, prefix) {
-    return (httpServer) => {
-
-      let currentBlock = {};
-      let wssBlock = new WebSocketServer({
-        server: httpServer,
-        path: prefix + '/ws/block'
-      });
-      let wssPeer = new WebSocketServer({
-        server: httpServer,
-        path: prefix + '/ws/peer'
-      });
-
-      wssBlock.on('error', function (error) {
-        logger.error('Error on WS Server');
-        logger.error(error);
-      });
-
-      wssBlock.on('connection', function connection(ws) {
-        co(function *() {
-          currentBlock = yield server.dal.getCurrentBlockOrNull();
-          if (currentBlock) {
-            ws.send(JSON.stringify(sanitize(currentBlock, dtos.Block)));
-          }
-        });
-      });
-
-      wssBlock.broadcast = (data) => wssBlock.clients.forEach((client) => {
-        try {
-          client.send(data);
-        } catch (e) {
-          logger.error('error on ws: %s', e);
-        }
-      });
-      wssPeer.broadcast = (data) => wssPeer.clients.forEach((client) => client.send(data));
-
-      // Forward blocks & peers
-      server
-        .pipe(es.mapSync(function(data) {
-          try {
-            // Broadcast block
-            if (data.joiners) {
-              currentBlock = data;
-              wssBlock.broadcast(JSON.stringify(sanitize(currentBlock, dtos.Block)));
-            }
-            // Broadcast peer
-            if (data.endpoints) {
-              wssPeer.broadcast(JSON.stringify(sanitize(data, dtos.Peer)));
-            }
-          } catch (e) {
-            logger.error('error on ws mapSync:', e);
-          }
-        }));
-    };
-  }
-};
diff --git a/app/lib/streams/sanitize.js b/app/lib/streams/sanitize.js
deleted file mode 100644
index e339b7aadd79fe07e669e739ea4e3c7dd1a371a8..0000000000000000000000000000000000000000
--- a/app/lib/streams/sanitize.js
+++ /dev/null
@@ -1,97 +0,0 @@
-"use strict";
-
-let _ = require('underscore');
-
-module.exports = function sanitize (json, contract) {
-
-  // Tries to sanitize only if contract is given
-  if (contract) {
-
-    if (Object.prototype.toString.call(contract) === "[object Array]") {
-      // Contract is an array
-
-      if (Object.prototype.toString.call(json) !== "[object Array]") {
-        json = [];
-      }
-
-      for (let i = 0, len = json.length; i < len; i++) {
-        json[i] = sanitize(json[i], contract[0]);
-      }
-    } else {
-      // Contract is an object or native type
-
-      // Return type is either a string, a number or an object
-      if (typeof json != typeof contract) {
-        try {
-          // Cast value
-          json = contract(json);
-        } catch (e) {
-          // Cannot be casted: create empty value
-          json = contract();
-        }
-      }
-
-      let contractFields = _(contract).keys();
-      let objectFields = _(json).keys();
-      let toDeleteFromObj = _.difference(objectFields, contractFields);
-
-      // Remove unwanted fields
-      for (let i = 0, len = toDeleteFromObj.length; i < len; i++) {
-        let field = toDeleteFromObj[i];
-        delete json[field];
-      }
-
-      // Format wanted fields
-      for (let i = 0, len = contractFields.length; i < len; i++) {
-        let prop = contractFields[i];
-        let propType = contract[prop];
-        let t = "";
-        if (propType.name) {
-          t = propType.name;
-        } else if (propType.length != undefined) {
-          t = 'Array';
-        } else {
-          t = 'Object';
-        }
-        // Test json member type
-        let tjson = typeof json[prop];
-        if (~['Array', 'Object'].indexOf(t)) {
-          if (tjson == 'object' && json[prop] !== null) {
-            tjson = json[prop].length == undefined ? 'Object' : 'Array';
-          }
-        }
-        // Check coherence & alter member if needed
-        if (!_(json[prop]).isNull() && t.toLowerCase() != tjson.toLowerCase()) {
-          try {
-            if (t == "String" || t == "Number") {
-              let s = json[prop] == undefined ? '' : json[prop];
-              eval('json[prop] = new ' + t + '(' + s + ').valueOf()');
-            }
-            else {
-              eval('json[prop] = new ' + t + '()');
-            }
-          } catch (ex) {
-            eval('json[prop] = new ' + t + '()');
-          }
-        }
-        // Arrays
-        if (t == 'Array') {
-          let subt = propType[0];
-          for (let j = 0, len2 = json[prop].length; j < len2; j++) {
-            if (subt == "String" || subt == "Number") {
-              eval('item = new ' + t + '(' + (json[prop] + '') + ').valueOf()');
-            }
-            else {
-              json[prop][j] = sanitize(json[prop][j], subt);
-            }
-          }
-        }
-        // Recursivity
-        if (t == 'Object' && json[prop] !== null) {
-          json[prop] = sanitize(json[prop], contract[prop]);
-        }
-      }
-    }
-  }
-  return json;
-};
diff --git a/app/lib/sync.js b/app/lib/sync.js
deleted file mode 100644
index cee39408d34004378c153be52560867bbebb960e..0000000000000000000000000000000000000000
--- a/app/lib/sync.js
+++ /dev/null
@@ -1,929 +0,0 @@
-"use strict";
-const util         = require('util');
-const stream       = require('stream');
-const co           = require('co');
-const _            = require('underscore');
-const moment       = require('moment');
-const contacter    = require('./contacter');
-const hashf        = require('./ucp/hashf');
-const indexer      = require('./dup/indexer');
-const dos2unix     = require('./system/dos2unix');
-const logger       = require('./logger')('sync');
-const rawer        = require('./ucp/rawer');
-const constants    = require('../lib/constants');
-const Block        = require('../lib/entity/block');
-const Transaction  = require('../lib/entity/transaction');
-const Peer         = require('../lib/entity/peer');
-const multimeter   = require('multimeter');
-const pulling      = require('../lib/pulling');
-const makeQuerablePromise = require('../lib/querablep');
-
-const CONST_BLOCKS_CHUNK = 250;
-const EVAL_REMAINING_INTERVAL = 1000;
-const INITIAL_DOWNLOAD_SLOTS = 1;
-
-
-module.exports = Synchroniser;
-
-function Synchroniser (server, host, port, conf, interactive) {
-
-  const that = this;
-
-  let speed = 0, blocksApplied = 0;
-  const baseWatcher = interactive ? new MultimeterWatcher() : new LoggerWatcher();
-
-  // Wrapper to also push event stream
-  const watcher = {
-    writeStatus: baseWatcher.writeStatus,
-    downloadPercent: (pct) => {
-      if (pct !== undefined && baseWatcher.downloadPercent() < pct) {
-        that.push({ download: pct });
-      }
-      return baseWatcher.downloadPercent(pct);
-    },
-    appliedPercent: (pct) => {
-      if (pct !== undefined && baseWatcher.appliedPercent() < pct) {
-        that.push({ applied: pct });
-      }
-      return baseWatcher.appliedPercent(pct);
-    },
-    end: baseWatcher.end
-  };
-
-  stream.Duplex.call(this, { objectMode: true });
-
-  // Unused, but made mandatory by Duplex interface
-  this._read = () => null;
-  this._write = () => null;
-
-  if (interactive) {
-    logger.mute();
-  }
-
-  // Services
-  const PeeringService     = server.PeeringService;
-  const BlockchainService  = server.BlockchainService;
-
-  const contacterOptions = {
-    timeout: constants.NETWORK.SYNC_LONG_TIMEOUT
-  };
-
-  const dal = server.dal;
-
-  const logRemaining = (to) => co(function*() {
-    const lCurrent = yield dal.getCurrentBlockOrNull();
-    const localNumber = lCurrent ? lCurrent.number : -1;
-
-    if (to > 1 && speed > 0) {
-      const remain = (to - (localNumber + 1 + blocksApplied));
-      const secondsLeft = remain / speed;
-      const momDuration = moment.duration(secondsLeft*1000);
-      watcher.writeStatus('Remaining ' + momDuration.humanize() + '');
-    }
-  });
-
-  this.test = (to, chunkLen, askedCautious, nopeers) => co(function*() {
-    const peering = yield contacter.statics.fetchPeer(host, port, contacterOptions);
-    const peer = new Peer(peering);
-    const node = yield peer.connect();
-    return node.getCurrent();
-  });
-
-  this.sync = (to, chunkLen, askedCautious, nopeers, noShufflePeers) => co(function*() {
-
-    try {
-
-      const peering = yield contacter.statics.fetchPeer(host, port, contacterOptions);
-
-      let peer = new Peer(peering);
-      logger.info("Try with %s %s", peer.getURL(), peer.pubkey.substr(0, 6));
-      let node = yield peer.connect();
-      node.pubkey = peer.pubkey;
-      logger.info('Sync started.');
-
-      const fullSync = !to;
-
-      //============
-      // Blockchain headers
-      //============
-      logger.info('Getting remote blockchain info...');
-      watcher.writeStatus('Connecting to ' + host + '...');
-      const lCurrent = yield dal.getCurrentBlockOrNull();
-      const localNumber = lCurrent ? lCurrent.number : -1;
-      let rCurrent;
-      if (isNaN(to)) {
-        rCurrent = yield node.getCurrent();
-      } else {
-        rCurrent = yield node.getBlock(to);
-      }
-      to = rCurrent.number;
-
-      //=======
-      // Peers (just for P2P download)
-      //=======
-      let peers = [];
-      if (!nopeers && (to - localNumber > 1000)) { // P2P download if more than 1000 blocs
-        watcher.writeStatus('Peers...');
-        const merkle = yield dal.merkleForPeers();
-        const getPeers = node.getPeers.bind(node);
-        const json2 = yield getPeers({});
-        const rm = new NodesMerkle(json2);
-        if(rm.root() != merkle.root()){
-          const leavesToAdd = [];
-          const json = yield getPeers({ leaves: true });
-          _(json.leaves).forEach((leaf) => {
-            if(merkle.leaves().indexOf(leaf) == -1){
-              leavesToAdd.push(leaf);
-            }
-          });
-          peers = yield leavesToAdd.map((leaf) => co(function*() {
-            try {
-              const json3 = yield getPeers({ "leaf": leaf });
-              const jsonEntry = json3.leaf.value;
-              const endpoint = jsonEntry.endpoints[0];
-              watcher.writeStatus('Peer ' + endpoint);
-              return jsonEntry;
-            } catch (e) {
-              logger.warn("Could not get peer of leaf %s, continue...", leaf);
-              return null;
-            }
-          }));
-        }
-        else {
-          watcher.writeStatus('Peers already known');
-        }
-      }
-
-      if (!peers.length) {
-        peers.push(peer);
-      }
-      peers = peers.filter((p) => p);
-
-      //============
-      // Blockchain
-      //============
-      logger.info('Downloading Blockchain...');
-
-      // We use cautious mode if it is asked, or not particulary asked but blockchain has been started
-      const cautious = (askedCautious === true || localNumber >= 0);
-      const shuffledPeers = noShufflePeers ? peers : _.shuffle(peers);
-      const downloader = new P2PDownloader(localNumber, to, rCurrent.hash, shuffledPeers, watcher);
-
-      downloader.start();
-
-      let lastPullBlock = null;
-
-      let bindex = [];
-      let iindex = [];
-      let mindex = [];
-      let cindex = [];
-      let sindex = [];
-      let currConf = {};
-      let bindexSize = 0;
-      let allBlocks = [];
-
-      let dao = pulling.abstractDao({
-
-        // Get the local blockchain current block
-        localCurrent: () => co(function*() {
-          if (cautious) {
-            return yield dal.getCurrentBlockOrNull();
-          } else {
-            if (lCurrent && !lastPullBlock) {
-              lastPullBlock = lCurrent;
-            }
-            return lastPullBlock;
-          }
-        }),
-
-        // Get the remote blockchain (bc) current block
-        remoteCurrent: (peer) => Promise.resolve(rCurrent),
-
-        // Get the remote peers to be pulled
-        remotePeers: () => co(function*() {
-          return [node];
-        }),
-
-        // Get block of given peer with given block number
-        getLocalBlock: (number) => dal.getBlock(number),
-
-        // Get block of given peer with given block number
-        getRemoteBlock: (thePeer, number) => co(function *() {
-          let block = null;
-          try {
-            block = yield node.getBlock(number);
-            Transaction.statics.cleanSignatories(block.transactions);
-          } catch (e) {
-            if (e.httpCode != 404) {
-              throw e;
-            }
-          }
-          return block;
-        }),
-
-        downloadBlocks: (thePeer, number) => co(function *() {
-          // Note: we don't care about the particular peer asked by the method. We use the network instead.
-          const numberOffseted = number - (localNumber + 1);
-          const targetChunk = Math.floor(numberOffseted / CONST_BLOCKS_CHUNK);
-          // Return the download promise! Simple.
-          return downloader.getChunk(targetChunk);
-        }),
-
-
-        applyBranch: (blocks) => co(function *() {
-          if (cautious) {
-            for (const block of blocks) {
-              if (block.number == 0) {
-                yield BlockchainService.saveParametersForRootBlock(block);
-                currConf = Block.statics.getConf(block);
-              }
-              yield dao.applyMainBranch(block);
-            }
-          } else {
-            const ctx = BlockchainService.getContext();
-            let blocksToSave = [];
-
-            for (const block of blocks) {
-              allBlocks.push(block);
-
-              if (block.number == 0) {
-                currConf = Block.statics.getConf(block);
-              }
-
-              if (block.number != to) {
-                blocksToSave.push(block);
-                const index = indexer.localIndex(block, currConf);
-                const local_iindex = indexer.iindex(index);
-                const local_cindex = indexer.cindex(index);
-                iindex = iindex.concat(local_iindex);
-                cindex = cindex.concat(local_cindex);
-                sindex = sindex.concat(indexer.sindex(index));
-                mindex = mindex.concat(indexer.mindex(index));
-                const HEAD = yield indexer.quickCompleteGlobalScope(block, currConf, bindex, iindex, mindex, cindex, sindex, {
-                  getBlock: (number) => {
-                    return Promise.resolve(allBlocks[number - 1]);
-                  },
-                  getBlockByBlockstamp: (blockstamp) => {
-                    return Promise.resolve(allBlocks[parseInt(blockstamp) - 1]);
-                  }
-                });
-                bindex.push(HEAD);
-
-                yield ctx.createNewcomers(local_iindex);
-
-                if (block.dividend
-                  || block.joiners.length
-                  || block.actives.length
-                  || block.revoked.length
-                  || block.excluded.length
-                  || block.certifications.length) {
-                  // Flush the INDEX (not bindex, which is particular)
-                  yield dal.mindexDAL.insertBatch(mindex);
-                  yield dal.iindexDAL.insertBatch(iindex);
-                  yield dal.sindexDAL.insertBatch(sindex);
-                  yield dal.cindexDAL.insertBatch(cindex);
-                  mindex = [];
-                  iindex = [];
-                  cindex = [];
-                  sindex = yield indexer.ruleIndexGenDividend(HEAD, dal);
-
-                  // Create/Update nodes in wotb
-                  yield ctx.updateMembers(block);
-
-                  // --> Update links
-                  yield dal.updateWotbLinks(local_cindex);
-                }
-
-                // Trim the bindex
-                bindexSize = [
-                  block.issuersCount,
-                  block.issuersFrame,
-                  conf.medianTimeBlocks,
-                  conf.dtDiffEval,
-                  CONST_BLOCKS_CHUNK
-                ].reduce((max, value) => {
-                  return Math.max(max, value);
-                }, 0);
-
-                if (bindexSize && bindex.length >= 2 * bindexSize) {
-                  // We trim it, not necessary to store it all (we already store the full blocks)
-                  bindex.splice(0, bindexSize);
-
-                  // Process triming continuously to avoid super long ending of sync
-                  yield dal.trimIndexes(bindex[0].number);
-                }
-              } else {
-
-                if (blocksToSave.length) {
-                  yield server.BlockchainService.saveBlocksInMainBranch(blocksToSave);
-                }
-                blocksToSave = [];
-
-                // Save the INDEX
-                yield dal.bindexDAL.insertBatch(bindex);
-                yield dal.mindexDAL.insertBatch(mindex);
-                yield dal.iindexDAL.insertBatch(iindex);
-                yield dal.sindexDAL.insertBatch(sindex);
-                yield dal.cindexDAL.insertBatch(cindex);
-
-                // Last block: cautious mode to trigger all the INDEX expiry mechanisms
-                yield dao.applyMainBranch(block);
-              }
-            }
-            if (blocksToSave.length) {
-              yield server.BlockchainService.saveBlocksInMainBranch(blocksToSave);
-            }
-          }
-          lastPullBlock = blocks[blocks.length - 1];
-          watcher.appliedPercent(Math.floor(blocks[blocks.length - 1].number / to * 100));
-          return true;
-        }),
-
-        applyMainBranch: (block) => co(function *() {
-          const addedBlock = yield server.BlockchainService.submitBlock(block, true, constants.FORK_ALLOWED);
-          server.streamPush(addedBlock);
-          watcher.appliedPercent(Math.floor(block.number / to * 100));
-        }),
-
-        // Eventually remove forks later on
-        removeForks: () => co(function*() {}),
-
-        // Tells wether given peer is a member peer
-        isMemberPeer: (thePeer) => co(function *() {
-          let idty = yield dal.getWrittenIdtyByPubkey(thePeer.pubkey);
-          return (idty && idty.member) || false;
-        })
-      });
-
-      const logInterval = setInterval(() => logRemaining(to), EVAL_REMAINING_INTERVAL);
-      yield pulling.pull(conf, dao);
-
-      // Finished blocks
-      watcher.downloadPercent(100.0);
-      watcher.appliedPercent(100.0);
-
-      if (logInterval) {
-        clearInterval(logInterval);
-      }
-
-      // Save currency parameters given by root block
-      const rootBlock = yield server.dal.getBlock(0);
-      yield BlockchainService.saveParametersForRootBlock(rootBlock);
-      server.dal.blockDAL.cleanCache();
-
-      //=======
-      // Peers
-      //=======
-      if (!nopeers && fullSync) {
-        watcher.writeStatus('Peers...');
-        yield syncPeer(node);
-        const merkle = yield dal.merkleForPeers();
-        const getPeers = node.getPeers.bind(node);
-        const json2 = yield getPeers({});
-        const rm = new NodesMerkle(json2);
-        if(rm.root() != merkle.root()){
-          const leavesToAdd = [];
-          const json = yield getPeers({ leaves: true });
-          _(json.leaves).forEach((leaf) => {
-            if(merkle.leaves().indexOf(leaf) == -1){
-              leavesToAdd.push(leaf);
-            }
-          });
-          for (const leaf of leavesToAdd) {
-            try {
-              const json3 = yield getPeers({ "leaf": leaf });
-              const jsonEntry = json3.leaf.value;
-              const sign = json3.leaf.value.signature;
-              const entry = {};
-              ["version", "currency", "pubkey", "endpoints", "block"].forEach((key) => {
-                entry[key] = jsonEntry[key];
-              });
-              entry.signature = sign;
-              watcher.writeStatus('Peer ' + entry.pubkey);
-              yield PeeringService.submitP(entry, false, to === undefined);
-            } catch (e) {
-              logger.warn(e);
-            }
-          }
-        }
-        else {
-          watcher.writeStatus('Peers already known');
-        }
-      }
-
-      watcher.end();
-      that.push({ sync: true });
-      logger.info('Sync finished.');
-    } catch (err) {
-      that.push({ sync: false, msg: err });
-      err && watcher.writeStatus(err.message || (err.uerr && err.uerr.message) || String(err));
-      watcher.end();
-      throw err;
-    }
-  });
-
-  //============
-  // Peer
-  //============
-  function syncPeer (node) {
-
-    // Global sync vars
-    const remotePeer = new Peer({});
-    let remoteJsonPeer = {};
-
-    return co(function *() {
-      const json = yield node.getPeer();
-      remotePeer.copyValuesFrom(json);
-      const entry = remotePeer.getRaw();
-      const signature = dos2unix(remotePeer.signature);
-      // Parameters
-      if(!(entry && signature)){
-        throw 'Requires a peering entry + signature';
-      }
-
-      remoteJsonPeer = json;
-      remoteJsonPeer.pubkey = json.pubkey;
-      let signatureOK = PeeringService.checkPeerSignature(remoteJsonPeer);
-      if (!signatureOK) {
-        watcher.writeStatus('Wrong signature for peer #' + remoteJsonPeer.pubkey);
-      }
-      try {
-        yield PeeringService.submitP(remoteJsonPeer);
-      } catch (err) {
-        if (err.indexOf !== undefined && err.indexOf(constants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE.uerr.message) !== -1 && err != constants.ERROR.PEER.UNKNOWN_REFERENCE_BLOCK) {
-          throw err;
-        }
-      }
-    });
-  }
-}
-
-function NodesMerkle (json) {
-  
-  const that = this;
-  ["depth", "nodesCount", "leavesCount"].forEach(function (key) {
-    that[key] = json[key];
-  });
-
-  this.merkleRoot = json.root;
-
-  // var i = 0;
-  // this.levels = [];
-  // while(json && json.levels[i]){
-  //   this.levels.push(json.levels[i]);
-  //   i++;
-  // }
-
-  this.root = function () {
-    return this.merkleRoot;
-  };
-}
-
-function MultimeterWatcher() {
-
-  const multi = multimeter(process);
-  const charm = multi.charm;
-  charm.on('^C', process.exit);
-  charm.reset();
-
-  multi.write('Progress:\n\n');
-
-  multi.write("Download: \n");
-  const downloadBar = multi("Download: \n".length, 3, {
-    width : 20,
-    solid : {
-      text : '|',
-      foreground : 'white',
-      background : 'blue'
-    },
-    empty : { text : ' ' }
-  });
-
-  multi.write("Apply:    \n");
-  const appliedBar = multi("Apply:    \n".length, 4, {
-    width : 20,
-    solid : {
-      text : '|',
-      foreground : 'white',
-      background : 'blue'
-    },
-    empty : { text : ' ' }
-  });
-
-  multi.write('\nStatus: ');
-
-  let xPos, yPos;
-  charm.position( (x, y) => {
-    xPos = x;
-    yPos = y;
-  });
-
-  const writtens = [];
-  this.writeStatus = (str) => {
-    writtens.push(str);
-    //require('fs').writeFileSync('writtens.json', JSON.stringify(writtens));
-    charm
-      .position(xPos, yPos)
-      .erase('end')
-      .write(str)
-    ;
-  };
-
-  this.downloadPercent = (pct) => downloadBar.percent(pct);
-
-  this.appliedPercent = (pct) => appliedBar.percent(pct);
-
-  this.end = () => {
-    multi.write('\nAll done.\n');
-    multi.destroy();
-  };
-
-  downloadBar.percent(0);
-  appliedBar.percent(0);
-}
-
-function LoggerWatcher() {
-
-  let downPct = 0, appliedPct = 0, lastMsg;
-
-  this.showProgress = () => logger.info('Downloaded %s%, Applied %s%', downPct, appliedPct);
-
-  this.writeStatus = (str) => {
-    if (str != lastMsg) {
-      lastMsg = str;
-      logger.info(str);
-    }
-  };
-
-  this.downloadPercent = (pct) => {
-    if (pct !== undefined) {
-      let changed = pct > downPct;
-      downPct = pct;
-      if (changed) this.showProgress();
-    }
-    return downPct;
-  };
-
-  this.appliedPercent = (pct) => {
-    if (pct !== undefined) {
-      let changed = pct > appliedPct;
-      appliedPct = pct;
-      if (changed) this.showProgress();
-    }
-    return appliedPct;
-  };
-
-  this.end = () => {
-  };
-
-}
-
-function P2PDownloader(localNumber, to, toHash, peers, watcher) {
-
-  const that = this;
-  const PARALLEL_PER_CHUNK = 1;
-  const MAX_DELAY_PER_DOWNLOAD = 15000;
-  const NO_NODES_AVAILABLE = "No node available for download";
-  const TOO_LONG_TIME_DOWNLOAD = "No answer after " + MAX_DELAY_PER_DOWNLOAD + "ms, will retry download later.";
-  const nbBlocksToDownload = Math.max(0, to - localNumber);
-  const numberOfChunksToDownload = Math.ceil(nbBlocksToDownload / CONST_BLOCKS_CHUNK);
-  const chunks          = Array.from({ length: numberOfChunksToDownload }).map(() => null);
-  const processing      = Array.from({ length: numberOfChunksToDownload }).map(() => false);
-  const handler         = Array.from({ length: numberOfChunksToDownload }).map(() => null);
-  const resultsDeferers = Array.from({ length: numberOfChunksToDownload }).map(() => null);
-  const resultsData     = Array.from({ length: numberOfChunksToDownload }).map((unused, index) => new Promise((resolve, reject) => {
-    resultsDeferers[index] = { resolve, reject };
-  }));
-
-  // Create slots of download, in a ready stage
-  let downloadSlots = Math.min(INITIAL_DOWNLOAD_SLOTS, peers.length);
-
-  let nodes = {};
-
-  let nbDownloadsTried = 0, nbDownloading = 0;
-  let lastAvgDelay = MAX_DELAY_PER_DOWNLOAD;
-  let aSlotWasAdded = false;
-
-  /**
-   * Get a list of P2P nodes to use for download.
-   * If a node is not yet correctly initialized (we can test a node before considering it good for downloading), then
-   * this method would not return it.
-   */
-  const getP2Pcandidates = () => co(function*() {
-    let promises = peers.reduce((chosens, other, index) => {
-      if (!nodes[index]) {
-        // Create the node
-        let p = new Peer(peers[index]);
-        nodes[index] = makeQuerablePromise(co(function*() {
-          // We wait for the download process to be triggered
-          // yield downloadStarter;
-          // if (nodes[index - 1]) {
-          //   try { yield nodes[index - 1]; } catch (e) {}
-          // }
-          const node = yield p.connect();
-          // We initialize nodes with the near worth possible notation
-          node.tta = 1;
-          node.nbSuccess = 0;
-          return node;
-        }));
-        chosens.push(nodes[index]);
-      } else {
-        chosens.push(nodes[index]);
-      }
-      // Continue
-      return chosens;
-    }, []);
-    let candidates = yield promises;
-    candidates.forEach((c) => {
-      c.tta = c.tta || 0; // By default we say a node is super slow to answer
-      c.ttas = c.ttas || []; // Memorize the answer delays
-    });
-    if (candidates.length === 0) {
-      throw NO_NODES_AVAILABLE;
-    }
-    // We remove the nodes impossible to reach (timeout)
-    let withGoodDelays = _.filter(candidates, (c) => c.tta <= MAX_DELAY_PER_DOWNLOAD);
-    if (withGoodDelays.length === 0) {
-      // No node can be reached, we can try to lower the number of nodes on which we download
-      downloadSlots = Math.floor(downloadSlots / 2);
-      // We reinitialize the nodes
-      nodes = {};
-      // And try it all again
-      return getP2Pcandidates();
-    }
-    const parallelMax = Math.min(PARALLEL_PER_CHUNK, withGoodDelays.length);
-    withGoodDelays = _.sortBy(withGoodDelays, (c) => c.tta);
-    withGoodDelays = withGoodDelays.slice(0, parallelMax);
-    // We temporarily augment the tta to avoid asking several times to the same node in parallel
-    withGoodDelays.forEach((c) => c.tta = MAX_DELAY_PER_DOWNLOAD);
-    return withGoodDelays;
-  });
-
-  /**
-   * Download a chunk of blocks using P2P network through BMA API.
-   * @param from The starting block to download
-   * @param count The number of blocks to download.
-   * @param chunkIndex The # of the chunk in local algorithm (logging purposes only)
-   */
-  const p2pDownload = (from, count, chunkIndex) => co(function*() {
-    let candidates = yield getP2Pcandidates();
-    // Book the nodes
-    return yield raceOrCancelIfTimeout(MAX_DELAY_PER_DOWNLOAD, candidates.map((node) => co(function*() {
-      try {
-        const start = Date.now();
-        handler[chunkIndex] = node;
-        node.downloading = true;
-        nbDownloading++;
-        watcher.writeStatus('Getting chunck #' + chunkIndex + '/' + (numberOfChunksToDownload - 1) + ' from ' + from + ' to ' + (from + count - 1) + ' on peer ' + [node.host, node.port].join(':'));
-        let blocks = yield node.getBlocks(count, from);
-        node.ttas.push(Date.now() - start);
-        // Only keep a flow of 5 ttas for the node
-        if (node.ttas.length > 5) node.ttas.shift();
-        // Average time to answer
-        node.tta = Math.round(node.ttas.reduce((sum, tta) => sum + tta, 0) / node.ttas.length);
-        watcher.writeStatus('GOT chunck #' + chunkIndex + '/' + (numberOfChunksToDownload - 1) + ' from ' + from + ' to ' + (from + count - 1) + ' on peer ' + [node.host, node.port].join(':'));
-        node.nbSuccess++;
-
-        // Opening/Closing slots depending on the Interne connection
-        if (slots.length == downloadSlots) {
-          const peers = yield Object.values(nodes);
-          const downloading = _.filter(peers, (p) => p.downloading && p.ttas.length);
-          const currentAvgDelay = downloading.reduce((sum, c) => {
-            const tta = Math.round(c.ttas.reduce((sum, tta) => sum + tta, 0) / c.ttas.length);
-            return sum + tta;
-          }, 0) / downloading.length;
-          // Check the impact of an added node (not first time)
-          if (!aSlotWasAdded) {
-            // We try to add a node
-            const newValue = Math.min(peers.length, downloadSlots + 1);
-            if (newValue !== downloadSlots) {
-              downloadSlots = newValue;
-              aSlotWasAdded = true;
-              logger.info('AUGMENTED DOWNLOAD SLOTS! Now has %s slots', downloadSlots);
-            }
-          } else {
-            aSlotWasAdded = false;
-            const decelerationPercent = currentAvgDelay / lastAvgDelay - 1;
-            const addedNodePercent = 1 / nbDownloading;
-            logger.info('Deceleration = %s (%s/%s), AddedNodePercent = %s', decelerationPercent, currentAvgDelay, lastAvgDelay, addedNodePercent);
-            if (decelerationPercent > addedNodePercent) {
-              downloadSlots = Math.max(1, downloadSlots - 1); // We reduce the number of slots, but we keep at least 1 slot
-              logger.info('REDUCED DOWNLOAD SLOT! Now has %s slots', downloadSlots);
-            }
-          }
-          lastAvgDelay = currentAvgDelay;
-        }
-
-        nbDownloadsTried++;
-        nbDownloading--;
-        node.downloading = false;
-
-        return blocks;
-      } catch (e) {
-        nbDownloading--;
-        node.downloading = false;
-        nbDownloadsTried++;
-        node.ttas.push(MAX_DELAY_PER_DOWNLOAD + 1); // No more ask on this node
-        // Average time to answer
-        node.tta = Math.round(node.ttas.reduce((sum, tta) => sum + tta, 0) / node.ttas.length);
-        throw e;
-      }
-    })));
-  });
-
-  /**
-   * Function for downloading a chunk by its number.
-   * @param index Number of the chunk.
-   */
-  const downloadChunk = (index) => co(function*() {
-    // The algorithm to download a chunk
-    const from = localNumber + 1 + index * CONST_BLOCKS_CHUNK;
-    let count = CONST_BLOCKS_CHUNK;
-    if (index == numberOfChunksToDownload - 1) {
-      count = nbBlocksToDownload % CONST_BLOCKS_CHUNK || CONST_BLOCKS_CHUNK;
-    }
-    try {
-      return yield p2pDownload(from, count, index);
-    } catch (e) {
-      logger.error(e);
-      return downloadChunk(index);
-    }
-  });
-
-  const slots = [];
-  const downloads = {};
-
-  /**
-   * Utility function that starts a race between promises but cancels it if no answer is found before `timeout`
-   * @param timeout
-   * @param races
-   * @returns {Promise}
-   */
-  const raceOrCancelIfTimeout = (timeout, races) => {
-    return Promise.race([
-      // Process the race, but cancel it if we don't get an anwser quickly enough
-      new Promise((resolve, reject) => {
-        setTimeout(() => {
-          reject(TOO_LONG_TIME_DOWNLOAD);
-        }, MAX_DELAY_PER_DOWNLOAD);
-      })
-    ].concat(races));
-  };
-
-  /**
-   * Triggers for starting the download.
-   */
-  let startResolver;
-  const downloadStarter = new Promise((resolve, reject) => startResolver = resolve);
-
-  const chainsCorrectly = (blocks, index) => co(function*() {
-
-    if (!blocks.length) {
-      logger.error('No block was downloaded');
-      return false;
-    }
-
-    for (let i = blocks.length - 1; i > 0; i--) {
-      if (blocks[i].number !== blocks[i - 1].number + 1 || blocks[i].previousHash !== blocks[i - 1].hash) {
-        logger.error("Blocks do not chaing correctly", blocks[i].number);
-        return false;
-      }
-      if (blocks[i].version != blocks[i - 1].version && blocks[i].version != blocks[i - 1].version + 1) {
-        logger.error("Version cannot be downgraded", blocks[i].number);
-        return false;
-      }
-    }
-
-    // Check hashes
-    for (let i = 0; i < blocks.length; i++) {
-      // Note: the hash, in Duniter, is made only on the **signing part** of the block: InnerHash + Nonce
-      if (blocks[i].version >= 6) {
-        for (const tx of blocks[i].transactions) {
-          tx.version = constants.TRANSACTION_VERSION;
-        }
-      }
-      if (blocks[i].inner_hash !== hashf(rawer.getBlockInnerPart(blocks[i])).toUpperCase()) {
-        logger.error("Inner hash of block#%s from %s does not match", blocks[i].number);
-        return false;
-      }
-      if (blocks[i].hash !== hashf(rawer.getBlockInnerHashAndNonceWithSignature(blocks[i])).toUpperCase()) {
-        logger.error("Hash of block#%s from %s does not match", blocks[i].number);
-        return false;
-      }
-    }
-
-    const lastBlockOfChunk = blocks[blocks.length - 1];
-    if (lastBlockOfChunk.number == to && lastBlockOfChunk.hash != toHash) {
-      // Top chunk
-      logger.error('Top block is not on the right chain');
-      return false;
-    } else {
-      // Chaining between downloads
-      const previousChunk = yield that.getChunk(index + 1);
-      const blockN = blocks[blocks.length - 1]; // The block n
-      const blockNp1 = previousChunk[0]; // The block n + 1
-      if (blockN && blockNp1 && (blockN.number + 1 !== blockNp1.number || blockN.hash != blockNp1.previousHash)) {
-        logger.error('Chunk is not referenced by the upper one');
-        return false;
-      }
-    }
-    return true;
-  });
-
-  /**
-   * Download worker
-   * @type {*|Promise} When finished.
-   */
-  co(function*() {
-    try {
-      yield downloadStarter;
-      let doneCount = 0, resolvedCount = 0;
-      while (resolvedCount < chunks.length) {
-        doneCount = 0;
-        resolvedCount = 0;
-        // Add as much possible downloads as possible, and count the already done ones
-        for (let i = chunks.length - 1; i >= 0; i--) {
-          if (chunks[i] === null && !processing[i] && slots.indexOf(i) === -1 && slots.length < downloadSlots) {
-            slots.push(i);
-            processing[i] = true;
-            downloads[i] = makeQuerablePromise(downloadChunk(i)); // Starts a new download
-          } else if (downloads[i] && downloads[i].isFulfilled() && processing[i]) {
-            doneCount++;
-          }
-          // We count the number of perfectly downloaded & validated chunks
-          if (chunks[i]) {
-            resolvedCount++;
-          }
-        }
-        watcher.downloadPercent(Math.round(doneCount / numberOfChunksToDownload * 100));
-        let races = slots.map((i) => downloads[i]);
-        if (races.length) {
-          try {
-            yield raceOrCancelIfTimeout(MAX_DELAY_PER_DOWNLOAD, races);
-          } catch (e) {
-            logger.warn(e);
-          }
-          for (let i = 0; i < slots.length; i++) {
-            // We must know the index of what resolved/rejected to free the slot
-            const doneIndex = slots.reduce((found, realIndex, index) => {
-              if (found !== null) return found;
-              if (downloads[realIndex].isFulfilled()) return index;
-              return null;
-            }, null);
-            if (doneIndex !== null) {
-              const realIndex = slots[doneIndex];
-              if (downloads[realIndex].isResolved()) {
-                const p = new Promise((resolve, reject) => co(function*() {
-                  const blocks = yield downloads[realIndex];
-                  if (realIndex < chunks.length - 1) {
-                    // We must wait for NEXT blocks to be STRONGLY validated before going any further, otherwise we
-                    // could be on the wrong chain
-                    yield that.getChunk(realIndex + 1);
-                  }
-                  const chainsWell = yield chainsCorrectly(blocks, realIndex);
-                  if (chainsWell) {
-                    // Chunk is COMPLETE
-                    logger.warn("Chunk #%s is COMPLETE from %s", realIndex, [handler[realIndex].host, handler[realIndex].port].join(':'));
-                    chunks[realIndex] = blocks;
-                    resultsDeferers[realIndex].resolve(chunks[realIndex]);
-                  } else {
-                    logger.warn("Chunk #%s DOES NOT CHAIN CORRECTLY from %s", realIndex, [handler[realIndex].host, handler[realIndex].port].join(':'));
-                    // Penality on this node to avoid its usage
-                    handler[realIndex].tta += MAX_DELAY_PER_DOWNLOAD;
-                    // Need a retry
-                    processing[realIndex] = false;
-                  }
-                }));
-              } else {
-                processing[realIndex] = false; // Need a retry
-              }
-              slots.splice(doneIndex, 1);
-            }
-          }
-        }
-        // Wait a bit
-        yield new Promise((resolve, reject) => setTimeout(resolve, 10));
-      }
-    } catch (e) {
-      logger.error('Fatal error in the downloader:');
-      logger.error(e);
-    }
-  });
-
-  /**
-   * PUBLIC API
-   */
-
-  /***
-   * Triggers the downloading
-   */
-  this.start  = () => startResolver();
-
-  /***
-   * Promises a chunk to be downloaded and returned
-   * @param index The number of the chunk to download & return
-   */
-  this.getChunk = (index) => resultsData[index] || Promise.resolve([]);
-}
-
-util.inherits(Synchroniser, stream.Duplex);
diff --git a/app/lib/system/dos2unix.js b/app/lib/system/dos2unix.js
deleted file mode 100644
index 32fdce59314af70c76489f3140ba64bf76fd916b..0000000000000000000000000000000000000000
--- a/app/lib/system/dos2unix.js
+++ /dev/null
@@ -1,23 +0,0 @@
-"use strict";
-const util     = require('util');
-const stream   = require('stream');
-
-module.exports = function (str) {
-  if (str)
-    return dos2unix(str);
-  else
-    return new Dos2UnixStream();
-};
-
-const dos2unix = (str) => str.replace(/\r\n/g, '\n');
-
-function Dos2UnixStream () {
-  stream.Transform.apply(this);
-
-  this._write = function (str, enc, done) {
-    this.push(dos2unix(str.toString()));
-    done();
-  }
-}
-
-util.inherits(Dos2UnixStream, stream.Transform);
diff --git a/app/lib/system/limiter.js b/app/lib/system/limiter.js
deleted file mode 100644
index 29959e83dbb7063e0733f724d5e14c46ce2cb479..0000000000000000000000000000000000000000
--- a/app/lib/system/limiter.js
+++ /dev/null
@@ -1,114 +0,0 @@
-"use strict";
-
-const A_MINUTE = 60 * 1000;
-const A_SECOND = 1000;
-
-const Limiter = {
-
-  /**
-   * Tells wether the quota is reached at current time or not.
-   */
-  canAnswerNow() {
-    // Rapid decision first.
-    // Note: we suppose limitPerSecond < limitPerMinute
-    if (this.reqsSecLen < this.limitPerSecond && this.reqsMinLen < this.limitPerMinute) {
-      return true;
-    }
-    this.updateRequests();
-    return this.reqsSecLen < this.limitPerSecond && this.reqsMinLen < this.limitPerMinute;
-  },
-
-  /**
-   * Filter the current requests stock to remove the too old ones
-   */
-  updateRequests() {
-    // Clean current requests stock and make the test again
-    const now = Date.now();
-    let i = 0, reqs = this.reqsMin, len = this.reqsMinLen;
-    // Reinit specific indicators
-    this.reqsSec = [];
-    this.reqsMin = [];
-    while (i < len) {
-      const duration = now - reqs[i];
-      if (duration < A_SECOND) {
-        this.reqsSec.push(reqs[i]);
-      }
-      if (duration < A_MINUTE) {
-        this.reqsMin.push(reqs[i]);
-      }
-      i++;
-    }
-    this.reqsSecLen = this.reqsSec.length;
-    this.reqsMinLen = this.reqsMin.length;
-  },
-  
-  processRequest() {
-    const now = Date.now();
-    this.reqsSec.push(now);
-    this.reqsSecLen++;
-    this.reqsMin.push(now);
-    this.reqsMinLen++;
-  }
-};
-
-let HIGH_USAGE_STRATEGY = Object.create(Limiter);
-HIGH_USAGE_STRATEGY.limitPerSecond = 10;
-HIGH_USAGE_STRATEGY.limitPerMinute = 300;
-
-let VERY_HIGH_USAGE_STRATEGY = Object.create(Limiter);
-VERY_HIGH_USAGE_STRATEGY.limitPerSecond = 30;
-VERY_HIGH_USAGE_STRATEGY.limitPerMinute = 30 * 60; // Limit is only per second
-
-let TEST_STRATEGY = Object.create(Limiter);
-TEST_STRATEGY.limitPerSecond = 5;
-TEST_STRATEGY.limitPerMinute = 6;
-
-let NO_LIMIT_STRATEGY = Object.create(Limiter);
-NO_LIMIT_STRATEGY.limitPerSecond = 1000000;
-NO_LIMIT_STRATEGY.limitPerMinute = 1000000 * 60;
-
-let disableLimits = false;
-
-module.exports = {
-  
-  limitAsHighUsage() {
-    return disableLimits ? createObject(NO_LIMIT_STRATEGY) : createObject(HIGH_USAGE_STRATEGY);
-  },
-
-  limitAsVeryHighUsage() {
-    return disableLimits ? createObject(NO_LIMIT_STRATEGY) : createObject(VERY_HIGH_USAGE_STRATEGY);
-  },
-
-  limitAsUnlimited() {
-    return createObject(NO_LIMIT_STRATEGY);
-  },
-
-  limitAsTest() {
-    return disableLimits ? createObject(NO_LIMIT_STRATEGY) : createObject(TEST_STRATEGY);
-  },
-
-  noLimit() {
-    disableLimits = true;
-  },
-
-  withLimit() {
-    disableLimits = false;
-  }
-};
-
-function createObject(strategy) {
-
-  const obj = Object.create(strategy);
-
-  // Stock of request times
-  obj.reqsSec = [];
-
-    // The length of reqs.
-    // It is better to have it instead of calling reqs.length
-  obj.reqsSecLen = 0;
-
-    // Minute specific
-  obj.reqsMin = [];
-  obj.reqsMinLen = 0;
-  return obj;
-}
\ No newline at end of file
diff --git a/app/lib/system/network.js b/app/lib/system/network.js
index aab7c95518aa5dc96062449c9adaae5a2267a688..b14be0f28efe0f056ddf9fd2cccaad35d727a0da 100644
--- a/app/lib/system/network.js
+++ b/app/lib/system/network.js
@@ -2,309 +2,26 @@
 
 const co = require('co');
 const os = require('os');
-const Q = require('q');
 const _ = require('underscore');
-const upnp = require('nnupnp');
-const ddos = require('ddos');
-const http = require('http');
-const express = require('express');
-const morgan = require('morgan');
-const errorhandler = require('errorhandler');
-const bodyParser = require('body-parser');
-const cors = require('cors');
-const fileUpload = require('express-fileupload');
-const constants = require('../constants');
-const sanitize = require('../streams/sanitize');
 const logger = require('../logger')('network');
 
+const bmapiMethods = require('duniter-bma').duniter.methods;
+
 module.exports = {
 
   getEndpoint: getEndpoint,
-  getBestLocalIPv4: () => getBestLocal('IPv4'),
-  getBestLocalIPv6: function getFirstGlobalIPv6() {
-    const osInterfaces = module.exports.listInterfaces();
-    for (let netInterface of osInterfaces) {
-      const addresses = netInterface.addresses;
-      const filtered = _(addresses).where({family: 'IPv6', scopeid: 0, internal: false });
-      const filtered2 = _(filtered).filter((address) => !address.address.match(/^fe80/) && !address.address.match(/^::1/));
-      if (filtered2[0]) {
-        return filtered2[0].address;
-      }
-    }
-    return null;
-  },
-  getLANIPv4: () => getLAN('IPv4'),
-  getLANIPv6: () => getLAN('IPv6'),
-
-  listInterfaces: () => {
-    const netInterfaces = os.networkInterfaces();
-    const keys = _.keys(netInterfaces);
-    const res = [];
-    for (const name of keys) {
-      res.push({
-        name: name,
-        addresses: netInterfaces[name]
-      });
-    }
-    return res;
-  },
-
-  upnpConf: (noupnp) => co(function *() {
-    const conf = {};
-    const client = upnp.createClient();
-    // Look for 2 random ports
-    const privatePort = module.exports.getRandomPort(conf);
-    const publicPort = privatePort;
-    logger.info('Checking UPnP features...');
-    if (noupnp) {
-      throw Error('No UPnP');
-    }
-    const publicIP = yield Q.nbind(client.externalIp, client)();
-    yield Q.nbind(client.portMapping, client)({
-      public: publicPort,
-      private: privatePort,
-      ttl: 120
-    });
-    const privateIP = yield Q.Promise((resolve, reject) => {
-      client.findGateway((err, res, localIP) => {
-        if (err) return reject(err);
-        resolve(localIP);
-      });
-    });
-    conf.remoteipv4 = publicIP.match(constants.IPV4_REGEXP) ? publicIP : null;
-    conf.remoteport = publicPort;
-    conf.port = privatePort;
-    conf.ipv4 = privateIP.match(constants.IPV4_REGEXP) ? privateIP : null;
-    return conf;
-  }),
-
-  getRandomPort: (conf) => {
-    if (conf && conf.remoteport) {
-      return conf.remoteport;
-    } else {
-      return ~~(Math.random() * (65536 - constants.NETWORK.PORT.START)) + constants.NETWORK.PORT.START;
-    }
-  },
-
-  createServersAndListen: (name, interfaces, httpLogs, staticPath, routingCallback, listenWebSocket, enableFileUpload) => co(function *() {
-
-    const app = express();
-
-    // all environments
-    if (httpLogs) {
-      app.use(morgan('\x1b[90m:remote-addr - :method :url HTTP/:http-version :status :res[content-length] - :response-time ms\x1b[0m', {
-        stream: {
-          write: function(message){
-            message && logger.trace(message.replace(/\n$/,''));
-          }
-        }
-      }));
-    }
-
-    // DDOS protection
-    const whitelist = interfaces.map(i => i.ip);
-    if (whitelist.indexOf('127.0.0.1') === -1) {
-      whitelist.push('127.0.0.1');
-    }
-    const ddosInstance = new ddos({ whitelist, silentStart: true });
-    app.use(ddosInstance.express);
-
-    // CORS for **any** HTTP request
-    app.use(cors());
-
-    if (enableFileUpload) {
-      // File upload for backup API
-      app.use(fileUpload());
-    }
-
-    app.use(bodyParser.urlencoded({
-      extended: true
-    }));
-    app.use(bodyParser.json());
-
-    // development only
-    if (app.get('env') == 'development') {
-      app.use(errorhandler());
-    }
-
-    routingCallback(app, {
-      httpGET:     (uri, promiseFunc, dtoContract, limiter) => handleRequest(app.get.bind(app), uri, promiseFunc, dtoContract, limiter),
-      httpPOST:    (uri, promiseFunc, dtoContract, limiter) => handleRequest(app.post.bind(app), uri, promiseFunc, dtoContract, limiter),
-      httpGETFile: (uri, promiseFunc, dtoContract, limiter) => handleFileRequest(app.get.bind(app), uri, promiseFunc, limiter)
-    });
-
-    if (staticPath) {
-      app.use(express.static(staticPath));
-    }
-
-    const httpServers = interfaces.map(() => {
-      const httpServer = http.createServer(app);
-      const sockets = {};
-      let nextSocketId = 0;
-      httpServer.on('connection', (socket) => {
-        const socketId = nextSocketId++;
-        sockets[socketId] = socket;
-        //logger.debug('socket %s opened', socketId);
-
-        socket.on('close', () => {
-          //logger.debug('socket %s closed', socketId);
-          delete sockets[socketId];
-        });
-      });
-      httpServer.on('error', (err) => {
-        httpServer.errorPropagates(err);
-      });
-      listenWebSocket && listenWebSocket(httpServer);
-      return {
-        http: httpServer,
-        closeSockets: () => {
-          _.keys(sockets).map((socketId) => {
-            sockets[socketId].destroy();
-          });
-        }
-      };
-    });
+  getBestLocalIPv4: bmapiMethods.getBestLocalIPv4,
+  getBestLocalIPv6: bmapiMethods.getBestLocalIPv6,
 
-    // May be removed when using Node 5.x where httpServer.listening boolean exists
-    const listenings = interfaces.map(() => false);
+  listInterfaces: bmapiMethods.listInterfaces,
 
-    if (httpServers.length == 0){
-      throw 'Duniter does not have any interface to listen to.';
-    }
+  upnpConf: (noupnp) => bmapiMethods.upnpConf(noupnp, logger),
 
-    // Return API
-    return {
+  getRandomPort: bmapiMethods.getRandomPort,
 
-      getDDOS: () => ddosInstance,
-
-      closeConnections: () => co(function *() {
-        for (let i = 0, len = httpServers.length; i < len; i++) {
-          const httpServer = httpServers[i].http;
-          const isListening = listenings[i];
-          if (isListening) {
-            listenings[i] = false;
-            logger.info(name + ' stop listening');
-            yield Q.Promise((resolve, reject) => {
-              httpServer.errorPropagates((err) => {
-                reject(err);
-              });
-              httpServers[i].closeSockets();
-              httpServer.close((err) => {
-                err && logger.error(err.stack || err);
-                resolve();
-              });
-            });
-          }
-        }
-        return [];
-      }),
-
-      openConnections: () => co(function *() {
-        for (let i = 0, len = httpServers.length; i < len; i++) {
-          const httpServer = httpServers[i].http;
-          const isListening = listenings[i];
-          if (!isListening) {
-            const netInterface = interfaces[i].ip;
-            const port = interfaces[i].port;
-            try {
-              yield Q.Promise((resolve, reject) => {
-                // Weird the need of such a hack to catch an exception...
-                httpServer.errorPropagates = function(err) {
-                  reject(err);
-                };
-                //httpServer.on('listening', resolve.bind(this, httpServer));
-                httpServer.listen(port, netInterface, (err) => {
-                  if (err) return reject(err);
-                  listenings[i] = true;
-                  resolve(httpServer);
-                });
-              });
-              logger.info(name + ' listening on http://' + (netInterface.match(/:/) ? '[' + netInterface + ']' : netInterface) + ':' + port);
-            } catch (e) {
-              logger.warn('Could NOT listen to http://' + netInterface + ':' + port);
-              logger.warn(e);
-            }
-          }
-        }
-        return [];
-      })
-    };
-  })
-};
-
-const handleRequest = (method, uri, promiseFunc, dtoContract, theLimiter) => {
-  const limiter = theLimiter || require('../system/limiter').limitAsUnlimited();
-  method(uri, function(req, res) {
-    res.set('Access-Control-Allow-Origin', '*');
-    res.type('application/json');
-    co(function *() {
-      try {
-        if (!limiter.canAnswerNow()) {
-          throw constants.ERRORS.HTTP_LIMITATION;
-        }
-        limiter.processRequest();
-        let result = yield promiseFunc(req);
-        // Ensure of the answer format
-        result = sanitize(result, dtoContract);
-        // HTTP answer
-        res.status(200).send(JSON.stringify(result, null, "  "));
-      } catch (e) {
-        let error = getResultingError(e);
-        // HTTP error
-        res.status(error.httpCode).send(JSON.stringify(error.uerr, null, "  "));
-      }
-    });
-  });
+  createServersAndListen: require('duniter-bma').duniter.methods.createServersAndListen
 };
 
-const handleFileRequest = (method, uri, promiseFunc, theLimiter) => {
-  const limiter = theLimiter || require('../system/limiter').limitAsUnlimited();
-  method(uri, function(req, res) {
-    res.set('Access-Control-Allow-Origin', '*');
-    co(function *() {
-      try {
-        if (!limiter.canAnswerNow()) {
-          throw constants.ERRORS.HTTP_LIMITATION;
-        }
-        limiter.processRequest();
-        let fileStream = yield promiseFunc(req);
-        // HTTP answer
-        fileStream.pipe(res);
-      } catch (e) {
-        let error = getResultingError(e);
-        // HTTP error
-        res.status(error.httpCode).send(JSON.stringify(error.uerr, null, "  "));
-        throw e
-      }
-    });
-  });
-};
-
-function getResultingError(e) {
-  // Default is 500 unknown error
-  let error = constants.ERRORS.UNKNOWN;
-  if (e) {
-    // Print eventual stack trace
-    typeof e == 'string' && logger.error(e);
-    e.stack && logger.error(e.stack);
-    e.message && logger.warn(e.message);
-    // BusinessException
-    if (e.uerr) {
-      error = e;
-    } else {
-      const cp = constants.ERRORS.UNHANDLED;
-      error = {
-        httpCode: cp.httpCode,
-        uerr: {
-          ucode: cp.uerr.ucode,
-          message: e.message || e || error.uerr.message
-        }
-      };
-    }
-  }
-  return error;
-}
-
 function getEndpoint(theConf) {
   let endpoint = 'BASIC_MERKLED_API';
   if (theConf.remotehost) {
@@ -321,62 +38,3 @@ function getEndpoint(theConf) {
   }
   return endpoint;
 }
-
-function getBestLocal(family) {
-  let netInterfaces = os.networkInterfaces();
-  let keys = _.keys(netInterfaces);
-  let res = [];
-  for (const name of keys) {
-    let addresses = netInterfaces[name];
-    for (const addr of addresses) {
-      if (!family || addr.family == family) {
-        res.push({
-          name: name,
-          value: addr.address
-        });
-      }
-    }
-  }
-  const interfacePriorityRegCatcher = [
-    /^tun\d/,
-    /^enp\ds\d/,
-    /^enp\ds\df\d/,
-    /^eth\d/,
-    /^Ethernet/,
-    /^wlp\ds\d/,
-    /^wlan\d/,
-    /^Wi-Fi/,
-    /^lo/,
-    /^Loopback/,
-    /^None/
-  ];
-  const best = _.sortBy(res, function(entry) {
-    for(let priority in interfacePriorityRegCatcher){
-      if (entry.name.match(interfacePriorityRegCatcher[priority])) return priority;
-    }
-    return interfacePriorityRegCatcher.length;
-  })[0];
-  return (best && best.value) || "";
-}
-
-function getLAN(family) {
-  let netInterfaces = os.networkInterfaces();
-  let keys = _.keys(netInterfaces);
-  let res = [];
-  for (const name of keys) {
-    let addresses = netInterfaces[name];
-    for (const addr of addresses) {
-      if ((addr.family == "IPv4" && family == "IPv4"
-          && addr.address != "127.0.0.1" && addr.address != "lo" && addr.address != "localhost")
-          || (addr.family == "IPv6" && family == "IPv6"
-          && addr.address != "::1" && addr.address != "lo" && addr.address != "localhost"))
-      {
-        res.push({
-          name: name,
-          value: addr.address
-        });
-      }
-    }
-  }
-  return res;
-}
diff --git a/app/lib/system/unix2dos.js b/app/lib/system/unix2dos.js
index e52b0fadcdf711c22d526210fe7f59af0b655d93..980e2af01b76ed68cdbbb2f28b37b2a597f57bf2 100644
--- a/app/lib/system/unix2dos.js
+++ b/app/lib/system/unix2dos.js
@@ -1,5 +1,5 @@
 "use strict";
-const dos2unix = require('./dos2unix');
+const dos2unix = require('duniter-common').dos2unix;
 const util     = require('util');
 const stream   = require('stream');
 
diff --git a/app/lib/system/upnp.js b/app/lib/system/upnp.js
deleted file mode 100644
index 5a562290c408a96568f4bc0b8ead259acf2f4b80..0000000000000000000000000000000000000000
--- a/app/lib/system/upnp.js
+++ /dev/null
@@ -1,66 +0,0 @@
-const upnp = require('nnupnp');
-const async = require('async');
-const constants  = require('../constants');
-const logger = require('../logger')('upnp');
-const co = require('co');
-const Q = require('q');
-
-module.exports = function (localPort, remotePort) {
-  "use strict";
-  return co(function *() {
-    logger.info('UPnP: configuring...');
-    return co(function *() {
-      try {
-        yield openPort(localPort, remotePort);
-      } catch (e) {
-        const client = upnp.createClient();
-        try {
-          yield Q.nbind(client.externalIp, client)();
-        } catch (err) {
-          if (err && err.message == 'timeout') {
-            throw 'No UPnP gateway found: your node won\'t be reachable from the Internet. Use --noupnp option to avoid this message.'
-          }
-          throw err;
-        } finally {
-          client.close();
-        }
-      }
-      let interval, upnpService = {
-        openPort: () => {
-          return openPort(localPort, remotePort);
-        },
-        startRegular: () => {
-          upnpService.stopRegular();
-          // Update UPnP IGD every INTERVAL seconds
-          interval = setInterval(async.apply(openPort, localPort, remotePort), 1000 * constants.NETWORK.UPNP.INTERVAL);
-        },
-        stopRegular: () => {
-          if (interval) {
-            clearInterval(interval);
-          }
-        }
-      };
-      return upnpService;
-    });
-  });
-};
-
-function openPort (localPort, remotePort) {
-  "use strict";
-  return Q.Promise(function(resolve, reject){
-    logger.trace('UPnP: mapping external port %s to local %s...', remotePort, localPort);
-    const client = upnp.createClient();
-    client.portMapping({
-      'public': parseInt(remotePort),
-      'private': parseInt(localPort),
-      'ttl': constants.NETWORK.UPNP.TTL
-    }, function(err) {
-      client.close();
-      if (err) {
-        logger.warn(err);
-        return reject(err);
-      }
-      resolve();
-    });
-  });
-}
\ No newline at end of file
diff --git a/app/lib/ucp/buid.js b/app/lib/ucp/buid.js
deleted file mode 100644
index 561ae1687ef534ddb84cc83610a37fdb15063fb0..0000000000000000000000000000000000000000
--- a/app/lib/ucp/buid.js
+++ /dev/null
@@ -1,36 +0,0 @@
-"use strict";
-const hashf = require('./hashf');
-const constants = require('../constants');
-
-const buidFunctions = function(number, hash) {
-  if (arguments.length === 2) {
-    return [number, hash].join('-');
-  }
-  if (arguments[0]) {
-    return [arguments[0].number, arguments[0].hash].join('-');
-  }
-  return '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855';
-};
-
-buidFunctions.fromTS = (line) => line.match(/TS:(.*)/)[1];
-buidFunctions.fromIdty = (idty) => this(idty.ts_number, idty.ts_hash);
-
-module.exports = {
-
-  format: {
-
-    hashf: (value) => hashf(String(value)).toUpperCase(),
-
-    isBuid: (value) => (typeof value === 'string') && value.match(constants.BLOCK_UID) ? true : false,
-
-    buid: buidFunctions,
-
-    obuid: (line) => {
-      let sp = this.buid.fromTS(line).split('-');
-      return {
-        number: sp[0],
-        hash: sp[1]
-      };
-    }
-  }
-};
diff --git a/app/lib/ucp/hashf.js b/app/lib/ucp/hashf.js
deleted file mode 100644
index f1a1bd2ee50e8f580c866ca2b602c74d796621bc..0000000000000000000000000000000000000000
--- a/app/lib/ucp/hashf.js
+++ /dev/null
@@ -1,8 +0,0 @@
-"use strict";
-
-module.exports = function (str){
-  return require("crypto")
-    .createHash("sha256")
-    .update(str)
-    .digest("hex");
-};
diff --git a/app/lib/ucp/rawer.js b/app/lib/ucp/rawer.js
deleted file mode 100644
index 34ba78e7f571715e4e31053b9469f00f48abba82..0000000000000000000000000000000000000000
--- a/app/lib/ucp/rawer.js
+++ /dev/null
@@ -1,228 +0,0 @@
-"use strict";
-let dos2unix = require('../system/dos2unix');
-let constants = require('../constants');
-
-module.exports = new function() {
-
-  this.getOfficialIdentity = (json) => {
-    let raw = "";
-    raw += "Version: " + (json.version || constants.DOCUMENTS_VERSION) + "\n";
-    raw += "Type: Identity\n";
-    raw += "Currency: " + json.currency + "\n";
-    raw += "Issuer: " + (json.issuer || json.pubkey) + "\n";
-    raw += "UniqueID: " + json.uid + '\n';
-    raw += "Timestamp: " + json.buid + '\n';
-    if (json.sig) {
-      raw += json.sig + '\n';
-    }
-    return dos2unix(raw);
-  };
-
-  this.getOfficialCertification = (json) => {
-    let raw = getNormalHeader('Certification', json);
-    raw += "IdtyIssuer: " + json.idty_issuer + '\n';
-    raw += "IdtyUniqueID: " + json.idty_uid + '\n';
-    raw += "IdtyTimestamp: " + json.idty_buid + '\n';
-    raw += "IdtySignature: " + json.idty_sig + '\n';
-    raw += "CertTimestamp: " + json.buid + '\n';
-    if (json.sig) {
-      raw += json.sig + '\n';
-    }
-    return dos2unix(raw);
-  };
-
-  this.getOfficialRevocation = (json) => {
-    let raw = getNormalHeader('Revocation', json);
-    raw += "IdtyUniqueID: " + json.uid + '\n';
-    raw += "IdtyTimestamp: " + json.buid + '\n';
-    raw += "IdtySignature: " + json.sig + '\n';
-    if (json.revocation) {
-      raw += json.revocation + '\n';
-    }
-    return dos2unix(raw);
-  };
-
-  this.getPeerWithoutSignature = (json) => {
-    let raw = "";
-    raw += "Version: " + (json.version || constants.DOCUMENTS_VERSION) + "\n";
-    raw += "Type: Peer\n";
-    raw += "Currency: " + json.currency + "\n";
-    raw += "PublicKey: " + json.pubkey + "\n";
-    raw += "Block: " + json.block + "\n";
-    raw += "Endpoints:" + "\n";
-    (json.endpoints || []).forEach((ep) => {
-      raw += ep + "\n";
-    });
-    return dos2unix(raw);
-  };
-
-  this.getPeer = (json) => {
-    return dos2unix(signed(this.getPeerWithoutSignature(json), json));
-  };
-
-  this.getMembershipWithoutSignature = (json) => {
-    let raw = getNormalHeader('Membership', json);
-    raw += "Block: " + json.block + "\n";
-    raw += "Membership: " + json.membership + "\n";
-    if (json.userid)
-      raw += "UserID: " + json.userid + "\n";
-    if (json.certts)
-      raw += "CertTS: " + json.certts + "\n";
-    return dos2unix(raw);
-  };
-
-  this.getMembership = (json) => {
-    return dos2unix(signed(this.getMembershipWithoutSignature(json), json));
-  };
-
-  this.getBlockInnerPart = (json) => {
-    let raw = "";
-    raw += "Version: " + (json.version || constants.DOCUMENTS_VERSION) + "\n";
-    raw += "Type: Block\n";
-    raw += "Currency: " + json.currency + "\n";
-    raw += "Number: " + json.number + "\n";
-    raw += "PoWMin: " + json.powMin + "\n";
-    raw += "Time: " + json.time + "\n";
-    raw += "MedianTime: " + json.medianTime + "\n";
-    if (json.dividend)
-      raw += "UniversalDividend: " + json.dividend + "\n";
-    raw += "UnitBase: " + json.unitbase + "\n";
-    raw += "Issuer: " + json.issuer + "\n";
-    raw += "IssuersFrame: " + json.issuersFrame + "\n";
-    raw += "IssuersFrameVar: " + json.issuersFrameVar + "\n";
-    raw += "DifferentIssuersCount: " + json.issuersCount + "\n";
-    if(json.previousHash)
-      raw += "PreviousHash: " + json.previousHash + "\n";
-    if(json.previousIssuer)
-      raw += "PreviousIssuer: " + json.previousIssuer + "\n";
-    if(json.parameters)
-      raw += "Parameters: " + json.parameters + "\n";
-    raw += "MembersCount: " + json.membersCount + "\n";
-    raw += "Identities:\n";
-    for (const idty of (json.identities || [])){
-      raw += idty + "\n";
-    }
-    raw += "Joiners:\n";
-    for (const joiner of (json.joiners || [])){
-      raw += joiner + "\n";
-    }
-    raw += "Actives:\n";
-    for (const active of (json.actives || [])){
-      raw += active + "\n";
-    }
-    raw += "Leavers:\n";
-    for (const leaver of (json.leavers || [])){
-      raw += leaver + "\n";
-    }
-    raw += "Revoked:\n";
-    for (const revoked of (json.revoked || [])){
-      raw += revoked + "\n";
-    }
-    raw += "Excluded:\n";
-    for (const excluded of (json.excluded || [])){
-      raw += excluded + "\n";
-    }
-    raw += "Certifications:\n";
-    for (const cert of (json.certifications || [])){
-      raw += cert + "\n";
-    }
-    raw += "Transactions:\n";
-    for (const tx of (json.transactions || [])){
-      raw += tx.raw || this.getCompactTransaction(tx);
-    }
-    return dos2unix(raw);
-  };
-
-  this.getBlockWithInnerHashAndNonce = (json) => {
-    let raw = this.getBlockInnerPart(json);
-    raw += "InnerHash: " + json.inner_hash + "\n";
-    raw += "Nonce: " + json.nonce + "\n";
-    return dos2unix(raw);
-  };
-
-  this.getBlockInnerHashAndNonce = (json) => {
-    let raw = "" +
-      "InnerHash: " + json.inner_hash + "\n" +
-      "Nonce: " + json.nonce + "\n";
-    return dos2unix(raw);
-  };
-
-  this.getBlockInnerHashAndNonceWithSignature = (json) => {
-    let raw = "" +
-      "InnerHash: " + json.inner_hash + "\n" +
-      "Nonce: " + json.nonce + "\n";
-    return dos2unix(signed(raw, json));
-  };
-
-  this.getBlock = (json) => {
-    return dos2unix(signed(this.getBlockWithInnerHashAndNonce(json), json));
-  };
-
-  this.getTransaction = (json) => {
-    let raw = "";
-    raw += "Version: " + (json.version) + "\n";
-    raw += "Type: Transaction\n";
-    raw += "Currency: " + json.currency + "\n";
-    raw += "Blockstamp: " + json.blockstamp + "\n";
-    raw += "Locktime: " + json.locktime + "\n";
-    raw += "Issuers:\n";
-    (json.issuers || []).forEach((issuer) => {
-      raw += issuer + '\n';
-    });
-    raw += "Inputs:\n";
-    (json.inputs || []).forEach((input) => {
-      raw += input + '\n';
-    });
-    raw += "Unlocks:\n";
-    (json.unlocks || []).forEach((input) => {
-      raw += input + '\n';
-    });
-    raw += "Outputs:\n";
-    (json.outputs || []).forEach((output) => {
-      raw += output + '\n';
-    });
-    raw += "Comment: " + (json.comment || "") + "\n";
-    (json.signatures || []).forEach((signature) => {
-      raw += signature + '\n';
-    });
-    return dos2unix(raw);
-  };
-
-  this.getCompactTransaction = (json) => {
-    let issuers = json.issuers;
-    let raw = ["TX", json.version, issuers.length, json.inputs.length, json.unlocks.length, json.outputs.length, json.comment ? 1 : 0, json.locktime || 0].join(':') + '\n';
-    raw += json.blockstamp + "\n";
-    (issuers || []).forEach((issuer) => {
-      raw += issuer + '\n';
-    });
-    (json.inputs || []).forEach((input) => {
-      raw += input + '\n';
-    });
-    (json.unlocks || []).forEach((input) => {
-      raw += input + '\n';
-    });
-    (json.outputs || []).forEach((output) => {
-      raw += output + '\n';
-    });
-    if (json.comment)
-      raw += json.comment + '\n';
-    (json.signatures || []).forEach((signature) => {
-      raw += signature + '\n';
-    });
-    return dos2unix(raw);
-  };
-
-  let getNormalHeader = (doctype, json) => {
-    let raw = "";
-    raw += "Version: " + (json.version || constants.DOCUMENTS_VERSION) + "\n";
-    raw += "Type: " + doctype + "\n";
-    raw += "Currency: " + json.currency + "\n";
-    raw += "Issuer: " + json.issuer + "\n";
-    return raw;
-  };
-
-  let signed = (raw, json) => {
-    raw += json.signature + '\n';
-    return raw;
-  };
-};
diff --git a/app/lib/ucp/txunlock.js b/app/lib/ucp/txunlock.js
index 85ed557545728cc551e5f3e8c5021010dd6951bf..beacb2a8525072e9bfa478f501919fb3cd95c16f 100644
--- a/app/lib/ucp/txunlock.js
+++ b/app/lib/ucp/txunlock.js
@@ -1,7 +1,7 @@
 "use strict";
 
 let Parser = require("jison").Parser;
-let ucp = require('./buid');
+let ucp = require('duniter-common').buid;
 
 let grammar = {
   "lex": {
diff --git a/app/lib/wizard.js b/app/lib/wizard.js
index 6612e6a09510d9cabef6d157035212a8d1a76240..d8255af7ee6ea6f0b53cee24a2eeaad71004f3be 100644
--- a/app/lib/wizard.js
+++ b/app/lib/wizard.js
@@ -1,10 +1,8 @@
 "use strict";
 const co        = require('co');
-const Q         = require('q');
 const constants = require('./constants');
 const network   = require('./system/network');
 const async     = require('async');
-const _         = require('underscore');
 const inquirer  = require('inquirer');
 const logger    = require('./logger')('wizard');
 
@@ -14,74 +12,21 @@ module.exports = function () {
 
 function Wizard () {
 
-  this.configAll = function (conf, done) {
-    doTasks(['currency', 'network', 'key', 'pow', 'ucp'], conf, done);
-  };
-
-  this.configBasic = function (conf, done) {
-    doTasks(['key', 'network', 'pow'], conf, done);
-  };
-
-  this.configPoW = function (conf, done) {
+  this.configPoW = function (conf, program, logger, done) {
     doTasks(['pow'], conf, done);
   };
 
-  this.configCurrency = function (conf, done) {
+  this.configCurrency = function (conf, program, logger, done) {
     doTasks(['currency'], conf, done);
   };
 
-  this.configNetwork = function (conf, done) {
-    doTasks(['network'], conf, done);
-  };
-
-  this.configNetworkReconfigure = function (conf, done) {
-    doTasks(['networkReconfigure'], conf, done);
-  };
-
-  this.configKey = function (conf, done) {
-    doTasks(['key'], conf, done);
-  };
-
-  this.configUCP = function (conf, done) {
-    doTasks(['ucp'], conf, done);
+  this.configUCP = function (conf, program, logger, done) {
+    doTasks(['parameters'], conf, done);
   };
-
-  this.networkReconfiguration = networkReconfiguration;
-  this.keyReconfigure = keyReconfigure;
-}
-
-function keyReconfigure(conf, autoconf, done) {
-  return co(function *() {
-    try {
-      if (autoconf) {
-        conf.salt = ~~(Math.random() * 2147483647) + "";
-        conf.passwd = ~~(Math.random() * 2147483647) + "";
-        logger.info('Key: %s', 'generated');
-      } else {
-        yield Q.Promise(function(resolve, reject){
-          choose('You need a keypair to identify your node on the network. Would you like to automatically generate it?', true,
-            function(){
-              conf.salt = ~~(Math.random() * 2147483647) + "";
-              conf.passwd = ~~(Math.random() * 2147483647) + "";
-              resolve();
-            },
-            function(){
-              doTasks(['key'], conf, (err) => err ? reject(err) : resolve());
-            });
-        });
-      }
-      done();
-    } catch(e) {
-      done(e);
-    }
-  });
 }
 
 function doTasks (todos, conf, done) {
   async.forEachSeries(todos, function(task, callback){
-    if (task == 'networkReconfigure') {
-      return tasks[task] && tasks[task](conf, false, false, callback);
-    }
     tasks[task] && tasks[task](conf, callback);
   }, done);
 }
@@ -107,51 +52,7 @@ const tasks = {
     ], done);
   },
 
-  network: function (conf, done) {
-    networkConfiguration(conf, done);
-  },
-
-  networkReconfigure: function (conf, autoconf, noupnp, done) {
-    networkReconfiguration(conf, autoconf, noupnp, done);
-  },
-
-  key: function (conf, done) {
-    async.waterfall([
-      function (next){
-        inquirer.prompt([{
-          type: "input",
-          name: "salt",
-          message: "Key's salt",
-          default: conf.salt ? conf.salt : undefined,
-          validate: function (input) {
-            return input.match(constants.SALT) ? true : false;
-          }
-        }], function (answers) {
-          conf.salt = answers.salt;
-          next();
-        });
-      },
-      function (next) {
-        var obfuscated = (conf.passwd || "").replace(/./g, '*');
-        inquirer.prompt([{
-          type: "password",
-          name: "passwd",
-          message: "Key\'s password",
-          default: obfuscated ? obfuscated : undefined,
-          validate: function (input) {
-            return input.match(constants.PASSWORD) ? true : false;
-          }
-        }], function (answers) {
-          var keepOld = obfuscated.length > 0 && obfuscated == answers.passwd;
-          conf.passwd = keepOld ? conf.passwd : answers.passwd;
-          conf.pair = undefined;
-          next();
-        });
-      }
-    ], done);
-  },
-
-  ucp: function (conf, done) {
+  parameters: function (conf, done) {
     async.waterfall([
       async.apply(simpleFloat,   "Universal Dividend %growth",                                             "c", conf),
       async.apply(simpleInteger, "Universal Dividend period (in seconds)",                                 "dt", conf),
@@ -175,37 +76,12 @@ const tasks = {
   pow: function (conf, done) {
     async.waterfall([
       function (next){
-        choose("Participate writing the blockchain (when member)", conf.participate,
-          function participate () {
-            conf.participate = true;
-            next();
-          },
-          function doNotParticipate () {
-            conf.participate = false;
-            next();
-          });
-      },
-      function (next) {
-        if (conf.participate) {
-          simpleInteger("Start computation of a new block if none received since (seconds)", "powDelay", conf, next);
-        }
-        else next();
+        simpleInteger("Start computation of a new block if none received since (seconds)", "powDelay", conf, next);
       }
     ], done);
   }
 };
 
-function choose (question, defaultValue, ifOK, ifNotOK) {
-  inquirer.prompt([{
-    type: "confirm",
-    name: "q",
-    message: question,
-    default: defaultValue
-  }], function (answer) {
-    answer.q ? ifOK() : ifNotOK();
-  });
-}
-
 function simpleValue (question, property, defaultValue, conf, validation, done) {
   inquirer.prompt([{
     type: "input",
@@ -230,317 +106,3 @@ function simpleFloat (question, property, conf, done) {
     return input && input.toString().match(/^[0-9]+(\.[0-9]+)?$/) ? true : false;
   }, done);
 }
-
-function upnpResolve(noupnp, done) {
-  return co(function *() {
-    try {
-      let conf = yield network.upnpConf(noupnp);
-      done(null, true, conf);
-    } catch (err) {
-      done(null, false, {});
-    }
-  });
-}
-
-function networkConfiguration(conf, done) {
-  async.waterfall([
-    upnpResolve.bind(this, !conf.upnp),
-    function(upnpSuccess, upnpConf, next) {
-
-      var operations = getLocalNetworkOperations(conf)
-        .concat(getRemoteNetworkOperations(conf, upnpConf.remoteipv4, upnpConf.remoteipv6));
-
-      if (upnpSuccess) {
-        operations = operations.concat(getUseUPnPOperations(conf));
-      }
-
-      async.waterfall(operations.concat(getHostnameOperations(conf, false)), next);
-    }
-  ], done);
-}
-
-function networkReconfiguration(conf, autoconf, noupnp, done) {
-  async.waterfall([
-    upnpResolve.bind(this, noupnp),
-    function(upnpSuccess, upnpConf, next) {
-
-      // Default values
-      conf.port = conf.port || constants.NETWORK.DEFAULT_PORT;
-      conf.remoteport = conf.remoteport || constants.NETWORK.DEFAULT_PORT;
-
-      var localOperations = getLocalNetworkOperations(conf, autoconf);
-      var remoteOpertions = getRemoteNetworkOperations(conf, upnpConf.remoteipv4, upnpConf.remoteipv6, autoconf);
-      var dnsOperations = getHostnameOperations(conf, autoconf);
-      var useUPnPOperations = getUseUPnPOperations(conf, autoconf);
-
-      if (upnpSuccess) {
-        _.extend(conf, upnpConf);
-        var local = [conf.ipv4, conf.port].join(':');
-        var remote = [conf.remoteipv4, conf.remoteport].join(':');
-        if (autoconf) {
-          conf.ipv6 = conf.remoteipv6 = network.getBestLocalIPv6();
-          logger.info('IPv6: %s', conf.ipv6 || "");
-          logger.info('Local IPv4: %s', local);
-          logger.info('Remote IPv4: %s', remote);
-          // Use proposed local + remote with UPnP binding
-          return async.waterfall(useUPnPOperations
-            .concat(dnsOperations), next);
-        }
-        choose("UPnP is available: duniter will be bound: \n  from " + local + "\n  to " + remote + "\nKeep this configuration?", true,
-          function () {
-            // Yes: not network changes
-            conf.ipv6 = conf.remoteipv6 = network.getBestLocalIPv6();
-            async.waterfall(useUPnPOperations
-              .concat(dnsOperations), next);
-          },
-          function () {
-            // No: want to change
-            async.waterfall(
-              localOperations
-                .concat(remoteOpertions)
-                .concat(useUPnPOperations)
-                .concat(dnsOperations), next);
-          });
-      } else {
-        conf.upnp = false;
-        if (autoconf) {
-          // Yes: local configuration = remote configuration
-          return async.waterfall(
-            localOperations
-              .concat(getHostnameOperations(conf, autoconf))
-              .concat([function (confDone) {
-                conf.remoteipv4 = conf.ipv4;
-                conf.remoteipv6 = conf.ipv6;
-                conf.remoteport = conf.port;
-                logger.info('Local & Remote IPv4: %s', [conf.ipv4, conf.port].join(':'));
-                logger.info('Local & Remote IPv6: %s', [conf.ipv6, conf.port].join(':'));
-                confDone();
-              }]), next);
-        }
-        choose("UPnP is *not* available: is this a public server (like a VPS)?", true,
-          function () {
-            // Yes: local configuration = remote configuration
-            async.waterfall(
-              localOperations
-                .concat(getHostnameOperations(conf))
-                .concat([function(confDone) {
-                  conf.remoteipv4 = conf.ipv4;
-                  conf.remoteipv6 = conf.ipv6;
-                  conf.remoteport = conf.port;
-                  confDone();
-                }]), next);
-          },
-          function () {
-            // No: must give all details
-            async.waterfall(
-              localOperations
-                .concat(remoteOpertions)
-                .concat(dnsOperations), next);
-          });
-      }
-    }
-  ], done);
-}
-
-function getLocalNetworkOperations(conf, autoconf) {
-  return [
-    function (next){
-      var osInterfaces = network.listInterfaces();
-      var interfaces = [{ name: "None", value: null }];
-      osInterfaces.forEach(function(netInterface){
-        var addresses = netInterface.addresses;
-        var filtered = _(addresses).where({family: 'IPv4'});
-        filtered.forEach(function(addr){
-          interfaces.push({
-            name: [netInterface.name, addr.address].join(' '),
-            value: addr.address
-          });
-        });
-      });
-      if (autoconf) {
-        conf.ipv4 = network.getBestLocalIPv4();
-        return next();
-      }
-      inquirer.prompt([{
-        type: "list",
-        name: "ipv4",
-        message: "IPv4 interface",
-        default: conf.ipv4,
-        choices: interfaces
-      }], function (answers) {
-        conf.ipv4 = answers.ipv4;
-        next();
-      });
-    },
-    function (next){
-      var osInterfaces = network.listInterfaces();
-      var interfaces = [{ name: "None", value: null }];
-      osInterfaces.forEach(function(netInterface){
-        var addresses = netInterface.addresses;
-        var filtered = _(addresses).where({ family: 'IPv6' });
-        filtered.forEach(function(addr){
-          var address = addr.address
-          if (addr.scopeid)
-            address += "%" + netInterface.name
-          let nameSuffix = "";
-          if (addr.scopeid == 0 && !addr.internal) {
-            nameSuffix = " (Global)";
-          }
-          interfaces.push({
-            name: [netInterface.name, address, nameSuffix].join(' '),
-            internal: addr.internal,
-            scopeid: addr.scopeid,
-            value: address
-          });
-        });
-      });
-      interfaces.sort((addr1, addr2) => {
-        if (addr1.value === null) return -1;
-        if (addr1.internal && !addr2.internal) return 1;
-        if (addr1.scopeid && !addr2.scopeid) return 1;
-        return 0;
-      });
-      if (autoconf || !conf.ipv6) {
-        conf.ipv6 = conf.remoteipv6 = network.getBestLocalIPv6();
-      }
-      if (autoconf) {
-        return next();
-      }
-      inquirer.prompt([{
-        type: "list",
-        name: "ipv6",
-        message: "IPv6 interface",
-        default: conf.ipv6,
-        choices: interfaces
-      }], function (answers) {
-        conf.ipv6 = conf.remoteipv6 = answers.ipv6;
-        next();
-      });
-    },
-    autoconf ? (done) => {
-      conf.port = network.getRandomPort(conf);
-      done();
-    } : async.apply(simpleInteger, "Port", "port", conf)
-  ];
-}
-
-function getRemoteNetworkOperations(conf, remoteipv4, remoteipv6, autoconf) {
-  return [
-    function (next){
-      if (!conf.ipv4) {
-        conf.remoteipv4 = null;
-        return next(null, {});
-      }
-      var choices = [{ name: "None", value: null }];
-      // Local interfaces
-      var osInterfaces = network.listInterfaces();
-      osInterfaces.forEach(function(netInterface){
-        var addresses = netInterface.addresses;
-        var filtered = _(addresses).where({family: 'IPv4'});
-        filtered.forEach(function(addr){
-          choices.push({
-            name: [netInterface.name, addr.address].join(' '),
-            value: addr.address
-          });
-        });
-      });
-      if (conf.remoteipv4) {
-        choices.push({ name: conf.remoteipv4, value: conf.remoteipv4 });
-      }
-      if (remoteipv4 && remoteipv4 != conf.remoteipv4) {
-        choices.push({ name: remoteipv4, value: remoteipv4 });
-      }
-      choices.push({ name: "Enter new one", value: "new" });
-      inquirer.prompt([{
-        type: "list",
-        name: "remoteipv4",
-        message: "Remote IPv4",
-        default: conf.remoteipv4 || conf.ipv4 || null,
-        choices: choices,
-        validate: function (input) {
-          return input && input.toString().match(constants.IPV4_REGEXP) ? true : false;
-        }
-      }], function (answers) {
-        if (answers.remoteipv4 == "new") {
-          inquirer.prompt([{
-            type: "input",
-            name: "remoteipv4",
-            message: "Remote IPv4",
-            default: conf.remoteipv4 || conf.ipv4,
-            validate: function (input) {
-              return input && input.toString().match(constants.IPV4_REGEXP) ? true : false;
-            }
-          }], async.apply(next, null));
-        } else {
-          next(null, answers);
-        }
-      });
-    },
-    function (answers, next){
-      conf.remoteipv4 = answers.remoteipv4;
-      return co(function*() {
-        try {
-          if (conf.remoteipv4 || conf.remotehost) {
-            yield new Promise((resolve, reject) => {
-              const getPort = async.apply(simpleInteger, "Remote port", "remoteport", conf);
-              getPort((err) => {
-                if (err) return reject(err);
-                resolve();
-              });
-            });
-          } else if (conf.remoteipv6) {
-            conf.remoteport = conf.port;
-          }
-          next();
-        } catch (e) {
-          next(e);
-        }
-      });
-    }
-  ];
-}
-
-function getHostnameOperations(conf, autoconf) {
-  return [function(next) {
-    if (!conf.ipv4) {
-      conf.remotehost = null;
-      return next();
-    }
-    if (autoconf) {
-      logger.info('DNS: %s', conf.remotehost || 'No');
-      return next();
-    }
-    choose("Does this server has a DNS name?", !!conf.remotehost,
-      function() {
-        // Yes
-        simpleValue("DNS name:", "remotehost", "", conf, function(){ return true; }, next);
-      },
-      function() {
-        conf.remotehost = null;
-        next();
-      });
-  }];
-}
-
-function getUseUPnPOperations(conf, autoconf) {
-  return [function(next) {
-    if (!conf.ipv4) {
-      conf.upnp = false;
-      return next();
-    }
-    if (autoconf) {
-      logger.info('UPnP: %s', 'Yes');
-      conf.upnp = true;
-      return next();
-    }
-    choose("UPnP is available: use automatic port mapping? (easier)", conf.upnp,
-      function() {
-        conf.upnp = true;
-        next();
-      },
-      function() {
-        conf.upnp = false;
-        next();
-      });
-  }];
-}
diff --git a/app/lib/wot.js b/app/lib/wot.js
index 359096a230fb44c28060e43ff237601654ac5699..d2173e31e410a84f13c0c54beee12c7f7944e010 100644
--- a/app/lib/wot.js
+++ b/app/lib/wot.js
@@ -45,7 +45,7 @@ function WoTBWrapper(instance) {
     instance.addLink(from, to);
   };
 
-  this.removeLink = (from, to, debug) => {
+  this.removeLink = (from, to) => {
     logger.trace('Link %s X> %s', from, to);
     instance.removeLink(from, to);
   };
diff --git a/app/modules/check-config.js b/app/modules/check-config.js
new file mode 100644
index 0000000000000000000000000000000000000000..614f6e37bbb3c6219415fa44257b661406a07031
--- /dev/null
+++ b/app/modules/check-config.js
@@ -0,0 +1,21 @@
+"use strict";
+
+const co = require('co');
+const constants = require('../lib/constants');
+const wizard = require('../lib/wizard');
+const logger = require('../lib/logger')('wizard');
+
+module.exports = {
+  duniter: {
+
+    cli: [{
+      name: 'check-config',
+      desc: 'Checks the node\'s configuration',
+
+      onConfiguredExecute: (server, conf, program, params, wizardTasks) => co(function*() {
+        yield server.checkConfig()
+        logger.warn('Configuration seems correct.');
+      })
+    }]
+  }
+};
diff --git a/app/modules/config.js b/app/modules/config.js
new file mode 100644
index 0000000000000000000000000000000000000000..a3f320b761069ba15bf32d768d3830b916e8ace2
--- /dev/null
+++ b/app/modules/config.js
@@ -0,0 +1,12 @@
+"use strict";
+
+module.exports = {
+  duniter: {
+    cli: [{
+      name: 'config',
+      desc: 'Register configuration in database',
+      // The command does nothing particular, it just stops the process right after configuration phase is over
+      onConfiguredExecute: (server, conf) => Promise.resolve(conf)
+    }]
+  }
+}
diff --git a/app/modules/daemon.js b/app/modules/daemon.js
new file mode 100644
index 0000000000000000000000000000000000000000..b228bbe34157a06966568bb284eefcb4ed9c9897
--- /dev/null
+++ b/app/modules/daemon.js
@@ -0,0 +1,54 @@
+"use strict";
+
+const co = require('co');
+
+module.exports = {
+  duniter: {
+
+    service: {
+      process: (server) => ServerService(server)
+    },
+
+    cli: [{
+      name: 'start',
+      desc: 'Start Duniter node daemon.',
+      onDatabaseExecute: (server, conf, program, params, startServices) => co(function*() {
+        const logger = server.logger;
+
+        logger.info(">> Server starting...");
+
+        yield server.checkConfig();
+        // Add signing & public key functions to PeeringService
+        logger.info('Node version: ' + server.version);
+        logger.info('Node pubkey: ' + server.conf.pair.pub);
+
+        // Services
+        yield startServices();
+
+        logger.info('>> Server ready!');
+
+        return new Promise(() => null); // Never ending
+      })
+    },{
+      name: 'stop',
+      desc: 'Stop Duniter node daemon.',
+      logs: false,
+      onConfiguredExecute: (server) => needsToBeLaunchedByScript(server.logger)
+    },{
+      name: 'restart',
+      desc: 'Restart Duniter node daemon.',
+      logs: false,
+      onConfiguredExecute: (server) => needsToBeLaunchedByScript(server.logger)
+    }]
+  }
+};
+
+function ServerService(server) {
+  server.startService = () => Promise.resolve();
+  server.stopService = () => Promise.resolve();
+  return server;
+}
+
+function needsToBeLaunchedByScript(logger) {
+  logger.error('This command must not be launched directly, please use duniter.sh script');
+}
diff --git a/app/modules/export-bc.js b/app/modules/export-bc.js
new file mode 100644
index 0000000000000000000000000000000000000000..e5d2915e85ce8aa1a96d0432421649f038e9c972
--- /dev/null
+++ b/app/modules/export-bc.js
@@ -0,0 +1,52 @@
+"use strict";
+
+const co = require('co');
+const _ = require('underscore');
+const Block = require('../lib/entity/block');
+
+module.exports = {
+  duniter: {
+    cli: [{
+      name: 'export-bc [upto]',
+      desc: 'Exports the whole blockchain as JSON array, up to [upto] block number (excluded).',
+      logs: false,
+      onDatabaseExecute: (server, conf, program, params) => co(function*() {
+        const upto = params[0];
+        const logger = server.logger;
+        try {
+          let CHUNK_SIZE = 500;
+          let jsoned = [];
+          let current = yield server.dal.getCurrentBlockOrNull();
+          let lastNumber = current ? current.number + 1 : -1;
+          if (upto !== undefined && upto.match(/\d+/)) {
+            lastNumber = Math.min(parseInt(upto), lastNumber);
+          }
+          let chunksCount = Math.floor(lastNumber / CHUNK_SIZE);
+          let chunks = [];
+          // Max-size chunks
+          for (let i = 0, len = chunksCount; i < len; i++) {
+            chunks.push({start: i * CHUNK_SIZE, to: i * CHUNK_SIZE + CHUNK_SIZE - 1});
+          }
+          // A last chunk
+          if (lastNumber > chunksCount * CHUNK_SIZE) {
+            chunks.push({start: chunksCount * CHUNK_SIZE, to: lastNumber});
+          }
+          for (const chunk of chunks) {
+            let blocks = yield server.dal.getBlocksBetween(chunk.start, chunk.to);
+            blocks.forEach(function (block) {
+              jsoned.push(_(new Block(block).json()).omit('raw'));
+            });
+          }
+          if (!program.nostdout) {
+            console.log(JSON.stringify(jsoned, null, "  "));
+          }
+          yield server.disconnect();
+          return jsoned;
+        } catch(err) {
+          logger.warn(err.message || err);
+          yield server.disconnect();
+        }
+      })
+    }]
+  }
+}
diff --git a/app/modules/peersignal.js b/app/modules/peersignal.js
new file mode 100644
index 0000000000000000000000000000000000000000..b8c9361f05931af799ff5ea0a96bb82bb9704004
--- /dev/null
+++ b/app/modules/peersignal.js
@@ -0,0 +1,58 @@
+"use strict";
+
+const co = require('co');
+const async = require('async');
+const constants = require('../lib/constants');
+
+module.exports = {
+  duniter: {
+    service: {
+      neutral: (server, conf, logger) => new PeerSignalEmitter(server, conf, logger)
+    }
+  }
+}
+
+/**
+ * Service which triggers the server's peering generation (actualization of the Peer document).
+ * @constructor
+ */
+function PeerSignalEmitter(server, conf) {
+
+  let INTERVAL = null;
+
+  const peerFifo = async.queue(function (task, callback) {
+    task(callback);
+  }, 1);
+
+  this.startService = () => co(function*() {
+
+    // The interval duration
+    const SIGNAL_INTERVAL = 1000 * conf.avgGenTime * constants.NETWORK.STATUS_INTERVAL.UPDATE;
+
+    // We eventually clean an existing interval
+    if (INTERVAL)
+      clearInterval(INTERVAL);
+
+    // Create the new regular algorithm
+    INTERVAL = setInterval(function () {
+      peerFifo.push((done) => co(function*(){
+        try {
+          yield server.PeeringService.generateSelfPeer(conf, SIGNAL_INTERVAL);
+          done();
+        } catch (e) {
+          done(e);
+        }
+      }))
+    }, SIGNAL_INTERVAL);
+
+    // Launches it a first time, immediately
+    yield server.PeeringService.generateSelfPeer(conf, SIGNAL_INTERVAL);
+  });
+
+  this.stopService = () => co(function*() {
+    // Stop the interval
+    clearInterval(INTERVAL);
+    // Empty the fifo
+    peerFifo.kill();
+  });
+}
diff --git a/app/modules/reapply.js b/app/modules/reapply.js
new file mode 100644
index 0000000000000000000000000000000000000000..1adb83f31bdc21cb7ff8ef15418f169e05ff2c20
--- /dev/null
+++ b/app/modules/reapply.js
@@ -0,0 +1,25 @@
+"use strict";
+
+const co = require('co');
+
+module.exports = {
+  duniter: {
+    cli: [{
+      name: 'reapply-to [number]',
+      desc: 'Reapply reverted blocks until block #[number] is reached. EXPERIMENTAL',
+      onDatabaseExecute: (server, conf, program, params) => co(function*() {
+        const number = params[0];
+        const logger = server.logger;
+        try {
+          yield server.reapplyTo(number);
+        } catch (err) {
+          logger.error('Error during reapply:', err);
+        }
+        // Save DB
+        if (server) {
+          yield server.disconnect();
+        }
+      })
+    }]
+  }
+}
diff --git a/app/modules/reset.js b/app/modules/reset.js
new file mode 100644
index 0000000000000000000000000000000000000000..fa7396d1836d6880cc4ad5342008c18c007b7345
--- /dev/null
+++ b/app/modules/reset.js
@@ -0,0 +1,48 @@
+"use strict";
+
+const co = require('co');
+const constants = require('../lib/constants');
+const wizard = require('../lib/wizard');
+const logger = require('../lib/logger')('wizard');
+
+module.exports = {
+  duniter: {
+
+    cli: [{
+      name: 'reset [config|data|peers|tx|stats|all]',
+      desc: 'Reset configuration, data, peers, transactions or everything in the database',
+
+      onConfiguredExecute: (server, conf, program, params, wizardTasks) => co(function*() {
+        const type = params[0];
+        if (type === 'peers') {
+          // Needs the DAL plugged
+          yield server.initDAL();
+        }
+        switch (type) {
+          case 'data':
+            yield server.resetData();
+            logger.warn('Data successfully reseted.');
+            break;
+          case 'peers':
+            yield server.resetPeers();
+            logger.warn('Peers successfully reseted.');
+            break;
+          case 'stats':
+            yield server.resetStats();
+            logger.warn('Stats successfully reseted.');
+            break;
+          case 'config':
+            yield server.resetConf();
+            logger.warn('Configuration successfully reseted.');
+            break;
+          case 'all':
+            yield server.resetAll();
+            logger.warn('Data & Configuration successfully reseted.');
+            break;
+          default:
+            throw constants.ERRORS.CLI_CALLERR_RESET;
+        }
+      })
+    }]
+  }
+};
diff --git a/app/modules/revert.js b/app/modules/revert.js
new file mode 100644
index 0000000000000000000000000000000000000000..1fb00afd6bc2af88106b470f9b32c5737a84b31c
--- /dev/null
+++ b/app/modules/revert.js
@@ -0,0 +1,41 @@
+"use strict";
+
+const co = require('co');
+
+module.exports = {
+  duniter: {
+    cli: [{
+      name: 'revert [count]',
+      desc: 'Revert (undo + remove) the top [count] blocks from the blockchain. EXPERIMENTAL',
+      onDatabaseExecute: (server, conf, program, params) => co(function*() {
+        const count = params[0];
+        const logger = server.logger;
+        try {
+          for (let i = 0; i < count; i++) {
+            yield server.revert();
+          }
+        } catch (err) {
+          logger.error('Error during revert:', err);
+        }
+        // Save DB
+        yield server.disconnect();
+      })
+    },{
+      name: 'revert-to [number]',
+      desc: 'Revert (undo + remove) top blockchain blocks until block #[number] is reached. EXPERIMENTAL',
+      onDatabaseExecute: (server, conf, program, params) => co(function*() {
+        const number = params[0];
+        const logger = server.logger;
+        try {
+          yield server.revertTo(number);
+        } catch (err) {
+          logger.error('Error during revert:', err);
+        }
+        // Save DB
+        if (server) {
+          yield server.disconnect();
+        }
+      })
+    }]
+  }
+}
diff --git a/app/modules/router.js b/app/modules/router.js
new file mode 100644
index 0000000000000000000000000000000000000000..60f1f6aef3e8383374de35076b38eb8dda30f992
--- /dev/null
+++ b/app/modules/router.js
@@ -0,0 +1,72 @@
+"use strict";
+
+const co = require('co');
+const constants = require('../lib/constants');
+const util = require('util');
+const stream = require('stream');
+const router = require('../lib/streams/router');
+const multicaster = require('../lib/streams/multicaster');
+
+module.exports = {
+  duniter: {
+    service: {
+      output: (server, conf, logger) => new Router(server, conf, logger)
+    },
+    methods: {
+      routeToNetwork: (server) => {
+        const router = new Router(server);
+        router.startService();
+        server.pipe(router);
+      }
+    }
+  }
+}
+
+/**
+ * Service which triggers the server's peering generation (actualization of the Peer document).
+ * @constructor
+ */
+function Router(server) {
+
+  const that = this;
+  let theRouter, theMulticaster = multicaster();
+
+  stream.Transform.call(this, { objectMode: true });
+
+  this._write = function (obj, enc, done) {
+    // Never close the stream
+    if (obj) {
+      that.push(obj);
+    }
+    done && done();
+  };
+
+  this.startService = () => co(function*() {
+    if (!theRouter) {
+      theRouter = router(server.PeeringService, server.dal);
+    }
+    theRouter.setActive(true);
+    theRouter.setConfDAL(server.dal);
+
+    /**
+     * Enable routing features:
+     *   - The server will try to send documents to the network
+     *   - The server will eventually be notified of network failures
+     */
+    // The router asks for multicasting of documents
+    that
+      .pipe(theRouter)
+    // The documents get sent to peers
+      .pipe(theMulticaster)
+      // The multicaster may answer 'unreachable peer'
+      .pipe(theRouter);
+  });
+
+  this.stopService = () => co(function*() {
+    that.unpipe();
+    theRouter && theRouter.unpipe();
+    theMulticaster && theMulticaster.unpipe();
+  });
+}
+
+util.inherits(Router, stream.Transform);
diff --git a/app/modules/wizard.js b/app/modules/wizard.js
new file mode 100644
index 0000000000000000000000000000000000000000..c6eb18e829f84f8a6dfe6f7ab66da66c49990299
--- /dev/null
+++ b/app/modules/wizard.js
@@ -0,0 +1,38 @@
+"use strict";
+
+const Q = require('q');
+const co = require('co');
+const wizard = require('../lib/wizard');
+const logger = require('../lib/logger')('wizard');
+
+module.exports = {
+  duniter: {
+
+    wizard: {
+      // The wizard itself also defines its personal tasks
+      'currency': Q.nbind(wizard().configCurrency, null),
+      'pow': Q.nbind(wizard().configPoW, null),
+      'parameters': Q.nbind(wizard().configUCP, null)
+    },
+
+    cli: [{
+      name: 'wizard [step]',
+      desc: 'Launch the configuration wizard.',
+
+      onConfiguredExecute: (server, conf, program, params, wizardTasks) => co(function*() {
+        const step = params[0];
+        const tasks = step ? [wizardTasks[step]] : Object.values(wizardTasks);
+        for (const task of tasks) {
+          if (!task) {
+            throw 'Unknown task';
+          }
+          yield task(conf, program, server.logger);
+        }
+        // Check config
+        yield server.checkConfig();
+        yield server.dal.saveConf(conf);
+        logger.debug("Configuration saved.");
+      })
+    }]
+  }
+};
diff --git a/app/service/BlockchainService.js b/app/service/BlockchainService.js
index 8ada969aa691254fe4499e65b1b744b537d0e8a2..22c93ff869fda6637b01b7c910264b09693b3769 100644
--- a/app/service/BlockchainService.js
+++ b/app/service/BlockchainService.js
@@ -1,17 +1,13 @@
 "use strict";
 
-const async           = require('async');
 const _               = require('underscore');
 const co              = require('co');
 const Q               = require('q');
 const parsers         = require('../lib/streams/parsers');
 const rules           = require('../lib/rules');
-const base58          = require('../lib/crypto/base58');
-const keyring         = require('../lib/crypto/keyring');
 const constants       = require('../lib/constants');
 const blockchainCtx   = require('../lib/computation/blockchainContext');
-const blockGenerator  = require('../lib/computation/blockGenerator');
-const blockProver     = require('../lib/computation/blockProver');
+const blockGenerator  = require('duniter-prover').duniter.methods.blockGenerator;
 const Block           = require('../lib/entity/block');
 const Identity        = require('../lib/entity/identity');
 const Transaction     = require('../lib/entity/transaction');
@@ -29,19 +25,14 @@ function BlockchainService (server) {
 
   let that = this;
   const mainContext = blockchainCtx();
-  const prover = this.prover = blockProver(server);
-  const generator = blockGenerator(mainContext, prover);
-  let conf, dal, keyPair, logger, selfPubkey;
+  let conf, dal, logger, selfPubkey;
 
   this.getContext = () => mainContext;
 
   this.setConfDAL = (newConf, newDAL, newKeyPair) => {
     dal = newDAL;
     conf = newConf;
-    keyPair = newKeyPair;
     mainContext.setConfDAL(conf, dal);
-    prover.setConfDAL(conf, dal, newKeyPair);
-    generator.setConfDAL(conf, dal, newKeyPair);
     selfPubkey = newKeyPair.publicKey;
     logger = require('../lib/logger')(dal.profile);
   };
@@ -141,7 +132,7 @@ function BlockchainService (server) {
       Transaction.statics.cleanSignatories(obj.transactions);
     }
     catch (e) {
-        throw e;
+      throw e;
     }
     let existing = yield dal.getBlockByNumberAndHashOrNull(obj.number, obj.hash);
     if (existing) {
@@ -157,7 +148,6 @@ function BlockchainService (server) {
       let res = yield mainContext.addBlock(obj);
       try {
         yield pushStatsForBlocks([res]);
-        server.permaProver.blockchainChanged(res);
       } catch (e) {
         logger.warn("An error occurred after the add of the block", e.stack || e);
       }
@@ -180,19 +170,12 @@ function BlockchainService (server) {
   });
 
 
-  that.tryToFork = (current) => co(function *() {
-    yield eventuallySwitchOnSideChain(current);
-    let newCurrent = yield mainContext.current();
-    let forked = newCurrent.number != current.number || newCurrent.hash != current.hash;
-    if (forked) {
-      server.permaProver.blockchainChanged();
-    }
-  });
+  that.tryToFork = (current) => eventuallySwitchOnSideChain(current);
 
   const eventuallySwitchOnSideChain = (current) => co(function *() {
     const branches = yield that.branches();
-    const blocksAdvance = constants.BRANCHES.SWITCH_ON_BRANCH_AHEAD_BY_X_MINUTES / (conf.avgGenTime / 60);
-    const timeAdvance = constants.BRANCHES.SWITCH_ON_BRANCH_AHEAD_BY_X_MINUTES * 60;
+    const blocksAdvance = conf.swichOnTimeAheadBy / (conf.avgGenTime / 60);
+    const timeAdvance = conf.swichOnTimeAheadBy * 60;
     let potentials = _.without(branches, current);
     // We switch only to blockchain with X_MIN advance considering both theoretical time by block + written time
     potentials = _.filter(potentials, (p) => p.number - current.number >= blocksAdvance
@@ -262,16 +245,6 @@ function BlockchainService (server) {
 
   this.applyNextAvailableFork = () => this.pushFIFO(() => mainContext.applyNextAvailableFork());
 
-  /**
-   * Generates root block with manual selection of root members.
-   */
-  this.generateManualRoot = () => generator.manualRoot();
-
-  /**
-   * Generates next block, finding newcomers, renewers, leavers, certs, transactions, etc.
-   */
-  this.generateNext = (params) => generator.nextBlock(params);
-
   this.requirementsOfIdentities = (identities) => co(function *() {
     let all = [];
     let current = yield dal.getCurrentBlockOrNull();
@@ -295,6 +268,7 @@ function BlockchainService (server) {
     let expiresPending = 0;
     let certs = [];
     try {
+      const generator = blockGenerator(server);
       const join = yield generator.getSinglePreJoinData(current, idty.hash);
       const pubkey = join.identity.pubkey;
       // Check WoT stability
@@ -377,13 +351,9 @@ function BlockchainService (server) {
     return certsFromLinks.concat(certsFromCerts);
   });
 
-  this.prove = prover.prove;
-
   this.isMember = () => dal.isMember(selfPubkey);
   this.getCountOfSelfMadePoW = () => dal.getCountOfPoW(selfPubkey);
 
-  this.makeNextBlock = generator.makeNextBlock;
-
   this.saveParametersForRootBlock = (block) => co(function *() {
     let mainFork = mainContext;
     let rootBlock = block || (yield dal.getBlock(0));
@@ -455,6 +425,4 @@ function BlockchainService (server) {
     }
     return dal.getBlocksBetween(from, from + count - 1);
   });
-
-  this.changeProverCPUSetting = (cpu) => prover.changeCPU(cpu);
 }
diff --git a/app/service/IdentityService.js b/app/service/IdentityService.js
index 4d0d5c7e5f2bc9812c711c78e684bebb2a08775f..6dda990864e3c629a12c1f88eedb47c82809584e 100644
--- a/app/service/IdentityService.js
+++ b/app/service/IdentityService.js
@@ -1,7 +1,7 @@
 "use strict";
 const Q               = require('q');
 const rules           = require('../lib/rules');
-const keyring          = require('../lib/crypto/keyring');
+const keyring          = require('duniter-common').keyring;
 const constants       = require('../lib/constants');
 const Block           = require('../../app/lib/entity/block');
 const Identity        = require('../../app/lib/entity/identity');
diff --git a/app/service/MembershipService.js b/app/service/MembershipService.js
index 3eb0ccb488da0e550fcc2df9d2c458eada1f3911..a31075bf38513a32be5042250f085674ed60123f 100644
--- a/app/service/MembershipService.js
+++ b/app/service/MembershipService.js
@@ -2,7 +2,7 @@
 
 const co              = require('co');
 const rules           = require('../lib/rules');
-const hashf           = require('../lib/ucp/hashf');
+const hashf           = require('duniter-common').hashf;
 const constants       = require('../lib/constants');
 const Membership      = require('../lib/entity/membership');
 const AbstractService = require('./AbstractService');
diff --git a/app/service/PeeringService.js b/app/service/PeeringService.js
index 537056fde61d2b7d2f66b20f48f28f4bcc1bb6cc..2f9dd485bc50f88d6691297f35ff5bad06f4c117 100644
--- a/app/service/PeeringService.js
+++ b/app/service/PeeringService.js
@@ -1,32 +1,21 @@
 "use strict";
 const co             = require('co');
 const util           = require('util');
-const async          = require('async');
 const _              = require('underscore');
 const Q              = require('q');
 const events         = require('events');
 const rp             = require('request-promise');
 const multicaster    = require('../lib/streams/multicaster');
-const keyring        = require('../lib/crypto/keyring');
+const keyring        = require('duniter-common').keyring;
 const logger         = require('../lib/logger')('peering');
-const base58         = require('../lib/crypto/base58');
-const dos2unix       = require('../lib/system/dos2unix');
-const hashf          = require('../lib/ucp/hashf');
-const rawer          = require('../lib/ucp/rawer');
-const pulling        = require('../lib/pulling');
+const dos2unix       = require('duniter-common').dos2unix;
+const hashf          = require('duniter-common').hashf;
+const rawer          = require('duniter-common').rawer;
 const constants      = require('../lib/constants');
-const querablep      = require('../lib/querablep');
 const Peer           = require('../lib/entity/peer');
-const Transaction    = require('../lib/entity/transaction');
 const AbstractService = require('./AbstractService');
 const network = require('../lib/system/network');
 
-const DONT_IF_MORE_THAN_FOUR_PEERS = true;
-const CONST_BLOCKS_CHUNK = 50;
-
-const programStart = Date.now();
-let pullingActualIntervalDuration = constants.PULLING_MINIMAL_DELAY;
-
 function PeeringService(server) {
 
   AbstractService.call(this);
@@ -139,7 +128,9 @@ function PeeringService(server) {
         if (!localNodeNotListed) {
           const indexOfThisNode = peerEntity.endpoints.indexOf(localEndpoint);
           if (indexOfThisNode !== -1) {
-            server.BlockchainService.prover.changePoWPrefix((indexOfThisNode + 1) * 10); // We multiply by 10 to give room to computers with < 100 cores
+            server.push({
+              nodeIndexInPeers: indexOfThisNode
+            });
           } else {
             logger.warn('This node has his interface listed in the peer document, but its index cannot be found.');
           }
@@ -160,78 +151,6 @@ function PeeringService(server) {
     return server.singleWritePromise(_.extend({ documentType: 'peer' }, pretendedNewer));
   };
 
-  const peerFifo = async.queue(function (task, callback) {
-    task(callback);
-  }, 1);
-  let peerInterval = null;
-
-  this.regularPeerSignal =  () => co(function*() {
-    let signalTimeInterval = 1000 * conf.avgGenTime * constants.NETWORK.STATUS_INTERVAL.UPDATE;
-    if (peerInterval)
-      clearInterval(peerInterval);
-    peerInterval = setInterval(function () {
-      peerFifo.push((done) => co(function*(){
-        try {
-          yield that.generateSelfPeer(conf, signalTimeInterval);
-          done();
-        } catch (e) {
-          done(e);
-        }
-      }))
-    }, signalTimeInterval);
-    yield that.generateSelfPeer(conf, signalTimeInterval);
-  });
-
-  const crawlPeersFifo = async.queue((task, callback) => task(callback), 1);
-  let crawlPeersInterval = null;
-  this.regularCrawlPeers = function (done) {
-    if (crawlPeersInterval)
-      clearInterval(crawlPeersInterval);
-    crawlPeersInterval = setInterval(()  => crawlPeersFifo.push(crawlPeers), 1000 * conf.avgGenTime * constants.NETWORK.SYNC_PEERS_INTERVAL);
-    crawlPeers(DONT_IF_MORE_THAN_FOUR_PEERS, done);
-  };
-
-  let askedCancel = false;
-  let currentSyncP = Q();
-  const syncBlockFifo = async.queue((task, callback) => task(callback), 1);
-  let syncBlockInterval = null;
-  this.regularSyncBlock = function (done) {
-    if (syncBlockInterval)
-      clearInterval(syncBlockInterval);
-    syncBlockInterval = setInterval(()  => syncBlockFifo.push(syncBlock), 1000 * pullingActualIntervalDuration);
-    syncBlock(done);
-  };
-
-  this.pullingPromise = () => currentSyncP;
-
-  this.pullBlocks = (pubkey) => syncBlock(null, pubkey);
-
-  const FIRST_CALL = true;
-  const testPeerFifo = async.queue((task, callback) => task(callback), 1);
-  let testPeerFifoInterval = null;
-  this.regularTestPeers = function (done) {
-    if (testPeerFifoInterval)
-      clearInterval(testPeerFifoInterval);
-    testPeerFifoInterval = setInterval(() => testPeerFifo.push(testPeers.bind(null, !FIRST_CALL)), 1000 * constants.NETWORK.TEST_PEERS_INTERVAL);
-    testPeers(FIRST_CALL, done);
-  };
-
-  this.stopRegular = () => {
-    askedCancel = true;
-    clearInterval(peerInterval);
-    clearInterval(crawlPeersInterval);
-    clearInterval(syncBlockInterval);
-    clearInterval(testPeerFifoInterval);
-    peerFifo.kill();
-    crawlPeersFifo.kill();
-    syncBlockFifo.kill();
-    testPeerFifo.kill();
-    return co(function *() {
-      yield currentSyncP;
-      askedCancel = false;
-    });
-  };
-
   this.generateSelfPeer = (theConf, signalTimeInterval) => co(function*() {
     const current = yield server.dal.getCurrentBlockOrNull();
     const currency = theConf.currency || constants.DEFAULT_CURRENCY_NAME;
@@ -274,7 +193,7 @@ function PeeringService(server) {
       logger.error('It seems there is an issue with your configuration.');
       logger.error('Please restart your node with:');
       logger.error('$ duniter restart');
-      return Q.Promise((resolve) => null);
+      return Q.Promise(() => null);
     }
     // Choosing next based-block for our peer record: we basically want the most distant possible from current
     let minBlock = current ? current.number - 30 : 0;
@@ -318,348 +237,6 @@ function PeeringService(server) {
           ep.includes(theConf.remotehost) || ep.includes(theConf.remoteipv6) || ep.includes(theConf.remoteipv4))));
     });
   }
-
-  const crawlPeers = (dontCrawlIfEnoughPeers, done) => {
-    if (arguments.length == 1) {
-      done = dontCrawlIfEnoughPeers;
-      dontCrawlIfEnoughPeers = false;
-    }
-    logger.info('Crawling the network...');
-    return co(function *() {
-      try {
-        const peers = yield dal.listAllPeersWithStatusNewUPWithtout(selfPubkey);
-        if (peers.length > constants.NETWORK.COUNT_FOR_ENOUGH_PEERS && dontCrawlIfEnoughPeers == DONT_IF_MORE_THAN_FOUR_PEERS) {
-          return;
-        }
-        let peersToTest = peers.slice().map((p) => Peer.statics.peerize(p));
-        let tested = [];
-        const found = [];
-        while (peersToTest.length > 0) {
-          const results = yield peersToTest.map(crawlPeer);
-          tested = tested.concat(peersToTest.map((p) => p.pubkey));
-          // End loop condition
-          peersToTest.splice(0);
-          // Eventually continue the loop
-          for (let i = 0, len = results.length; i < len; i++) {
-            const res = results[i];
-            for (let j = 0, len2 = res.length; j < len2; j++) {
-              try {
-                const subpeer = res[j].leaf.value;
-                if (subpeer.currency && tested.indexOf(subpeer.pubkey) === -1) {
-                  const p = Peer.statics.peerize(subpeer);
-                  peersToTest.push(p);
-                  found.push(p);
-                }
-              } catch (e) {
-                logger.warn('Invalid peer %s', res[j]);
-              }
-            }
-          }
-          // Make unique list
-          peersToTest = _.uniq(peersToTest, false, (p) => p.pubkey);
-        }
-        logger.info('Crawling done.');
-        for (let i = 0, len = found.length; i < len; i++) {
-          let p = found[i];
-          try {
-            // Try to write it
-            p.documentType = 'peer';
-            yield server.singleWritePromise(p);
-          } catch(e) {
-            // Silent error
-          }
-        }
-        done();
-      } catch (e) {
-        done(e);
-      }
-    });
-  };
-
-  const crawlPeer = (aPeer) => co(function *() {
-    let subpeers = [];
-    try {
-      logger.debug('Crawling peers of %s %s', aPeer.pubkey.substr(0, 6), aPeer.getNamedURL());
-      const node = yield aPeer.connect();
-      yield checkPeerValidity(aPeer, node);
-      //let remotePeer = yield Q.nbind(node.network.peering.get)();
-      const json = yield node.getPeers.bind(node)({ leaves: true });
-      for (let i = 0, len = json.leaves.length; i < len; i++) {
-        let leaf = json.leaves[i];
-        let subpeer = yield node.getPeers.bind(node)({ leaf: leaf });
-        subpeers.push(subpeer);
-      }
-      return subpeers;
-    } catch (e) {
-      return subpeers;
-    }
-  });
-
-  const testPeers = (displayDelays, done) => co(function *() {
-    try {
-      let peers = yield dal.listAllPeers();
-      let now = (new Date().getTime());
-      peers = _.filter(peers, (p) => p.pubkey != selfPubkey);
-      for (let i = 0, len = peers.length; i < len; i++) {
-        let p = new Peer(peers[i]);
-        if (p.status == 'DOWN') {
-          let shouldDisplayDelays = displayDelays;
-          let downAt = p.first_down || now;
-          let waitRemaining = getWaitRemaining(now, downAt, p.last_try);
-          let nextWaitRemaining = getWaitRemaining(now, downAt, now);
-          let testIt = waitRemaining <= 0;
-          if (testIt) {
-            // We try to reconnect only with peers marked as DOWN
-            try {
-              logger.trace('Checking if node %s is UP... (%s:%s) ', p.pubkey.substr(0, 6), p.getHostPreferDNS(), p.getPort());
-              // We register the try anyway
-              yield dal.setPeerDown(p.pubkey);
-              // Now we test
-              let node = yield p.connect();
-              let peering = yield node.getPeer();
-              yield checkPeerValidity(p, node);
-              // The node answered, it is no more DOWN!
-              logger.info('Node %s (%s:%s) is UP!', p.pubkey.substr(0, 6), p.getHostPreferDNS(), p.getPort());
-              yield dal.setPeerUP(p.pubkey);
-              // We try to forward its peering entry
-              let sp1 = peering.block.split('-');
-              let currentBlockNumber = sp1[0];
-              let currentBlockHash = sp1[1];
-              let sp2 = peering.block.split('-');
-              let blockNumber = sp2[0];
-              let blockHash = sp2[1];
-              if (!(currentBlockNumber == blockNumber && currentBlockHash == blockHash)) {
-                // The peering changed
-                yield that.submitP(peering);
-              }
-              // Do not need to display when next check will occur: the node is now UP
-              shouldDisplayDelays = false;
-            } catch (err) {
-              // Error: we set the peer as DOWN
-              logger.trace("Peer %s is DOWN (%s)", p.pubkey, (err.httpCode && 'HTTP ' + err.httpCode) || err.code || err.message || err);
-              yield dal.setPeerDown(p.pubkey);
-              shouldDisplayDelays = true;
-            }
-          }
-          if (shouldDisplayDelays) {
-            logger.debug('Will check that node %s (%s:%s) is UP in %s min...', p.pubkey.substr(0, 6), p.getHostPreferDNS(), p.getPort(), (nextWaitRemaining / 60).toFixed(0));
-          }
-        }
-      }
-      done();
-    } catch (e) {
-      done(e);
-    }
-  });
-
-  function getWaitRemaining(now, downAt, last_try) {
-    let downDelay = Math.floor((now - downAt) / 1000);
-    let waitedSinceLastTest = Math.floor((now - (last_try || now)) / 1000);
-    let waitRemaining = 1;
-    if (downDelay <= constants.DURATIONS.A_MINUTE) {
-      waitRemaining = constants.DURATIONS.TEN_SECONDS - waitedSinceLastTest;
-    }
-    else if (downDelay <= constants.DURATIONS.TEN_MINUTES) {
-      waitRemaining = constants.DURATIONS.A_MINUTE - waitedSinceLastTest;
-    }
-    else if (downDelay <= constants.DURATIONS.AN_HOUR) {
-      waitRemaining = constants.DURATIONS.TEN_MINUTES - waitedSinceLastTest;
-    }
-    else if (downDelay <= constants.DURATIONS.A_DAY) {
-      waitRemaining = constants.DURATIONS.AN_HOUR - waitedSinceLastTest;
-    }
-    else if (downDelay <= constants.DURATIONS.A_WEEK) {
-      waitRemaining = constants.DURATIONS.A_DAY - waitedSinceLastTest;
-    }
-    else if (downDelay <= constants.DURATIONS.A_MONTH) {
-      waitRemaining = constants.DURATIONS.A_WEEK - waitedSinceLastTest;
-    }
-    // Else do not check it, DOWN for too long
-    return waitRemaining;
-  }
-
-  const checkPeerValidity = (p, node) => co(function *() {
-    try {
-      let document = yield node.getPeer();
-      let thePeer = Peer.statics.peerize(document);
-      let goodSignature = that.checkPeerSignature(thePeer);
-      if (!goodSignature) {
-        throw 'Signature from a peer must match';
-      }
-      if (p.currency !== thePeer.currency) {
-        throw 'Currency has changed from ' + p.currency + ' to ' + thePeer.currency;
-      }
-      if (p.pubkey !== thePeer.pubkey) {
-        throw 'Public key of the peer has changed from ' + p.pubkey + ' to ' + thePeer.pubkey;
-      }
-      let sp1 = p.block.split('-');
-      let sp2 = thePeer.block.split('-');
-      let blockNumber1 = parseInt(sp1[0]);
-      let blockNumber2 = parseInt(sp2[0]);
-      if (blockNumber2 < blockNumber1) {
-        throw 'Signature date has changed from block ' + blockNumber1 + ' to older block ' + blockNumber2;
-      }
-    } catch (e) {
-      logger.warn(e);
-      throw { code: "E_DUNITER_PEER_CHANGED" };
-    }
-  });
-
-  function pullingEvent(type, number) {
-    server.push({
-      pulling: {
-        type: type,
-        data: number
-      }
-    });
-  }
-
-  function syncBlock(callback, pubkey) {
-
-    // Eventually change the interval duration
-    const minutesElapsed = Math.ceil((Date.now() - programStart) / (60 * 1000));
-    const FACTOR = Math.sin((minutesElapsed / constants.PULLING_INTERVAL_TARGET) * (Math.PI / 2));
-    // Make the interval always higher than before
-    const pullingTheoreticalIntervalNow = Math.max(parseInt(Math.max(FACTOR * constants.PULLING_INTERVAL_TARGET, constants.PULLING_MINIMAL_DELAY)), pullingActualIntervalDuration);
-    if (pullingTheoreticalIntervalNow !== pullingActualIntervalDuration) {
-      pullingActualIntervalDuration = pullingTheoreticalIntervalNow;
-      // Change the interval
-      if (syncBlockInterval)
-        clearInterval(syncBlockInterval);
-      syncBlockInterval = setInterval(()  => syncBlockFifo.push(syncBlock), 1000 * pullingActualIntervalDuration);
-    }
-
-    currentSyncP = querablep(co(function *() {
-      try {
-        let current = yield dal.getCurrentBlockOrNull();
-        if (current) {
-          pullingEvent('start', current.number);
-          logger.info("Pulling blocks from the network...");
-          let peers = yield dal.findAllPeersNEWUPBut([selfPubkey]);
-          peers = _.shuffle(peers);
-          if (pubkey) {
-            _(peers).filter((p) => p.pubkey == pubkey);
-          }
-          // Shuffle the peers
-          peers = _.shuffle(peers);
-          // Only take at max X of them
-          peers = peers.slice(0, constants.MAX_NUMBER_OF_PEERS_FOR_PULLING);
-          for (let i = 0, len = peers.length; i < len; i++) {
-            let p = new Peer(peers[i]);
-            pullingEvent('peer', _.extend({number: i, length: peers.length}, p));
-            logger.trace("Try with %s %s", p.getURL(), p.pubkey.substr(0, 6));
-            try {
-              let node = yield p.connect();
-              node.pubkey = p.pubkey;
-              yield checkPeerValidity(p, node);
-              let lastDownloaded;
-              let dao = pulling.abstractDao({
-
-                // Get the local blockchain current block
-                localCurrent: () => dal.getCurrentBlockOrNull(),
-
-                // Get the remote blockchain (bc) current block
-                remoteCurrent: (thePeer) => thePeer.getCurrent(),
-
-                // Get the remote peers to be pulled
-                remotePeers: () => Q([node]),
-
-                // Get block of given peer with given block number
-                getLocalBlock: (number) => dal.getBlock(number),
-
-                // Get block of given peer with given block number
-                getRemoteBlock: (thePeer, number) => co(function *() {
-                  let block = null;
-                  try {
-                    block = yield thePeer.getBlock(number);
-                    Transaction.statics.cleanSignatories(block.transactions);
-                  } catch (e) {
-                    if (e.httpCode != 404) {
-                      throw e;
-                    }
-                  }
-                  return block;
-                }),
-
-                // Simulate the adding of a single new block on local blockchain
-                applyMainBranch: (block) => co(function *() {
-                  let addedBlock = yield server.BlockchainService.submitBlock(block, true, constants.FORK_ALLOWED);
-                  if (!lastDownloaded) {
-                    lastDownloaded = yield dao.remoteCurrent(node);
-                  }
-                  pullingEvent('applying', {number: block.number, last: lastDownloaded.number});
-                  if (addedBlock) {
-                    current = addedBlock;
-                    server.streamPush(addedBlock);
-                  }
-                }),
-
-                // Eventually remove forks later on
-                removeForks: () => Q(),
-
-                // Tells wether given peer is a member peer
-                isMemberPeer: (thePeer) => co(function *() {
-                  let idty = yield dal.getWrittenIdtyByPubkey(thePeer.pubkey);
-                  return (idty && idty.member) || false;
-                }),
-
-                // Simulates the downloading of blocks from a peer
-                downloadBlocks: (thePeer, fromNumber, count) => co(function*() {
-                  if (!count) {
-                    count = CONST_BLOCKS_CHUNK;
-                  }
-                  let blocks = yield thePeer.getBlocks(count, fromNumber);
-                  // Fix for #734
-                  for (const block of blocks) {
-                    for (const tx of block.transactions) {
-                      tx.version = constants.TRANSACTION_VERSION;
-                    }
-                  }
-                  return blocks;
-                })
-              });
-
-              yield pulling.pull(conf, dao);
-
-              // To stop the processing
-              if (askedCancel) {
-                len = 0;
-              }
-            } catch (e) {
-              if (isConnectionError(e)) {
-                logger.info("Peer %s unreachable: now considered as DOWN.", p.pubkey);
-                yield dal.setPeerDown(p.pubkey);
-              }
-              else if (e.httpCode == 404) {
-                logger.trace("No new block from %s %s", p.pubkey.substr(0, 6), p.getURL());
-              }
-              else {
-                logger.warn(e);
-              }
-            }
-          }
-          pullingEvent('end', current.number);
-        }
-        logger.info('Will pull blocks from the network in %s min %s sec', Math.floor(pullingActualIntervalDuration / 60), Math.floor(pullingActualIntervalDuration % 60));
-
-        callback && callback();
-      } catch(err) {
-        pullingEvent('error');
-        logger.warn(err.code || err.stack || err.message || err);
-        callback && callback();
-      }
-    }));
-    return currentSyncP;
-  }
-
-  function isConnectionError(err) {
-    return err && (
-      err.code == "E_DUNITER_PEER_CHANGED"
-      || err.code == "EINVAL"
-      || err.code == "ECONNREFUSED"
-      || err.code == "ETIMEDOUT"
-      || (err.httpCode !== undefined && err.httpCode !== 404));
-  }
 }
 
 util.inherits(PeeringService, events.EventEmitter);
diff --git a/appveyor.yml b/appveyor.yml
index ec651bc2f1ec0277c571e80556241eebdb7108a1..38c766e3804ebcf1b4855b9a9d29314a55225510 100644
--- a/appveyor.yml
+++ b/appveyor.yml
@@ -70,7 +70,7 @@ artifacts:
     name: Duniter
 
 deploy:
-  release: v0.81.0
+  release: v0.90.0
   provider: GitHub
   auth_token:
     secure: Vp/M0r0i1yhGR2nhrPWEbTiDIF6r0cmwbNDFZUzdFe5clWxPXtuC0lgIpOQI78zt
diff --git a/bin/duniter b/bin/duniter
index cc6a71a91ee649d1bace05fd873a9b323c4b4ee3..153524c40864e8aa710a83505ec03f7d031bf916 100755
--- a/bin/duniter
+++ b/bin/duniter
@@ -5,20 +5,27 @@ const co = require('co');
 const duniter = require('../index');
 const stack = duniter.statics.autoStack();
 
-stack.registerDependency({
-  duniter: {
-    cli: [{
-      name: 'hello',
-      desc: 'Says hello to the world.',
-      requires: ['service'],
-      promiseCallback: (duniterServer) => co(function*(){
-        console.log('Hello, world.');
-      })
-    }]
-  }
-});
+return co(function*() {
 
-return co(function*(){
-  yield stack.executeStack();
-  console.log('Done');
+  // Specific errors handling
+  process.on('uncaughtException', (err) => {
+    // Dunno why this specific exception is not caught
+    if (err.code !== "EADDRNOTAVAIL" && err.code !== "EINVAL") {
+      duniter.statics.logger.error(err);
+      process.exit(2);
+    }
+  });
+
+  try {
+    yield stack.executeStack(process.argv);
+    // Everything went well, close Duniter quietly.
+    process.exit();
+  } catch (e) {
+    // If an unhandled error occured
+    duniter.statics.logger.error(e);
+    process.exit(1);
+  } finally {
+    // If we did not succeed to close before, force close with error.
+    process.exit(100);
+  }
 });
diff --git a/ci/travis/before_deploy.sh b/ci/travis/before_deploy.sh
index c4c3ff88ef91cc4472d58554f28e0e080d5ec243..58c79b57aff4d720691cee5d962a1419befceeb9 100755
--- a/ci/travis/before_deploy.sh
+++ b/ci/travis/before_deploy.sh
@@ -7,7 +7,7 @@ if [[ ! -f before_deploy ]]; then
 
   # Prepare
   NVER=`node -v`
-  DUNITER_VER=0.81.0
+  DUNITER_VER=0.90.0
   DUNITER_DEB_VER=" $DUNITER_VER"
   ADDON_VERSION=48
   NW_VERSION=0.17.6
diff --git a/ci/travis/debian/DEBIAN/control b/ci/travis/debian/DEBIAN/control
index da712ccd3bc99e96399d3d62fb7751a4f7c2fc8e..f4749164698984799f922e6d0406fadfbc018cf8 100644
--- a/ci/travis/debian/DEBIAN/control
+++ b/ci/travis/debian/DEBIAN/control
@@ -1,5 +1,5 @@
 Package: duniter
-Version: 0.81.0
+Version: 0.90.0
 Section: misc
 Priority: optional
 Architecture: all
diff --git a/doc/HTTP_API.md b/doc/HTTP_API.md
deleted file mode 100644
index 6cd7f026cb97859980a2aa0db37e62fc254b7f11..0000000000000000000000000000000000000000
--- a/doc/HTTP_API.md
+++ /dev/null
@@ -1,1812 +0,0 @@
-# Duniter HTTP API
-
-## Contents
-
-* [Contents](#contents)
-* [Overview](#overview)
-* [Merkle URLs](#merkle-urls)
-* [API](#api)
-  * [node/](#node)
-      * [summary](#nodesummary)
-  * [wot/](#wot)
-      * [add](#wotadd)
-      * [certify](#wotcertify)
-      * [revoke](#wotrevoke)
-      * [lookup/[search]](#wotlookupsearch)
-      * [requirements/[PUBKEY]](#wotrequirementspubkey)
-      * [certifiers-of/[search]](#wotcertifiers-ofsearch)
-      * [certified-by/[search]](#wotcertified-bysearch)
-  * [blockchain/](#blockchain)
-      * [parameters](#blockchainparameters)
-      * [membership](#blockchainmembership)
-      * [memberships/[search]](#blockchainmembershipssearch)
-      * [block](#blockchainblock)
-      * [block/[number]](#blockchainblocknumber)
-      * [blocks/[count]/[from]](#blockchainblockscountfrom)
-      * [current](#blockchaincurrent)
-      * [hardship/[PUBKEY]](#blockchainhardshippubkey)
-      * [difficulties](#blockchaindifficulties)
-      * [with/](#blockchainwith)
-          * [newcomers](#blockchainwithnewcomers)
-          * [certs](#blockchainwithcerts)
-          * [actives](#blockchainwithactives)
-          * [leavers](#blockchainwithleavers)
-          * [excluded](#blockchainwithexcluded)
-          * [ud](#blockchainwithud)
-          * [tx](#blockchainwithtx)
-      * [branches](#blockchainbranches)
-  * [network/](#network)
-      * [peers](#networkpeers)
-      * [peering](#networkpeering)
-      * [peering/peers (GET)](#networkpeeringpeers-get)
-      * [peering/peers (POST)](#networkpeeringpeers-post)
-  * [tx/](#tx)
-      * [process](#txprocess)
-      * [sources/[pubkey]](#txsourcespubkey)
-      * [history/[pubkey]](#txhistorypubkey)
-      * [history/[pubkey]/blocks/[from]/[to]](#txhistorypubkeyblocksfromto)
-      * [history/[pubkey]/times/[from]/[to]](#txhistorypubkeytimesfromto)
-  * [ud/](#ud)
-      * [history/[pubkey]](#udhistorypubkey)
-  * [ws/](#ws)
-      * [block](#wsblock)
-      * [peer](#wspeer)
-
-## Overview
-
-Data is made accessible through an HTTP API mainly inspired from [OpenUDC_exchange_formats draft](https://github.com/Open-UDC/open-udc/blob/master/docs/OpenUDC_exchange_formats.draft.txt), and has been adapted to fit Duniter specificities.
-
-    http[s]://Node[:port]/...
-    |-- wot/
-    |   |-- add
-    |   |-- certify
-    |   |-- revoke
-    |   |-- requirements/[pubkey]
-    |   |-- certifiers-of/[uid|pubkey]
-    |   |-- certified-by/[uid|pubkey]
-    |   |-- members
-    |   `-- lookup
-    |-- blockchain/
-    |   |-- parameters
-    |   |-- membership
-    |   |-- with/
-    |       |-- newcomers
-    |       |-- certs
-    |       |-- joiners
-    |       |-- actives
-    |       |-- leavers
-    |       |-- excluded
-    |       |-- ud
-    |       `-- tx
-    |   |-- hardship
-    |   |   `-- [PUBKEY]
-    |   |-- block
-    |   |   `-- [NUMBER]
-    |   |-- difficulties
-    |   `-- current
-    |-- network/
-    |   |-- peers
-    |   `-- peering
-    |       `-- peers
-    |-- tx/
-    |   |-- process
-    |   |-- sources
-    |   `-- history
-    |-- ud/
-    |   `-- history
-    `-- ws/
-        |-- block
-        `-- peer
-
-## Merkle URLs
-
-Merkle URL is a special kind of URL applicable for resources:
-
-* `network/peering/peers (GET)`
-
-Such kind of URL returns Merkle tree hashes informations. In Duniter, Merkle trees are an easy way to detect unsynced data and where the differences come from. For example, `network/peering/peers` is a Merkle tree whose leaves are peers' key fingerprint sorted ascending way. Thus, if any new peer is added, a branch of the tree will see its hash modified and propagated to the root hash. Change is then easy to detect.
-
-For commodity issues, this URL uses query parameters to retrieve partial data of the tree, as most of the time all the data is not required. Duniter Merkle tree has a determined number of parent nodes (given a number of leaves), which allows to ask only for interval of them.
-
-Here is an example of members Merkle tree with 5 members (taken from [Tree Hash EXchange format (THEX)](http://web.archive.org/web/20080316033726/http://www.open-content.net/specs/draft-jchapweske-thex-02.html)):
-
-                       ROOT=H(H+E)
-                        /        \
-                       /          \
-                 H=H(F+G)          E
-                /       \           \
-               /         \           \
-        F=H(A+B)         G=H(C+D)     E
-        /     \           /     \      \
-       /       \         /       \      \
-      A         B       C         D      E
-
-
-    Note: H() is some hash function
-
-Where A,B,C,D,E are already hashed data.
-
-With such a tree structure, Duniter consider the tree has exactly 6 nodes: `[ROOT,H,E,F,G,E]`. Nodes are just an array, and for a Lambda Server LS1, it is easy to ask for the values of another server LS2 for level 1 (`H` and `E`, the second level): it requires nodes interval `[1;2]`.
-
-Hence it is quite easy for anyone who wants to check if a `Z` member joined the Duniter community as it would alter the `E` branch of the tree:
-
-                        ROOT'=H(H+E')
-                        /            \
-                       /              \
-                 H=H(F+G)              E'
-                /       \               \
-               /         \               \
-        F=H(A+B)          G=H(C+D)       E'=H(E+Z)
-        /     \           /     \         /     \
-       /       \         /       \       /       \
-      A         B       C         D     E         Z
-
-`ROOT` changed to `ROOT'`, `E` to `E'`, but `H` did not. The whole `E'` branch should be updated with the proper new data.
-
-For that purpose, Merkle URL defines different parameters and results:
-
-**Parameters**
-
-Parameter | Description
---------- | -----------
-`leaves`  | Defines wether or not leaves hashes should be returned too. Defaults to `false`.
-`leaf`    | Hash of a leaf whose content should be returned. Ignore `leaves` parameter.
-
-**Returns**
-
-Merkle URL result with `leaves=false`.
-```json
-{
-  "depth": 3,
-  "nodesCount": 6,
-  "leavesCount": 5,
-  "root": "6513D6A1582DAE614D8A3B364BF3C64C513D236B"
-}
-```
-
-Merkle URL result with `leaves=true`.
-```json
-{
-  "depth": 3,
-  "nodesCount": 6,
-  "leavesCount": 5,
-  "root": "6513D6A1582DAE614D8A3B364BF3C64C513D236B",
-  "leaves": [
-    "32096C2E0EFF33D844EE6D675407ACE18289357D",
-    "50C9E8D5FC98727B4BBC93CF5D64A68DB647F04F",
-    "6DCD4CE23D88E2EE9568BA546C007C63D9131C1B",
-    "AE4F281DF5A5D0FF3CAD6371F76D5C29B6D953EC",
-    "E0184ADEDF913B076626646D3F52C3B49C39AD6D"
-  ]
-}
-```
-
-Merkle URL result with `leaf=AE4F281DF5A5D0FF3CAD6371F76D5C29B6D953EC`.
-```json
-{
-  "depth": 3,
-  "nodesCount": 6,
-  "leavesCount": 5,
-  "root": "6513D6A1582DAE614D8A3B364BF3C64C513D236B",
-  "leaf": {
-    "hash": "AE4F281DF5A5D0FF3CAD6371F76D5C29B6D953EC",
-    "value": // JSON value (object, string, int, ...)
-  }
-}
-```
-
-### Duniter Merkle trees leaves
-
-Each tree manages different data, and has a different goal. Hence, each tree has its own rules on how are generated and sorted tree leaves.
-Here is a summup of such rules:
-
-
-Merkle URL             | Leaf                      | Sort
----------------------- | --------------------------| -------------
-`network/peers (GET)`    | Hash of the peers' pubkey | By hash string sort, ascending.
-
-#### Unicity
-
-It has to be noted that **possible conflict exists** for leaves, as every leaf is hash, but is rather unlikely.
-
-## API
-
-### node/*
-
-#### `node/summary`
-**Goal**
-
-GET technical informations about this peer.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Technical informations about the node.
-```json
-{
-  "duniter": {
-    "software": "duniter",
-    "version": "0.10.3",
-    "forkWindowSize": 10
-  }
-}
-```
-
-### wot/*
-
-#### `wot/add`
-
-
-**Goal**
-
-POST [Identity](./Protocol.md#identity) data.
-
-**Parameters**
-
-Name  | Value | Method
-----  | ----- | ------
-`identity` | The raw identity. | POST
-
-**Returns**
-
-The available validated data for this public key.
-```json
-{
-  "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "uids": [
-    {
-      "uid": "udid2;c;TOCQUEVILLE;FRANCOIS-XAVIER-ROBE;1989-07-14;e+48.84+002.30;0;",
-      "meta": {
-        "timestamp": "44-76522E321B3380B058DB6D9E66121705EEA63610869A7C5B3E701CF6AF2D55A8"
-      },
-      "self": "J3G9oM5AKYZNLAB5Wx499w61NuUoS57JVccTShUbGpCMjCqj9yXXqNq7dyZpDWA6BxipsiaMZhujMeBfCznzyci",
-      "others": [
-        {
-          "pubkey": "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB",
-          "meta": {
-            "timestamp": "22-2E910FCCCEE008C4B978040CA68211C2395C84C3E6BFB432A267384ED8CD22E5"
-          },
-          "signature": "42yQm4hGTJYWkPg39hQAUgP6S6EQ4vTfXdJuxKEHL1ih6YHiDL2hcwrFgBHjXLRgxRhj2VNVqqc6b4JayKqTE14r"
-        }
-      ]
-    }
-  ]
-}
-```
-
-#### `wot/certify`
-
-
-**Goal**
-
-POST [Certification](./Protocol.md#certification) data.
-
-**Parameters**
-
-Name  | Value | Method
-----  | ----- | ------
-`cert` | The raw certification. | POST
-
-**Returns**
-
-The available validated data for this public key.
-```json
-{
-  "issuer": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "timestamp": "44-76522E321B3380B058DB6D9E66121705EEA63610869A7C5B3E701CF6AF2D55A8",
-  "sig": "42yQm4hGTJYWkPg39hQAUgP6S6EQ4vTfXdJuxKEHL1ih6YHiDL2hcwrFgBHjXLRgxRhj2VNVqqc6b4JayKqTE14r",
-  "target": {
-    "issuer": "CqwuWfMsPqtUkWdUK6FxV6hPWeHaUfEcz7dFZZJA49BS",
-    "uid": "johnsnow",
-    "timestamp": "44-76522E321B3380B058DB6D9E66121705EEA63610869A7C5B3E701CF6AF2D55A8",
-    "sig": "42yQm4hGTJYWkPg39hQAUgP6S6EQ4vTfXdJuxKEHL1ih6YHiDL2hcwrFgBHjXLRgxRhj2VNVqqc6b4JayKqTE14r",
-  }
-}
-```
-
-#### `wot/revoke`
-
-
-**Goal**
-
-Remove an identity from Identity pool.
-
-> N.B.: An identity **written in the blockchain cannot be removed**.
-
-**Parameters**
-
-Name  | Value | Method
-----  | ----- | ------
-`revocation` | The raw revocation. | POST
-
-**Returns**
-
-True if operation went well. An HTTP error otherwise with body as error message.
-```json
-{
-  "result": true
-}
-```
-
-#### `wot/lookup/[search]`
-
-
-**Goal**
-
-GET [Public key](./Protocol.md#publickey) data.
-
-**Parameters**
-
-Name  | Value | Method
-----  | ----- | ------
-`search` | A string of data to look for (public key, uid). | URL
-
-**Returns**
-
-A list of public key data matching search string (may not return all results, check `partial` flag which is set to `false` if all results are here, ` true` otherwise).
-```json
-{
-  "partial": false,
-  "results": [
-    {
-      "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-      "uids": [
-        {
-          "uid": "udid2;c;TOCQUEVILLE;FRANCOIS-XAVIER-ROBE;1989-07-14;e+48.84+002.30;0;",
-          "meta": {
-            "timestamp": "56-97A56CCE04A1B7A03264ADE09545B262CBE65E62DDA481B7D7C89EB4669F5435"
-          },
-          "self": "J3G9oM5AKYZNLAB5Wx499w61NuUoS57JVccTShUbGpCMjCqj9yXXqNq7dyZpDWA6BxipsiaMZhujMeBfCznzyci",
-          "revocation_sig": "CTmlh3tO4B8f8IbL8iDy5ZEr3jZDcxkPmDmRPQY74C39MRLXi0CKUP+oFzTZPYmyUC7fZrUXrb3LwRKWw1jEBQ==",
-          "revoked": false,
-          "others": [
-            {
-              "pubkey": "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB",
-              "meta": {
-                "timestamp": "32-DB30D958EE5CB75186972286ED3F4686B8A1C2CD"
-              },
-              "signature": "42yQm4hGTJYWkPg39hQAUgP6S6EQ4vTfXdJuxKEHL1ih6YHiDL2hcwrFgBHjXLRgxRhj2VNVqqc6b4JayKqTE14r"
-            }
-          ]
-        }
-      ],
-      "signed": [
-        {
-          "uid": "snow",
-          "pubkey": "2P7y2UDiCcvsgSSt8sgHF3BPKS4m9waqKw4yXHCuP6CN",
-          "meta": {
-            "timestamp": "33-AB30D958EE5CB75186972286ED3F4686B8A1C2CD"
-          },
-          "revocation_sig": "CK6UDDJM3d0weE1RVtzFJnw/+J507lPAtspleHc59T4+N1tzQj1RRGWrzPiTknCjnCO6SxBSJX0B+MIUWrpNAw==",
-          "revoked": false,
-          "signature": "Xbr7qhyGNCmLoVuuKnKIbrdmtCvb9VBIEY19izUNwA5nufsjNm8iEsBTwKWOo0lq5O1+AAPMnht8cm2JjMq8AQ=="
-        },
-        {
-          "uid": "snow",
-          "pubkey": "2P7y2UDiCcvsgSSt8sgHF3BPKS4m9waqKw4yXHCuP6CN",
-          "meta": {
-            "timestamp": "978-B38F54242807DFA1A12F17E012D355D8DB92CA6E947FC5D147F131B30C639163"
-          },
-          "revocation_sig": "a7SFapoVaXq27NU+wZj4afmxp0SbwLGqLJih8pfX6TRKPvNp/V93fbKixbqg10cwa1CadNenztxq3ZgOivqADw==",
-          "revoked": false,
-          "signature": "HU9VPwC4EqPJwATPuyUJM7HLjfig5Ke1CKonL9Q78n5/uNSL2hkgE9Pxsor8CCJfkwCxh66NjGyqnGYqZnQMAg=="
-        },
-        {
-          "uid": "snow",
-          "pubkey": "7xapQvvxQ6367bs8DsskEf3nvQAgJv97Yu11aPbkCLQj",
-          "meta": {
-            "timestamp": "76-0DC977717C49E69A78A67C6A1526EC17ED380BC68F0C38D290A954471A1349B7"
-          },
-          "revocation_sig": "h8D/dx/z5K2dx06ktp7fnmLRdxkdV5wRkJgnmEvKy2k55mM2RyREpHfD7t/1CC5Ew+UD0V9N27PfaoLxZc1KCQ==",
-          "revoked": true,
-          "signature": "6S3x3NwiHB2QqYEY79x4wCUYHcDctbazfxIyxejs38V1uRAl4DuC8R3HJUfD6wMSiWKPqbO+td+8ZMuIn0L8AA=="
-        },
-        {
-          "uid": "cat",
-          "pubkey": "CK2hBp25sYQhdBf9oFMGHyokkmYFfzSCmwio27jYWAN7",
-          "meta": {
-            "timestamp": "63-999677597FC04E6148860AE888A2E1942DF0E1E732C31500BA8EFF07F06FEC0C"
-          },
-          "revocation_sig": "bJyoM2Tz4hltVXkLvYHOOmLP4qqh2fx7aMLkS5q0cMoEg5AFER3iETj13uoFyhz8yiAKESyAZSDjjQwp8A1QDw==",
-          "revoked": false,
-          "signature": "AhgblSOdxUkLwpUN9Ec46St3JGaw2jPyDn/mLcR4j3EjKxUOwHBYqqkxcQdRz/6K4Qo/xMa941MgUp6NjNbKBA=="
-        }
-      ]
-    }
-  ]
-}
-```
-
-#### `wot/members`
-
-
-**Goal**
-
-GET the list of current Web of Trust members.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-A list of public key + uid.
-```json
-{
-  "results": [
-    { "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY", "uid": "cat" },
-    { "pubkey": "9kNEiyseUNoPn3pmNUhWpvCCwPRgavsLu7YFKZuzzd1L", "uid": "tac" },
-    { "pubkey": "9HJ9VXa9wc6EKC6NkCi8b5TKWBot68VhYDg7kDk5T8Cz", "uid": "toc" }
-  ]
-}
-```
-
-#### `wot/requirements/[pubkey]`
-
-
-**Goal**
-
-GET requirements to be filled by pubkey to become a member.
-
-**Parameters**
-
-Name  | Value | Method
-----  | ----- | ------
-`pbkey` | Public key to check. | URL
-
-**Returns**
-
-A list of identities matching this pubkey and the requirements of each identities to become a member.
-
-> If the pubkey is matching a member, only one identity may be displayed: the one which is a member.
-
-```json
-{
-  "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "identities": [{
-      "uid": "tobi",
-      "meta": {
-        "timestamp": "12-20504546F37853625C1E695B757D93CFCC6E494069D53F73748E428947933E45"
-      },
-      "outdistanced": true,
-      "certifications": 2,
-      "membershipMissing": true
-    }
-    ...
-  ]
-}
-```
-
-#### `wot/certifiers-of/[search]`
-
-
-**Goal**
-
-GET [Certification](./Protocol.md#certification) data over a member.
-
-**Parameters**
-
-Name  | Value | Method
-----  | ----- | ------
-`search` | Public key or uid of a *member* (or someone who *was a member*) we want see the certifications. | URL
-
-**Returns**
-
-A list of certifications issued to the member by other members (or who used to be members), with `written` data indicating wether the certification is written in the blockchain or not.
-
-Each certification also has:
-
-* a `isMember` field to indicate wether the issuer of the certification is still a member or not.
-* a `written` field to indicate the block where the certification was written (or null if not written yet).
-
-```json
-{
-  "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "uid": "user identifier",
-  "isMember": true,
-  sigDate: 1421787461,
-  "certifications": [
-    {
-      "pubkey": "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB",
-      "uid": "certifier uid",
-      "cert_time": {
-        "block": 88,
-        "medianTime": 1509991044
-      },
-      sigDate: 1421787461,
-      "written": {
-        "number": 872768,
-        "hash": "D30978C9D6C5A348A8188603F039423D90E50DC5"
-      },
-      "isMember": true,
-      "signature": "42yQm4hGTJYWkPg39hQAUgP6S6EQ4vTfXdJuxKEHL1ih6YHiDL2hcwrFgBHjXLRgxRhj2VNVqqc6b4JayKqTE14r"
-    },
-    ...
-  ]
-}
-```
-
-#### `wot/certified-by/[search]`
-
-
-**Goal**
-
-GET [Certification](./Protocol.md#certification) data over a member.
-
-**Parameters**
-
-Name  | Value | Method
-----  | ----- | ------
-`search` | Public key or uid of a *member* (or someone who *was a member*) we want see the certifications. | URL
-
-**Returns**
-
-A list of certifications issued by the member to other members (or who used to be members), with `written` data indicating wether the certification is written in the blockchain or not.
-
-Each certification also has:
-
-* a `isMember` field to indicate wether the issuer of the certification is still a member or not.
-* a `written` field to indicate the block where the certification was written (or null if not written yet).
-```json
-{
-  "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "uid": "user identifier",
-  "isMember": true,
-  sigDate: 1421787461,
-  "certifications": [
-    {
-      "pubkey": "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB",
-      "uid": "certifier uid",
-      "cert_time": {
-        "block": 88,
-        "medianTime": 1509991044
-      },
-      sigDate: 1421787461,
-      "written": {
-        "number": 872768,
-        "hash": "D30978C9D6C5A348A8188603F039423D90E50DC5"
-      },
-      "isMember": true,
-      "signature": "42yQm4hGTJYWkPg39hQAUgP6S6EQ4vTfXdJuxKEHL1ih6YHiDL2hcwrFgBHjXLRgxRhj2VNVqqc6b4JayKqTE14r"
-    },
-    ...
-  ]
-}
-```
-
-#### `wot/identity-of/[search]`
-
-
-**Goal**
-
-GET identity data written for a member.
-
-**Parameters**
-
-Name  | Value | Method
-----  | ----- | ------
-`search` | Public key or uid of a *member* we want see the attached identity. | URL
-
-**Returns**
-
-Identity data written in the blockchain.
-```json
-{
-  "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "uid": "user identifier",
-  "sigDate": "21-EB18A8D89256EA80195990C91AD399B798F92EE8187F775DF7F4823C46E61F00"
-}
-```
-
-
-### blockchain/*
-
-#### `blockchain/parameters`
-
-**Goal**
-
-GET the blockchain parameters used by this node.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-The synchronization parameters.
-```json
-{
-  currency: "beta_brouzouf",
-  c: 0.01,
-  dt: 302400,
-  ud0: 100,
-  sigPeriod: 7200,
-  sigStock: 45,
-  sigWindow: 604800,
-  sigValidity: 2629800,
-  sigQty: 3,
-  idtyWindow: 604800,
-  msWindow: 604800,
-  xpercent: 5,
-  msValidity: 2629800,
-  stepMax: 3,
-  medianTimeBlocks: 11,
-  avgGenTime: 600,
-  dtDiffEval: 10,
-  percentRot: 0.67
-}
-```
-
-Parameters meaning is described under [Protocol parameters](./Protocol.md#protocol-parameters).
-
-#### `blockchain/membership`
-
-
-**Goal**
-
-POST a [Membership](./Protocol.md#membership) document.
-
-**Parameters**
-
-Name | Value | Method
----- | ----- | ------
-`membership` | The membership document (with signature). | POST
-
-**Returns**
-
-The posted membership request.
-```json
-{
-  "signature": "H41/8OGV2W4CLKbE35kk5t1HJQsb3jEM0/QGLUf80CwJvGZf3HvVCcNtHPUFoUBKEDQO9mPK3KJkqOoxHpqHCw==",
-  "membership": {
-    "version": "2",
-    "currency": "beta_brouzouf",
-    "issuer": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-    "membership": "IN",
-    "sigDate": 1390739944,
-    "uid": "superman63"
-  }
-}
-```
-
-#### `blockchain/memberships/[search]`
-
-
-**Goal**
-
-GET [Membership](./Protocol.md#membership) data written for a member.
-
-**Parameters**
-
-Name  | Value | Method
-----  | ----- | ------
-`search` | Public key or uid of a *member* we want see the memberships. | URL
-
-**Returns**
-
-A list of memberships issued by the *member* and written in the blockchain.
-```json
-{
-  "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "uid": "user identifier",
-  "sigDate": 1390739944,
-  "memberships": [
-    {
-	  "version": "2",
-	  "currency": "beta_brouzouf",
-	  "membership": "IN",
-	  "blockNumber": 678,
-	  "blockHash": "000007936DF3CC32BFCC1023D1258EC9E485D474",
-	  "written_number": null
-	},
-    ...
-  ]
-}
-```
-
-#### `blockchain/block`
-
-**Goal**
-
-POST a new block to add to the blockchain.
-
-**Parameters**
-
-Name               | Value                          | Method
------------------- | ------------------------------ | ------
-`block`             | The raw block to be added     | POST
-`signature`         | Signature of the raw block    | POST
-
-**Returns**
-
-The promoted block if successfully added to the blockchain (see [block/[number]](#blockchainblocknumber) return object).
-
-#### `blockchain/block/[NUMBER]`
-
-**Goal**
-
-GET the promoted block whose number `NUMBER`.
-
-**Parameters**
-
-Name               | Value                                                         | Method
------------------- | ------------------------------------------------------------- | ------
-`NUMBER`           | The promoted block number (integer value) we want to see.  | URL
-
-**Returns**
-
-The promoted block if it exists (otherwise return HTTP 404).
-```json
-{
-  "version": 2,
-  "currency": "beta_brouzouf",
-  "nonce": 28,
-  "inner_hash": "FD09B0F7CEC5A575CA6E528DC4C854B612AE77B7283F48E0D28677F5C9C9D0DD",
-  "number": 1,
-  "time": 1408996317,
-  "medianTime": 1408992543,
-  "dividend": 254,
-  "monetaryMass": 18948,
-  "issuer": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "previousHash": "0009A7A62703F976F683BBA500FC0CB832B8220D",
-  "previousIssuer": "CYYjHsNyg3HMRMpTHqCJAN9McjH5BwFLmDKGV3PmCuKp",
-  "membersCount": 4,
-  "hash": "0000F40BDC0399F2E84000468628F50A122B5F16",
-  "identities": [
-    "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB:2D96KZwNUvVtcapQPq2mm7J9isFcDCfykwJpVEZwBc7tCgL4qPyu17BT5ePozAE9HS6Yvj51f62Mp4n9d9dkzJoX:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:crowlin"
-  ],
-  "joiners": [
-    "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB:2XiBDpuUdu6zCPWGzHXXy8c4ATSscfFQG9DjmqMZUxDZVt1Dp4m2N5oHYVUfoPdrU9SLk4qxi65RNrfCVnvQtQJk:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:crowlin"
-  ],
-  "leavers": [
-    "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB:2XiBDpuUdu6zCPWGzHXXy8c4ATSscfFQG9DjmqMZUxDZVt1Dp4m2N5oHYVUfoPdrU9SLk4qxi65RNrfCVnvQtQJk:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:crowlin"
-  ],
-  "revoked": [
-    "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB:2D96KZwNUvVtcapQPq2mm7J9isFcDCfykwJpVEZwBc7tCgL4qPyu17BT5ePozAE9HS6Yvj51f62Mp4n9d9dkzJoX"
-  ],
-  "excluded": [
-    "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB"
-  ],
-  "certifications": [
-    "CYYjHsNyg3HMRMpTHqCJAN9McjH5BwFLmDKGV3PmCuKp:9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB:1505900000:2XiBDpuUdu6zCPWGzHXXy8c4ATSscfFQG9DjmqMZUxDZVt1Dp4m2N5oHYVUfoPdrU9SLk4qxi65RNrfCVnvQtQJk"
-  ],
-  "transactions": [
-    {
-      "signatures": [
-        "H41/8OGV2W4CLKbE35kk5t1HJQsb3jEM0/QGLUf80CwJvGZf3HvVCcNtHPUFoUBKEDQO9mPK3KJkqOoxHpqHCw=="
-        "2D96KZwNUvVtcapQPq2mm7J9isFcDCfykwJpVEZwBc7tCgL4qPyu17BT5ePozAE9HS6Yvj51f62Mp4n9d9dkzJoX",
-        "2XiBDpuUdu6zCPWGzHXXy8c4ATSscfFQG9DjmqMZUxDZVt1Dp4m2N5oHYVUfoPdrU9SLk4qxi65RNrfCVnvQtQJk"
-      ],
-        "version": 2,
-        "currency": "beta_brouzouf",
-        "issuers": [
-          "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-          "CYYjHsNyg3HMRMpTHqCJAN9McjH5BwFLmDKGV3PmCuKp",
-          "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB"
-        ],
-        "inputs": [
-          "T:6991C993631BED4733972ED7538E41CCC33660F554E3C51963E2A0AC4D6453D3:0",
-          "T:3A09A20E9014110FD224889F13357BAB4EC78A72F95CA03394D8CCA2936A7435:0",
-          "D:4745EEBA84D4E3C2BDAE4768D4E0F5A671531EE1B0B9F5206744B4551C664FDF:243",
-          "T:3A09A20E9014110FD224889F13357BAB4EC78A72F95CA03394D8CCA2936A7435:1",
-          "T:67F2045B5318777CC52CD38B424F3E40DDA823FA0364625F124BABE0030E7B5B:0",
-          "D:521A760049DF4FAA602FEF86B7A8E306654502FA3A345F6169B8468B81E71AD3:187"
-       ],
-       "unlocks": [
-          "0:SIG(0)",
-          "1:SIG(2)",
-          "2:SIG(1)",
-          "3:SIG(1)",
-          "4:SIG(0)",
-          "5:SIG(0)"
-       ],
-      "outputs": [
-        "30:SIG(BYfWYFrsyjpvpFysgu19rGK3VHBkz4MqmQbNyEuVU64g)",
-        "156:SIG(DSz4rgncXCytsUMW2JU2yhLquZECD2XpEkpP9gG5HyAx)",
-        "49:SIG(6DyGr5LFtFmbaJYRvcs9WmBsr4cbJbJ1EV9zBbqG7A6i)"
-      ]
-    }
-  ],
-  "signature": "H41/8OGV2W4CLKbE35kk5t1HJQsb3jEM0/QGLUf80CwJvGZf3HvVCcNtHPUFoUBKEDQO9mPK3KJkqOoxHpqHCw==",
-}
-```
-
-#### `blockchain/blocks/[COUNT]/[FROM]`
-
-**Goal**
-
-GET the `[COUNT]` promoted blocks from `[FROM]` number, inclusive.
-
-**Parameters**
-
-Name               | Value                                                         | Method
------------------- | ------------------------------------------------------------- | ------
-`COUNT`           | The number of blocks we want to see.  | URL
-`FROM`           | The starting block.  | URL
-
-**Returns**
-
-The promoted blocks if it exists block `[FROM]` (otherwise return HTTP 404). Result is an array whose values are the same structure as [/blockchain/block/[number]](#blockchainblocknumber).
-```json
-{
-  "blocks": [
-    { number: 2, ... },
-    { number: 3, ... }
-  ]
-}
-```
-
-#### `blockchain/current`
-
-Same as [block/[number]](#blockchainblocknumber), but return last accepted block.
-
-#### `blockchain/hardship/[PUBKEY]`
-
-**Goal**
-
-GET hardship level for given member's pubkey for writing next block.
-
-**Parameters**
-
-Name              | Value                     | Method
-----              | -----                     | ------
-`PUBKEY` | Member's pubkey.   | URL
-
-**Returns**
-
-The hardship value (`level`) + `block` number.
-```json
-{
-  "block": 598,
-  "level": 3
-}
-
-```
-
-#### `blockchain/difficulties`
-
-**Goal**
-
-GET hardship level for given member's pubkey for writing next block.
-
-**Parameters**
-
-None.
-
-**Returns**
-
-The respective difficulty of each member in the last `IssuersFrame` blocks for current block.
-```json
-{
-  "block": 598,
-  "levels": [{
-    "uid": "jack",
-    "level": 8
-  },{
-    "uid": "cat",
-    "level": 4
-  }]
-}
-
-```
-
-#### `blockchain/with/newcomers`
-**Goal**
-
-GET the block numbers containing newcomers (new identities).
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Block numbers.
-```json
-{
-  "result": {
-    "blocks": [223,813]
-  }
-}
-```
-
-#### `blockchain/with/certs`
-**Goal**
-
-GET the block numbers containing certifications.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Block numbers.
-```json
-{
-  "result": {
-    "blocks": [223,813]
-  }
-}
-```
-
-#### `blockchain/with/joiners`
-**Goal**
-
-GET the block numbers containing joiners (newcomers or people coming back after exclusion).
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Block numbers.
-```json
-{
-  "result": {
-    "blocks": [223,813]
-  }
-}
-```
-
-#### `blockchain/with/actives`
-**Goal**
-
-GET the block numbers containing actives (members updating their membership).
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Block numbers.
-```json
-{
-  "result": {
-    "blocks": [223,813]
-  }
-}
-```
-
-#### `blockchain/with/leavers`
-**Goal**
-
-GET the block numbers containing leavers (members leaving definitely the currency).
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Block numbers.
-```json
-{
-  "result": {
-    "blocks": [223,813]
-  }
-}
-```
-
-#### `blockchain/with/revoked`
-**Goal**
-
-GET the block numbers containing revoked members.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Block numbers.
-```json
-{
-  "result": {
-    "blocks": [223,813]
-  }
-}
-```
-
-#### `blockchain/with/excluded`
-**Goal**
-
-GET the block numbers containing excluded members.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Block numbers.
-```json
-{
-  "result": {
-    "blocks": [223,813]
-  }
-}
-```
-
-#### `blockchain/with/ud`
-**Goal**
-
-GET the block numbers containing Universal Dividend.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Block numbers.
-```json
-{
-  "result": {
-    "blocks": [223,813]
-  }
-}
-```
-
-#### `blockchain/with/tx`
-**Goal**
-
-GET the block numbers containing transactions.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Block numbers.
-```json
-{
-  "result": {
-    "blocks": [223,813]
-  }
-}
-```
-
-#### `blockchain/branches`
-
-**Goal**
-
-GET current branches of the node.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Top block of each branch, i.e. the last received block of each branch. An array of 4 blocks would mean the node has 4 branches,
-3 would mean 3 branches, and so on.
-
-```json
-{
-  "blocks": [
-    { number: 2, ... },
-    { number: 3, ... }
-  ]
-}
-```
-
-### network/*
-
-This URL is used for Duniter Gossip protocol (exchanging UCG messages).
-
-#### `network/peers`
-**Goal**
-
-GET the exhaustive list of peers known by the node.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-List of peering entries.
-```json
-{
-  "peers": [
-    {
-      "version": "2",
-      "currency": "meta_brouzouf",
-      "status": "UP",
-      "first_down": null,
-      "last_try": null,
-      "pubkey": "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk",
-      "block": "45180-00000E577DD4B308B98D0ED3E43926CE4D22E9A8",
-      "signature": "GKTrlUc4um9lQuj9UI8fyA/n/JKieYqBYcl9keIWfAVOnvHamLHaqGzijsdX1kNt64cadcle/zkd7xOgMTdQAQ==",
-      "endpoints": [
-        "BASIC_MERKLED_API metab.ucoin.io 88.174.120.187 9201"
-      ]
-    },
-    {
-      "version": "2",
-      "currency": "meta_brouzouf",
-      "status": "UP",
-      "first_down": null,
-      "last_try": null,
-      "pubkey": "2aeLmae5d466y8D42wLK5MknwUBCR6MWWeixRzdTQ4Hu",
-      "block": "45182-0000064EEF412C1CDD1B370CC45A3BC3B9743464",
-      "signature": "kbdTay1OirDqG/E3jyCaDlL7HVVHb9/BXvNHAg+xO9sSA+NgmBo/4mEqL9b7hH0UnbXHss6TfuvxAHZLmBqsCw==",
-      "endpoints": [
-        "BASIC_MERKLED_API twiced.fr 88.174.120.187 9223"
-      ]
-    },
-    ...
-  ]
-}
-```
-
-#### `network/peering`
-**Goal**
-
-GET the peering informations of this node.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Peering entry of the node.
-```json
-{
-  "version": "2",
-  "currency": "beta_brouzouf",
-  "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "endpoints": [
-    "BASIC_MERKLED_API some.dns.name 88.77.66.55 2001:0db8:0000:85a3:0000:0000:ac1f 9001",
-    "BASIC_MERKLED_API some.dns.name 88.77.66.55 2001:0db8:0000:85a3:0000:0000:ac1f 9002",
-    "OTHER_PROTOCOL 88.77.66.55 9001",
-  ],
-  "signature": "42yQm4hGTJYWkPg39hQAUgP6S6EQ4vTfXdJuxKEHL1ih6YHiDL2hcwrFgBHjXLRgxRhj2VNVqqc6b4JayKqTE14r"
-}
-```
-
-#### `network/peering/peers (GET)`
-**Goal**
-
-Merkle URL refering to peering entries of every node inside the currency network.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Merkle URL result.
-```json
-{
-  "depth": 3,
-  "nodesCount": 6,
-  "leavesCount": 5,
-  "root": "114B6E61CB5BB93D862CA3C1DFA8B99E313E66E9"
-}
-```
-
-Merkle URL leaf: peering entry
-```json
-{
-  "hash": "2E69197FAB029D8669EF85E82457A1587CA0ED9C",
-  "value": {
-    "version": "2",
-    "currency": "beta_brouzouf",
-    "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-    "endpoints": [
-      "BASIC_MERKLED_API some.dns.name 88.77.66.55 2001:0db8:0000:85a3:0000:0000:ac1f 9001",
-      "BASIC_MERKLED_API some.dns.name 88.77.66.55 2001:0db8:0000:85a3:0000:0000:ac1f 9002",
-      "OTHER_PROTOCOL 88.77.66.55 9001",
-    ],
-    "signature": "42yQm4hGTJYWkPg39hQAUgP6S6EQ4vTfXdJuxKEHL1ih6YHiDL2hcwrFgBHjXLRgxRhj2VNVqqc6b4JayKqTE14r"
-  }
-}
-```
-
-#### `network/peering/peers (POST)`
-**Goal**
-
-POST a peering entry document.
-
-**Parameters**
-
-Name        | Value                               | Method
------------ | ----------------------------------- | ------
-`peer`      | The peering entry document.         | POST
-
-**Returns**
-
-The posted entry.
-```json
-{
-  "version": "2",
-  "currency": "beta_brouzouf",
-  "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "endpoints": [
-    "BASIC_MERKLED_API some.dns.name 88.77.66.55 2001:0db8:0000:85a3:0000:0000:ac1f 9001",
-    "BASIC_MERKLED_API some.dns.name 88.77.66.55 2001:0db8:0000:85a3:0000:0000:ac1f 9002",
-    "OTHER_PROTOCOL 88.77.66.55 9001",
-  ],
-  "signature": "42yQm4hGTJYWkPg39hQAUgP6S6EQ4vTfXdJuxKEHL1ih6YHiDL2hcwrFgBHjXLRgxRhj2VNVqqc6b4JayKqTE14r"
-}
-```
-
-### tx/*
-
-#### `tx/process`
-**Goal**
-
-POST a transaction.
-
-**Parameters**
-
-Name              | Value                                                         | Method
------------------ | ------------------------------------------------------------- | ------
-`transaction`     | The raw transaction.                                          | POST
-
-**Returns**
-
-The recorded transaction.
-```json
-{
-  "raw": "Version: 2\r\n...\r\n",
-  "transaction":
-  {
-      "signatures": [
-        "H41/8OGV2W4CLKbE35kk5t1HJQsb3jEM0/QGLUf80CwJvGZf3HvVCcNtHPUFoUBKEDQO9mPK3KJkqOoxHpqHCw=="
-        "2D96KZwNUvVtcapQPq2mm7J9isFcDCfykwJpVEZwBc7tCgL4qPyu17BT5ePozAE9HS6Yvj51f62Mp4n9d9dkzJoX",
-        "2XiBDpuUdu6zCPWGzHXXy8c4ATSscfFQG9DjmqMZUxDZVt1Dp4m2N5oHYVUfoPdrU9SLk4qxi65RNrfCVnvQtQJk"
-      ],
-        "version": 2,
-        "currency": "beta_brouzouf",
-        "issuers": [
-          "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-          "CYYjHsNyg3HMRMpTHqCJAN9McjH5BwFLmDKGV3PmCuKp",
-          "9WYHTavL1pmhunFCzUwiiq4pXwvgGG5ysjZnjz9H8yB"
-        ],
-        "inputs": [
-          "T:6991C993631BED4733972ED7538E41CCC33660F554E3C51963E2A0AC4D6453D3:0",
-          "T:3A09A20E9014110FD224889F13357BAB4EC78A72F95CA03394D8CCA2936A7435:0",
-          "D:4745EEBA84D4E3C2BDAE4768D4E0F5A671531EE1B0B9F5206744B4551C664FDF:243",
-          "T:3A09A20E9014110FD224889F13357BAB4EC78A72F95CA03394D8CCA2936A7435:1",
-          "T:67F2045B5318777CC52CD38B424F3E40DDA823FA0364625F124BABE0030E7B5B:0",
-          "D:521A760049DF4FAA602FEF86B7A8E306654502FA3A345F6169B8468B81E71AD3:187"
-       ],
-       "unlocks": [
-          "0:SIG(0)",
-          "1:SIG(2)",
-          "2:SIG(1)",
-          "3:SIG(1)",
-          "4:SIG(0)",
-          "5:SIG(0)"
-       ],
-      "outputs": [
-        "30:SIG(BYfWYFrsyjpvpFysgu19rGK3VHBkz4MqmQbNyEuVU64g)",
-        "156:SIG(DSz4rgncXCytsUMW2JU2yhLquZECD2XpEkpP9gG5HyAx)",
-        "49:SIG(6DyGr5LFtFmbaJYRvcs9WmBsr4cbJbJ1EV9zBbqG7A6i)"
-      ]
-  }
-}
-```
-
-
-#### `tx/sources/[pubkey]`
-
-**Goal**
-
-GET a list of available sources.
-
-**Parameters**
-
-Name              | Value                       | Method
-----              | -----                       | ------
-`pubkey`           | Owner of the coins' pubkey. | URL
-
-**Returns**
-
-A list of available sources for the given `pubkey`.
-```json
-{
-  "currency": "beta_brouzouf",
-  "pubkey": "HsLShAtzXTVxeUtQd7yi5Z5Zh4zNvbu8sTEZ53nfKcqY",
-  "sources": [
-    {
-      "type: "D",
-      "noffset": 5,
-      "identifier": "6C20752F6AD06AEA8D0BB46BB8C4F68641A34C79",
-      "amount": 100
-    },
-    {
-      "type: "D",
-      "noffset": 18,
-      "identifier": "DB7D88E795E42CF8CFBFAAFC77379E97847F9B42",
-      "amount": 110
-    },
-    {
-      "type: "T",
-      "noffset": 55,
-      "identifier": "E614E814179F313B1113475E6319EF4A3D470AD0",
-      "amount": 30
-    }
-  ]
-}
-```
-
-
-#### `tx/history/[pubkey]`
-
-**Goal**
-
-Get the wallet transaction history
-
-**parameters**
-
-Name              | Value							| Method
-----              | -----							| ------
-`pubkey`          | Wallet public key.				| URL
-
-**Returns**
-
-The full transaction history for the given `pubkey`
-```json
-{
-  "currency": "meta_brouzouf",
-  "pubkey": "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk",
-  "history": {
-    "sent": [
-      {
-        "version": 2,
-        "received": null,
-        "issuers": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk"
-        ],
-        "inputs": [
-          "D:000A8362AE0C1B8045569CE07735DE4C18E81586:125"
-        ],
-        "outputs": [
-          "5:SIG(8Fi1VSTbjkXguwThF4v2ZxC5whK7pwG2vcGTkPUPjPGU)",
-          "95:SIG(HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk)"
-        ],
-        "comment": "Essai",
-        "signatures": [
-          "8zzWSU+GNSNURnH1NKPT/TBoxngEc/0wcpPSbs7FqknGxud+94knvT+dpe99k6NwyB5RXvOVnKAr4p9/KEluCw=="
-        ],
-        "hash": "FC7BAC2D94AC9C16AFC5C0150C2C9E7FBB2E2A09",
-        "block_number": 173,
-        "time": 1421932545
-      }
-    ],
-    "received": [
-      {
-        "version": 2,
-        "received": null,
-        "issuers": [
-          "8Fi1VSTbjkXguwThF4v2ZxC5whK7pwG2vcGTkPUPjPGU"
-        ],
-        "inputs": [
-          "D:000A8362AE0C1B8045569CE07735DE4C18E81586:125"
-        ],
-        "outputs": [
-          "7:SIG(HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk)",
-          "93:SIG(8Fi1VSTbjkXguwThF4v2ZxC5whK7pwG2vcGTkPUPjPGU)"
-        ],
-        "comment": "",
-        "signatures": [
-          "1Mn8q3K7N+R4GZEpAUm+XSyty1Uu+BuOy5t7BIRqgZcKqiaxfhAUfDBOcuk2i4TJy1oA5Rntby8hDN+cUCpvDg=="
-        ],
-        "hash": "5FB3CB80A982E2BDFBB3EA94673A74763F58CB2A",
-        "block_number": 207,
-        "time": 1421955525
-      },
-      {
-        "version": 2,
-        "received": null,
-        "issuers": [
-          "J78bPUvLjxmjaEkdjxWLeENQtcfXm7iobqB49uT1Bgp3"
-        ],
-        "inputs": [
-          "T:6A50FF82410387B239489CE38B34E0FDDE1697FE:0"
-        ],
-        "outputs": [
-          "42:SIG(HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk)",
-          "9958:SIG(J78bPUvLjxmjaEkdjxWLeENQtcfXm7iobqB49uT1Bgp3)"
-        ],
-        "comment": "",
-        "signatures": [
-          "XhBcCPizPiWdKeXWg1DX/FTQst6DppEjsYEtoAZNA0P11reXtgc9IduiIxNWzNjt/KvTw8APkSI8/Uf31QQVDA=="
-        ],
-        "hash": "ADE7D1C4002D6BC10013C34CE22733A55173BAD4",
-        "block_number": 15778,
-        "time": 1432314584
-      }
-    ],
-    "sending": [
-	  {
-        "version": 2,
-        "received": 1459691641,
-        "issuers": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk"
-        ],
-        "inputs": [
-          "0:D:8196:000022AD426FE727C707D847EC2168A64C577706:5872"
-        ],
-        "outputs": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:5871"
-          "2sq8bBDQGK74f1eD3mAPQVgHCmFdijZr9nbv16FwbokX:1",
-        ],
-        "comment": "some comment",
-        "signatures": [
-          "kLOAAy7/UldQk7zz4I7Jhv9ICuGYRx7upl8wH8RYL43MMF6+7MbPh3QRN1qNFGpAfa3XMWIQmbUWtjZKP6OfDA=="
-        ],
-        "hash": "BA41013F2CD38EDFFA9D38A275F8532DD906A2DE"
-      }
-    ],
-    "receiving": [
-	 {
-        "version": 2,
-        "received": 1459691641,
-        "issuers": [
-          "2sq8bBDQGK74f1eD3mAPQVgHCmFdijZr9nbv16FwbokX"
-        ],
-        "inputs": [
-          "0:D:8196:000022AD426FE727C707D847EC2168A64C577706:4334"
-        ],
-        "outputs": [
-          "2sq8bBDQGK74f1eD3mAPQVgHCmFdijZr9nbv16FwbokX:1",
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:4333"
-        ],
-        "comment": "some comment",
-        "signatures": [
-          "DRiZinUEKrrLiJNogtydzwEbmETrvWiLNYXCiJsRekxTLyU5g4LjnwiLp/XlvmIekjJK5n/gullLWrHUBvFSAw==
-        ],
-        "hash": "A0A511131CD0E837204A9441B3354918AC4CE671"
-      }
-	]
-  }
-}
-```
-
-#### `tx/history/[PUBKEY]/blocks/[from]/[to]`
-
-**Goal**
-
-Get the wallet transaction history
-
-**parameters**
-
-Name				| Value							| Method
-----				| -----							| ------
-`pubkey`			| Wallet public key.			| URL
-`from`				| The starting block.			| URL
-`to`				| the ending block.				| URL
-
-**Returns**
-
-The transaction history for the given `pubkey` and between the given `from` and `to` blocks. 
-```json
-{
-  "currency": "meta_brouzouf",
-  "pubkey": "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk",
-  "history": {
-    "sent": [
-      {
-        "version": 2,
-        "issuers": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk"
-        ],
-        "inputs": [
-          "0:D:125:000A8362AE0C1B8045569CE07735DE4C18E81586:100"
-        ],
-        "outputs": [
-          "8Fi1VSTbjkXguwThF4v2ZxC5whK7pwG2vcGTkPUPjPGU:5",
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:95"
-        ],
-        "comment": "Essai",
-        "signatures": [
-          "8zzWSU+GNSNURnH1NKPT/TBoxngEc/0wcpPSbs7FqknGxud+94knvT+dpe99k6NwyB5RXvOVnKAr4p9/KEluCw=="
-        ],
-        "hash": "FC7BAC2D94AC9C16AFC5C0150C2C9E7FBB2E2A09",
-        "block_number": 173,
-        "time": 1421932545
-      }
-    ],
-    "received": [
-      {
-        "version": 2,
-        "issuers": [
-          "8Fi1VSTbjkXguwThF4v2ZxC5whK7pwG2vcGTkPUPjPGU"
-        ],
-        "inputs": [
-          "0:D:125:000A8362AE0C1B8045569CE07735DE4C18E81586:100"
-        ],
-        "outputs": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:7",
-          "8Fi1VSTbjkXguwThF4v2ZxC5whK7pwG2vcGTkPUPjPGU:93"
-        ],
-        "comment": "",
-        "signatures": [
-          "1Mn8q3K7N+R4GZEpAUm+XSyty1Uu+BuOy5t7BIRqgZcKqiaxfhAUfDBOcuk2i4TJy1oA5Rntby8hDN+cUCpvDg=="
-        ],
-        "hash": "5FB3CB80A982E2BDFBB3EA94673A74763F58CB2A",
-        "block_number": 207,
-        "time": 1421955525
-      },
-      {
-        "version": 2,
-        "issuers": [
-          "J78bPUvLjxmjaEkdjxWLeENQtcfXm7iobqB49uT1Bgp3"
-        ],
-        "inputs": [
-          "0:T:15128:6A50FF82410387B239489CE38B34E0FDDE1697FE:10000"
-        ],
-        "outputs": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:42",
-          "J78bPUvLjxmjaEkdjxWLeENQtcfXm7iobqB49uT1Bgp3:9958"
-        ],
-        "comment": "",
-        "signatures": [
-          "XhBcCPizPiWdKeXWg1DX/FTQst6DppEjsYEtoAZNA0P11reXtgc9IduiIxNWzNjt/KvTw8APkSI8/Uf31QQVDA=="
-        ],
-        "hash": "ADE7D1C4002D6BC10013C34CE22733A55173BAD4",
-        "block_number": 15778,
-        "time": 1432314584
-      }
-    ],
-    "sending": [
-	  {
-        "version": 2,
-        "issuers": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk"
-        ],
-        "inputs": [
-          "0:D:8196:000022AD426FE727C707D847EC2168A64C577706:5872"
-        ],
-        "outputs": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:5871"
-          "2sq8bBDQGK74f1eD3mAPQVgHCmFdijZr9nbv16FwbokX:1",
-        ],
-        "comment": "some comment",
-        "signatures": [
-          "kLOAAy7/UldQk7zz4I7Jhv9ICuGYRx7upl8wH8RYL43MMF6+7MbPh3QRN1qNFGpAfa3XMWIQmbUWtjZKP6OfDA=="
-        ],
-        "hash": "BA41013F2CD38EDFFA9D38A275F8532DD906A2DE"
-      }
-    ],
-    "receiving": [
-	 {
-        "version": 2,
-        "issuers": [
-          "2sq8bBDQGK74f1eD3mAPQVgHCmFdijZr9nbv16FwbokX"
-        ],
-        "inputs": [
-          "0:D:8196:000022AD426FE727C707D847EC2168A64C577706:4334"
-        ],
-        "outputs": [
-          "2sq8bBDQGK74f1eD3mAPQVgHCmFdijZr9nbv16FwbokX:1",
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:4333"
-        ],
-        "comment": "some comment",
-        "signatures": [
-          "DRiZinUEKrrLiJNogtydzwEbmETrvWiLNYXCiJsRekxTLyU5g4LjnwiLp/XlvmIekjJK5n/gullLWrHUBvFSAw==
-        ],
-        "hash": "A0A511131CD0E837204A9441B3354918AC4CE671"
-      }
-	]
-  }
-}
-```
-
-#### `tx/history/[pubkey]/times/[from]/[to]`
-
-**Goal**
-
-Get the wallet transaction history
-
-**parameters**
-
-Name              | Value							| Method
-----              | -----							| ------
-`pubkey`          | Wallet public key.				| URL
-`from` | The starting timestamp limit. (optionnal) | URL
-`to`        | The ending timestamp. (optionnal)	| URL
-
-**Returns**
-
-The transaction history for the given `pubkey` and between the given `from` and `to` dates. 
-```json
-{
-  "currency": "meta_brouzouf",
-  "pubkey": "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk",
-  "history": {
-    "sent": [
-      {
-        "version": 2,
-        "issuers": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk"
-        ],
-        "inputs": [
-          "0:D:125:000A8362AE0C1B8045569CE07735DE4C18E81586:100"
-        ],
-        "outputs": [
-          "8Fi1VSTbjkXguwThF4v2ZxC5whK7pwG2vcGTkPUPjPGU:5",
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:95"
-        ],
-        "comment": "Essai",
-        "signatures": [
-          "8zzWSU+GNSNURnH1NKPT/TBoxngEc/0wcpPSbs7FqknGxud+94knvT+dpe99k6NwyB5RXvOVnKAr4p9/KEluCw=="
-        ],
-        "hash": "FC7BAC2D94AC9C16AFC5C0150C2C9E7FBB2E2A09",
-        "block_number": 173,
-        "time": 1421932545
-      }
-    ],
-    "received": [
-      {
-        "version": 2,
-        "issuers": [
-          "8Fi1VSTbjkXguwThF4v2ZxC5whK7pwG2vcGTkPUPjPGU"
-        ],
-        "inputs": [
-          "0:D:125:000A8362AE0C1B8045569CE07735DE4C18E81586:100"
-        ],
-        "outputs": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:7",
-          "8Fi1VSTbjkXguwThF4v2ZxC5whK7pwG2vcGTkPUPjPGU:93"
-        ],
-        "comment": "",
-        "signatures": [
-          "1Mn8q3K7N+R4GZEpAUm+XSyty1Uu+BuOy5t7BIRqgZcKqiaxfhAUfDBOcuk2i4TJy1oA5Rntby8hDN+cUCpvDg=="
-        ],
-        "hash": "5FB3CB80A982E2BDFBB3EA94673A74763F58CB2A",
-        "block_number": 207,
-        "time": 1421955525
-      },
-      {
-        "version": 2,
-        "issuers": [
-          "J78bPUvLjxmjaEkdjxWLeENQtcfXm7iobqB49uT1Bgp3"
-        ],
-        "inputs": [
-          "0:T:15128:6A50FF82410387B239489CE38B34E0FDDE1697FE:10000"
-        ],
-        "outputs": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:42",
-          "J78bPUvLjxmjaEkdjxWLeENQtcfXm7iobqB49uT1Bgp3:9958"
-        ],
-        "comment": "",
-        "signatures": [
-          "XhBcCPizPiWdKeXWg1DX/FTQst6DppEjsYEtoAZNA0P11reXtgc9IduiIxNWzNjt/KvTw8APkSI8/Uf31QQVDA=="
-        ],
-        "hash": "ADE7D1C4002D6BC10013C34CE22733A55173BAD4",
-        "block_number": 15778,
-        "time": 1432314584
-      }
-    ],
-    "sending": [
-	  {
-        "version": 2,
-        "issuers": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk"
-        ],
-        "inputs": [
-          "0:D:8196:000022AD426FE727C707D847EC2168A64C577706:5872"
-        ],
-        "outputs": [
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:5871"
-          "2sq8bBDQGK74f1eD3mAPQVgHCmFdijZr9nbv16FwbokX:1",
-        ],
-        "comment": "some comment",
-        "signatures": [
-          "kLOAAy7/UldQk7zz4I7Jhv9ICuGYRx7upl8wH8RYL43MMF6+7MbPh3QRN1qNFGpAfa3XMWIQmbUWtjZKP6OfDA=="
-        ],
-        "hash": "BA41013F2CD38EDFFA9D38A275F8532DD906A2DE"
-      }
-    ],
-    "receiving": [
-	 {
-        "version": 2,
-        "issuers": [
-          "2sq8bBDQGK74f1eD3mAPQVgHCmFdijZr9nbv16FwbokX"
-        ],
-        "inputs": [
-          "0:D:8196:000022AD426FE727C707D847EC2168A64C577706:4334"
-        ],
-        "outputs": [
-          "2sq8bBDQGK74f1eD3mAPQVgHCmFdijZr9nbv16FwbokX:1",
-          "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:4333"
-        ],
-        "comment": "some comment",
-        "signatures": [
-          "DRiZinUEKrrLiJNogtydzwEbmETrvWiLNYXCiJsRekxTLyU5g4LjnwiLp/XlvmIekjJK5n/gullLWrHUBvFSAw==
-        ],
-        "hash": "A0A511131CD0E837204A9441B3354918AC4CE671"
-      }
-	]
-  }
-}
-```
-### ud/*
-
-#### `ud/history/[pubkey]`
-
-**Goal**
-
-Get the wallet universal dividend history
-
-**parameters**
-
-Name              | Value							| Method
-----              | -----							| ------
-`pubkey`          | Wallet public key.				| URL
-
-**Returns**
-
-The universal dividend history for the given `pubkey`. 
-```json
-{
-  "currency": "meta_brouzouf",
-  "pubkey": "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk",
-  "history": {
-    "history": [
-      {
-        "block_number": 125,
-        "consumed": true,
-        "time": 1421927007,
-        "amount": 100
-      },
-      {
-        "block_number": 410,
-        "consumed": false,
-        "time": 1422012828,
-        "amount": 100
-      },
-      {
-        "block_number": 585,
-        "consumed": true,
-        "time": 1422098800,
-        "amount": 100
-      }
-    ]
-  }
-}
-```
-
-
-### ws/*
-
-#### `ws/block`
-
-**Goal**
-
-A websocket entry point for receiving blocks.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Websocket connection.
-
-#### `ws/peer`
-
-**Goal**
-
-A websocket entry point for receiving peers.
-
-**Parameters**
-
-*None*.
-
-**Returns**
-
-Websocket connection.
diff --git a/doc/Protocol.md b/doc/Protocol.md
index 83915e5e04428a217ec93b238530a11be271d1a1..8553ce0de0841cb23c500a5948d315fe04db6359 100644
--- a/doc/Protocol.md
+++ b/doc/Protocol.md
@@ -1365,6 +1365,7 @@ TRUE
 * *Rule*: The sum of all inputs in `CommonBase` must equal the sum of all outputs in `CommonBase`
 
 > Functionally: we cannot create nor lose money through transactions. We can only transfer coins we own.
+> Functionally: also, we cannot convert a superiod unit base into a lower one.
 
 #### Global
 
@@ -1401,6 +1402,7 @@ Function references:
 * *FIRST* return the first element in a list of values matching the given condition
 * *REDUCE* merges a set of elements into a single one, by extending the non-null properties from each record into the resulting record.
 * *REDUCE_BY* merges a set of elements into a new set, where each new element is the reduction of the first set sharing a given key.
+* *CONCAT* concatenates two sets of elements into a new one
 
 > If there is no elements, all its properties are `null`.
 
@@ -1692,7 +1694,7 @@ Else:
     nbPersonalBlocksInFrame = COUNT(blocksOfIssuer)
     blocksPerIssuerInFrame = MAP(
         UNIQ((HEAD~1..<HEAD~1.issuersFrame>).issuer)
-            => COUNT(HEAD~1..<HEAD~1.issuersFrame>[issuer=HEAD.issuer]))
+            => ISSUER: COUNT(HEAD~1..<HEAD~1.issuersFrame>[issuer=ISSUER]))
     medianOfBlocksInFrame = MEDIAN(blocksPerIssuerInFrame)
     
 EndIf
@@ -2026,11 +2028,13 @@ Else:
     
 EndIf
 
-####### BR_G46 - ENTRY.available
+####### BR_G46 - ENTRY.available and ENTRY.conditions
 
 For each `LOCAL_SINDEX[op='UPDATE'] as ENTRY`:
 
-    ENTRY.available = REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base]).consumed == false
+    INPUT = REDUCE(GLOBAL_SINDEX[identifier=ENTRY.identifier,pos=ENTRY.pos,amount=ENTRY.amount,base=ENTRY.base])
+    ENTRY.conditions = INPUT.conditions
+    ENTRY.available = INPUT.consumed == false
 
 ####### BR_G47 - ENTRY.isLocked
 
@@ -2375,6 +2379,31 @@ For each `REDUCE_BY(GLOBAL_IINDEX[member=true], 'pub') as IDTY` then if `IDTY.me
         consumed = false
     )
 
+###### BR_G106 - Low accounts
+
+Set:
+
+    ACCOUNTS = UNIQ(GLOBAL_SINDEX, 'conditions')
+
+For each `ACCOUNTS as ACCOUNT` then:
+
+Set:
+
+    ALL_SOURCES = CONCAT(GLOBAL_SINDEX[conditions=ACCOUNT.conditions], LOCAL_SINDEX[conditions=ACCOUNT.conditions])
+    SOURCES = REDUCE_BY(ALL_SOURCES, 'identifier', 'pos')[consumed=false]
+    BALANCE = SUM(MAP(SOURCES => SRC: SRC.amount * POW(10, SRC.base)))
+
+If `BALANCE < 100 * POW(10, HEAD.unitBase)`, then for each `SOURCES AS SRC` add a new LOCAL_SINDEX entry:
+
+    SINDEX (
+        op = 'UPDATE'
+        identifier = SRC.identifier
+        pos = SRC.pos
+        written_on = BLOCKSTAMP
+        written_time = MedianTime
+        consumed = true
+    )
+
 ###### BR_G92 - Certification expiry
 
 For each `GLOBAL_CINDEX[expires_on<=HEAD.medianTime] as CERT`, add a new LOCAL_CINDEX entry:
diff --git a/doc/manual-config.md b/doc/manual-config.md
index b8dd1b35f49d0dfa4b9ed4f515826a5cd4f4de5b..90282120fc1ce4915e8c05f4479e2518f7212747 100644
--- a/doc/manual-config.md
+++ b/doc/manual-config.md
@@ -19,7 +19,7 @@ Replace `mycurrency` by the name of the currency you want to manage.
 
 ## Network parameters
 
-By default, ucoin runs on port 8033. You may change it using the --port parameter:
+By default, duniter runs on port 8033. You may change it using the --port parameter:
 
 ```bash
 $ duniter config --port 80
@@ -60,9 +60,9 @@ Note too that listening to multiple interfaces doesn't imply mutiple program ins
 
 ### Peering informations
 
-Duniter protocol uses peering mecanisms, hence needs any ucoin node to be reachable through the network.
+Duniter protocol uses peering mecanisms, hence needs any duniter node to be reachable through the network.
 
-As the server may be behind a reverse proxy, or because hosts may change of address, remote informations are likely to be different from listening host and port parameters. ucoin software defines 4 remote parameters you need to precise for your ucoin instance to be working:
+As the server may be behind a reverse proxy, or because hosts may change of address, remote informations are likely to be different from listening host and port parameters. duniter software defines 4 remote parameters you need to precise for your duniter instance to be working:
 
 * `--remoteh`
 * `--remote4`
diff --git a/gui/index.html b/gui/index.html
index 215d7f646d0272475affa66184a5f61482e2b06b..5202d5dda749fd305682ff49dfacca1b0b210467 100644
--- a/gui/index.html
+++ b/gui/index.html
@@ -3,7 +3,7 @@
 <head>
   <meta charset="utf-8">
   <meta http-equiv="X-UA-Compatible" content="IE=edge">
-  <title>Duniter 0.81.0</title>
+  <title>Duniter 0.90.0</title>
   <style>
     html {
       font-family: "Courier New", Courier, monospace;
diff --git a/gui/package.json b/gui/package.json
index e5e0f5770c554ffa12ca07105427cb76a218e643..59a350319a73717e86d551e49593a8553cb40f5f 100644
--- a/gui/package.json
+++ b/gui/package.json
@@ -1,10 +1,10 @@
 {
-  "name": "v0.81.0",
+  "name": "v0.90.0",
   "main": "index.html",
   "node-main": "../sources/bin/duniter",
   "window": {
     "icon": "duniter.png",
-    "title": "v0.81.0",
+    "title": "v0.90.0",
     "width": 800,
     "height": 800,
     "min_width": 750,
diff --git a/index.js b/index.js
index f449dc6fcdcb956b85aa20be178e473295a0ec07..151cf07d64df3a45e6ab553e8d085cf736fc13ed 100644
--- a/index.js
+++ b/index.js
@@ -1,133 +1,422 @@
 "use strict";
 
+const Q = require('q');
 const co = require('co');
+const es = require('event-stream');
+const util = require('util');
+const stream = require('stream');
 const _ = require('underscore');
 const Server = require('./server');
+const directory = require('./app/lib/system/directory');
+const constants = require('./app/lib/constants');
+const wizard = require('./app/lib/wizard');
 const logger = require('./app/lib/logger')('duniter');
 
-module.exports = function (dbConf, overConf) {
-  return new Server(dbConf, overConf);
+const configDependency    = require('./app/modules/config');
+const wizardDependency    = require('./app/modules/wizard');
+const resetDependency     = require('./app/modules/reset');
+const checkConfDependency = require('./app/modules/check-config');
+const exportBcDependency  = require('./app/modules/export-bc');
+const reapplyDependency   = require('./app/modules/reapply');
+const revertDependency    = require('./app/modules/revert');
+const daemonDependency    = require('./app/modules/daemon');
+const pSignalDependency   = require('./app/modules/peersignal');
+const proverDependency    = require('duniter-prover');//require('./app/modules/prover');
+const routerDependency    = require('./app/modules/router');
+
+const MINIMAL_DEPENDENCIES = [
+  { name: 'duniter-config',    required: configDependency }
+];
+
+const DEFAULT_DEPENDENCIES = MINIMAL_DEPENDENCIES.concat([
+  { name: 'duniter-wizard',    required: wizardDependency },
+  { name: 'duniter-reset',     required: resetDependency },
+  { name: 'duniter-chkconf',   required: checkConfDependency },
+  { name: 'duniter-exportbc',  required: exportBcDependency },
+  { name: 'duniter-reapply',   required: reapplyDependency },
+  { name: 'duniter-revert',    required: revertDependency },
+  { name: 'duniter-daemon',    required: daemonDependency },
+  { name: 'duniter-psignal',   required: pSignalDependency },
+  { name: 'duniter-router',    required: routerDependency }
+]);
+
+const PRODUCTION_DEPENDENCIES = DEFAULT_DEPENDENCIES.concat([
+  { name: 'duniter-prover',   required: proverDependency }
+]);
+
+module.exports = function (home, memory, overConf) {
+  return new Server(home, memory, overConf);
 };
 
 module.exports.statics = {
 
   logger: logger,
 
-  /**************
-   * Duniter used by its Command Line Interface
-   * @param onService A callback for external usage when Duniter server is ready
+  /**
+   * Creates a new stack with minimal registrations only.
    */
-  cli: (onService) => {
+  minimalStack: () => new Stack(MINIMAL_DEPENDENCIES),
 
-    const cli = require('./app/cli');
-
-    // Specific errors handling
-    process.on('uncaughtException', (err) => {
-      // Dunno why this specific exception is not caught
-      if (err.code !== "EADDRNOTAVAIL" && err.code !== "EINVAL") {
-        logger.error(err);
-        process.exit(1);
-      }
-    });
+  /**
+   * Creates a new stack with core registrations only.
+   */
+  simpleStack: () => new Stack(DEFAULT_DEPENDENCIES),
 
-    process.on('unhandledRejection', (reason) => {
-      logger.error('Unhandled rejection: ' + reason);
-    });
+  /**
+   * Creates a new stack pre-registered with compliant modules found in package.json
+   */
+  autoStack: () => {
+    const pjson = require('./package.json');
+    const duniterModules = [];
 
-    return co(function*() {
-      try {
-        // Prepare the command
-        const command = cli(process.argv);
-        // If ever the process gets interrupted
-        process.on('SIGINT', () => {
-          co(function*() {
-            yield command.closeCommand();
-            process.exit();
-          });
+    // Look for compliant packages
+    const prodDeps = Object.keys(pjson.dependencies);
+    const devDeps = Object.keys(pjson.devDependencies);
+    const duniterDeps = _.filter(prodDeps.concat(devDeps), (dep) => dep.match(/^duniter-/));
+    for(const dep of duniterDeps) {
+      const required = require(dep);
+      if (required.duniter) {
+        duniterModules.push({
+          name: dep,
+          required
         });
-        // Executes the command
-        yield command.execute(onService);
-        process.exit();
-      } catch (e) {
-        logger.error(e);
-        process.exit(1);
       }
-    });
+    }
 
-  },
+    // The final stack
+    return new Stack(PRODUCTION_DEPENDENCIES.concat(duniterModules));
+  }
+};
 
-  autoStack: () => {
+function Stack(dependencies) {
 
-    const cli = require('./app/cli');
-    const stack = {
+  const that = this;
+  const cli = require('./app/cli')();
+  const configLoadingCallbacks = [];
+  const configBeforeSaveCallbacks = [];
+  const INPUT = new InputStream();
+  const PROCESS = new ProcessStream();
+  const loaded = {};
+  const wizardTasks = {};
 
-      registerDependency: (requiredObject) => {
-        for (const opt of (requiredObject.duniter.cliOptions || [])) {
-          cli.addOption(opt.value, opt.desc, opt.parser);
-        }
-        for (const command of (requiredObject.duniter.cli || [])) {
-          cli.addCommand({ name: command.name, desc: command.desc }, command.requires, command.promiseCallback);
-        }
-      },
+  const definitions = [];
+  const streams = {
+    input: [],
+    process: [],
+    output: [],
+    neutral: []
+  };
 
-      executeStack: () => {
+  this.registerDependency = (requiredObject, name) => {
+    if (name && loaded[name]) {
+      // Do not try to load it twice
+      return;
+    }
+    loaded[name] = true;
+    const def = requiredObject.duniter;
+    definitions.push(def);
+    for (const opt of (def.cliOptions || [])) {
+      cli.addOption(opt.value, opt.desc, opt.parser);
+    }
+    for (const command of (def.cli || [])) {
+      cli.addCommand({
+        name: command.name,
+        desc: command.desc
+      }, (...args) => that.processCommand.apply(null, [command].concat(args)));
+    }
 
-        // Specific errors handling
-        process.on('uncaughtException', (err) => {
-          // Dunno why this specific exception is not caught
-          if (err.code !== "EADDRNOTAVAIL" && err.code !== "EINVAL") {
-            logger.error(err);
-            process.exit(1);
-          }
-        });
+    /**
+     * Configuration injection
+     * -----------------------
+     */
+    if (def.config) {
+      if (def.config.onLoading) {
+        configLoadingCallbacks.push(def.config.onLoading);
+      }
+      // Before the configuration is saved, the module can make some injection/cleaning
+      if (def.config.beforeSave) {
+        configBeforeSaveCallbacks.push(def.config.beforeSave);
+      }
+    }
 
-        process.on('unhandledRejection', (reason) => {
-          logger.error('Unhandled rejection: ' + reason);
-        });
+    /**
+     * Wizard injection
+     * -----------------------
+     */
+    if (def.wizard) {
+      const tasks = Object.keys(def.wizard);
+      for (const name of tasks) {
+        wizardTasks[name] = def.wizard[name];
+      }
+    }
+  };
+
+  this.processCommand = (...args) => co(function*() {
+    const command = args[0];
+    const program = args[1];
+    const params  = args.slice(2);
+    params.pop(); // Don't need the command argument
+
+    const dbName = program.mdb;
+    const dbHome = program.home;
+    const home = directory.getHome(dbName, dbHome);
+
+    if (command.logs === false) {
+      logger.mute();
+    }
+
+    // Add log files for this instance
+    logger.addHomeLogs(home);
 
-        return co(function*() {
+    const server = new Server(home, program.memory === true, commandLineConf(program));
+
+    // If ever the process gets interrupted
+    let isSaving = false;
+    process.on('SIGINT', () => {
+      co(function*() {
+        if (!isSaving) {
+          isSaving = true;
+          // Save DB
           try {
-            // Prepare the command
-            const command = cli(process.argv);
-            // If ever the process gets interrupted
-            process.on('SIGINT', () => {
-              co(function*() {
-                yield command.closeCommand();
-                process.exit();
-              });
-            });
-            // Executes the command
-            yield command.execute();
+            yield server.disconnect();
             process.exit();
           } catch (e) {
             logger.error(e);
-            process.exit(1);
+            process.exit(3);
           }
-        });
+        }
+      });
+    });
+
+    // Initialize server (db connection, ...)
+    try {
+      yield server.plugFileSystem();
+
+      // Register the configuration hook for loading phase (overrides the loaded data)
+      server.dal.loadConfHook = (conf) => co(function*() {
+        // Loading injection
+        for (const callback of configLoadingCallbacks) {
+          yield callback(conf, program, logger);
+        }
+      });
+
+      // Register the configuration hook for saving phase (overrides the saved data)
+      server.dal.saveConfHook = (conf) => co(function*() {
+        const clonedConf = _.clone(conf);
+        for (const callback of configBeforeSaveCallbacks) {
+          yield callback(clonedConf, program, logger);
+        }
+        return clonedConf;
+      });
+
+      const conf = yield server.loadConf();
+      // Auto-configuration default
+      yield configure(program, server, server.conf || {});
+      // Autosave conf
+      try {
+        yield server.dal.saveConf(conf);
+        logger.debug("Configuration saved.");
+      } catch (e) {
+        logger.error("Configuration could not be saved: " + e);
+        throw Error(e);
+      }
+      // First possible class of commands: post-config
+      if (command.onConfiguredExecute) {
+        return yield command.onConfiguredExecute(server, conf, program, params, wizardTasks);
       }
-    };
+      // Second possible class of commands: post-service
+      yield server.initDAL();
 
-    const pjson = require('./package.json');
-    const duniterModules = [];
+      /**
+       * Service injection
+       * -----------------
+       */
+      for (const def of definitions) {
+        if (def.service) {
+          // To feed data coming from some I/O (network, disk, other module, ...)
+          if (def.service.input) {
+            streams.input.push(def.service.input(server, conf, logger));
+          }
+          // To handle data that has been submitted by INPUT stream
+          if (def.service.process) {
+            streams.process.push(def.service.process(server, conf, logger));
+          }
+          // To handle data that has been validated by PROCESS stream
+          if (def.service.output) {
+            streams.output.push(def.service.output(server, conf, logger));
+          }
+          // Special service which does not stream anything particular (ex.: piloting the `server` object)
+          if (def.service.neutral) {
+            streams.neutral.push(def.service.neutral(server, conf, logger));
+          }
+        }
+      }
 
-    // Look for compliant packages
-    const prodDeps = Object.keys(pjson.dependencies);
-    const devDeps = Object.keys(pjson.devDependencies);
-    const duniterDeps = _.filter(prodDeps.concat(devDeps), (dep) => dep.match(/^duniter-/));
-    for(const dep of duniterDeps) {
-      const required = require(dep);
-      if (required.duniter) {
-        duniterModules.push({
-          name: dep,
-          required
-        });
+      return yield command.onDatabaseExecute(server, conf, program, params,
+
+        // Start services and streaming between them
+        () => co(function*() {
+          // All inputs write to global INPUT stream
+          for (const module of streams.input) module.pipe(INPUT);
+          // All processes read from global INPUT stream
+          for (const module of streams.process) INPUT.pipe(module);
+          // All processes write to global PROCESS stream
+          for (const module of streams.process) module.pipe(PROCESS);
+          // All ouputs read from global PROCESS stream
+          for (const module of streams.output) PROCESS.pipe(module);
+          // Any streaming module must implement a `startService` method
+          const modules = streams.input.concat(streams.process).concat(streams.output).concat(streams.neutral);
+          yield modules.map(module => module.startService());
+        }),
+
+        // Stop services and streaming between them
+        () => co(function*() {
+          const modules = streams.input.concat(streams.process).concat(streams.output);
+          // Any streaming module must implement a `stopService` method
+          yield modules.map(module => module.stopService());
+          // Stop reading inputs
+          for (const module of streams.input) module.unpipe();
+          // Stop reading from global INPUT
+          INPUT.unpipe();
+          for (const module of streams.process) module.unpipe();
+          // Stop reading from global PROCESS
+          PROCESS.unpipe();
+        }));
+    } catch (e) {
+      server.disconnect();
+      throw e;
+    }
+  });
+
+  this.executeStack = (argv) => {
+
+    // Trace these errors
+    process.on('unhandledRejection', (reason) => {
+      logger.error('Unhandled rejection: ' + reason);
+      logger.error(reason);
+    });
+
+    // Executes the command
+    return cli.execute(argv);
+  };
+
+  // We register the initial dependencies right now. Others can be added thereafter.
+  for (const dep of dependencies) {
+    that.registerDependency(dep.required, dep.name);
+  }
+}
+
+function commandLineConf(program, conf) {
+
+  conf = conf || {};
+  conf.sync = conf.sync || {};
+  const cli = {
+    currency: program.currency,
+    cpu: program.cpu,
+    server: {
+      port: program.port,
+    },
+    db: {
+      mport: program.mport,
+      mdb: program.mdb,
+      home: program.home
+    },
+    logs: {
+      http: program.httplogs,
+      nohttp: program.nohttplogs
+    },
+    endpoints: [],
+    rmEndpoints: [],
+    isolate: program.isolate,
+    forksize: program.forksize,
+    nofork: program.nofork,
+    timeout: program.timeout
+  };
+
+  // Update conf
+  if (cli.currency)                         conf.currency = cli.currency;
+  if (cli.server.port)                      conf.port = cli.server.port;
+  if (cli.cpu)                              conf.cpu = Math.max(0.01, Math.min(1.0, cli.cpu));
+  if (cli.logs.http)                        conf.httplogs = true;
+  if (cli.logs.nohttp)                      conf.httplogs = false;
+  if (cli.db.mport)                         conf.mport = cli.db.mport;
+  if (cli.db.home)                          conf.home = cli.db.home;
+  if (cli.db.mdb)                           conf.mdb = cli.db.mdb;
+  if (cli.isolate)                          conf.isolate = cli.isolate;
+  if (cli.timeout)                          conf.timeout = cli.timeout;
+  if (cli.forksize != null)                 conf.forksize = cli.forksize;
+
+  return conf;
+}
+
+function configure(program, server, conf) {
+  return co(function *() {
+    if (typeof server == "string" || typeof conf == "string") {
+      throw constants.ERRORS.CLI_CALLERR_CONFIG;
+    }
+    // Try to add an endpoint if provided
+    if (program.addep) {
+      if (conf.endpoints.indexOf(program.addep) === -1) {
+        conf.endpoints.push(program.addep);
+      }
+      // Remove it from "to be removed" list
+      const indexInRemove = conf.rmEndpoints.indexOf(program.addep);
+      if (indexInRemove !== -1) {
+        conf.rmEndpoints.splice(indexInRemove, 1);
       }
     }
+    // Try to remove an endpoint if provided
+    if (program.remep) {
+      if (conf.rmEndpoints.indexOf(program.remep) === -1) {
+        conf.rmEndpoints.push(program.remep);
+      }
+      // Remove it from "to be added" list
+      const indexInToAdd = conf.endpoints.indexOf(program.remep);
+      if (indexInToAdd !== -1) {
+        conf.endpoints.splice(indexInToAdd, 1);
+      }
+    }
+  });
+}
+
+/**
+ * InputStream is a special stream that filters what passes in.
+ * Only DUP-like documents should be treated by the processing tools, to avoid JSON injection and save CPU cycles.
+ * @constructor
+ */
+function InputStream() {
+
+  const that = this;
+
+  stream.Transform.call(this, { objectMode: true });
+
+  this._write = function (str, enc, done) {
+    if (typeof str === 'string') {
+      // Keep only strings
+      const matches = str.match(/Type: (.*)\n/);
+      if (matches && matches[1].match(/(Block|Membership|Identity|Certification|Transaction|Peer)/)) {
+        const type = matches[1].toLowerCase();
+        that.push({ type, doc: str });
+      }
+    }
+    done && done();
+  };
+}
+
+function ProcessStream() {
+
+  const that = this;
 
-    for (const duniterModule of duniterModules) {
-      stack.registerDependency(duniterModule.required);
+  stream.Transform.call(this, { objectMode: true });
+
+  this._write = function (obj, enc, done) {
+    // Never close the stream
+    if (obj !== undefined && obj !== null) {
+      that.push(obj);
     }
+    done && done();
+  };
+}
 
-    return stack;
-  }
-};
+util.inherits(InputStream, stream.Transform);
+util.inherits(ProcessStream, stream.Transform);
diff --git a/install.sh b/install.sh
index 4ad4d825efaa7315b4a388dae80ef0cf677588f3..1f8c8722924320a434246b19431505c02e1b362d 100755
--- a/install.sh
+++ b/install.sh
@@ -11,7 +11,7 @@ if [ -z "$DUNITER_DIR" ]; then
 fi
 
 latest_version() {
-  echo "v0.81.0"
+  echo "v0.90.0"
 }
 
 repo_url() {
diff --git a/package.json b/package.json
index 40f8f461b7c0b2ba4eeb8c3788bdadc1d0d4b271..a53f122b4288045e3373e729e51ec62620d6a936 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
   "name": "duniter",
-  "version": "0.81.0",
+  "version": "0.90.0",
   "engines": {
     "node": ">=4.2.0",
     "npm": ">=2.11"
@@ -38,51 +38,41 @@
     "archiver": "1.0.1",
     "async": "1.5.2",
     "bindings": "1.2.1",
-    "body-parser": "1.15.2",
     "co": "4.6.0",
     "colors": "1.1.2",
     "commander": "2.9.0",
-    "cors": "2.8.1",
     "daemonize2": "0.4.2",
-    "ddos": "0.1.16",
-    "errorhandler": "1.4.3",
+    "duniter-bma": "0.2.0",
+    "duniter-crawler": "0.2.1",
+    "duniter-keypair": "0.1.2",
+    "duniter-prover": "0.2.0",
+    "duniter-common": "0.1.0",
     "event-stream": "3.3.4",
-    "express": "4.14.0",
-    "express-cors": "0.0.3",
-    "express-fileupload": "0.0.5",
     "inquirer": "0.8.5",
     "jison": "0.4.17",
     "merkle": "0.5.1",
     "moment": "2.15.1",
-    "morgan": "1.7.0",
-    "multimeter": "0.1.1",
-    "naclb": "1.3.7",
-    "nnupnp": "1.0.2",
     "node-pre-gyp": "0.6.32",
-    "node-uuid": "1.4.7",
     "optimist": "0.6.1",
     "parallelshell": "2.0.0",
     "q": "1.4.1",
     "q-io": "1.13.2",
+    "querablep": "^0.1.0",
     "request": "2.75.0",
     "request-promise": "4.1.1",
-    "scryptb": "6.0.4",
     "sha1": "1.1.1",
     "sqlite3": "3.1.4",
     "superagent": "2.3.0",
-    "tweetnacl": "0.14.3",
     "underscore": "1.8.3",
     "unzip": "0.1.11",
     "unzip2": "0.2.5",
     "winston": "2.2.0",
-    "wotb": "0.4.14",
-    "ws": "1.1.1"
+    "wotb": "0.4.14"
   },
   "devDependencies": {
     "coveralls": "2.11.4",
-    "duniter-ui": "0.2.14",
-    "eslint": "0.21.1",
-    "eslint-plugin-mocha": "0.2.2",
+    "eslint": "3.13.1",
+    "eslint-plugin-mocha": "4.8.0",
     "istanbul": "0.4.0",
     "mocha": "2.2.5",
     "mocha-eslint": "0.1.7",
diff --git a/server.js b/server.js
index 8699dac40ccb4e8cb5cd707b4e457b2dcca3d1ed..954f082e9352e740cfbf3030a303f0b39a77f9ca 100644
--- a/server.js
+++ b/server.js
@@ -12,25 +12,16 @@ const parsers     = require('./app/lib/streams/parsers');
 const constants   = require('./app/lib/constants');
 const fileDAL     = require('./app/lib/dal/fileDAL');
 const jsonpckg    = require('./package.json');
-const router      = require('./app/lib/streams/router');
-const base58      = require('./app/lib/crypto/base58');
-const keyring      = require('./app/lib/crypto/keyring');
+const keyring      = require('duniter-common').keyring;
 const directory   = require('./app/lib/system/directory');
-const dos2unix    = require('./app/lib/system/dos2unix');
-const Synchroniser = require('./app/lib/sync');
-const multicaster = require('./app/lib/streams/multicaster');
-const upnp        = require('./app/lib/system/upnp');
-const rawer       = require('./app/lib/ucp/rawer');
-const permanentProver = require('./app/lib/computation/permanentProver');
+const rawer       = require('duniter-common').rawer;
 
-function Server (dbConf, overrideConf) {
+function Server (home, memoryOnly, overrideConf) {
 
   stream.Duplex.call(this, { objectMode: true });
 
-  const home = directory.getHome(dbConf.name, dbConf.home);
-  const paramsP = directory.getHomeParams(dbConf && dbConf.memory, home);
+  const paramsP = directory.getHomeParams(memoryOnly, home);
   const logger = require('./app/lib/logger')('server');
-  const permaProver = this.permaProver = permanentProver(this);
   const that = this;
   that.home = home;
   that.conf = null;
@@ -40,19 +31,28 @@ function Server (dbConf, overrideConf) {
 
   // External libs
   that.lib = {};
-  that.lib.keyring = require('./app/lib/crypto/keyring');
+  that.lib.keyring = require('duniter-common').keyring;
   that.lib.Identity = require('./app/lib/entity/identity');
-  that.lib.rawer = require('./app/lib/ucp/rawer');
-  that.lib.http2raw = require('./app/lib/helpers/http2raw');
-  that.lib.dos2unix = require('./app/lib/system/dos2unix');
-  that.lib.contacter = require('./app/lib/contacter');
-  that.lib.bma = require('./app/lib/streams/bma');
+  that.lib.Certification = require('./app/lib/entity/certification');
+  that.lib.Transaction = require('./app/lib/entity/transaction');
+  that.lib.Peer = require('./app/lib/entity/peer');
+  that.lib.Membership = require('./app/lib/entity/membership');
+  that.lib.Block = require('./app/lib/entity/block');
+  that.lib.Stat = require('./app/lib/entity/stat');
+  that.lib.rawer = require('duniter-common').rawer;
+  that.lib.parsers = require('./app/lib/streams/parsers');
+  that.lib.http2raw = require('duniter-bma').duniter.methods.http2raw;
+  that.lib.dos2unix = require('duniter-common').dos2unix;
+  that.lib.contacter = require('duniter-crawler').duniter.methods.contacter;
+  that.lib.bma = require('duniter-bma').duniter.methods.bma;
   that.lib.network = require('./app/lib/system/network');
   that.lib.constants = require('./app/lib/constants');
-  that.lib.ucp = require('./app/lib/ucp/buid');
+  that.lib.ucp = require('duniter-common').buid;
+  that.lib.hashf = require('duniter-common').hashf;
+  that.lib.indexer = require('./app/lib/dup/indexer');
+  that.lib.rules = require('./app/lib/rules');
 
   that.MerkleService       = require("./app/lib/helpers/merkle");
-  that.ParametersService   = require("./app/lib/helpers/parameters")();
   that.IdentityService     = require('./app/service/IdentityService')();
   that.MembershipService   = require('./app/service/MembershipService')();
   that.PeeringService      = require('./app/service/PeeringService')(that);
@@ -105,7 +105,6 @@ function Server (dbConf, overrideConf) {
     const defaultValues = {
       remoteipv6:         that.conf.ipv6,
       remoteport:         that.conf.port,
-      cpu:                constants.DEFAULT_CPU,
       c:                  constants.CONTRACT.DEFAULT.C,
       dt:                 constants.CONTRACT.DEFAULT.DT,
       ud0:                constants.CONTRACT.DEFAULT.UD0,
@@ -132,25 +131,13 @@ function Server (dbConf, overrideConf) {
         that.conf[key] = defaultValues[key];
       }
     });
-    logger.debug('Loading crypto functions...');
     // Extract key pair
-    let keyPair = null;
-    const keypairOverriden = overrideConf && (overrideConf.salt || overrideConf.passwd);
-    if (!keypairOverriden && that.conf.pair) {
-      keyPair = keyring.Key(that.conf.pair.pub, that.conf.pair.sec);
-    }
-    else if (that.conf.passwd || that.conf.salt) {
-      keyPair = yield keyring.scryptKeyPair(that.conf.salt, that.conf.passwd);
-    }
-    if (keyPair) {
-      that.keyPair = keyPair;
-      that.sign = keyPair.sign;
-      // Update services
-      [that.IdentityService, that.MembershipService, that.PeeringService, that.BlockchainService, that.TransactionsService].map((service) => {
-        service.setConfDAL(that.conf, that.dal, that.keyPair);
-      });
-      that.router().setConfDAL(that.conf, that.dal);
-    }
+    that.keyPair = keyring.Key(that.conf.pair.pub, that.conf.pair.sec);
+    that.sign = that.keyPair.sign;
+    // Update services
+    [that.IdentityService, that.MembershipService, that.PeeringService, that.BlockchainService, that.TransactionsService].map((service) => {
+      service.setConfDAL(that.conf, that.dal, that.keyPair);
+    });
     return that.conf;
   });
 
@@ -202,63 +189,15 @@ function Server (dbConf, overrideConf) {
 
   this.initDAL = () => this.dal.init();
 
-  this.start = () => co(function*(){
-    yield that.checkConfig();
-    // Add signing & public key functions to PeeringService
-    logger.info('Node version: ' + that.version);
-    logger.info('Node pubkey: ' + that.PeeringService.pubkey);
-    return that.initPeer();
-  });
-
   this.recomputeSelfPeer = () => that.PeeringService.generateSelfPeer(that.conf, 0);
 
-  this.initPeer = () => co(function*(){
-      yield that.checkConfig();
-      yield Q.nbind(that.PeeringService.regularCrawlPeers, that.PeeringService);
-      logger.info('Storing self peer...');
-      yield that.PeeringService.regularPeerSignal();
-      yield Q.nbind(that.PeeringService.regularTestPeers, that.PeeringService);
-      yield Q.nbind(that.PeeringService.regularSyncBlock, that.PeeringService);
-  });
-
-  this.stopBlockComputation = () => permaProver.stopEveryting();
-  
   this.getCountOfSelfMadePoW = () => this.BlockchainService.getCountOfSelfMadePoW();
   this.isServerMember = () => this.BlockchainService.isMember();
 
-  this.isPoWWaiting = () => permaProver.isPoWWaiting();
-
-  this.startBlockComputation = () => permaProver.allowedToStart();
-
-  permaProver.onBlockComputed((block) => co(function*() {
-    try {
-      const obj = parsers.parseBlock.syncWrite(dos2unix(block.getRawSigned()));
-      yield that.singleWritePromise(obj);
-    } catch (err) {
-      logger.warn('Proof-of-work self-submission: %s', err.message || err);
+  this.checkConfig = () => co(function*() {
+    if (!that.conf.pair) {
+      throw new Error('No keypair was given.');
     }
-  }));
-
-  this.checkConfig = () => {
-    return that.checkPeeringConf(that.conf);
-  };
-
-  this.checkPeeringConf = (conf) => co(function*() {
-      if (!conf.pair && conf.passwd == null) {
-        throw new Error('No key password was given.');
-      }
-      if (!conf.pair && conf.salt == null) {
-        throw new Error('No key salt was given.');
-      }
-      if(!conf.ipv4 && !conf.ipv6){
-        throw new Error("No interface to listen to.");
-      }
-      if(!conf.remoteipv4 && !conf.remoteipv6 && !conf.remotehost){
-        throw new Error('No interface for remote contact.');
-      }
-      if (!conf.remoteport) {
-        throw new Error('No port for remote contact.');
-      }
   });
 
   this.resetHome = () => co(function *() {
@@ -382,16 +321,6 @@ function Server (dbConf, overrideConf) {
 
   this.disconnect = () => Promise.resolve(that.dal && that.dal.close());
 
-  this.pullBlocks = that.PeeringService.pullBlocks;
-
-  // Unit Tests or Preview method
-  this.doMakeNextBlock = (manualValues) => that.BlockchainService.makeNextBlock(null, null, manualValues);
-
-  this.doCheckBlock = (block) => {
-    const parsed = parsers.parseBlock.syncWrite(block.getRawSigned());
-    return that.BlockchainService.checkBlock(parsed, false);
-  };
-
   this.revert = () => this.BlockchainService.revertCurrentBlock();
 
   this.revertTo = (number) => co(function *() {
@@ -417,67 +346,6 @@ function Server (dbConf, overrideConf) {
 
   this.singleWritePromise = (obj) => that.submit(obj);
 
-  let theRouter;
-
-  this.router = (active) => {
-    if (!theRouter) {
-      theRouter = router(that.PeeringService, that.conf, that.dal);
-    }
-    theRouter.setActive(active !== false);
-    return theRouter;
-  };
-
-  /**
-   * Synchronize the server with another server.
-   *
-   * If local server's blockchain is empty, process a fast sync: **no block is verified in such a case**, unless
-   * you force value `askedCautious` to true.
-   *
-   * @param onHost Syncs on given host.
-   * @param onPort Syncs on given port.
-   * @param upTo Sync up to this number, if `upTo` value is a positive integer.
-   * @param chunkLength Length of each chunk of blocks to download. Kind of buffer size.
-   * @param interactive Tell if the loading bars should be used for console output.
-   * @param askedCautious If true, force the verification of each downloaded block. This is the right way to have a valid blockchain for sure.
-   * @param nopeers If true, sync will omit to retrieve peer documents.
-   * @param noShufflePeers If true, sync will NOT shuffle the retrieved peers before downloading on them.
-   */
-  this.synchronize = (onHost, onPort, upTo, chunkLength, interactive, askedCautious, nopeers, noShufflePeers) => {
-    const remote = new Synchroniser(that, onHost, onPort, that.conf, interactive === true);
-    const syncPromise = remote.sync(upTo, chunkLength, askedCautious, nopeers, noShufflePeers === true);
-    return {
-      flow: remote,
-      syncPromise: syncPromise
-    };
-  };
-  
-  this.testForSync = (onHost, onPort) => {
-    const remote = new Synchroniser(that, onHost, onPort);
-    return remote.test();
-  };
-
-  /**
-   * Enable routing features:
-   *   - The server will try to send documents to the network
-   *   - The server will eventually be notified of network failures
-   */
-  this.routing = () => {
-    // The router asks for multicasting of documents
-    this.pipe(this.router())
-      // The documents get sent to peers
-      .pipe(multicaster(this.conf))
-      // The multicaster may answer 'unreachable peer'
-      .pipe(this.router());
-  };
-
-  this.upnp = () => co(function *() {
-    const upnpAPI = yield upnp(that.conf.port, that.conf.remoteport);
-    that.upnpAPI = upnpAPI;
-    return upnpAPI;
-  });
-
-  this.applyCPU = (cpu) => that.BlockchainService.changeProverCPUSetting(cpu);
-  
   this.rawer = rawer;
 
   this.writeRaw = (raw, type) => co(function *() {
@@ -491,49 +359,6 @@ function Server (dbConf, overrideConf) {
    * @param linesQuantity
    */
   this.getLastLogLines = (linesQuantity) => this.dal.getLogContent(linesQuantity);
-
-  this.startServices = () => co(function*(){
-
-    /***************
-     * HTTP ROUTING
-     **************/
-    that.router(that.conf.routing);
-
-    /***************
-     *    UPnP
-     **************/
-    if (that.conf.upnp) {
-      try {
-        if (that.upnpAPI) {
-          that.upnpAPI.stopRegular();
-        }
-        yield that.upnp();
-        that.upnpAPI.startRegular();
-      } catch (e) {
-        logger.warn(e);
-      }
-    }
-
-    /*******************
-     * BLOCK COMPUTING
-     ******************/
-    if (that.conf.participate) {
-      that.startBlockComputation();
-    }
-
-    /***********************
-     * CRYPTO NETWORK LAYER
-     **********************/
-    yield that.start();
-  });
-
-  this.stopServices = () => co(function*(){
-    that.router(false);
-    if (that.conf.participate) {
-      that.stopBlockComputation();
-    }
-    return that.PeeringService.stopRegular();
-  });
 }
 
 util.inherits(Server, stream.Duplex);
diff --git a/test/dal/triming.js b/test/dal/triming.js
index 4fb4fc2583b21020ed2a1344b4010e68a97c9708..770a344013cddbd4044010d35e200f4b121f41f9 100644
--- a/test/dal/triming.js
+++ b/test/dal/triming.js
@@ -1,13 +1,10 @@
 "use strict";
 const co = require('co');
-const _ = require('underscore');
 const should = require('should');
 const FileDAL = require('../../app/lib/dal/fileDAL');
 const dir = require('../../app/lib/system/directory');
-const constants = require('../../app/lib/constants');
 const indexer = require('../../app/lib/dup/indexer');
 const toolbox = require('../integration/tools/toolbox');
-const limiter = require('../../app/lib/system/limiter');
 
 let dal;
 
@@ -16,7 +13,6 @@ describe("Triming", function(){
   before(() => co(function *() {
     dal = FileDAL(yield dir.getHomeParams(true, 'db0'));
     yield dal.init();
-    limiter.noLimit();
   }));
 
   it('should be able to feed the bindex', () => co(function *() {
@@ -46,7 +42,7 @@ describe("Triming", function(){
     yield dal.iindexDAL.insertBatch([
       { op: 'CREATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', uid: 'cat', created_on: '121-H', written_on: '122-H', member: true,  wasMember: true, kick: false },
       { op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', uid: null,  created_on: '121-H', written_on: '123-H', member: null,  wasMember: null, kick: true },
-      { op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', uid: null,  created_on: '121-H', written_on: '124-H', member: false, wasMember: null, kick: false },
+      { op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', uid: null,  created_on: '121-H', written_on: '124-H', member: false, wasMember: null, kick: false }
     ]);
     let lignes = yield dal.iindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
     lignes.should.have.length(3);
@@ -74,7 +70,7 @@ describe("Triming", function(){
       { op: 'CREATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', created_on: '121-H', written_on: '122-H', expires_on: 1000, expired_on: null },
       { op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', created_on: '121-H', written_on: '123-H', expires_on: 1200, expired_on: null },
       { op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', created_on: '121-H', written_on: '124-H', expires_on: null, expired_on: null },
-      { op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', created_on: '121-H', written_on: '125-H', expires_on: 1400, expired_on: null },
+      { op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', created_on: '121-H', written_on: '125-H', expires_on: 1400, expired_on: null }
     ]);
     const lignes = yield dal.mindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
     lignes.should.have.length(4);
@@ -93,7 +89,7 @@ describe("Triming", function(){
     yield dal.cindexDAL.insertBatch([
       { op: 'CREATE', issuer: 'HgTT', receiver: 'DNan', created_on: '121-H', written_on: '126-H', expires_on: 1000, expired_on: null },
       { op: 'UPDATE', issuer: 'HgTT', receiver: 'DNan', created_on: '121-H', written_on: '126-H', expires_on: null, expired_on: 3000 },
-      { op: 'CREATE', issuer: 'DNan', receiver: 'HgTT', created_on: '125-H', written_on: '126-H', expires_on: null, expired_on: null },
+      { op: 'CREATE', issuer: 'DNan', receiver: 'HgTT', created_on: '125-H', written_on: '126-H', expires_on: null, expired_on: null }
     ]);
     (yield dal.cindexDAL.sqlFind({ issuer: 'HgTT' })).should.have.length(2);
     (yield dal.cindexDAL.sqlFind({ issuer: 'DNan' })).should.have.length(1);
@@ -112,7 +108,7 @@ describe("Triming", function(){
       { op: 'CREATE', identifier: 'SOURCE_1', pos: 4, written_on: '126-H', written_time: 2000, consumed: false },
       { op: 'UPDATE', identifier: 'SOURCE_1', pos: 4, written_on: '139-H', written_time: 4500, consumed: true },
       { op: 'CREATE', identifier: 'SOURCE_2', pos: 4, written_on: '126-H', written_time: 2000, consumed: false },
-      { op: 'CREATE', identifier: 'SOURCE_3', pos: 4, written_on: '126-H', written_time: 2000, consumed: false },
+      { op: 'CREATE', identifier: 'SOURCE_3', pos: 4, written_on: '126-H', written_time: 2000, consumed: false }
     ]);
     (yield dal.sindexDAL.sqlFind({ identifier: 'SOURCE_1' })).should.have.length(2);
     (yield dal.sindexDAL.sqlFind({ pos: 4 })).should.have.length(4);
@@ -132,13 +128,12 @@ describe("Triming", function(){
         pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
         sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
       },
-      participate: false,
       sigQty: 1,
       dtDiffEval: 2,
       medianTimeBlocks: 3
     })).s1;
     // const s1 = server.s1;
-    for (const i of new Array(13)) {
+    for (let i = 0; i < 13; i++) {
       yield server.commit();
     }
     (yield server.dal.bindexDAL.head(1)).should.have.property('number').equal(12);
diff --git a/test/data/blockchain.json b/test/data/blockchain.json
index 5e8074246c3adac716a2ee59326d47da89edc8d0..75cc03f829593d89285bbbce415a058549549b2b 100644
--- a/test/data/blockchain.json
+++ b/test/data/blockchain.json
@@ -5,8 +5,8 @@
       "nonce": 100000000001,
       "number": 0,
       "powMin": 0,
-      "time": 1483970065,
-      "medianTime": 1483970065,
+      "time": 1480000000,
+      "medianTime": 1480000000,
       "membersCount": 2,
       "monetaryMass": null,
       "unitbase": 0,
@@ -15,39 +15,39 @@
       "issuersFrameVar": 0,
       "len": 6,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "xsABrNW4d4SwiJzuJrAfSJkENuMC1yyedtvIlQXDJENmi2zeHKLcMfe4tK7qVpFPbBgK01EewljOom6y7jbkBg==",
-      "hash": "CAEDB205C3B995922A78D167DD9A078312E6A169F8C3800F5060242D7A159751",
-      "parameters": "0.007376575:3600:120:0:40:604800:31536000:1:604800:604800:0.9:31536000:3:20:960:10:0.6666666666666666",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "+ROmUNXfqITwDAe6laDRLKImdmBNGseQGzvIhoYv2sqXnsbaE4HhKNCFZuw2DrOO//Z8uouMgZUQCaqIacHsBw==",
+      "hash": "8E336B655B18CD7E2AFE5E9E178D46D6640EBC6354A3DC89ADF6A4C5EE9E83B8",
+      "parameters": "0.99:300:9995:0:40:604800:31536000:1:604800:604800:0.9:31536000:3:1:5000:10:0.6666666666666666",
       "previousHash": null,
       "previousIssuer": null,
-      "inner_hash": "503EA11DD809C1FCCA8D4E71EF84B29DA9C341CDCF392933FE4B628437A47368",
+      "inner_hash": "CB76B02685CFF0071A14B9FF7DC6B2A96C1A6A8D5059D300ADAC8675734A8777",
       "dividend": null,
       "identities": [
-        "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV:1eubHHbuNfilHMM0G2bI30iZzebQ2cQ1PC7uPAw08FGMMmQCRerlF/3pc4sAcsnexsxBseA/3lY03KlONqJBAg==:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:tic",
-        "DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo:lcekuS0eP2dpFL99imJcwvDAwx49diiDMkG8Lj7FLkC/6IJ0tgNjUzCIZgMGi7bL5tODRiWi9B49UMXb8b3MAw==:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:toc"
+        "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd:kdg4GA3wNnXt+u/gP9uD34lGhzClhMylDpJew5zhtkQAz84oscNQtDXpvooCWzVsS1kZ25MaH42eL7kOq+sUCg==:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:cat",
+        "2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc:oA9lMYvK8CiqQ0K05q5JL42A9nTrWrv2aITJsunPHccB+HuInv3+u/MFmopBiMuUsNL3CSNF6ycyLkVkhcNNAQ==:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:tac"
       ],
       "joiners": [
-        "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV:s2hUbokkibTAWGEwErw6hyXSWlWFQ2UWs2PWx8d/kkElAyuuWaQq4Tsonuweh1xn4AC1TVWt4yMR3WrDdkhnAw==:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:tic",
-        "DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo:80pUx9YBk0RwqrVrQQA+PuxoNn21A8NwQ3824CQPU1ad9R1oDXc/pU6NVpQv92LM8gaWs/Pm1mLXNNVnr+m6BA==:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:toc"
+        "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd:/1zplmVFOvsw8Ko8Y7I2o2eq+cIJOPDwe770Guzjvoq9WiKDbL//ETqUIF2oydyOn46hj9xPaLFCvafCBHZbBg==:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:cat",
+        "2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc:9Fj76dW4K/IQfXCZcspwEji3pNdpEuNIx+UWPi86S8xab35wu0Q1Fj/f8bcjlCnlo1PURW/ijaMJiSYIKqiSBg==:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:tac"
       ],
       "actives": [],
       "leavers": [],
       "revoked": [],
       "excluded": [],
       "certifications": [
-        "DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo:DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV:0:vMaYgBSnU+83AYOVQCZAx1XLpg/F1MmMztDfCnZvl8hPs4LE9tcDvCrrFogAwMEW2N7Y0gCH62/fBMgw4KrGCA==",
-        "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV:DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo:0:RKIGMgYIhB9FmjPbmyo4egPufg/iTpBznYGZp5hjK1WZ1a9imQldLNUMe0eiPlSKJTK/JD3gOlCiynOEY2csBA=="
+        "2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc:HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd:0:4QfdSLspQh0c3DlEOZMiadC7GvlX4L6WB/dPdNjh1scVEmeRUEnAFoaa2gec5wdAzAq3vu2gKnQrKb9EAaNwDA==",
+        "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd:2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc:0:hnOxOjpoL7+KVDmaosBgJXX8BBpcUWi5hzWrhjCT/NByZzfQYAknQHg98vGRVVVbsn+oKZO6worHFbbIAP7kAQ=="
       ],
       "transactions": []
     },
     {
       "version": 10,
-      "nonce": 300000000001,
+      "nonce": 100000000001,
       "number": 1,
       "powMin": 0,
-      "time": 1483977274,
-      "medianTime": 1483970065,
+      "time": 1480000300,
+      "medianTime": 1480000000,
       "membersCount": 2,
       "monetaryMass": 0,
       "unitbase": 0,
@@ -56,13 +56,13 @@
       "issuersFrameVar": 5,
       "len": 0,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "cUx2yWePZnAL0V1f+ZgXvArJAhY4JSz79CfL7zj+T43rZNf/zNLWvDMrv4o/atXOmHQKOi2VFks8WLJzd2sgDA==",
-      "hash": "BAAD4C0F600C83BA806B48EFD9106446E767D0DFA8C5EAA2014901683F3CC61D",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "M0JJFbwqlBRqH7xZSfavJV7jyDILVJjgyEAJEdg+uss3P+GDqvlR1tb5qeQ4uhqxRZ81ugp6if04wAOfBeskDg==",
+      "hash": "5B92D05E2949D56FEDD1C24A76CF039861EFB54725E8E635568B96142A067D4E",
       "parameters": "",
-      "previousHash": "CAEDB205C3B995922A78D167DD9A078312E6A169F8C3800F5060242D7A159751",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "650FDC31CBE8D6BED87C0E118A7D6FE5749152AB9C0B9025E9D4543E31BC797E",
+      "previousHash": "8E336B655B18CD7E2AFE5E9E178D46D6640EBC6354A3DC89ADF6A4C5EE9E83B8",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "8C288BEC7B81AA66E3E10445B9587DA4A9EE481DE916FCB9896D808CC0FCD98C",
       "dividend": null,
       "identities": [],
       "joiners": [],
@@ -78,24 +78,24 @@
       "nonce": 100000000001,
       "number": 2,
       "powMin": 0,
-      "time": 1483977274,
-      "medianTime": 1483973669,
+      "time": 1480000300,
+      "medianTime": 1480000300,
       "membersCount": 2,
-      "monetaryMass": 240,
+      "monetaryMass": 19990,
       "unitbase": 0,
       "issuersCount": 1,
       "issuersFrame": 2,
       "issuersFrameVar": 4,
       "len": 0,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "G3Z15ArGeoUrqSwaEJMbLTOxtHgrXnNZwbKCOksJohnQX0qsXaoUmz+fLTVqebt1x84cOd7brmyMT1VYTqMgAg==",
-      "hash": "5B506AEB491358614C9ED5C06214F62012250143A74745002CE289DD3DA1499E",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "mVjTQmQPsz49IMDyWXZARvn5kfonX81qcZUS6wKmKXte0kSem1Eu/q6ZJMV0/LMcILyXMrUrPsvSAwl+QzzOAQ==",
+      "hash": "224A337B7EF2D9206C19C81070E90EF37DEAF5678308790E63096119ABF7FED6",
       "parameters": "",
-      "previousHash": "BAAD4C0F600C83BA806B48EFD9106446E767D0DFA8C5EAA2014901683F3CC61D",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "8644E2EE9E3571832B6CFB9FF89119884B7D05AEB809A19CB491B31466BB0E2C",
-      "dividend": 120,
+      "previousHash": "5B92D05E2949D56FEDD1C24A76CF039861EFB54725E8E635568B96142A067D4E",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "91D0BE95402369E619DCB5BA008D12FCCF96853E26F67BCFC3F640B852E9188B",
+      "dividend": 9995,
       "identities": [],
       "joiners": [],
       "actives": [],
@@ -110,23 +110,23 @@
       "nonce": 100000000001,
       "number": 3,
       "powMin": 0,
-      "time": 1483977284,
-      "medianTime": 1483974871,
+      "time": 1480000300,
+      "medianTime": 1480000300,
       "membersCount": 2,
-      "monetaryMass": 240,
+      "monetaryMass": 19990,
       "unitbase": 0,
       "issuersCount": 1,
       "issuersFrame": 3,
       "issuersFrameVar": 3,
       "len": 8,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "f/Dhe87Mmf+KoUl0+GWrNA2g6QlSP0jP0z7U2ebcBNfvx6feBJVHbSoj95GPsNr4yxo7oJDd9iV/dAEyUK1OBg==",
-      "hash": "2C52F3391B43B0E7AA5370F1FA58DA7E97A835057805757BF27B0C1C400A0148",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "i+VIzH3ipC2C0oLtUNozRZJ1Sfyz1DWjmXG6Z9KHyH+nLQpXW2eiQRfsuiXrAVMQE/5wemyJCspER4u0F5xFAw==",
+      "hash": "82DB0F5ABA211E7A9BE4FCE1DC5DC7DBDF37CA07FDA9E552323975C9DDD6BF4F",
       "parameters": "",
-      "previousHash": "5B506AEB491358614C9ED5C06214F62012250143A74745002CE289DD3DA1499E",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "7C9A44C18642D9B1A8972069E16739FAC64ED4D5E96BF0C1BD12616E050657A3",
+      "previousHash": "224A337B7EF2D9206C19C81070E90EF37DEAF5678308790E63096119ABF7FED6",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "4076D1E3B8A4E00C3CEFA5BAC88CB615FDD1520A917FB5A512FC7EA4B3929A12",
       "dividend": null,
       "identities": [],
       "joiners": [],
@@ -138,54 +138,54 @@
       "transactions": [
         {
           "version": 10,
-          "blockstamp": "2-5B506AEB491358614C9ED5C06214F62012250143A74745002CE289DD3DA1499E",
+          "blockstamp": "2-224A337B7EF2D9206C19C81070E90EF37DEAF5678308790E63096119ABF7FED6",
           "locktime": 0,
           "issuers": [
-            "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV"
+            "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd"
           ],
           "inputs": [
-            "120:0:D:DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV:2"
+            "9995:0:D:HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd:2"
           ],
           "unlocks": [
             "0:SIG(0)"
           ],
           "outputs": [
-            "51:0:SIG(DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo)",
-            "69:0:SIG(DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV)"
+            "3000:0:SIG(2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc)",
+            "6995:0:SIG(HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd)"
           ],
           "comments": [],
           "signatures": [
-            "VnvnY4jKD+XqDLrhsoZ67z4Qr03ugwmaVIFwor2cMoAlTlxpFU4L/+e2oFaTpMal3wr/2VcHMFkf3DfiygJADg=="
+            "9FcU1O2xETZhjYzzeH5h1m6bNElvH19FlGH+UyrrXZr471rIc1NpyKA+g+0gw01coUG0gFfheRGpCsMgsq0iAg=="
           ],
           "comment": "",
           "currency": "duniter_unit_test_currency",
           "block_number": 3,
-          "time": 1483974871
+          "time": 1480000300
         }
       ]
     },
     {
       "version": 10,
-      "nonce": 200000000001,
+      "nonce": 100000000001,
       "number": 4,
       "powMin": 0,
-      "time": 1483975474,
-      "medianTime": 1483975474,
+      "time": 1480000300,
+      "medianTime": 1480000300,
       "membersCount": 2,
-      "monetaryMass": 240,
+      "monetaryMass": 19990,
       "unitbase": 0,
       "issuersCount": 1,
       "issuersFrame": 4,
       "issuersFrameVar": 2,
-      "len": 9,
+      "len": 8,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "mewLNlB1lbzulripYw5RXZF9+NY1rA1LLteRJJ4WCNbtSO2/zTOS1vJDhJHTyVKpeU92hUtCxmrb6qI/cV0pAA==",
-      "hash": "53CEBCF358C44862DEDD31F714749FD5D405041B1B57459676560E7B097B616F",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "ElgbeXSenPf2nA/3mceCP7uSUy5TWqxzK9FZ+rkcM+k6ICFM5aLyVa8EZP3oAMitUDvI0RInp010rMid9Kt8Dw==",
+      "hash": "B663792962579F4B7FF35C428AA5C352CA54492A4634B69AE42EFAB9840D4EB6",
       "parameters": "",
-      "previousHash": "2C52F3391B43B0E7AA5370F1FA58DA7E97A835057805757BF27B0C1C400A0148",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "4C5784AFDBBD77C19835690E1F0B990B6B41A8A265DC5F5E55BA640D372FDE83",
+      "previousHash": "82DB0F5ABA211E7A9BE4FCE1DC5DC7DBDF37CA07FDA9E552323975C9DDD6BF4F",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "F3FCF1ACF928991D558AE59A388525FB9F78B03BE5723E67082D6C6EB5F51331",
       "dividend": null,
       "identities": [],
       "joiners": [],
@@ -197,30 +197,29 @@
       "transactions": [
         {
           "version": 10,
-          "blockstamp": "3-2C52F3391B43B0E7AA5370F1FA58DA7E97A835057805757BF27B0C1C400A0148",
+          "blockstamp": "3-82DB0F5ABA211E7A9BE4FCE1DC5DC7DBDF37CA07FDA9E552323975C9DDD6BF4F",
           "locktime": 0,
           "issuers": [
-            "DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo"
+            "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd"
           ],
           "inputs": [
-            "120:0:D:DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo:2",
-            "51:0:T:3F8D6726BA783AF1136E9F25DE260A097A3B491FDBBD50A68FC1EB556FE96956:0"
+            "6995:0:T:693C54130D1D393767347F657D074FA471E0844FC1CF35A6FDEAC68849737A01:1"
           ],
           "unlocks": [
-            "0:SIG(0)",
-            "1:SIG(0)"
+            "0:SIG(0)"
           ],
           "outputs": [
-            "171:0:SIG(DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV)"
+            "5495:0:SIG(2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc)",
+            "1500:0:SIG(HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd)"
           ],
           "comments": [],
           "signatures": [
-            "gunVyw4Vuceso5NvZJl8cwd0QSJug53mnEAT0v+3Mwfz7mE7tzZKrYwnCubQkgYI1FNp+wDd/G6e3T+BQsX0Bg=="
+            "m1ilWGfhXnkngZ4ZBNAESLAct83vDSK0GzvFgkV8Mp9Fg7T4gax07lG7RIFj7vPn8K4BUObnFQ4C73eD318NBQ=="
           ],
           "comment": "",
           "currency": "duniter_unit_test_currency",
           "block_number": 4,
-          "time": 1483975474
+          "time": 1480000300
         }
       ]
     },
@@ -229,23 +228,23 @@
       "nonce": 100000000001,
       "number": 5,
       "powMin": 0,
-      "time": 1483989904,
-      "medianTime": 1483975474,
+      "time": 1480000300,
+      "medianTime": 1480000300,
       "membersCount": 2,
-      "monetaryMass": 240,
+      "monetaryMass": 19990,
       "unitbase": 0,
       "issuersCount": 1,
       "issuersFrame": 5,
       "issuersFrameVar": 1,
-      "len": 0,
+      "len": 8,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "D6S4lNuwDdhoqh9oixqYHIBcnnSBDhAt9e0/tct0z/lFKJ8OdB0hz2FHXx7XPU76cMEYxL4RLE9AnyslM8+rCg==",
-      "hash": "096B52435524FDE970C3F03D54BE7C94D1966B0050E781F7601991D68A828551",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "sylWufEKQyN5hinCRg9pI2v9Lf60wBa+lqI1CLq8KbN1v91y1FaQj8QEj4hqsnPRSOvZd+3I5uotDcxnd2VLAA==",
+      "hash": "17E879B63BB95A6C04D150E9F6F3171F44E644AE4A6E21C8FEE1FA88526593EA",
       "parameters": "",
-      "previousHash": "53CEBCF358C44862DEDD31F714749FD5D405041B1B57459676560E7B097B616F",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "BC82EF017BC76FCF26FDFFC24FFEF92E207B1EC9C2971C3C567A118328FB0D79",
+      "previousHash": "B663792962579F4B7FF35C428AA5C352CA54492A4634B69AE42EFAB9840D4EB6",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "68424CBE1D58AA15A4A446606DD48E7CE718663DF3B6DACC693D4AC227DFEC19",
       "dividend": null,
       "identities": [],
       "joiners": [],
@@ -254,31 +253,58 @@
       "revoked": [],
       "excluded": [],
       "certifications": [],
-      "transactions": []
+      "transactions": [
+        {
+          "version": 10,
+          "blockstamp": "4-B663792962579F4B7FF35C428AA5C352CA54492A4634B69AE42EFAB9840D4EB6",
+          "locktime": 0,
+          "issuers": [
+            "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd"
+          ],
+          "inputs": [
+            "1500:0:T:1E47AF2308490CD7480CD509F3D031B9F1E0DEE9E40FEC9CF9462CEE412C0710:1"
+          ],
+          "unlocks": [
+            "0:SIG(0)"
+          ],
+          "outputs": [
+            "1:0:SIG(6EQoFVnFf2xpaRzieNTXmAKU6XkDHYrvgorJ8ppMFa8b)",
+            "1499:0:SIG(HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd)"
+          ],
+          "comments": [],
+          "signatures": [
+            "aFgEWGJAt8jk9elmCO1MAxqR+EO4a4C4zt6mfPD3FZNQOgqOPc3RaQ0BLVHdcTaefk/wrWEaiB30kmcKuKbOCA=="
+          ],
+          "comment": "",
+          "currency": "duniter_unit_test_currency",
+          "block_number": 5,
+          "time": 1480000300
+        }
+      ]
     },
     {
       "version": 10,
       "nonce": 100000000001,
       "number": 6,
       "powMin": 0,
-      "time": 1483977879,
-      "medianTime": 1483977879,
+      "time": 1480000300,
+      "medianTime": 1480000300,
       "membersCount": 2,
-      "monetaryMass": 482,
+      "monetaryMass": 19990,
       "unitbase": 0,
       "issuersCount": 1,
       "issuersFrame": 6,
       "issuersFrameVar": 0,
-      "len": 0,
+      "len": 8,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "JooERXZDZ1hCmS4LImX2R9txI6fGX5COS4JT4yYxTgPfBf2xSDEaNZ0g8Gkrxw2bMSeA/K8b1wS83YQDi+9hBQ==",
-      "hash": "D9BE62C77EAFAC96D28D38E71C06F02B6C302D2E679889F6D107F38F71E33841",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "gSlXYjIn2Cy9p4iCOvu6awEy5JPkgs3bTehjlR91KSPn28z8R8GUUZWtLLCp1/RFTxV+EzWmGIacUeShSzn2AA==",
+      "hash": "550B5EB68E256519244BB7AB8C20786E59318EA7F7937B39BE4838641C4427E6",
       "parameters": "",
-      "previousHash": "096B52435524FDE970C3F03D54BE7C94D1966B0050E781F7601991D68A828551",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "E3B8C442B0F305256CA9C9ACDF832669E25FF6EE6E3BD9706F9EC2C367A2AAF4",
-      "dividend": 121,
+      "previousHash": "17E879B63BB95A6C04D150E9F6F3171F44E644AE4A6E21C8FEE1FA88526593EA",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "A1FCB7719F9DFE2701B59333CB8140FEEE07112564370EBDF79D0DFAD4F72461",
+      "dividend": null,
       "identities": [],
       "joiners": [],
       "actives": [],
@@ -286,30 +312,57 @@
       "revoked": [],
       "excluded": [],
       "certifications": [],
-      "transactions": []
+      "transactions": [
+        {
+          "version": 10,
+          "blockstamp": "5-17E879B63BB95A6C04D150E9F6F3171F44E644AE4A6E21C8FEE1FA88526593EA",
+          "locktime": 0,
+          "issuers": [
+            "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd"
+          ],
+          "inputs": [
+            "1499:0:T:EC82DA278CC068C456480E783847814BD7B94A8B4C01751FA98ECB6269420055:1"
+          ],
+          "unlocks": [
+            "0:SIG(0)"
+          ],
+          "outputs": [
+            "99:0:SIG(2EvWF9XM6TY3zUDjwi3qfGRW5zhN11TXcUDXdgK2XK41)",
+            "1400:0:SIG(HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd)"
+          ],
+          "comments": [],
+          "signatures": [
+            "4tFYR6F8xLztyWzWy/9V9K1p2eFTSnCxoir3nfMAo7esz8+WdFsNHYRxSoN8izYj7oESvRZezkOxgFZYcCwbCA=="
+          ],
+          "comment": "",
+          "currency": "duniter_unit_test_currency",
+          "block_number": 6,
+          "time": 1480000300
+        }
+      ]
     },
     {
       "version": 10,
-      "nonce": 200000000001,
+      "nonce": 100000000001,
       "number": 7,
       "powMin": 0,
-      "time": 1483977879,
-      "medianTime": 1483977879,
+      "time": 1480000300,
+      "medianTime": 1480000300,
       "membersCount": 2,
-      "monetaryMass": 482,
+      "monetaryMass": 19990,
       "unitbase": 0,
       "issuersCount": 1,
       "issuersFrame": 6,
       "issuersFrameVar": 0,
-      "len": 7,
+      "len": 8,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "apWYTLEJyjJm8ggE2B4S6CbFad1Ch3TjYLFLfuLIpIs9wOY3ha4NuLlJu/d9g1dO86Xnl84KmoL/+4laDyaIDA==",
-      "hash": "AD83DE9C185699AF79BB14F83AC470407D35171129040E8B34C25893675225C0",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "ZQrIH3Dwc/01SEAecFzJXD2PML7LTZoRFnrk99g4dMs0jmSZDvMA48p7Xq1Nkm0bSZ/lKNC8WnCikRoF0sDFCQ==",
+      "hash": "F0B4504EAB57E3F7314A73A18BC936D6C2D7838277AE79F523635F532F008F11",
       "parameters": "",
-      "previousHash": "D9BE62C77EAFAC96D28D38E71C06F02B6C302D2E679889F6D107F38F71E33841",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "499B336E509305DF60014B7D4A3951806EEA1184C850FA68488B968495672C41",
+      "previousHash": "550B5EB68E256519244BB7AB8C20786E59318EA7F7937B39BE4838641C4427E6",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "CB738835E9AC4CB5FC2F0CF9472A0560EBC4A18CD4A9FE5B7D44BECDC19C4471",
       "dividend": null,
       "identities": [],
       "joiners": [],
@@ -321,28 +374,29 @@
       "transactions": [
         {
           "version": 10,
-          "blockstamp": "6-D9BE62C77EAFAC96D28D38E71C06F02B6C302D2E679889F6D107F38F71E33841",
+          "blockstamp": "6-550B5EB68E256519244BB7AB8C20786E59318EA7F7937B39BE4838641C4427E6",
           "locktime": 0,
           "issuers": [
-            "DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo"
+            "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd"
           ],
           "inputs": [
-            "121:0:D:DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo:6"
+            "1400:0:T:3245C4AA564213EE8C292ADD5C35D7AEA5AC3CE7932E68F3C37DED1C60990CA5:1"
           ],
           "unlocks": [
             "0:SIG(0)"
           ],
           "outputs": [
-            "121:0:SIG(DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV)"
+            "100:0:SIG(DPFgnVSB14QnYFjKNhbFRYLxroSmaXZ53TzgFZBcCxbF)",
+            "1300:0:SIG(HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd)"
           ],
           "comments": [],
           "signatures": [
-            "hCsFFk7xGDDWv0AP2DqxmJRRjziJT83ZCfXJq5jf9Vyw99PP+60u8ZsSksScbUGOtjuMkISWqg9VwY2bHfwyDw=="
+            "4/2neIATkINNT8L+0KhIaD8sktRAsySyaE+7yalw7Ay9/Xg6c+Wnzb0ZDF76xIWmS338Gd78l8jUwDs0q6Y0Bw=="
           ],
           "comment": "",
           "currency": "duniter_unit_test_currency",
           "block_number": 7,
-          "time": 1483977879
+          "time": 1480000300
         }
       ]
     },
@@ -351,23 +405,23 @@
       "nonce": 100000000001,
       "number": 8,
       "powMin": 0,
-      "time": 1483977879,
-      "medianTime": 1483977879,
+      "time": 1480000300,
+      "medianTime": 1480000300,
       "membersCount": 2,
-      "monetaryMass": 482,
+      "monetaryMass": 19990,
       "unitbase": 0,
       "issuersCount": 1,
       "issuersFrame": 6,
       "issuersFrameVar": 0,
       "len": 8,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "djssjpSOuJPW0ze83lqHS/7aRQ7L7N2/XYR2O6OA+pJATrfHh6WRj2fByznM3Q2DWJzqRVqBkJxNxvXA3pjeBw==",
-      "hash": "E286600C0B91A7BC63949F3081FC2E5D5423ACD760DB61E8BC7D4E6697C48CEC",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "2qLOlrXOgb0e+BYPDVLh7GNR+jiEOgeptnCS45rO0U1BMREh1Gtb790eCulHUFIY73beX608vfdWVuRYNHGlAg==",
+      "hash": "F14FF7F1BB7B4FE7376995508DC52BC0071A7796C68D3F5225FF2054A98B7B95",
       "parameters": "",
-      "previousHash": "AD83DE9C185699AF79BB14F83AC470407D35171129040E8B34C25893675225C0",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "1501C49E7F5D3974893677214426ED8D9DA3560E0256AB48B1412B7A81394C09",
+      "previousHash": "F0B4504EAB57E3F7314A73A18BC936D6C2D7838277AE79F523635F532F008F11",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "306287D1F14E984D75B20DF42B787E5A1252D2543228B7F4532CB2B5077A913D",
       "dividend": null,
       "identities": [],
       "joiners": [],
@@ -379,30 +433,29 @@
       "transactions": [
         {
           "version": 10,
-          "blockstamp": "7-AD83DE9C185699AF79BB14F83AC470407D35171129040E8B34C25893675225C0",
+          "blockstamp": "7-F0B4504EAB57E3F7314A73A18BC936D6C2D7838277AE79F523635F532F008F11",
           "locktime": 0,
           "issuers": [
-            "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV"
+            "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd"
           ],
           "inputs": [
-            "121:0:T:E625C9976FDC3C1B5DE12C7D8B1230B3B45656443CE14B7CB025AA285932F9CB:0"
+            "1300:0:T:5218AA814F5AE71BF9ECF2DC86D8E8D85968F98E220D2E12DB6AAEFD2CD9EEE0:1"
           ],
           "unlocks": [
             "0:SIG(0)"
           ],
           "outputs": [
-            "121:0:XHX(8AFC8DF633FC158F9DB4864ABED696C1AA0FE5D617A7B5F7AB8DE7CA2EFCD4CB)"
-          ],
-          "comments": [
-            "ok"
+            "999:0:SIG(4WmQWq4NuJtu6mzFDKkmmu6Cm6BZvgoY4b4MMDMwVvu7)",
+            "301:0:SIG(HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd)"
           ],
+          "comments": [],
           "signatures": [
-            "RHy+IFdIUqZpLGIjiPSWWM1y102KKS8SJ2EReXpvb0m6bY7xgBUM9f3Wq3oBHUUjp0oj+MJXmBfCHpoMN18zDQ=="
+            "CAegVJIEaysMrv6sRZRRQPO9BTZfJB5oSpo2/y/9OCUWwv7cA6LvYMtXgSdDvoGgzJJa2NzxCkQjA6vBrni+Dw=="
           ],
-          "comment": "ok",
+          "comment": "",
           "currency": "duniter_unit_test_currency",
           "block_number": 8,
-          "time": 1483977879
+          "time": 1480000300
         }
       ]
     },
@@ -411,23 +464,23 @@
       "nonce": 100000000001,
       "number": 9,
       "powMin": 0,
-      "time": 1483977879,
-      "medianTime": 1483977879,
+      "time": 1480000900,
+      "medianTime": 1480000300,
       "membersCount": 2,
-      "monetaryMass": 482,
+      "monetaryMass": 19990,
       "unitbase": 0,
       "issuersCount": 1,
       "issuersFrame": 6,
       "issuersFrameVar": 0,
-      "len": 8,
+      "len": 16,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "8N3YT1DJEXVfyMBxYagkVvI2llzPk6CMgBDIiuD3ZBPCbWIrpYGpfwQ6N6AG6Dc5dWh2xEV5P9RAQY+zFijECA==",
-      "hash": "EE55B59D8415A02D4B6062627A5654A5BEDAA2DDF9F75A309BB0E9B96A9A9E64",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "pyNNE4Afc7ZCLn/OdRtvrnhJf9oL/X/qswRsqF75m3bX8ldVP4TJJqa6H+KB1XlV920wMdXSmQA3M5AdCt0mBA==",
+      "hash": "78CB9C74839CAC5726032810DCD2BE30B7C1B664CF8111C733D2FE548CCE38A2",
       "parameters": "",
-      "previousHash": "E286600C0B91A7BC63949F3081FC2E5D5423ACD760DB61E8BC7D4E6697C48CEC",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "C31343A0A0644ACA65DD9C4EA4BA707EFC66AD2D84A92D918082A9800A6338D2",
+      "previousHash": "F14FF7F1BB7B4FE7376995508DC52BC0071A7796C68D3F5225FF2054A98B7B95",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "B576E194DC1D6DA0F8FB7D7FB20C9CCBA457C6382C0DE4CFBC6B464C777ADD88",
       "dividend": null,
       "identities": [],
       "joiners": [],
@@ -439,30 +492,55 @@
       "transactions": [
         {
           "version": 10,
-          "blockstamp": "7-AD83DE9C185699AF79BB14F83AC470407D35171129040E8B34C25893675225C0",
+          "blockstamp": "8-F14FF7F1BB7B4FE7376995508DC52BC0071A7796C68D3F5225FF2054A98B7B95",
           "locktime": 0,
           "issuers": [
-            "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV"
+            "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd"
           ],
           "inputs": [
-            "121:0:T:68BC9CC4D6B5238671492D4863D174346B60389B3EB721F61116CDFDD38C6BFF:0"
+            "301:0:T:F603AD88714A83A0B3C68BA14E311C55CD81F609C033B18501BAE1C8A21CB174:1"
           ],
           "unlocks": [
-            "0:XHX(1872767826647264)"
+            "0:SIG(0)"
           ],
           "outputs": [
-            "121:0:SIG(DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo)"
+            "300:0:SIG(7kMAi8wttYKPK5QSfCwoDriNTcCTWKzTbuSjsLsjGJX2)",
+            "1:0:SIG(HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd)"
+          ],
+          "comments": [],
+          "signatures": [
+            "pHEcJB+FBgDEWKxTeI0LTpdMUm4YaFZc9VLkpjXnf/xYgEq0joks2cT7LU6Nr3PMpqR58IfTLiIuWeni5XtLBQ=="
+          ],
+          "comment": "",
+          "currency": "duniter_unit_test_currency",
+          "block_number": 9,
+          "time": 1480000300
+        },
+        {
+          "version": 10,
+          "blockstamp": "8-F14FF7F1BB7B4FE7376995508DC52BC0071A7796C68D3F5225FF2054A98B7B95",
+          "locktime": 0,
+          "issuers": [
+            "2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc"
           ],
-          "comments": [
-            "okk"
+          "inputs": [
+            "9995:0:D:2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc:2"
+          ],
+          "unlocks": [
+            "0:SIG(0)"
           ],
+          "outputs": [
+            "700:0:SIG(7kMAi8wttYKPK5QSfCwoDriNTcCTWKzTbuSjsLsjGJX2)",
+            "9295:0:SIG(2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc)"
+          ],
+          "comments": [],
           "signatures": [
-            "GGhRugcizcjQ7yAv9LJGTkpJ6dr24gitAaV3nsOTIRONuBlzkwj6lff0BbjwdYc4QRlymL6Xnfc5M6G1XeDcCQ=="
+            "wSrAUv8DGtaT7kHnmklxAXDHaKc1SGj3ICYqpyppOte2ihs4NCwN4YjNLJhfFr6qEHUcfLKQf3iwlTvAcMzVBg=="
           ],
-          "comment": "okk",
+          "comment": "",
           "currency": "duniter_unit_test_currency",
           "block_number": 9,
-          "time": 1483977879
+          "time": 1480000300
         }
       ]
     },
@@ -471,24 +549,24 @@
       "nonce": 100000000001,
       "number": 10,
       "powMin": 1,
-      "time": 1483977879,
-      "medianTime": 1483977879,
+      "time": 1480000900,
+      "medianTime": 1480000900,
       "membersCount": 2,
-      "monetaryMass": 482,
-      "unitbase": 0,
+      "monetaryMass": 59590,
+      "unitbase": 1,
       "issuersCount": 1,
       "issuersFrame": 6,
       "issuersFrameVar": 0,
-      "len": 7,
+      "len": 0,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "lqdeL3H5fqxg/oPbmeIcw2FDlfYLzaPEwwkymlj+W1HSR5ZjS8V5UGKgJS+b16AE614FT4wBxf7bgbMPi/4MCw==",
-      "hash": "37D397CCC32887BBD63BE1CCB0D3CB234E02F3B21792D59D9239D2F0C883C6A6",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "Ffa54r3CpYrd8/k4oiFSXIfxA/xaM5+X8MGZdqoqqt6emrObGlOgsnh5OUWWlpCdgA/b6ZFmRb2aG6DVciFBAQ==",
+      "hash": "144275EBEB7CB3E5B508F80CED5D75ACEA0A65B933E381B5DB0A31FAF0E6EA29",
       "parameters": "",
-      "previousHash": "EE55B59D8415A02D4B6062627A5654A5BEDAA2DDF9F75A309BB0E9B96A9A9E64",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "6AE9E4A7309704A00485C347CCC844197477107D295B120333185F609A2FE8B3",
-      "dividend": null,
+      "previousHash": "78CB9C74839CAC5726032810DCD2BE30B7C1B664CF8111C733D2FE548CCE38A2",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "DFACF4DE1A328D54055E034CA812B77F8F0FBCCC3E3D1B547F575EC5A103C469",
+      "dividend": 1980,
       "identities": [],
       "joiners": [],
       "actives": [],
@@ -496,57 +574,31 @@
       "revoked": [],
       "excluded": [],
       "certifications": [],
-      "transactions": [
-        {
-          "version": 10,
-          "blockstamp": "9-EE55B59D8415A02D4B6062627A5654A5BEDAA2DDF9F75A309BB0E9B96A9A9E64",
-          "locktime": 0,
-          "issuers": [
-            "DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo"
-          ],
-          "inputs": [
-            "121:0:T:46DCF9F9448E86434D41477881BFEE5C154B121DE9F6524D304C1F2D261CC066:0"
-          ],
-          "unlocks": [
-            "0:SIG(0)"
-          ],
-          "outputs": [
-            "121:0:SIG(DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV)"
-          ],
-          "comments": [],
-          "signatures": [
-            "IODsd3oOGj90zZNvrEJ9CSJnnDGQFcOY+UuP4xWK/Iad54V8EP2EzgCnPuydaKJrzem/LqlZDdIXfRLXAnYSDQ=="
-          ],
-          "comment": "",
-          "currency": "duniter_unit_test_currency",
-          "block_number": 10,
-          "time": 1483977879
-        }
-      ]
+      "transactions": []
     },
     {
       "version": 10,
       "nonce": 100000000001,
       "number": 11,
       "powMin": 1,
-      "time": 1483977879,
-      "medianTime": 1483977879,
+      "time": 1480001800,
+      "medianTime": 1480000900,
       "membersCount": 2,
-      "monetaryMass": 482,
-      "unitbase": 0,
+      "monetaryMass": 157610,
+      "unitbase": 1,
       "issuersCount": 1,
       "issuersFrame": 6,
       "issuersFrameVar": 0,
-      "len": 8,
+      "len": 0,
       "currency": "duniter_unit_test_currency",
-      "issuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "signature": "eyFqgSzUsCECDvkgkh/DX7qVJ3OKq86mR4vgQrXeHuynlLJ+HXMSnX5wnQ2csbOgpYUHzWlRTwm0mGQInKoCBQ==",
-      "hash": "CF191C963B47F2BA6CD11ECEF2374C0052E863D73B5E59487A7C3E46EE19B6BE",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "A2w0d4LZtqdSOMGA5zIf4+cG/WPu1Yf0RGM4UFA4UdSNH9oXcAOOEjVK8tcNdDR85QXzUCMGGhYONnMKHCioCQ==",
+      "hash": "A46C0ED42C43C43769E98FB15C6CB050B2DDACBDB8A977010B4F77BF9D6BBD0D",
       "parameters": "",
-      "previousHash": "37D397CCC32887BBD63BE1CCB0D3CB234E02F3B21792D59D9239D2F0C883C6A6",
-      "previousIssuer": "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV",
-      "inner_hash": "26126CD22C10AA988D1B04D1D826BFAF31B05CC209CEB97C01D33B0EE436635A",
-      "dividend": null,
+      "previousHash": "144275EBEB7CB3E5B508F80CED5D75ACEA0A65B933E381B5DB0A31FAF0E6EA29",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "39B02A6B0B205144BA85F19FD50A5B1C272BFEFA97A31574412EED538BA7B668",
+      "dividend": 4901,
       "identities": [],
       "joiners": [],
       "actives": [],
@@ -554,35 +606,39 @@
       "revoked": [],
       "excluded": [],
       "certifications": [],
-      "transactions": [
-        {
-          "version": 10,
-          "blockstamp": "10-37D397CCC32887BBD63BE1CCB0D3CB234E02F3B21792D59D9239D2F0C883C6A6",
-          "locktime": 0,
-          "issuers": [
-            "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV"
-          ],
-          "inputs": [
-            "121:0:T:E9BCB40174140FF58037FC2D9AD51E0D3EA67FB19FE4D6D50E39BF5442E73103:0"
-          ],
-          "unlocks": [
-            "0:SIG(0)"
-          ],
-          "outputs": [
-            "121:0:(XHX(8AFC8DF633FC158F9DB4864ABED696C1AA0FE5D617A7B5F7AB8DE7CA2EFCD4CB) && SIG(DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo)) || (SIG(DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV) && SIG(DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo))"
-          ],
-          "comments": [
-            "cross1"
-          ],
-          "signatures": [
-            "Ify7JqbxdqBO/CwVVlFAgVXphUL9W8imqKmdy4rBUx7T/WgLuowuhGe+8UlO0cEFZlZhXA2eGw19XSzErNawBg=="
-          ],
-          "comment": "cross1",
-          "currency": "duniter_unit_test_currency",
-          "block_number": 11,
-          "time": 1483977879
-        }
-      ]
+      "transactions": []
+    },
+    {
+      "version": 10,
+      "nonce": 100000000001,
+      "number": 12,
+      "powMin": 1,
+      "time": 1480003600,
+      "medianTime": 1480001800,
+      "membersCount": 2,
+      "monetaryMass": 410210,
+      "unitbase": 2,
+      "issuersCount": 1,
+      "issuersFrame": 6,
+      "issuersFrameVar": 0,
+      "len": 0,
+      "currency": "duniter_unit_test_currency",
+      "issuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "signature": "+7cEZBqjuIsu/Ape+2xNBw+5jqVmHPbOo21sP0MjlJG1SQJrYA1fIDX4BuwcUzS31R3qyC0nnvwQ2zRkZITBAQ==",
+      "hash": "0D58377496261505F48CF46359138349811D8F104DB6E8DAAE1BE3A845DD3620",
+      "parameters": "",
+      "previousHash": "A46C0ED42C43C43769E98FB15C6CB050B2DDACBDB8A977010B4F77BF9D6BBD0D",
+      "previousIssuer": "HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd",
+      "inner_hash": "D5578A9FE63386D894599D8E6B814F05CA918D4C5C0882F32613E46C065C1DAA",
+      "dividend": 1263,
+      "identities": [],
+      "joiners": [],
+      "actives": [],
+      "leavers": [],
+      "revoked": [],
+      "excluded": [],
+      "certifications": [],
+      "transactions": []
     }
   ]
 }
diff --git a/test/eslint.js b/test/eslint.js
index bcb4e0fbf533f0402594467c4669a060afaa5bf4..24994f994e1cf09c918a364227605f4855d01650 100644
--- a/test/eslint.js
+++ b/test/eslint.js
@@ -1,16 +1,16 @@
-var lint = require('mocha-eslint');
+const lint = require('mocha-eslint');
 
 // Array of paths to lint
 // Note: a seperate Mocha test will be run for each path and each file which
 // matches a glob pattern
-var paths = [
+const paths = [
   'app',
   'bin/duniter',
   'test'
 ];
 
 // Specify style of output
-var options = {};
+const options = {};
 options.formatter = 'stylish';
 
 // Run the tests
diff --git a/test/fast/block_pulling.js b/test/fast/block_pulling.js
deleted file mode 100644
index ae497c9d450d00eb2423a9d68c19a3f9ed8e9bd7..0000000000000000000000000000000000000000
--- a/test/fast/block_pulling.js
+++ /dev/null
@@ -1,257 +0,0 @@
-"use strict";
-var should = require('should');
-var _ = require('underscore');
-var co = require('co');
-var Q = require('q');
-var pulling = require('../../app/lib/pulling');
-var constants = require("../../app/lib/constants.js");
-
-let commonConf = {
-  avgGenTime: constants.BRANCHES.SWITCH_ON_BRANCH_AHEAD_BY_X_MINUTES * 60,
-  forksize: 100
-};
-
-describe('Pulling blocks', () => {
-
-  it('from genesis with good sidechain should work', pullinTest({
-    blockchain: [
-      newBlock(0, 'A')
-    ],
-    sidechains: [
-      [
-        newBlock(0, 'A'),
-        newBlock(1, 'A')  // <-- 1) checks this block: is good, we add it
-      ]
-    ],
-    expectHash: 'A1'
-  }));
-
-  it('from genesis with fork sidechain should not work', pullinTest({
-    blockchain: [
-      newBlock(0, 'A')
-    ],
-    sidechains: [
-      [
-        newBlock(0, 'B'), // <-- 2) oh no this not common with blockchain A, leave this blockchain B alone
-        newBlock(1, 'B')  // <-- 1) checks this block: ah, a fork! let's find common root ...
-      ]
-    ],
-    expectHash: 'A0'
-  }));
-
-  it('from genesis with multiple good sidechains should work', pullinTest({
-    blockchain: [
-      newBlock(0, 'A')
-    ],
-    sidechains: [
-      [
-        newBlock(0, 'A'),
-        newBlock(1, 'A'), // <-- 1) checks this block: is good, we add it
-        newBlock(2, 'A')  // <-- 2) checks this block: is good, we add it
-      ],
-      [
-        newBlock(0, 'A'),
-        newBlock(1, 'A')  // <-- 3) you are a bit late ... we are on A2 yet!
-      ],
-      [
-        newBlock(0, 'A'),
-        newBlock(1, 'A'),
-        newBlock(2, 'A'),
-        newBlock(3, 'A')  // <-- 4) checks this block: is good, we add it
-      ],
-      [
-        newBlock(0, 'A'),
-        newBlock(1, 'A')  // <-- 5 really too late
-      ]
-    ],
-    expectHash: 'A3'
-  }));
-
-  it('sync with a single fork', pullinTest({
-    blockchain: [
-      newBlock(0, 'A'),
-      newBlock(1, 'A'),
-      newBlock(2, 'A'),
-      newBlock(3, 'A')
-    ],
-    sidechains: [
-      [
-        newBlock(0, 'A'), // <-- 2) sees a common root, yet not *the* common root (A1 is not a fork block)
-        newBlock(1, 'A'), // <-- 4) yep this is the good one! sync from B2 to B5
-        newBlock(2, 'B'), // <-- 3) check the middle, not the common root
-        newBlock(3, 'B'),
-        newBlock(4, 'B'), // <-- 1) checks this block: a fork, let's find common root
-        newBlock(5, 'B')
-      ]
-    ],
-    expectHash: 'B5'
-  }));
-
-  it('sync with multiple forks', pullinTest({
-    blockchain: [
-      newBlock(0, 'A'),
-      newBlock(1, 'A'),
-      newBlock(2, 'A'),
-      newBlock(3, 'A')
-    ],
-    sidechains: [
-      [
-        newBlock(0, 'A'), // <-- 2) sees a common root, yet not *the* common root (A1 is not a fork block)
-        newBlock(1, 'A'), // <-- 4) yep this is the good one! sync from B2 to B5
-        newBlock(2, 'B'), // <-- 3) check the middle, not the common root
-        newBlock(3, 'B'),
-        newBlock(4, 'B'), // <-- 1) checks this block: a fork, let's find common root
-        newBlock(5, 'B')
-      ],
-      // This fork should not be followed because we switch only one time per pulling, and B5 is already OK
-      [
-        newBlock(0, 'A'),
-        newBlock(1, 'A'),
-        newBlock(2, 'B'),
-        newBlock(3, 'B'),
-        newBlock(4, 'B'),
-        newBlock(5, 'B'),
-        newBlock(6, 'B')
-      ]
-    ],
-    expectHash: 'B5'
-  }));
-
-  it('sync with inconsistant fork should skip it', pullinTest({
-    blockchain: [
-      newBlock(0, 'A'),
-      newBlock(1, 'A'),
-      newBlock(2, 'A'),
-      newBlock(3, 'A')
-    ],
-    sidechains: [
-      [
-        newBlock(0, 'A'), // <-- 2) sees a common root, yet not *the* common root (A1 is not a fork block)
-        qwaBlock(1, 'A'), // <-- 4) checks the middle: the block has changed and now displays C! this is inconsistent
-        newBlock(2, 'C'), // <-- 3) checks the middle (binary search): too high, go downwards
-        newBlock(3, 'C'),
-        newBlock(4, 'C'), // <-- 1) sees a fork, try to find common root
-        newBlock(5, 'C')
-      ]
-    ],
-    expectHash: 'A3'
-  }));
-});
-
-function newBlock(number, branch, rootBranch, quantum) {
-  let previousNumber = number - 1;
-  let previousBranch = rootBranch || branch;
-  let previousHash = previousNumber >= 0 ? previousBranch + previousNumber : '';
-  return {
-    number: number,
-    medianTime: number * constants.BRANCHES.SWITCH_ON_BRANCH_AHEAD_BY_X_MINUTES * 60,
-    hash: branch + number,
-    previousHash: previousHash,
-    // this is not a real field, just here for the sake of demonstration: a quantum block changes itself
-    // when we consult it, making the chain inconsistent
-    quantum: quantum
-  };
-}
-
-function qwaBlock(number, branch, rootBranch) {
-  return newBlock(number, branch, rootBranch, true);
-}
-
-function pullinTest(testConfiguration) {
-  return () => co(function *() {
-
-    // The blockchains we are testing against
-    let blockchain = testConfiguration.blockchain;
-    let sidechains = testConfiguration.sidechains;
-
-    // The data access object simulating network access
-    let dao = mockDao(blockchain, sidechains);
-
-    // The very last block of a blockchain should have the good number
-    (yield dao.localCurrent()).should.have.property('number').equal(blockchain[blockchain.length - 1].number);
-
-    // And after we make a pulling...
-    yield pulling.pull(commonConf, dao);
-
-    // We test the new local blockchain current block (it should have changed in case of successful pull)
-    let localCurrent = yield dao.localCurrent();
-    if (testConfiguration.expectHash !== undefined && testConfiguration.expectHash !== null) {
-      localCurrent.should.have.property('hash').equal(testConfiguration.expectHash);
-    }
-    if (testConfiguration.expectFunc !== undefined && testConfiguration.expectFunc !== null) {
-      testConfiguration.expectFunc(dao);
-    }
-  });
-}
-
-/**
- * Network mocker
- * @param blockchain
- * @param sideChains
- * @returns {{localCurrent: (function(): (*|Q.Promise<*>|Q.Promise<T>)), remoteCurrent: (function(): (*|Q.Promise<*>|Q.Promise<T>)), remotePeers: (function(): (*|Q.Promise<*>|Q.Promise<T>)), getRemoteBlock: (function(): (*|Q.Promise<*|null>|Q.Promise<T>)), applyMainBranch: (function(): (*|Q.Promise<Number|*|_Chain<*>>|Q.Promise<T>)), removeForks: (function(): (*|Q.Promise<T>)), isMemberPeer: (function(): (*|Q.Promise<boolean>|Q.Promise<T>)), findCommonRoot: (function(): (*|Promise)), downloadBlocks: (function(): (*|Q.Promise<Buffer|ArrayBuffer|Array.<any>|string|*|_Chain<any>>|Q.Promise<T>)), applyBranch: (function())}}
- */
-function mockDao(blockchain, sideChains) {
-  const dao = pulling.abstractDao({
-
-    // Get the local blockchain current block
-    localCurrent: () => co(function*() {
-      return blockchain[blockchain.length - 1]
-    }),
-
-    // Get the remote blockchain (bc) current block
-    remoteCurrent: (bc) => co(function*() {
-      return bc[bc.length - 1]
-    }),
-
-    // Get the remote peers to be pulled
-    remotePeers: () => Q(sideChains.map((sc, index) => {
-      sc.pubkey = 'PUBK' + index;
-      return sc;
-    })),
-
-    // Get block of given peer with given block number
-    getLocalBlock: (number) => co(function*() {
-      return blockchain[number] || null
-    }),
-
-    // Get block of given peer with given block number
-    getRemoteBlock: (bc, number) => co(function *() {
-      let block = bc[number] || null;
-      // Quantum block implementation
-      if (block && block.quantum) {
-        bc[number] = _.clone(block);
-        bc[number].hash = 'Q' + block.hash;
-      }
-      return block;
-    }),
-
-    // Simulate the adding of a single new block on local blockchain
-    applyMainBranch: (block) => co(function*() {
-      return blockchain.push(block)
-    }),
-
-    // Clean the eventual fork blocks already registered in DB (since real fork mechanism uses them, so we want
-    // every old fork block to be removed)
-    removeForks: () => co(function*() {}),
-
-    // Tells wether given peer is a member peer
-    isMemberPeer: (peer) => co(function*() {
-      return true;
-    }),
-
-    // Simulates the downloading of blocks from a peer
-    downloadBlocks: (bc, fromNumber, count) => co(function*() {
-      if (!count) {
-        const block = yield dao.getRemoteBlock(bc, fromNumber);
-        if (block) {
-          return [block];
-        }
-        else {
-          return [];
-        }
-      }
-      return bc.slice(fromNumber, fromNumber + count);
-    }),
-  });
-  return dao;
-}
diff --git a/test/fast/crypto/crypto.js b/test/fast/crypto/crypto.js
deleted file mode 100644
index e5cb180a819efcee1cf8f55995128e759f34e587..0000000000000000000000000000000000000000
--- a/test/fast/crypto/crypto.js
+++ /dev/null
@@ -1,52 +0,0 @@
-"use strict";
-var should = require('should');
-var co  = require('co');
-var nacl   = require('tweetnacl');
-var base58 = require('../../../app/lib/crypto/base58');
-var keyring      = require('../../../app/lib/crypto/keyring');
-
-var enc = nacl.util.encodeBase64,
-    dec = nacl.util.decodeBase64;
-
-var passphrase = "abc";
-var salt = "abc";
-var pub, sec, rawPub, rawSec;
-
-before(() => co(function*() {
-  // Generate the keypair
-  const keyPair = yield keyring.scryptKeyPair(salt, passphrase);
-  pub = base58.decode(keyPair.publicKey);
-  sec = base58.decode(keyPair.secretKey);
-  rawPub = base58.encode(pub);
-  rawSec = base58.encode(sec);
-}));
-
-describe('ed25519 tests:', function(){
-
-  //it('good signature from existing secret key should be verified', function(done){
-  //  var keys = nacl.sign.scryptKeyPair.fromSecretKey(dec("TM0Imyj/ltqdtsNG7BFOD1uKMZ81q6Yk2oz27U+4pvs9QBfD6EOJWpK3CqdNG368nJgszy7ElozAzVXxKvRmDA=="));
-  //  var msg = "cg==";
-  //  var goodSig = dec("52Hh9omo9rxklulAE7gvVeYvAq0GgXYoZE2NB/gzehpCYIT04bMcGIs5bhYLaH93oib34jsVMWs9Udadr1B+AQ==");
-  //  var sig = crypto.signSync(msg, keys.secretKey);
-  //  sig.should.equal(enc(goodSig));
-  //  crypto.verify(msg, sig, enc(keys.publicKey)).should.be.true;
-  //  done();
-  //});
-
-  it('good signature from generated key should be verified', function(done){
-    var msg = "Some message to be signed";
-    var sig = keyring.Key(rawPub, rawSec).signSync(msg);
-    var verified = keyring.verify(msg, sig, rawPub);
-    verified.should.be.true;
-    done();
-  });
-
-  it('wrong signature from generated key should NOT be verified', function(done){
-    var msg = "Some message to be signed";
-    var cor = dec(enc(msg) + 'delta');
-    var sig = keyring.Key(rawPub, rawSec).signSync(msg);
-    var verified = keyring.verify(cor, sig, rawPub);
-    verified.should.be.false;
-    done();
-  });
-});
diff --git a/test/fast/ddos-test.js b/test/fast/ddos-test.js
deleted file mode 100644
index a2c4f74f180372b020f553b7c66b881959c714b8..0000000000000000000000000000000000000000
--- a/test/fast/ddos-test.js
+++ /dev/null
@@ -1,39 +0,0 @@
-"use strict";
-const should = require('should');
-const co = require('co');
-const limiter = require('../../app/lib/system/limiter');
-const toolbox = require('../integration/tools/toolbox');
-const user    = require('../integration/tools/user');
-const bma     = require('../../app/lib/streams/bma');
-
-limiter.noLimit();
-
-const s1 = toolbox.server({
-  pair: {
-    pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-    sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-  }
-});
-
-describe('DDOS', () => {
-
-  before(() => co(function*() {
-    limiter.noLimit();
-    yield s1.initWithDAL().then(bma).then((bmapi) => {
-      s1.bma = bmapi;
-      bmapi.openConnections();
-    });
-  }));
-
-  it('should not be able to send more than 10 reqs/s', () => co(function*() {
-    try {
-      s1.bma.getDDOS().params.limit = 3;
-      s1.bma.getDDOS().params.burst = 3;
-      s1.bma.getDDOS().params.whitelist = [];
-      yield Array.from({ length: 4 }).map(() => s1.get('/blockchain/current'));
-      throw 'Wrong error thrown';
-    } catch (e) {
-      e.should.have.property('statusCode').equal(429);
-    }
-  }));
-});
diff --git a/test/fast/javascript_test.js b/test/fast/javascript_test.js
deleted file mode 100644
index c0063cbdccb723fb20e7e83d405303d014403283..0000000000000000000000000000000000000000
--- a/test/fast/javascript_test.js
+++ /dev/null
@@ -1,37 +0,0 @@
-"use strict";
-var should = require('should');
-var co = require('co');
-
-describe('JavaScript', () => {
-
-    describe('for loops', () => {
-
-        const array1 = [1, 2, 3];
-        array1.abc = 2;
-
-        it('for (.. in ..)', () => {
-            array1.abc = 2;
-            let sum = 0;
-            for (const i in array1) {
-                sum += array1[i];
-            }
-            sum.should.equal(8); // <-- Yes, it does not equal 6! Because `for .. in` is not `hasOwnProperty` checked.
-        });
-
-        it('for (.. of ..)', () => {
-            let sum = 0;
-            for (const value of array1) {
-                sum += value;
-            }
-            sum.should.equal(6);
-        });
-
-        it('with promises', () => co(function*() {
-            let sum = 0;
-            for (const value of array1) {
-                sum += yield Promise.resolve(value);
-            }
-            sum.should.equal(6);
-        }));
-    });
-});
diff --git a/test/fast/limiter-test.js b/test/fast/limiter-test.js
deleted file mode 100644
index efc1ca239db1b5ec8c764f9901a2052c691e7478..0000000000000000000000000000000000000000
--- a/test/fast/limiter-test.js
+++ /dev/null
@@ -1,60 +0,0 @@
-"use strict";
-const should = require('should');
-const co = require('co');
-const limiter = require('../../app/lib/system/limiter');
-const toolbox = require('../integration/tools/toolbox');
-const user    = require('../integration/tools/user');
-const bma     = require('../../app/lib/streams/bma');
-
-limiter.noLimit();
-
-const s1 = toolbox.server({
-  pair: {
-    pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-    sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-  }
-});
-
-const cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: s1 });
-
-let theLimiter;
-
-describe('Limiter', () => {
-
-  before(() => {
-    limiter.withLimit();
-    theLimiter = limiter.limitAsTest();
-  });
-
-  it('should not be able to send more than 10 reqs/s', () => {
-    theLimiter.canAnswerNow().should.equal(true);
-    for (let i = 1; i <= 4; i++) {
-        theLimiter.processRequest();
-    }
-    theLimiter.canAnswerNow().should.equal(true);
-    theLimiter.processRequest(); // 5 in 1sec
-    theLimiter.canAnswerNow().should.equal(false);
-  });
-
-  it('should be able to send 1 more request (by minute constraint)', () => co(function*(){
-    yield new Promise((resolve) => setTimeout(resolve, 1000));
-    theLimiter.canAnswerNow().should.equal(true);
-    theLimiter.processRequest(); // 1 in 1sec, 6 in 1min
-    theLimiter.canAnswerNow().should.equal(false);
-  }));
-
-  it('should work with BMA API', () => co(function*(){
-    yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
-    yield cat.createIdentity();
-    try {
-      for (let i = 0; i < 11; i++) {
-        yield s1.get('/wot/lookup/cat');
-      }
-      throw 'Should have thrown a limiter error';
-    } catch (e) {
-      e.should.have.property('error').property('ucode').equal(1006);
-    }
-  }));
-
-  after(() => limiter.noLimit());
-});
diff --git a/test/fast/peering.js b/test/fast/peering.js
index e6ec6d75bb9fb17ed311e4eed22257fe07feee72..2bfb93ea8fe79db03eb0bec71b18a8f229ad6795 100644
--- a/test/fast/peering.js
+++ b/test/fast/peering.js
@@ -10,7 +10,7 @@ var rawPeer = "" +
   "PublicKey: 3Z7w5g4gC9oxwEbATnmK2UFgGWhLZPmZQb5dRxvNrXDu\n" +
   "Block: 0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855\n" +
   "Endpoints:\n" +
-  "BASIC_MERKLED_API ucoin.twiced.fr 88.163.127.43 9101\n" +
+  "BASIC_MERKLED_API duniter.twiced.fr 88.163.127.43 9101\n" +
   "OTHER_PROTOCOL 88.163.127.43 9102\n" +
   "bvuKzc6+cGWMGC8FIkZHN8kdQhaRL/MK60KYyw5vJqkKEgxXbygQHAzfoojeSY4gPKIu4FggBkR1HndSEm2FAQ==\n";
 
@@ -44,7 +44,7 @@ describe('Peer', function(){
     });
 
     it('should have DNS', function(){
-      assert.equal(pr.getDns(), 'ucoin.twiced.fr');
+      assert.equal(pr.getDns(), 'duniter.twiced.fr');
     });
 
     it('should have IPv4', function(){
diff --git a/test/fast/pow/pow-engine.js b/test/fast/pow/pow-engine.js
deleted file mode 100644
index cf96ee93b8c0238649176e044f4cdabddfa9d5b9..0000000000000000000000000000000000000000
--- a/test/fast/pow/pow-engine.js
+++ /dev/null
@@ -1,119 +0,0 @@
-"use strict";
-
-const co = require('co');
-const should = require('should');
-const engine = require('../../../app/lib/pow/engine');
-
-describe('PoW Engine', () => {
-
-  it('should start with status "ready", then "idle"', () => co(function*(){
-    const e1 = engine();
-    (yield e1.status()).should.equal('ready');
-    (yield e1.status()).should.equal('ready');
-    yield e1.setValue('autokillTimeout', 10);
-  }));
-
-  it('should automatically close itself if no signal is sent', () => co(function*(){
-    const e1 = engine();
-    (yield e1.status()).should.equal('ready');
-    (yield e1.status()).should.equal('ready');
-    yield e1.setValue('autokillTimeout', 50);
-    (yield e1.status()).should.equal('ready');
-    (yield e1.status()).should.equal('ready');
-    yield new Promise((res) => setTimeout(res, 100));
-    e1.isConnected().should.equal(false);
-  }));
-
-  it('should NOT automatically close itself too early', () => co(function*(){
-    const e1 = engine();
-    e1.isConnected().should.equal(false);
-    (yield e1.status()).should.equal('ready');
-    e1.isConnected().should.equal(true);
-    (yield e1.status()).should.equal('ready');
-    yield e1.setValue('autokillTimeout', 200);
-    yield new Promise((res) => setTimeout(res, 100));
-    e1.isConnected().should.equal(true);
-    yield new Promise((res) => setTimeout(res, 50));
-    e1.isConnected().should.equal(true);
-    yield new Promise((res) => setTimeout(res, 30));
-    e1.isConnected().should.equal(true);
-    yield new Promise((res) => setTimeout(res, 30));
-    e1.isConnected().should.equal(false);
-  }));
-
-  it('should be identifiable', () => co(function*(){
-    const e1 = engine();
-    e1.isConnected().should.equal(false);
-    (yield e1.status()).should.equal('ready');
-    e1.isConnected().should.equal(true);
-    (yield e1.setValue('identify', { pubkey: 'pub1', identifier: 'id1' })).should.equal('OK');
-    (yield e1.getValue('pubkey')).should.equal('pub1');
-    (yield e1.getValue('id')).should.equal('id1');
-    yield new Promise((res) => setTimeout(res, 10));
-  }));
-
-  it('should be configurable', () => co(function*(){
-    const e1 = engine();
-    e1.isConnected().should.equal(false);
-    (yield e1.status()).should.equal('ready');
-    e1.isConnected().should.equal(true);
-    (yield e1.setValue('conf', { cpu: 0.2, prefix: '34' })).should.deepEqual({ currentCPU: 0.2, prefix: 34000000000000 });
-    (yield e1.getValue('cpu')).should.equal(0.2);
-    (yield e1.getValue('prefix')).should.equal(34000000000000);
-  }));
-
-  it('should be able to make a proof', () => co(function*(){
-    const e1 = engine();
-    (yield e1.status()).should.equal('ready');
-    (yield e1.setValue('identify', { pubkey: 'pub1', identifier: 'id1' })).should.equal('OK');
-    const block = { number: 35 };
-    const nonceBeginning = 0;
-    const zeros = 2;
-    const highMark = 'A';
-    const pair = {
-      pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-      sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-    };
-    const forcedTime = 1;
-    const medianTimeBlocks = 20;
-    const avgGenTime = 5 * 60;
-    const proof = yield e1.prove(block, nonceBeginning, zeros, highMark, pair, forcedTime, medianTimeBlocks, avgGenTime);
-    proof.should.deepEqual({
-      pow: {
-        block: {
-          number: 35,
-          time: 1,
-          inner_hash: '785EEAC3FB737D67FA56B9C82F5F6E7D4DF87988FF1785AFD4A080272C0C9414',
-          nonce: 202,
-          hash: '00729594838C945EC291D00997FB8C4254B3949CFE8295A68C78A9FAABE90106',
-          signature: 'cTFtHlldwptnm6hjw+f0DzkUtYiCbhHAnxXsj1B7V6V8tDUcFNNlGAPqZvVXrZevwjkAGm0pgdUCrdFGB6+FCw=='
-        },
-        testsCount: 201,
-        pow: '00729594838C945EC291D00997FB8C4254B3949CFE8295A68C78A9FAABE90106',
-      }
-    });
-  }));
-
-  it('should be able to stop a proof', () => co(function*(){
-    const e1 = engine();
-    (yield e1.status()).should.equal('ready');
-    (yield e1.setValue('identify', { pubkey: 'pub1', identifier: 'id1' })).should.equal('OK');
-    const block = { number: 26 };
-    const nonceBeginning = 0;
-    const zeros = 10; // Requires hundreds of thousands of tries probably
-    const highMark = 'A';
-    const pair = {
-      pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-      sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-    };
-    const forcedTime = 1;
-    const medianTimeBlocks = 20;
-    const proofPromise = e1.prove(block, nonceBeginning, zeros, highMark, pair, forcedTime, medianTimeBlocks);
-    yield new Promise((res) => setTimeout(res, 100));
-    (yield e1.cancel()).should.equal('cancelling');
-    (yield e1.cancel()).should.equal('cancelling');
-    const proof = yield proofPromise;
-    (yield e1.cancel()).should.equal('ready');
-    should.not.exist(proof);
-  }));
-});
diff --git a/test/integration/branches.js b/test/integration/branches.js
index 6b5fa3c33181ae3034f0046526ec6c7dc9238316..a71747bd425ad8a23eccd5599faf87670ac779df 100644
--- a/test/integration/branches.js
+++ b/test/integration/branches.js
@@ -3,8 +3,8 @@
 const _         = require('underscore');
 const co        = require('co');
 const should    = require('should');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter   = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
 
@@ -16,14 +16,13 @@ const commonConf = {
   currency: 'bb',
   httpLogs: true,
   forksize: 3,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb1'
-}, _.extend({
+const s1 = duniter(
+  '/bb1',
+  MEMORY_MODE,
+  _.extend({
   port: '7778',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
@@ -44,7 +43,7 @@ describe("Branches", () => co(function*() {
     it('should have a 3 blocks fork window size', function() {
       return expectAnswer(rp('http://127.0.0.1:7778/node/summary', { json: true }), function(res) {
         res.should.have.property('duniter').property('software').equal('duniter');
-        res.should.have.property('duniter').property('version').equal('0.81.0');
+        res.should.have.property('duniter').property('version').equal('0.90.0');
         res.should.have.property('duniter').property('forkWindowSize').equal(3);
       });
     });
diff --git a/test/integration/branches2.js b/test/integration/branches2.js
index 85944e6602596c8a6563b52d7c659124283a4f71..e5265a9a265681991ffcc9878ccea278c3027be3 100644
--- a/test/integration/branches2.js
+++ b/test/integration/branches2.js
@@ -2,8 +2,8 @@
 
 const co        = require('co');
 const _         = require('underscore');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const constants = require('../../app/lib/constants');
 const rp        = require('request-promise');
@@ -18,21 +18,27 @@ if (constants.MUTE_LOGS_DURING_UNIT_TESTS) {
   require('../../app/lib/logger')().mute();
 }
 
+// Trace these errors
+process.on('unhandledRejection', (reason) => {
+  console.error('Unhandled rejection: ' + reason);
+  console.error(reason);
+});
+
 const MEMORY_MODE = true;
 const commonConf = {
   ipv4: '127.0.0.1',
   currency: 'bb',
   httpLogs: true,
   forksize: 10,
-  avgGenTime: constants.BRANCHES.SWITCH_ON_BRANCH_AHEAD_BY_X_MINUTES * 60,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
+  swichOnTimeAheadBy: 30,
+  avgGenTime: 30 * 60,
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb4'
-}, _.extend({
+const s1 = duniter(
+  '/bb4',
+  MEMORY_MODE,
+  _.extend({
   port: '7781',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
@@ -40,10 +46,10 @@ const s1 = ucoin({
   }
 }, commonConf));
 
-const s2 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb5'
-}, _.extend({
+const s2 = duniter(
+  '/bb5',
+  MEMORY_MODE,
+  _.extend({
   port: '7782',
   pair: {
     pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
@@ -98,7 +104,7 @@ describe("SelfFork", function() {
     yield s1.singleWritePromise(s2p);
 
     // Forking S1 from S2
-    return s1.pullBlocks(s2p.pubkey);
+    return require('duniter-crawler').duniter.methods.pullBlocks(s1, s2p.pubkey);
   }));
 
   describe("Server 1 /blockchain", function() {
diff --git a/test/integration/branches_pending_data.js b/test/integration/branches_pending_data.js
index 5eb7f35da4cb08801c18a44341917d258dd6f765..9cf29a9b09b27ff136b72841c8cb8be1f5780e6b 100644
--- a/test/integration/branches_pending_data.js
+++ b/test/integration/branches_pending_data.js
@@ -3,7 +3,7 @@
 const co = require('co');
 const _         = require('underscore');
 const duniter   = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
@@ -18,14 +18,13 @@ const commonConf = {
   currency: 'bb',
   httpLogs: true,
   forksize: 3,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = duniter({
-  memory: MEMORY_MODE,
-  name: 'bb6'
-}, _.extend({
+const s1 = duniter(
+  '/bb6',
+  MEMORY_MODE,
+  _.extend({
   port: '7783',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
diff --git a/test/integration/branches_revert.js b/test/integration/branches_revert.js
index 066b05ad5226add5711016dd69dbf9d37d8c9a54..ed00d5e766e5310c5a14553cb226b235e4c79f5f 100644
--- a/test/integration/branches_revert.js
+++ b/test/integration/branches_revert.js
@@ -2,7 +2,7 @@
 
 const co = require('co');
 const _         = require('underscore');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const toolbox   = require('./tools/toolbox');
 const commit    = require('./tools/commit');
@@ -12,7 +12,6 @@ const commonConf = {
   currency: 'bb',
   httpLogs: true,
   forksize: 3,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
@@ -21,7 +20,7 @@ const s1 = toolbox.server(_.extend({
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
     sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
   },
-  participate: false, rootoffset: 10,
+  rootoffset: 10,
   sigQty: 1, dt: 1, ud0: 120
 }, commonConf));
 
diff --git a/test/integration/branches_revert2.js b/test/integration/branches_revert2.js
index daf5dbc7f51901aa118e5eef41994f2e4048604a..8dddeff3e19f1e6c5658945b87ae699eb189a69f 100644
--- a/test/integration/branches_revert2.js
+++ b/test/integration/branches_revert2.js
@@ -2,8 +2,8 @@
 
 const co = require('co');
 const _         = require('underscore');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
@@ -19,20 +19,19 @@ const commonConf = {
   currency: 'bb',
   httpLogs: true,
   forksize: 3,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb11'
-}, _.extend({
+const s1 = duniter(
+  '/bb11',
+  MEMORY_MODE,
+  _.extend({
   port: '7712',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
     sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
   },
-  participate: false, rootoffset: 10,
+  rootoffset: 10,
   sigQty: 1, dt: 1, ud0: 120
 }, commonConf));
 
diff --git a/test/integration/branches_revert_memberships.js b/test/integration/branches_revert_memberships.js
index 35479468077a5d325d0b67825569f710ebc887c2..4b12f96d8cb87fd2eb42e6b3c9fa885d43e1693e 100644
--- a/test/integration/branches_revert_memberships.js
+++ b/test/integration/branches_revert_memberships.js
@@ -2,12 +2,10 @@
 
 const co        = require('co');
 const should    = require('should');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const commit    = require('./tools/commit');
 const toolbox   = require('./tools/toolbox');
-const limiter   = require('../../app/lib/system/limiter');
-const multicaster = require('../../app/lib/streams/multicaster');
 
 const s1 = toolbox.server({
   memory: true,
@@ -28,8 +26,6 @@ describe("Revert memberships", function() {
 
   before(() => co(function*() {
 
-    limiter.noLimit();
-
     yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
 
     yield i1.createIdentity();
diff --git a/test/integration/branches_switch.js b/test/integration/branches_switch.js
index c16b1ec12a6f00e3190c1b99c1ca6226c85bd014..b2463e2ec3733801580956c088182931efc6381c 100644
--- a/test/integration/branches_switch.js
+++ b/test/integration/branches_switch.js
@@ -2,8 +2,8 @@
 
 const co = require('co');
 const _         = require('underscore');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
@@ -20,27 +20,28 @@ const commonConf = {
   httpLogs: true,
   forksize: 30,
   avgGenTime: 1,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb11'
-}, _.extend({
+const s1 = duniter(
+  '/bb11',
+  MEMORY_MODE,
+  _.extend({
+  swichOnTimeAheadBy: 0,
   port: '7788',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
     sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
   },
-  participate: false, rootoffset: 10,
+  rootoffset: 10,
   sigQty: 1, dt: 1, ud0: 120
 }, commonConf));
 
-const s2 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb12'
-}, _.extend({
+const s2 = duniter(
+  '/bb12',
+  MEMORY_MODE,
+  _.extend({
+  swichOnTimeAheadBy: 0,
   port: '7789',
   pair: {
     pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
@@ -81,14 +82,10 @@ describe("Switch", function() {
     // So we now have:
     // S1 01234
     // S2   `3456789
-    let oldVal = constants.BRANCHES.SWITCH_ON_BRANCH_AHEAD_BY_X_MINUTES = 0;
-
     yield s1.singleWritePromise(s2p);
 
     // Forking S1 from S2
-    yield s1.pullBlocks(s2p.pubkey);
-
-    constants.BRANCHES.SWITCH_ON_BRANCH_AHEAD_BY_X_MINUTES = oldVal;
+    yield require('duniter-crawler').duniter.methods.pullBlocks(s1, s2p.pubkey);
     // S1 should have switched to the other branch
   }));
 
diff --git a/test/integration/certification_chainability.js b/test/integration/certification_chainability.js
index 4fa896612010befedfa7a24d242b115ea0c85977..5731249fde98864ad1c17c4d77e75f4da0c67709 100644
--- a/test/integration/certification_chainability.js
+++ b/test/integration/certification_chainability.js
@@ -3,8 +3,8 @@
 const _         = require('underscore');
 const co        = require('co');
 const should    = require('should');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const constants = require('../../app/lib/constants');
 const rp        = require('request-promise');
@@ -20,14 +20,13 @@ const commonConf = {
   xpercent: 0.9,
   sigPeriod: 200, // every 200 seconds
   msValidity: 10000,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb11'
-}, _.extend({
+const s1 = duniter(
+  '/bb11',
+  MEMORY_MODE,
+  _.extend({
   port: '9225',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
diff --git a/test/integration/cli.js b/test/integration/cli.js
index 1c5ce1a1f478a1831522203a6822d8093b04e448..940cad33fc31f35aef74317f47349e2848b2a4d0 100644
--- a/test/integration/cli.js
+++ b/test/integration/cli.js
@@ -6,9 +6,9 @@ const co        = require('co');
 const should    = require('should');
 const _         = require('underscore');
 const toolbox   = require('./tools/toolbox');
-const cli       = require('../../app/cli');
+const duniter   = require('../../index');
 const merkleh   = require('../../app/lib/helpers/merkle');
-const hashf     = require('../../app/lib/ucp/hashf');
+const hashf     = require('duniter-common').hashf;
 const constants = require('../../app/lib/constants');
 const Merkle    = require('../../app/lib/entity/merkle');
 
@@ -161,9 +161,10 @@ describe("CLI", function() {
 function execute(args) {
   const finalArgs = [process.argv[0], __filename].concat(args).concat(['--mdb', DB_NAME]);
   return co(function*() {
-    const command = cli(finalArgs);
+
+    const stack = duniter.statics.autoStack();
     // Executes the command
-    return command.execute();
+    return stack.executeStack(finalArgs);
   });
 }
 
diff --git a/test/integration/collapse.js b/test/integration/collapse.js
index 79878d625a71ab4e63421e908469a623c3bd26b1..9b602e302b759bea4ef8982e4cf54736f6e98eef 100644
--- a/test/integration/collapse.js
+++ b/test/integration/collapse.js
@@ -2,8 +2,8 @@
 
 const co        = require('co');
 const _         = require('underscore');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const commit    = require('./tools/commit');
 const httpTest  = require('./tools/http');
@@ -15,20 +15,19 @@ const commonConf = {
   currency: 'bb',
   httpLogs: true,
   forksize: 3,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb11'
-}, _.extend({
+const s1 = duniter(
+  '/bb11',
+  MEMORY_MODE,
+  _.extend({
   port: '9340',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
     sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
   },
-  participate: false, rootoffset: 10,
+  rootoffset: 10,
   sigQty: 1, dt: 100, ud0: 120, sigValidity: 1
 }, commonConf));
 
diff --git a/test/integration/continuous-proof.js b/test/integration/continuous-proof.js
index 5e0867208481483551dbc93565f33fad0252e0ed..34dcd21a9700b57eac63b9f9c4e2004b9834a41d 100644
--- a/test/integration/continuous-proof.js
+++ b/test/integration/continuous-proof.js
@@ -5,13 +5,18 @@ const should    = require('should');
 const user      = require('./tools/user');
 const toolbox   = require('./tools/toolbox');
 const constants = require('../../app/lib/constants');
-const keyring   = require('../../app/lib/crypto/keyring');
-const blockProver = require('../../app/lib/computation/blockProver');
+const keyring   = require('duniter-common').keyring;
+
+// Trace these errors
+process.on('unhandledRejection', (reason) => {
+  console.error('Unhandled rejection: ' + reason);
+  console.error(reason);
+});
 
 const s1 = toolbox.server({
+  cpu: 1,
   powDelay: 1000,
   powMin: 32,
-  participate: true, // TODO: to remove when startGeneration will be an explicit call
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
     sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
@@ -37,18 +42,17 @@ describe("Continous proof-of-work", function() {
   }));
 
   it('should automatically stop waiting if nothing happens', () => co(function*() {
-    s1.permaProver.should.have.property('loops').equal(0);
-    const PREVIOUS_VALUE = constants.POW_SECURITY_RETRY_DELAY;
-    constants.POW_SECURITY_RETRY_DELAY = 10;
+    s1.conf.powSecurityRetryDelay = 10;
     let start = Date.now();
     s1.startBlockComputation();
+    s1.permaProver.should.have.property('loops').equal(0);
     yield s1.until('block', 1);
     s1.permaProver.should.have.property('loops').equal(1);
     (start - Date.now()).should.be.belowOrEqual(1000);
     yield s1.stopBlockComputation();
     yield new Promise((resolve) => setTimeout(resolve, 100));
     s1.permaProver.should.have.property('loops').equal(2);
-    constants.POW_SECURITY_RETRY_DELAY = PREVIOUS_VALUE;
+    s1.conf.powSecurityRetryDelay = 10 * 60 * 1000;
     yield s1.revert();
     s1.permaProver.loops = 0;
   }));
@@ -93,23 +97,23 @@ describe("Continous proof-of-work", function() {
 
   it('testing a network', () => co(function*() {
     const res = yield toolbox.simpleNetworkOf2NodesAnd2Users({
-      participate: true,
       powMin: 16
     }), s2 = res.s1, s3 = res.s2;
     yield s2.commit();
     s2.conf.cpu = 0.5;
     s3.conf.cpu = 0.5;
-    s2.startBlockComputation();
-    s3.startBlockComputation();
     yield [
       s2.until('block', 10),
-      s3.until('block', 10)
+      s3.until('block', 10),
+      co(function*() {
+        s2.startBlockComputation();
+        s3.startBlockComputation();
+      })
     ];
   }));
 
   it('testing proof-of-work during a block pulling', () => co(function*() {
     const res = yield toolbox.simpleNetworkOf2NodesAnd2Users({
-      participate: true,
       powMin: 0
     }), s2 = res.s1, s3 = res.s2;
     yield s2.commit();
@@ -118,7 +122,7 @@ describe("Continous proof-of-work", function() {
     yield s2.until('block', 15);
     s2.stopBlockComputation();
     yield [
-      s3.PeeringService.pullBlocks(),
+      require('duniter-crawler').duniter.methods.pullBlocks(s3),
       s3.startBlockComputation()
     ];
     yield s3.expectJSON('/blockchain/current', { number: 15 });
diff --git a/test/integration/crosschain-test.js b/test/integration/crosschain-test.js
index d4df3e43d7d84d2a1b96f3eab51dc81723f56fde..2f08617818e8f8eaf646e006f92addb0a9d12441 100644
--- a/test/integration/crosschain-test.js
+++ b/test/integration/crosschain-test.js
@@ -5,7 +5,7 @@ const _ = require('underscore');
 const assert = require('assert');
 const should = require('should');
 const rp        = require('request-promise');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const commit    = require('./tools/commit');
 const toolbox = require('./tools/toolbox');
 const user   = require('./tools/user');
@@ -21,7 +21,6 @@ describe("Crosschain transactions", function() {
     httpLogs: true,
     forksize: 3,
     dt: 1, ud0: 120, rootoffset: 10,
-    parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
     sigQty: 1
   };
 
diff --git a/test/integration/forwarding.js b/test/integration/forwarding.js
index b6ada196caa15f2270d97b98486eb578dcfa9b96..cdbf5c7d84bbce4f2ced553461061881e55e1ee4 100644
--- a/test/integration/forwarding.js
+++ b/test/integration/forwarding.js
@@ -7,20 +7,18 @@ const co     = require('co');
 const node   = require('./tools/node');
 const user   = require('./tools/user');
 const jspckg = require('../../package');
-const limiter = require('../../app/lib/system/limiter');
-
-limiter.noLimit();
 
 const MEMORY_MODE = true;
+require('duniter-bma').duniter.methods.noLimit(); // Disables the HTTP limiter
 
 describe("Forwarding", function() {
 
   describe("Nodes", function() {
 
-    const common = { currency: 'bb', ipv4: '127.0.0.1', remoteipv4: '127.0.0.1', upnp: false, participate: false, rootoffset: 0, sigQty: 1 };
+    const common = { currency: 'bb', ipv4: '127.0.0.1', remoteipv4: '127.0.0.1', rootoffset: 0, sigQty: 1 };
 
-    const node1 = node({ name: 'db_1', memory: MEMORY_MODE }, _({ httplogs: false, port: 9600, remoteport: 9600, salt: 'abc', passwd: 'abc', routing: true }).extend(common));
-    const node2 = node({ name: 'db_2', memory: MEMORY_MODE }, _({ httplogs: false, port: 9601, remoteport: 9601, salt: 'abc', passwd: 'def', routing: true }).extend(common));
+    const node1 = node({ name: 'db_1', memory: MEMORY_MODE }, _({ httplogs: false, port: 9600, remoteport: 9600, pair: { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'} }).extend(common));
+    const node2 = node({ name: 'db_2', memory: MEMORY_MODE }, _({ httplogs: false, port: 9601, remoteport: 9601, pair: { pub: 'G2CBgZBPLe6FSFUgpx2Jf1Aqsgta6iib3vmDRA1yLiqU', sec: '58LDg8QLmF5pv6Dn9h7X4yFKfMTdP8fdAiWVcyDoTRJu454fwRihCLULH4MW37zncsg4ruoTGJPZneWk22QmG1w4'} }).extend(common));
 
     const cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, node1);
     const tac = user('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, node1);
diff --git a/test/integration/http_api.js b/test/integration/http_api.js
index ec403a4a811e66280bd57d489fba039f4e2264e9..820593a1b4885de85f17f41fbd0ca0bf397f6710 100644
--- a/test/integration/http_api.js
+++ b/test/integration/http_api.js
@@ -4,22 +4,22 @@ const co        = require('co');
 const _         = require('underscore');
 const should    = require('should');
 const assert    = require('assert');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const http      = require('./tools/http');
 const constants = require('../../app/lib/constants');
 const rp        = require('request-promise');
 const ws        = require('ws');
 
-const server = ucoin({
-  memory: true
-}, {
+const server = duniter(
+  '/bb11',
+  true,
+  {
   ipv4: '127.0.0.1',
   port: '7777',
   currency: 'bb',
   httpLogs: true,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1,
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
@@ -54,7 +54,7 @@ describe("HTTP API", function() {
 
   function makeBlockAndPost(theServer) {
     return function() {
-      return theServer.doMakeNextBlock()
+      return require('duniter-prover').duniter.methods.generateAndProveTheNext(theServer)
         .then(postBlock(theServer));
     };
   }
diff --git a/test/integration/identity-absorption.js b/test/integration/identity-absorption.js
index a6e0fc36b12f4bacd37282f82a6da0fa47f1c2a9..3150b9b703ca6dbca725915e77c4cd7a9140aca0 100644
--- a/test/integration/identity-absorption.js
+++ b/test/integration/identity-absorption.js
@@ -2,8 +2,8 @@
 
 const _         = require('underscore');
 const co        = require('co');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
@@ -18,14 +18,13 @@ const commonConf = {
   forksize: 3,
   xpercent: 0.9,
   msValidity: 10000,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb12'
-}, _.extend({
+const s1 = duniter(
+  '/bb12',
+  MEMORY_MODE,
+  _.extend({
   port: '4450',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
@@ -33,10 +32,10 @@ const s1 = ucoin({
   }
 }, commonConf));
 
-const s2 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb12'
-}, _.extend({
+const s2 = duniter(
+  '/bb12',
+  MEMORY_MODE,
+  _.extend({
   port: '4451',
   pair: {
     pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
diff --git a/test/integration/identity-clean-test.js b/test/integration/identity-clean-test.js
index dc108c43e434ac22ff6f66d9eae70dd346a6f29f..2ee0fc13607cffd016d48c9a51eba339fe756fc7 100644
--- a/test/integration/identity-clean-test.js
+++ b/test/integration/identity-clean-test.js
@@ -2,8 +2,8 @@
 
 const _         = require('underscore');
 const co        = require('co');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
@@ -19,14 +19,13 @@ const commonConf = {
   forksize: 3,
   xpercent: 0.9,
   msValidity: 10000,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb12'
-}, _.extend({
+const s1 = duniter(
+  '/bb12',
+  MEMORY_MODE,
+  _.extend({
   port: '7733',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
diff --git a/test/integration/identity-expiry.js b/test/integration/identity-expiry.js
index 6a0a67191ce180ac42e2328796a6407d9432b57f..c43753cdde9af2c46181a768936c6a22e23fdfb9 100644
--- a/test/integration/identity-expiry.js
+++ b/test/integration/identity-expiry.js
@@ -4,7 +4,7 @@ const _         = require('underscore');
 const co        = require('co');
 const should    = require('should');
 const duniter   = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const constants = require('../../app/lib/constants');
 const rp        = require('request-promise');
@@ -23,14 +23,13 @@ const commonConf = {
   xpercent: 0.9,
   msValidity: 10000,
   idtyWindow: 1, // 1 second of duration
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = duniter({
-  memory: MEMORY_MODE,
-  name: 'bb11'
-}, _.extend({
+const s1 = duniter(
+  '/bb11',
+  MEMORY_MODE,
+  _.extend({
   port: '8560',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
diff --git a/test/integration/identity-kicking.js b/test/integration/identity-kicking.js
index 0ebaec32ea11f5f4985c8520f589c5a8c40a7e3a..45f7f4b044d36f8080ed50283e3fda1293eb1963 100644
--- a/test/integration/identity-kicking.js
+++ b/test/integration/identity-kicking.js
@@ -4,7 +4,7 @@ const _         = require('underscore');
 const co        = require('co');
 const should    = require('should');
 const duniter   = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const constants = require('../../app/lib/constants');
 const rp        = require('request-promise');
@@ -22,14 +22,13 @@ const commonConf = {
   xpercent: 0.9,
   sigValidity: 1600, // 1600 second of duration
   msValidity: 3600, // 3600 second of duration
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = duniter({
-  memory: MEMORY_MODE,
-  name: 'bb11'
-}, _.extend({
+const s1 = duniter(
+  '/bb11',
+  MEMORY_MODE,
+  _.extend({
   port: '8561',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
diff --git a/test/integration/identity-same-pubkey.js b/test/integration/identity-same-pubkey.js
index 2614ab0fdffa2e7d8e7efa660f7f64a1dfaa9090..f64e9fc778a27e010082ed1bee15971c6e6ecb0c 100644
--- a/test/integration/identity-same-pubkey.js
+++ b/test/integration/identity-same-pubkey.js
@@ -2,12 +2,10 @@
 
 const co        = require('co');
 const should    = require('should');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const commit    = require('./tools/commit');
-const until     = require('./tools/until');
 const toolbox   = require('./tools/toolbox');
-const multicaster = require('../../app/lib/streams/multicaster');
 
 const s1 = toolbox.server({
   pair: {
diff --git a/test/integration/identity-test.js b/test/integration/identity-test.js
index bb0a217a2375e3cb7bca51087345453b870d5c2d..72e8aa705e45197513b68017d1c46ca518d505ac 100644
--- a/test/integration/identity-test.js
+++ b/test/integration/identity-test.js
@@ -3,16 +3,13 @@
 const _         = require('underscore');
 const co        = require('co');
 const should    = require('should');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const constants = require('../../app/lib/constants');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
 const commit    = require('./tools/commit');
-const limiter = require('../../app/lib/system/limiter');
-
-limiter.noLimit();
 
 const expectAnswer   = httpTest.expectAnswer;
 
@@ -24,14 +21,13 @@ const commonConf = {
   forksize: 3,
   xpercent: 0.9,
   msValidity: 10000,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb11'
-}, _.extend({
+const s1 = duniter(
+  '/bb11',
+  MEMORY_MODE,
+  _.extend({
   port: '7799',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
@@ -345,7 +341,7 @@ describe("Identities collision", function() {
       res.should.have.property('pubkey').equal('DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV');
       res.should.have.property('uid').equal('tic');
       res.should.have.property('sigDate').be.a.Number;
-      res.should.have.property('memberships').length(1); // We no more conserve the memberships in sandbox
+      res.should.have.property('memberships').length(2);
       // Renew membership, not written
       res.memberships[0].should.have.property('version').equal(constants.DOCUMENTS_VERSION);
       res.memberships[0].should.have.property('currency').equal('bb');
diff --git a/test/integration/lookup.js b/test/integration/lookup.js
index 6dc0a0e3d7b0079d068cb2db7bb557c07bcee6aa..84fdf26c17f03c0063c8ea1858bc1a5141d82413 100644
--- a/test/integration/lookup.js
+++ b/test/integration/lookup.js
@@ -2,8 +2,8 @@
 
 const _         = require('underscore');
 const co        = require('co');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
@@ -14,10 +14,10 @@ const commonConf = {
   currency: 'bb'
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb12'
-}, _.extend({
+const s1 = duniter(
+  'bb12',
+  MEMORY_MODE,
+  _.extend({
   port: '4452',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
diff --git a/test/integration/network.js b/test/integration/network.js
index 585511c99cc201f60266e4da640e2df83d15ce18..4e67e182c1d2aea80b2b71fffb7bc80277ff08bd 100644
--- a/test/integration/network.js
+++ b/test/integration/network.js
@@ -16,7 +16,6 @@ const commonConf = {
   currency: 'bb',
   httpLogs: true,
   forksize: 3,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
@@ -24,13 +23,14 @@ const s1 = node({
   memory: MEMORY_MODE,
   name: 'bb33'
 }, _.extend({
+  ipv4: '127.0.0.1',
   port: '20501',
   remoteport: '20501',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
     sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
   },
-  participate: false, rootoffset: 10,
+  rootoffset: 10,
   sigQty: 1, dt: 0, ud0: 120
 }, commonConf));
 
diff --git a/test/integration/peer-outdated.js b/test/integration/peer-outdated.js
index eb13c91c021f0438aa966d0c9e5f87c53207549d..d0897a161c9bdb1870aed1bda520e5d6077b4947 100644
--- a/test/integration/peer-outdated.js
+++ b/test/integration/peer-outdated.js
@@ -5,7 +5,7 @@ const Q         = require('q');
 const should    = require('should');
 const es        = require('event-stream');
 const _         = require('underscore');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const commit    = require('./tools/commit');
 const until     = require('./tools/until');
@@ -44,11 +44,7 @@ describe("Peer document expiry", function() {
       const bmaAPI = yield bma(server);
       yield bmaAPI.openConnections();
       server.bma = bmaAPI;
-      server
-        .pipe(server.router()) // The router asks for multicasting of documents
-        .pipe(multicaster())
-        .pipe(server.router());
-      return server.start();
+      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
     }), Q());
 
     // Server 1
diff --git a/test/integration/peerings.js b/test/integration/peerings.js
index b4d3047482d596948038234abe61bb2abb517906..b8aa83f37ca984ccf28c60a9a0b6a5fd92684810 100644
--- a/test/integration/peerings.js
+++ b/test/integration/peerings.js
@@ -4,22 +4,20 @@ const co        = require('co');
 const Q         = require('q');
 const _         = require('underscore');
 const should    = require('should');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const constants = require('../../app/lib/constants');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
 const commit    = require('./tools/commit');
 const sync      = require('./tools/sync');
-const contacter  = require('../../app/lib/contacter');
+const contacter  = require('duniter-crawler').duniter.methods.contacter;
 const until     = require('./tools/until');
 const multicaster = require('../../app/lib/streams/multicaster');
 const Peer = require('../../app/lib/entity/peer');
 
 const expectJSON     = httpTest.expectJSON;
-const expectAnswer   = httpTest.expectAnswer;
-const expectHttpCode = httpTest.expectHttpCode;
 
 const MEMORY_MODE = true;
 const commonConf = {
@@ -28,14 +26,13 @@ const commonConf = {
   currency: 'bb',
   httpLogs: true,
   forksize: 3,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb_net1'
-}, _.extend({
+const s1 = duniter(
+  'bb_net1',
+  MEMORY_MODE,
+  _.extend({
   port: '7784',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
@@ -43,10 +40,10 @@ const s1 = ucoin({
   }
 }, commonConf));
 
-const s2 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb_net2'
-}, _.extend({
+const s2 = duniter(
+  'bb_net2',
+  MEMORY_MODE,
+  _.extend({
   port: '7785',
   pair: {
     pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
@@ -54,10 +51,10 @@ const s2 = ucoin({
   }
 }, commonConf));
 
-const s3 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb_net3'
-}, _.extend({
+const s3 = duniter(
+  'bb_net3',
+  MEMORY_MODE,
+  _.extend({
   port: '7786',
   pair: {
     pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
@@ -91,14 +88,8 @@ describe("Network", function() {
               return bmaAPI.openConnections()
                 .then(() => {
                   server.bma = bmaAPI;
-                  server
-                    .pipe(server.router()) // The router asks for multicasting of documents
-                    .pipe(multicaster())
-                    .pipe(server.router());
+                  require('../../app/modules/router').duniter.methods.routeToNetwork(server);
                 });
-            })
-            .then(function(){
-              return server.start();
             });
         });
     }, Q())
@@ -141,21 +132,22 @@ describe("Network", function() {
           yield sync(2, 2, s1, s2);
           yield s2.recomputeSelfPeer();
           yield s2.bma.openConnections();
+          yield new Promise((resolve) => setTimeout(resolve, 1000));
           yield [
             until(s2, 'block', 2),
             until(s3, 'block', 2),
             commitS1()
               .then(commitS1)
           ];
-          yield commitS3();
           yield [
             until(s1, 'block', 1),
-            until(s2, 'block', 1)
+            until(s2, 'block', 1),
+            commitS3()
           ];
-          yield commitS2();
           yield [
             until(s1, 'block', 1),
-            until(s3, 'block', 1)
+            until(s3, 'block', 1),
+            commitS2()
           ];
         });
       })
diff --git a/test/integration/peers-same-pubkey.js b/test/integration/peers-same-pubkey.js
index 6861ddba306e78f4a6dfddf0aaf5f354b237022f..1d9f0ad5482ac09e120def34182cb3838200125b 100644
--- a/test/integration/peers-same-pubkey.js
+++ b/test/integration/peers-same-pubkey.js
@@ -4,7 +4,7 @@ const co        = require('co');
 const Q         = require('q');
 const _         = require('underscore');
 const should    = require('should');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const commit    = require('./tools/commit');
 const sync      = require('./tools/sync');
@@ -40,11 +40,7 @@ describe("Peer document", function() {
       const bmaAPI = yield bma(server);
       yield bmaAPI.openConnections();
       server.bma = bmaAPI;
-      server
-        .pipe(server.router()) // The router asks for multicasting of documents
-        .pipe(multicaster())
-        .pipe(server.router());
-      return server.start();
+      require('../../app/modules/router').duniter.methods.routeToNetwork(server);
     }), Q());
 
     // Server 1
diff --git a/test/integration/proof-of-work.js b/test/integration/proof-of-work.js
index 472ddd4e18fb7112eb54a6d0540f375c78044445..5c092a195bd5e810e47fcba8119d7e1a9b7945c8 100644
--- a/test/integration/proof-of-work.js
+++ b/test/integration/proof-of-work.js
@@ -3,9 +3,10 @@
 const co        = require('co');
 const should    = require('should');
 const toolbox   = require('./tools/toolbox');
+const Block = require('../../app/lib/entity/block');
 const constants = require('../../app/lib/constants');
-const keyring   = require('../../app/lib/crypto/keyring');
-const blockProver = require('../../app/lib/computation/blockProver');
+const logger = require('../../app/lib/logger')();
+const blockProver = require('duniter-prover').duniter.methods.blockProver;
 
 /***
 conf.medianTimeBlocks
@@ -18,32 +19,26 @@ keyring from Key
 const intermediateProofs = [];
 
 const prover = blockProver({
-  push: (data) => intermediateProofs.push(data)
-});
-
-prover.setConfDAL({
-    cpu: 1.0 // 80%
+  push: (data) => intermediateProofs.push(data),
+  conf: {
+    cpu: 1.0, // 80%,
+    pair: {
+      pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
+      sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
+    }
   },
-  null,
-  keyring.Key(
-  'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-  '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-));
+  logger,
+  lib: { constants, Block }
+});
 
 const now = 1474382274 * 1000;
 const MUST_START_WITH_A_ZERO = 16;
 const MUST_START_WITH_TWO_ZEROS = 32;
 
-const BACKUP_HANDICAP = constants.POW_MAXIMUM_ACCEPTABLE_HANDICAP;
-
 constants.CORES_MAXIMUM_USE_IN_PARALLEL = 1; // For simple tests. Can be changed to test multiple cores.
 
 describe("Proof-of-work", function() {
 
-  before(() => {
-    constants.POW_MAXIMUM_ACCEPTABLE_HANDICAP = 8;
-  });
-
   it('should be able to find an easy PoW', () => co(function*() {
     let block = yield prover.prove({
       issuer: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
@@ -55,20 +50,6 @@ describe("Proof-of-work", function() {
     intermediateProofs[intermediateProofs.length - 1].pow.should.have.property('hash').equal(block.hash);
   }));
 
-  it('should be possible to make the prover make us wait until we trigger it again', () => co(function*() {
-    let waitPromise = prover.waitForNewAsking();
-    return Promise.all([
-      waitPromise,
-      co(function*() {
-        yield new Promise((resolve) => setTimeout(resolve, 10));
-        yield prover.prove({
-          issuer: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-          number: 2
-        }, MUST_START_WITH_A_ZERO, now);
-      })
-    ]);
-  }));
-
   // Too randomly successing test
   // it('should be able to cancel a proof-of-work on other PoW receival', () => co(function*() {
   //   const now = 1474464489;
@@ -124,63 +105,61 @@ describe("Proof-of-work", function() {
   //   yield s2.expectJSON('/blockchain/current', { number: 3 });
   // }));
 
-  it('should be able to cancel a waiting on other PoW receival', () => co(function*() {
-    const now = 1474464481;
-    const res = yield toolbox.simpleNetworkOf2NodesAnd2Users({
-      participate: true,
-      percentRot: 1,
-      powMin: 35
-    }), s1 = res.s1, s2 = res.s2;
-    yield Promise.all([
-      s1.commit({ time: now }),
-      // We wait until both nodes received the new block
-      s1.until('block', 1),
-      s2.until('block', 1)
-    ]);
-    yield s1.expectJSON('/blockchain/current', { number: 0 });
-    yield s2.expectJSON('/blockchain/current', { number: 0 });
-    yield Promise.all([
-      s2.commit({ time: now }),
-      // We wait until both nodes received the new block
-      s1.until('block', 1),
-      s2.until('block', 1)
-    ]);
-    yield s1.expectJSON('/blockchain/current', { number: 1 });
-    yield s2.expectJSON('/blockchain/current', { number: 1 });
-    yield Promise.all([
-      s1.commit({ time: now }),
-      // We wait until both nodes received the new block
-      s1.until('block', 1),
-      s2.until('block', 1)
-    ]);
-    yield s1.expectJSON('/blockchain/current', { number: 2, issuersCount: 1 });
-    yield s2.expectJSON('/blockchain/current', { number: 2, issuersCount: 1 });
-    yield Promise.all([
-      s2.commit({ time: now }),
-      // We wait until both nodes received the new block
-      s1.until('block', 1),
-      s2.until('block', 1)
-    ]);
-    yield s1.expectJSON('/blockchain/current', { number: 3, issuersCount: 2 });
-    yield s2.expectJSON('/blockchain/current', { number: 3, issuersCount: 2 });
-    // yield s2.expectJSON('/blockchain/difficulties', { number: 3, issuersCount: 2 });
-    yield Promise.all([
-
-      new Promise((resolve) => {
-        s1.startBlockComputation();
-        s2.startBlockComputation();
-        resolve();
-      }),
-
-      // We wait until both nodes received the new block
-      s1.until('block', 2),
-      s2.until('block', 2)
-    ]);
-    yield s1.expectJSON('/blockchain/current', { number: 5 });
-    yield s2.expectJSON('/blockchain/current', { number: 5 });
-  }));
-
-  after(() => {
-    constants.POW_MAXIMUM_ACCEPTABLE_HANDICAP = BACKUP_HANDICAP;
-  });
+  // TODO: re-enable when algorithm is better
+  // it('should be able to cancel a waiting on other PoW receival', () => co(function*() {
+  //   const now = 1474464481;
+  //   const res = yield toolbox.simpleNetworkOf2NodesAnd2Users({
+  //     powSecurityRetryDelay: 10 * 60 * 1000,
+  //     powMaxHandicap: 8,
+  //     percentRot: 1,
+  //     powMin: 35
+  //   }), s1 = res.s1, s2 = res.s2;
+  //   yield Promise.all([
+  //     s1.commit({ time: now }),
+  //     // We wait until both nodes received the new block
+  //     s1.until('block', 1),
+  //     s2.until('block', 1)
+  //   ]);
+  //   yield s1.expectJSON('/blockchain/current', { number: 0 });
+  //   yield s2.expectJSON('/blockchain/current', { number: 0 });
+  //   yield Promise.all([
+  //     s2.commit({ time: now }),
+  //     // We wait until both nodes received the new block
+  //     s1.until('block', 1),
+  //     s2.until('block', 1)
+  //   ]);
+  //   yield s1.expectJSON('/blockchain/current', { number: 1 });
+  //   yield s2.expectJSON('/blockchain/current', { number: 1 });
+  //   yield Promise.all([
+  //     s1.commit({ time: now }),
+  //     // We wait until both nodes received the new block
+  //     s1.until('block', 1),
+  //     s2.until('block', 1)
+  //   ]);
+  //   yield s1.expectJSON('/blockchain/current', { number: 2, issuersCount: 1 });
+  //   yield s2.expectJSON('/blockchain/current', { number: 2, issuersCount: 1 });
+  //   yield Promise.all([
+  //     s2.commit({ time: now }),
+  //     // We wait until both nodes received the new block
+  //     s1.until('block', 1),
+  //     s2.until('block', 1)
+  //   ]);
+  //   yield s1.expectJSON('/blockchain/current', { number: 3, issuersCount: 2 });
+  //   yield s2.expectJSON('/blockchain/current', { number: 3, issuersCount: 2 });
+  //   // yield s2.expectJSON('/blockchain/difficulties', { number: 3, issuersCount: 2 });
+  //   yield Promise.all([
+  //
+  //     new Promise((resolve) => {
+  //       s1.startBlockComputation();
+  //       s2.startBlockComputation();
+  //       resolve();
+  //     }),
+  //
+  //     // We wait until both nodes received the new block
+  //     s1.until('block', 2),
+  //     s2.until('block', 2)
+  //   ]);
+  //   yield s1.expectJSON('/blockchain/current', { number: 5 });
+  //   yield s2.expectJSON('/blockchain/current', { number: 5 });
+  // }));
 });
diff --git a/test/integration/revocation-test.js b/test/integration/revocation-test.js
index b6f0dc8ea7d7af9541926783fde981eb7165e1ed..61d39215c6137759fed7c6ae9aa82313d84b7497 100644
--- a/test/integration/revocation-test.js
+++ b/test/integration/revocation-test.js
@@ -4,14 +4,11 @@ const _         = require('underscore');
 const co        = require('co');
 const should    = require('should');
 const duniter   = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
 const commit    = require('./tools/commit');
-const limiter = require('../../app/lib/system/limiter');
-
-limiter.noLimit();
 
 const expectAnswer  = httpTest.expectAnswer;
 
@@ -23,14 +20,13 @@ const commonConf = {
   forksize: 3,
   xpercent: 0.9,
   msValidity: 10000,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = duniter({
-  memory: MEMORY_MODE,
-  name: 'bb12'
-}, _.extend({
+const s1 = duniter(
+  '/bb12',
+  MEMORY_MODE,
+  _.extend({
   port: '9964',
   pair: {
     pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
@@ -38,10 +34,10 @@ const s1 = duniter({
   }
 }, commonConf));
 
-const s2 = duniter({
-  memory: MEMORY_MODE,
-  name: 'bb13'
-}, _.extend({
+const s2 = duniter(
+  '/bb13',
+  MEMORY_MODE,
+  _.extend({
   port: '9965',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
diff --git a/test/integration/scenarios/malformed-documents.js b/test/integration/scenarios/malformed-documents.js
index c817c4c7f10a0c2fd4dd9ea6c0a4242bd30b20c2..792f79a88ffbb860011874d22dbfd2504e4fac17 100644
--- a/test/integration/scenarios/malformed-documents.js
+++ b/test/integration/scenarios/malformed-documents.js
@@ -1,11 +1,10 @@
 "use strict";
-var wallet = require('../tools/wallet');
 
-module.exports = function(node1) {
+const request = require('request');
 
-  var w1 = wallet('abc', '123', node1);
+module.exports = function(node1) {
 
-  var malformedTransaction = "Version: 2\n" +
+  const malformedTransaction = "Version: 2\n" +
     "Type: Transaction\n" +
     "Currency: null\n" +
     "Issuers:\n" +
@@ -14,7 +13,25 @@ module.exports = function(node1) {
     "0:T:1536:539CB0E60CD5F55CF1BE96F067E73BF55C052112:1.0\n" +
     "Outputs:Comment: mon comments\n";
 
+
+  function sendRaw (raw) {
+    return function(done) {
+      post('/tx/process', {
+        "transaction": raw
+      }, done);
+    }
+  }
+
+  function post(uri, data, done) {
+    const postReq = request.post({
+      "uri": 'http://' + [node1.server.conf.remoteipv4, node1.server.conf.remoteport].join(':') + uri,
+      "timeout": 1000*10
+    }, function (err, res, body) {
+      done(err, res, body);
+    });
+    postReq.form(data);
+  }
   return [
-    w1.sendRaw(malformedTransaction)
+    sendRaw(malformedTransaction)
   ];
 };
diff --git a/test/integration/server-import-export.js b/test/integration/server-import-export.js
index c0372d89ea18daf3d5672c35b7f42a5cb045bf95..d00adeb6ec00c798fefb2ff0f544946dd020b8e6 100644
--- a/test/integration/server-import-export.js
+++ b/test/integration/server-import-export.js
@@ -6,7 +6,7 @@ const co = require('co');
 const unzip = require('unzip');
 const toolbox = require('../integration/tools/toolbox');
 const user    = require('../integration/tools/user');
-const bma     = require('../../app/lib/streams/bma');
+const bma     = require('duniter-bma').duniter.methods.bma;
 
 const serverConfig = {
   memory: false,
diff --git a/test/integration/server-sandbox.js b/test/integration/server-sandbox.js
index 4f423316cdffc8306a53d2fd1ea85ceda2e03c6a..e6dc5d9e1dbf485892a0d20ee9de6f9ae2e9a942 100644
--- a/test/integration/server-sandbox.js
+++ b/test/integration/server-sandbox.js
@@ -2,15 +2,11 @@
 
 const co        = require('co');
 const should    = require('should');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const commit    = require('./tools/commit');
 const toolbox   = require('./tools/toolbox');
-const multicaster = require('../../app/lib/streams/multicaster');
 const constants = require('../../app/lib/constants');
-const limiter   = require('../../app/lib/system/limiter');
-
-limiter.noLimit();
 
 const s1 = toolbox.server({
   idtyWindow: 10,
diff --git a/test/integration/start_generate_blocks.js b/test/integration/start_generate_blocks.js
index 7e0ba5d449587758f1205178d069c1bb8f086d83..8ee4b148f9b33d4e973b6430a2e7f7d75ba0c8ed 100644
--- a/test/integration/start_generate_blocks.js
+++ b/test/integration/start_generate_blocks.js
@@ -2,8 +2,8 @@
 
 const co        = require('co');
 const _         = require('underscore');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const rp        = require('request-promise');
 const httpTest  = require('./tools/http');
@@ -11,7 +11,7 @@ const commit    = require('./tools/commit');
 const until     = require('./tools/until');
 const multicaster = require('../../app/lib/streams/multicaster');
 const Peer = require('../../app/lib/entity/peer');
-const contacter  = require('../../app/lib/contacter');
+const contacter  = require('duniter-crawler').duniter.methods.contacter;
 const sync      = require('./tools/sync');
 
 const expectJSON     = httpTest.expectJSON;
@@ -26,30 +26,28 @@ const commonConf = {
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb7'
-}, _.extend({
+const s1 = duniter(
+  '/bb7',
+  MEMORY_MODE,
+  _.extend({
   port: '7790',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
     sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
   },
-  powDelay: 1,
-  participate: true // TODO: to remove when startGeneration will be an explicit call
+  powDelay: 1
 }, commonConf));
 
-const s2 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb7_2'
-}, _.extend({
+const s2 = duniter(
+  '/bb7_2',
+  MEMORY_MODE,
+  _.extend({
   port: '7791',
   pair: {
     pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
     sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
   },
-  powDelay: 1,
-  participate: true // TODO: to remove when startGeneration will be an explicit call
+  powDelay: 1
 }, commonConf));
 
 const cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: s1 });
@@ -72,11 +70,11 @@ describe("Generation", function() {
         yield server.initWithDAL();
         server.bma = yield bma(server);
         yield server.bma.openConnections();
-        server
-          .pipe(server.router()) // The router asks for multicasting of documents
-          .pipe(multicaster())
-          .pipe(server.router());
-        yield server.start();
+        require('../../app/modules/router').duniter.methods.routeToNetwork(server);
+        yield server.PeeringService.generateSelfPeer(server.conf, 0);
+        const prover = require('duniter-prover').duniter.methods.prover(server);
+        server.startBlockComputation = () => prover.startService();
+        server.stopBlockComputation = () => prover.stopService();
       }
       nodeS1 = contacter('127.0.0.1', s1.conf.port);
       nodeS2 = contacter('127.0.0.1', s2.conf.port);
diff --git a/test/integration/tests.js b/test/integration/tests.js
index 9b23de8cbf81e8714655b2b737949d80980e0bce..7e81633d69fad4d70629a53942c9bc1b514aab7b 100644
--- a/test/integration/tests.js
+++ b/test/integration/tests.js
@@ -4,18 +4,15 @@ const co = require('co');
 const _ = require('underscore');
 const should = require('should');
 const assert = require('assert');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const constants = require('../../app/lib/constants');
 const node   = require('./tools/node');
-const ucoin     = require('../../index');
+const duniter     = require('../../index');
 const user   = require('./tools/user');
 const jspckg = require('../../package');
 const commit    = require('./tools/commit');
 const httpTest  = require('./tools/http');
 const rp        = require('request-promise');
-const limiter = require('../../app/lib/system/limiter');
-
-limiter.noLimit();
 
 const expectAnswer   = httpTest.expectAnswer;
 const MEMORY_MODE = true;
@@ -24,8 +21,8 @@ describe("Integration", function() {
 
   describe("Node 1", function() {
 
-    const node1 = node('db1', { currency: 'bb', ipv4: 'localhost', port: 9999, remoteipv4: 'localhost', remoteport: 9999, upnp: false, httplogs: false,
-      participate: false, rootoffset: 0,
+    const node1 = node('db1', { currency: 'bb', ipv4: 'localhost', port: 9999, remoteipv4: 'localhost', remoteport: 9999, httplogs: false,
+      rootoffset: 0,
       sigQty: 1,
       pair: {
         pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
@@ -172,10 +169,14 @@ describe("Integration", function() {
 
   describe("Testing leavers", function(){
 
-    const node3 = ucoin({ name: 'db3', memory: MEMORY_MODE }, {
-      currency: 'dd', ipv4: 'localhost', port: 9997, remoteipv4: 'localhost', remoteport: 9997, upnp: false, httplogs: false,
-      salt: 'abc', passwd: 'abc', participate: false, rootoffset: 0,
-      sigQty: 1, sigPeriod: 0
+    const node3 = duniter('/db3', MEMORY_MODE, {
+      currency: 'dd', ipv4: 'localhost', port: 9997, remoteipv4: 'localhost', remoteport: 9997, httplogs: false,
+      rootoffset: 0,
+      sigQty: 1, sigPeriod: 0,
+      pair: {
+        pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
+        sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
+      }
     });
 
     const cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: node3 });
diff --git a/test/integration/tools/commit.js b/test/integration/tools/commit.js
index fd06c8b2cc6ae0036d52a573aa29cd964efc56c6..1385e44b40fc2a396bdf275226f2cecfcce2b0b8 100644
--- a/test/integration/tools/commit.js
+++ b/test/integration/tools/commit.js
@@ -12,7 +12,7 @@ module.exports = function makeBlockAndPost(theServer, extraProps) {
       manualValues = _.extend(manualValues, extraProps);
     }
     return co(function *() {
-      let proven = yield theServer.doMakeNextBlock(manualValues);
+      let proven = yield require('duniter-prover').duniter.methods.generateAndProveTheNext(theServer, null, null, manualValues);
       return postBlock(theServer)(proven);
     });
   };
diff --git a/test/integration/tools/node.js b/test/integration/tools/node.js
index c15aa92122479acf5c9ab73c061c4db0097b540b..3d90d172d652d4ebc795cba2b3a9f1ee092e5366 100644
--- a/test/integration/tools/node.js
+++ b/test/integration/tools/node.js
@@ -6,83 +6,22 @@ var _ = require('underscore');
 var async  = require('async');
 var request  = require('request');
 var rules = require('../../../app/lib/rules');
-var contacter = require('../../../app/lib/contacter');
-var ucoin  = require('../../../index');
+var contacter = require('duniter-crawler').duniter.methods.contacter;
+var duniter  = require('../../../index');
 var multicaster = require('../../../app/lib/streams/multicaster');
 var Configuration = require('../../../app/lib/entity/configuration');
 var Peer          = require('../../../app/lib/entity/peer');
 var user   = require('./user');
 var http   = require('./http');
-const bma = require('../../../app/lib/streams/bma');
-
-var MEMORY_MODE = true;
+const bma = require('duniter-bma').duniter.methods.bma;
 
 module.exports = function (dbName, options) {
   return new Node(dbName, options);
 };
 
-let AUTO_PORT = 10200;
-
 module.exports.statics = {
-
-  newBasicTxNode: (testSuite) => () => {
-    getTxNode(testSuite);
-  },
-
-  newBasicTxNodeWithOldDatabase: (testSuite) => () => {
-    getTxNode(testSuite, (node) => co(function*() {
-      yield node.server.dal.txsDAL.exec('UPDATE txs SET recipients = "[]";');
-    }));
-  }
 };
 
-function getTxNode(testSuite, afterBeforeHook){
-
-  let port = ++AUTO_PORT;
-  const now = 1481800000;
-
-  var node2 = new Node({ name: "db_" + port, memory: MEMORY_MODE }, { currency: 'cc', ipv4: 'localhost', port: port, remoteipv4: 'localhost', remoteport: port, upnp: false, httplogs: false,
-    pair: {
-      pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
-      sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
-    },
-    forksize: 3,
-    participate: false, rootoffset: 10,
-    sigQty: 1, dt: 1, ud0: 120
-  });
-
-  var tic = user('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, node2);
-  var toc = user('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, node2);
-
-  before(() => co(function*() {
-    yield node2.startTesting();
-    // Self certifications
-    yield tic.createIdentity();
-    yield toc.createIdentity();
-    // Certification;
-    yield tic.cert(toc);
-    yield toc.cert(tic);
-    yield tic.join();
-    yield toc.join();
-    yield node2.commitP({ time: now });
-    yield node2.commitP({ time: now + 10 });
-    yield node2.commitP({ time: now + 10 });
-    yield tic.sendP(51, toc);
-
-    if (afterBeforeHook) {
-      yield afterBeforeHook(node2);
-    }
-  }));
-
-  after(node2.after());
-
-  node2.rp = (uri) => rp('http://127.0.0.1:' + port + uri, { json: true });
-
-  node2.expectHttp = (uri, callback) => () => http.expectAnswer(node2.rp(uri), callback);
-
-  testSuite(node2);
-}
-
 var UNTIL_TIMEOUT = 115000;
 
 function Node (dbName, options) {
@@ -141,9 +80,9 @@ function Node (dbName, options) {
             block: function(callback){
               co(function *() {
                 try {
-                  const block2 = yield that.server.BlockchainService.generateNext(params);
+                  const block2 = yield require('duniter-prover').duniter.methods.generateTheNextBlock(that.server, params);
                   const trial2 = yield that.server.getBcContext().getIssuerPersonalizedDifficulty(that.server.keyPair.publicKey);
-                  const block = yield that.server.BlockchainService.makeNextBlock(block2, trial2, params);
+                  const block = yield require('duniter-prover').duniter.methods.generateAndProveTheNext(that.server, block2, trial2, params);
                   callback(null, block);
                 } catch (e) {
                   callback(e);
@@ -186,58 +125,60 @@ function Node (dbName, options) {
         function (server, next){
           // Launching server
           that.server = server;
-          co(function*(){
-            try {
-              yield that.server.start();
-              if (server.conf.routing) {
-                server
-                  .pipe(server.router()) // The router asks for multicasting of documents
-                  .pipe(multicaster());
-              }
-              started = true;
-              next();
-            } catch (e) {
-              next(e);
-            }
-          });
+          started = true;
+          next();
         },
         function (next) {
           that.http = contacter(options.remoteipv4, options.remoteport);
           next();
         }
       ], function(err) {
-        err ? reject(err) : resolve(that.server);
+        err ? reject(err) : resolve();
         done && done(err);
       });
-    })
-      .then((server) => co(function*() {
-        const bmapi = yield bma(server, [{
-          ip: server.conf.ipv4,
-          port: server.conf.port
-        }], true);
-        return bmapi.openConnections();
-      }));
+    });
   };
 
   function service(callback) {
     return function () {
-      var cbArgs = arguments;
-      var dbConf = typeof dbName == 'object' ? dbName : { name: dbName, memory: true };
-      var server = ucoin(dbConf, Configuration.statics.complete(options));
-
-      // Initialize server (db connection, ...)
-      return co(function*(){
-        try {
-          yield server.initWithDAL();
-          //cbArgs.length--;
-          cbArgs[cbArgs.length++] = server;
-          //cbArgs[cbArgs.length++] = server.conf;
-          callback(null, server);
-        } catch (err) {
-          server.disconnect();
-          throw err;
+      const stack = duniter.statics.simpleStack();
+      for (const name of ['duniter-keypair', 'duniter-bma']) {
+        stack.registerDependency(require(name), name);
+      }
+      stack.registerDependency({
+        duniter: {
+          config: {
+            onLoading: (conf, program) => co(function*() {
+              options.port = options.port || 8999;
+              options.ipv4 = options.ipv4 || "127.0.0.1";
+              options.ipv6 = options.ipv6 || null;
+              options.remotehost = options.remotehost || null;
+              options.remoteipv4 = options.remoteipv4 || null;
+              options.remoteipv6 = options.remoteipv6 || null;
+              options.remoteport = options.remoteport || 8999;
+              const overConf = Configuration.statics.complete(options);
+              _.extend(conf, overConf);
+            })
+          },
+          service: {
+            process: (server) => _.extend(server, {
+              startService: () => {
+                logger.debug('Server Servie Started!');
+              }
+            })
+          },
+          cli: [{
+            name: 'execute',
+            desc: 'Unit Test execution',
+            onDatabaseExecute: (server, conf, program, params, startServices) => co(function*() {
+              yield startServices();
+              callback(null, server);
+              yield Promise.resolve((res) => null); // Never ending
+            })
+          }]
         }
-      });
+      }, 'duniter-automated-test');
+      stack.executeStack(['', '', '--mdb', dbName, '--memory', 'execute']);
     };
   }
 
diff --git a/test/integration/tools/toolbox.js b/test/integration/tools/toolbox.js
index a6b606f0fb831d9dee3160ecea58e42ff80693eb..fb5f56a83d64af6c10de5d84a77207e6a12381bd 100644
--- a/test/integration/tools/toolbox.js
+++ b/test/integration/tools/toolbox.js
@@ -12,11 +12,14 @@ const until       = require('../tools/until');
 const Peer        = require('../../../app/lib/entity/peer');
 const Identity    = require('../../../app/lib/entity/identity');
 const Block       = require('../../../app/lib/entity/block');
-const bma         = require('../../../app/lib/streams/bma');
+const bma         = require('duniter-bma').duniter.methods.bma;
 const multicaster = require('../../../app/lib/streams/multicaster');
 const network     = require('../../../app/lib/system/network');
-const dtos        = require('../../../app/lib/streams/dtos');
+const dtos        = require('duniter-bma').duniter.methods.dtos;
 const duniter     = require('../../../index');
+const logger      = require('../../../app/lib/logger')('toolbox');
+
+require('duniter-bma').duniter.methods.noLimit(); // Disables the HTTP limiter
 
 const MEMORY_MODE = true;
 const CURRENCY_NAME = 'duniter_unit_test_currency';
@@ -50,8 +53,8 @@ module.exports = {
     yield tac.join();
 
     // Each server forwards to each other
-    s1.pipe(s1.router()).pipe(multicaster());
-    s2.pipe(s2.router()).pipe(multicaster());
+    require('../../../app/modules/router').duniter.methods.routeToNetwork(s1);
+    require('../../../app/modules/router').duniter.methods.routeToNetwork(s2);
 
     return { s1, s2, cat, tac };
   }),
@@ -125,7 +128,7 @@ module.exports = {
       const fakeServer = yield network.createServersAndListen("Fake Duniter Server", [{
         ip: host,
         port: port
-      }], NO_HTTP_LOGS, NO_STATIC_PATH, (app, httpMethods) => {
+      }], NO_HTTP_LOGS, logger, NO_STATIC_PATH, (app, httpMethods) => {
 
         // Mock BMA method for sync mocking
         httpMethods.httpGET('/network/peering', () => {
@@ -182,13 +185,12 @@ module.exports = {
       currency: conf.currency || CURRENCY_NAME,
       httpLogs: true,
       forksize: 3,
-      parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
       sigQty: 1
     };
-    const server = duniter({
-      memory: conf.memory !== undefined ? conf.memory : MEMORY_MODE,
-      name: conf.homename || 'dev_unit_tests'
-    }, _.extend(conf, commonConf));
+    const server = duniter(
+      '~/.config/duniter/' + (conf.homename || 'dev_unit_tests'),
+      conf.memory !== undefined ? conf.memory : MEMORY_MODE,
+      _.extend(conf, commonConf));
 
     server.port = port;
     server.host = HOST;
@@ -240,7 +242,7 @@ module.exports = {
     });
 
     server.makeNext = (overrideProps) => co(function*() {
-      const block = yield server.doMakeNextBlock(overrideProps || {});
+      const block = yield require('duniter-prover').duniter.methods.generateAndProveTheNext(server, null, null, overrideProps || {});
       return Block.statics.fromJSON(block);
     });
 
@@ -284,13 +286,21 @@ module.exports = {
       const bmaAPI = yield bma(server);
       yield bmaAPI.openConnections();
       server.bma = bmaAPI;
-      server
-        .pipe(server.router()) // The router asks for multicasting of documents
-        .pipe(multicaster())
-        .pipe(server.router());
-      return server.start();
+      require('../../../app/modules/router').duniter.methods.routeToNetwork(server);
     });
 
+    let prover;
+    server.startBlockComputation = () => {
+      if (!prover) {
+        prover = require('duniter-prover').duniter.methods.prover(server);
+        server.permaProver = prover.permaProver;
+        server.pipe(prover);
+      }
+      prover.startService();
+    };
+    // server.startBlockComputation = () => prover.startService();
+    server.stopBlockComputation = () => prover.stopService();
+
     return server;
   }
 };
diff --git a/test/integration/tools/user.js b/test/integration/tools/user.js
index 1dbf8bc0b4aa59f028447bdbdb07d4492743a7ab..2f8d4db5d9a4569895194b84133e421fc6f3b23d 100644
--- a/test/integration/tools/user.js
+++ b/test/integration/tools/user.js
@@ -4,12 +4,11 @@ const Q		    = require('q');
 const _ = require('underscore');
 const async		= require('async');
 const request	= require('request');
-const contacter = require('../../../app/lib/contacter');
-const ucp     = require('../../../app/lib/ucp/buid');
+const contacter = require('duniter-crawler').duniter.methods.contacter;
+const ucp     = require('duniter-common').buid;
 const parsers = require('../../../app/lib/streams/parsers');
-const keyring	= require('../../../app/lib/crypto/keyring');
-const rawer		= require('../../../app/lib/ucp/rawer');
-const base58	= require('../../../app/lib/crypto/base58');
+const keyring	= require('duniter-common').keyring;
+const rawer		= require('duniter-common').rawer;
 const constants = require('../../../app/lib/constants');
 const Identity = require('../../../app/lib/entity/identity');
 const Certification = require('../../../app/lib/entity/certification');
@@ -18,8 +17,8 @@ const Revocation = require('../../../app/lib/entity/revocation');
 const Peer = require('../../../app/lib/entity/peer');
 const Transaction = require('../../../app/lib/entity/transaction');
 
-module.exports = function (uid, salt, passwd, url) {
-  return new User(uid, salt, passwd, url);
+module.exports = function (uid, url, node) {
+  return new User(uid, url, node);
 };
 
 function User (uid, options, node) {
@@ -36,25 +35,7 @@ function User (uid, options, node) {
   }
 
   function init(done) {
-    if (options.salt && options.passwd) {
-      async.waterfall([
-        function (next) {
-          co(function*(){
-            try {
-              const pair = yield keyring.scryptKeyPair(options.salt, options.passwd);
-              next(null, pair);
-            } catch (e) {
-              next(e);
-            }
-          });
-        },
-        function (pair, next) {
-          pub = that.pub = pair.publicKey;
-          sec = that.sec = pair.secretKey;
-          next();
-        }
-      ], done);
-    } else if (options.pub && options.sec) {
+    if (options.pub && options.sec) {
       pub = that.pub = options.pub;
       sec = that.sec = options.sec;
       done();
@@ -275,7 +256,7 @@ function User (uid, options, node) {
     let outputs = [{
       qty: amount,
       base: commonbase,
-      lock: 'SIG(' + recipient.pub + ')'
+      lock: 'SIG(' + (recipient.pub || recipient) + ')'
     }];
     if (inputSum - amount > 0) {
       // Rest back to issuer
diff --git a/test/integration/tools/wallet.js b/test/integration/tools/wallet.js
deleted file mode 100644
index 6ca076ae5070a164abace59872da977547f4cc70..0000000000000000000000000000000000000000
--- a/test/integration/tools/wallet.js
+++ /dev/null
@@ -1,47 +0,0 @@
-"use strict";
-var async		= require('async');
-var request	= require('request');
-var crypto	= require('../../../app/lib/crypto/keyring');
-var rawer		= require('../../../app/lib/ucp/rawer');
-var base58	= require('../../../app/lib/crypto/base58');
-
-module.exports = function (salt, passwd, node) {
-	return new Wallet(salt, passwd, node);
-};
-
-function Wallet (salt, passwd, node) {
-
-  var that = this;
-  var pub, sec;
-
-  function init(done) {
-    async.waterfall([
-      function(next) {
-        crypto.getKeyPair(salt, passwd, next);
-      },
-      function(pair, next) {
-        pub = that.pub = base58.encode(pair.publicKey);
-        sec = pair.secretKey;
-        next();
-      }
-    ], done);
-  }
-
-  this.sendRaw = function (raw) {
-    return function(done) {
-      post('/tx/process', {
-        "transaction": raw
-      }, done);
-    }
-  };
-
-  function post(uri, data, done) {
-    var postReq = request.post({
-      "uri": 'http://' + [node.server.conf.remoteipv4, node.server.conf.remoteport].join(':') + uri,
-      "timeout": 1000*10
-    }, function (err, res, body) {
-      done(err, res, body);
-    });
-    postReq.form(data);
-  }
-}
diff --git a/test/integration/transactions-chaining.js b/test/integration/transactions-chaining.js
index f8db07405ebc03c846141e352760170de5366117..de951f88485cb129d05fd9451957d51e2c60c7fa 100644
--- a/test/integration/transactions-chaining.js
+++ b/test/integration/transactions-chaining.js
@@ -5,15 +5,12 @@ const _ = require('underscore');
 const should = require('should');
 const assert = require('assert');
 const constants = require('../../app/lib/constants');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const toolbox   = require('./tools/toolbox');
 const node   = require('./tools/node');
 const user   = require('./tools/user');
 const unit   = require('./tools/unit');
 const http   = require('./tools/http');
-const limiter = require('../../app/lib/system/limiter');
-
-limiter.noLimit();
 
 describe("Transaction chaining", function() {
 
@@ -25,7 +22,7 @@ describe("Transaction chaining", function() {
       sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
     },
     dt: 3600,
-    ud0: 120,
+    ud0: 1200,
     c: 0.1
   });
 
@@ -48,10 +45,10 @@ describe("Transaction chaining", function() {
 
   describe("Sources", function(){
 
-    it('it should exist block#2 with UD of 120', () => s1.expect('/blockchain/block/2', (block) => {
+    it('it should exist block#2 with UD of 1200', () => s1.expect('/blockchain/block/2', (block) => {
       should.exists(block);
       assert.equal(block.number, 2);
-      assert.equal(block.dividend, 120);
+      assert.equal(block.dividend, 1200);
     }));
   });
 
@@ -61,9 +58,9 @@ describe("Transaction chaining", function() {
       // Current state
       let current = yield s1.get('/blockchain/current');
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
-      let tx1 = yield toc.prepareITX(104, tic); // Rest = 120 - 104 = 16
-      let tx2 = yield toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 16, base: 0, lock: 'SIG(' + tic.pub + ')' }], {
-        comment: 'also take the remaining 16 units',
+      let tx1 = yield toc.prepareITX(1040, tic); // Rest = 1200 - 1040 = 160
+      let tx2 = yield toc.prepareUTX(tx1, ['SIG(0)'], [{ qty: 160, base: 0, lock: 'SIG(' + tic.pub + ')' }], {
+        comment: 'also take the remaining 160 units',
         blockstamp: [current.number, current.hash].join('-'),
         theseOutputsStart: 1
       });
@@ -74,11 +71,11 @@ describe("Transaction chaining", function() {
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
       (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(1);
       yield s1.commit({ time: now + 7210 }); // TX1 commited
-      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // The 16 remaining units
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(2); // The UD + 104 units sent by toc
+      (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // The 160 remaining units
+      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(2); // The UD + 1040 units sent by toc
       yield s1.commit({ time: now + 7210 }); // TX2 commited
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
-      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3); // The UD + 104 + 16 units sent by toc
+      (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3); // The UD + 1040 + 160 units sent by toc
       constants.TRANSACTION_MAX_TRIES = tmp;
     }));
   });
diff --git a/test/integration/transactions-test.js b/test/integration/transactions-test.js
index 10a6f3164b58452b1ebb17a1702feaaa313c93e1..e435fbd12d3fb62490103d3822a5df5274a81ba0 100644
--- a/test/integration/transactions-test.js
+++ b/test/integration/transactions-test.js
@@ -5,15 +5,13 @@ const _ = require('underscore');
 const should = require('should');
 const assert = require('assert');
 const constants = require('../../app/lib/constants');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const toolbox   = require('./tools/toolbox');
 const node   = require('./tools/node');
 const user   = require('./tools/user');
 const unit   = require('./tools/unit');
 const http   = require('./tools/http');
-const limiter = require('../../app/lib/system/limiter');
 
-limiter.noLimit();
 
 describe("Testing transactions", function() {
 
@@ -25,7 +23,7 @@ describe("Testing transactions", function() {
       sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
     },
     dt: 3600,
-    ud0: 120
+    ud0: 1200
   });
 
   const tic = user('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, { server: s1 });
@@ -51,7 +49,7 @@ describe("Testing transactions", function() {
     yield s1.commit({
       time: now + 7210
     });
-    yield tic.sendP(51, toc);
+    yield tic.sendP(510, toc);
     yield s1.expect('/tx/history/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', (res) => {
       res.should.have.property('pubkey').equal('DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo');
       res.should.have.property('history').property('pending').length(1);
@@ -64,32 +62,32 @@ describe("Testing transactions", function() {
 
   describe("Sources", function(){
 
-    it('it should exist block#2 with UD of 120', () => s1.expect('/blockchain/block/2', (block) => {
+    it('it should exist block#2 with UD of 1200', () => s1.expect('/blockchain/block/2', (block) => {
       should.exists(block);
       assert.equal(block.number, 2);
-      assert.equal(block.dividend, 120);
+      assert.equal(block.dividend, 1200);
     }));
 
-    it('tic should be able to send 51 to toc', () => s1.expect('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', (res) => {
+    it('tic should be able to send 510 to toc', () => s1.expect('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', (res) => {
       should.exists(res);
       assert.equal(res.sources.length, 1);
       const txSrc = _.findWhere(res.sources, { type: 'T' });
-      assert.equal(txSrc.amount, 69);
+      assert.equal(txSrc.amount, 690);
     }));
 
-    it('toc should have 151 of sources', () => s1.expect('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', (res) => {
+    it('toc should have 1510 of sources', () => s1.expect('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', (res) => {
       should.exists(res);
       assert.equal(res.sources.length, 2);
       const txRes = _.findWhere(res.sources, { type: 'T' });
       const duRes = _.filter(res.sources, { type: 'D' });
       assert.equal(txRes.type, 'T');
-      assert.equal(txRes.amount, 51);
+      assert.equal(txRes.amount, 510);
       assert.equal(duRes[0].type, 'D');
-      assert.equal(duRes[0].amount, 120);
+      assert.equal(duRes[0].amount, 1200);
     }));
 
-    it('toc should be able to send 80 to tic', () => co(function *() {
-      let tx1 = yield toc.prepareITX(171, tic);
+    it('toc should be able to send 800 to tic', () => co(function *() {
+      let tx1 = yield toc.prepareITX(1710, tic);
       yield toc.sendTX(tx1);
       yield s1.commit();
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
@@ -111,18 +109,18 @@ describe("Testing transactions", function() {
       yield s1.commit();
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
       (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3);
-      let tx1 = yield toc.prepareITX(121, tic);
+      let tx1 = yield toc.prepareITX(1201, tic);
       yield toc.sendTX(tx1);
       yield s1.commit();
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
       (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(4);
       // Now cat has all the money...
       let current = yield s1.get('/blockchain/current');
-      let tx2 = yield tic.prepareUTX(tx1, ['SIG(2)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
-      let tx3 = yield tic.prepareUTX(tx1, ['SIG(1)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
-      let tx4 = yield tic.prepareUTX(tx1, ['SIG(0)'], [{ qty: 121, base: 0, lock: 'XHX(8AFC8DF633FC158F9DB4864ABED696C1AA0FE5D617A7B5F7AB8DE7CA2EFCD4CB)' }], { comment: 'ok', blockstamp: [current.number, current.hash].join('-') });
-      let tx5 = yield tic.prepareUTX(tx1, ['XHX(2)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
-      let tx6 = yield tic.prepareUTX(tx1, ['XHX(4)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      let tx2 = yield tic.prepareUTX(tx1, ['SIG(2)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      let tx3 = yield tic.prepareUTX(tx1, ['SIG(1)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      let tx4 = yield tic.prepareUTX(tx1, ['SIG(0)'], [{ qty: 1201, base: 0, lock: 'XHX(8AFC8DF633FC158F9DB4864ABED696C1AA0FE5D617A7B5F7AB8DE7CA2EFCD4CB)' }], { comment: 'ok', blockstamp: [current.number, current.hash].join('-') });
+      let tx5 = yield tic.prepareUTX(tx1, ['XHX(2)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      let tx6 = yield tic.prepareUTX(tx1, ['XHX(4)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
       yield unit.shouldFail(toc.sendTX(tx2), 'Wrong unlocker in transaction');
       yield unit.shouldFail(toc.sendTX(tx3), 'Wrong unlocker in transaction');
       yield unit.shouldNotFail(toc.sendTX(tx4));
@@ -131,8 +129,8 @@ describe("Testing transactions", function() {
       yield s1.commit(); // TX4 commited
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0); // The tx was not sent to someone, but with an XHX! So toc has nothing more than before.
       (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3);
-      let tx7 = yield tic.prepareUTX(tx4, ['XHX(2872767826647264)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong1', blockstamp: [current.number, current.hash].join('-') });
-      let tx8 = yield tic.prepareUTX(tx4, ['XHX(1872767826647264)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'okk', blockstamp: [current.number, current.hash].join('-') }); // tic unlocks the XHX locked amount, and gives it to toc!
+      let tx7 = yield tic.prepareUTX(tx4, ['XHX(2872767826647264)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong1', blockstamp: [current.number, current.hash].join('-') });
+      let tx8 = yield tic.prepareUTX(tx4, ['XHX(1872767826647264)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'okk', blockstamp: [current.number, current.hash].join('-') }); // tic unlocks the XHX locked amount, and gives it to toc!
       yield unit.shouldFail(toc.sendTX(tx7), 'Wrong unlocker in transaction');
       yield unit.shouldNotFail(toc.sendTX(tx8));
       yield s1.commit(); // TX8 commited
@@ -143,24 +141,24 @@ describe("Testing transactions", function() {
     it('with MULTISIG', () => co(function *() {
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
       (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3);
-      let tx1 = yield toc.prepareITX(121, tic);
+      let tx1 = yield toc.prepareITX(1201, tic);
       yield toc.sendTX(tx1);
       yield s1.commit();
       let current = yield s1.get('/blockchain/current');
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
       (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(4);
       // The funding transaction that can be reverted by its issuer (tic here) or consumed by toc if he knowns X for H(X)
-      let tx2 = yield tic.prepareUTX(tx1, ['SIG(0)'], [{ qty: 121, base: 0, lock: '(XHX(8AFC8DF633FC158F9DB4864ABED696C1AA0FE5D617A7B5F7AB8DE7CA2EFCD4CB) && SIG(' + toc.pub + ')) || (SIG(' + tic.pub + ') && SIG(' + toc.pub + '))'  }], { comment: 'cross1', blockstamp: [current.number, current.hash].join('-') });
+      let tx2 = yield tic.prepareUTX(tx1, ['SIG(0)'], [{ qty: 1201, base: 0, lock: '(XHX(8AFC8DF633FC158F9DB4864ABED696C1AA0FE5D617A7B5F7AB8DE7CA2EFCD4CB) && SIG(' + toc.pub + ')) || (SIG(' + tic.pub + ') && SIG(' + toc.pub + '))'  }], { comment: 'cross1', blockstamp: [current.number, current.hash].join('-') });
       yield unit.shouldNotFail(toc.sendTX(tx2));
       yield s1.commit(); // TX2 commited
       (yield s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // toc is also present in the target of tx2
       (yield s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(4); // As well as tic
-      let tx3 = yield tic.prepareUTX(tx2, ['XHX(1872767826647264) SIG(0)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
-      let tx4 = yield toc.prepareUTX(tx2, ['XHX(1872767826647264) SIG(0)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'ok', blockstamp: [current.number, current.hash].join('-') });
-      let tx5 = yield tic.prepareMTX(tx2, toc, ['XHX(1872767826647264) SIG(1) SIG(0)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'multi OK', blockstamp: [current.number, current.hash].join('-') });
-      let tx6 = yield toc.prepareMTX(tx2, tic, ['XHX(1872767826647264) SIG(1) SIG(0)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'multi WRONG', blockstamp: [current.number, current.hash].join('-') });
+      let tx3 = yield tic.prepareUTX(tx2, ['XHX(1872767826647264) SIG(0)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      let tx4 = yield toc.prepareUTX(tx2, ['XHX(1872767826647264) SIG(0)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'ok', blockstamp: [current.number, current.hash].join('-') });
+      let tx5 = yield tic.prepareMTX(tx2, toc, ['XHX(1872767826647264) SIG(1) SIG(0)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'multi OK', blockstamp: [current.number, current.hash].join('-') });
+      let tx6 = yield toc.prepareMTX(tx2, tic, ['XHX(1872767826647264) SIG(1) SIG(0)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'multi WRONG', blockstamp: [current.number, current.hash].join('-') });
       // nLocktime
-      let tx7 = yield tic.prepareMTX(tx2, toc, ['XHX(1872767826647264) SIG(1) SIG(0)'], [{ qty: 121, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong locktime', locktime: 100, blockstamp: [current.number, current.hash].join('-') });
+      let tx7 = yield tic.prepareMTX(tx2, toc, ['XHX(1872767826647264) SIG(1) SIG(0)'], [{ qty: 1201, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong locktime', locktime: 100, blockstamp: [current.number, current.hash].join('-') });
       yield unit.shouldFail(toc.sendTX(tx3), 'Wrong unlocker in transaction');
       yield unit.shouldNotFail(toc.sendTX(tx4));
       yield unit.shouldNotFail(toc.sendTX(tx5));
diff --git a/test/integration/v0.4-dividend.js b/test/integration/v0.4-dividend.js
index 5525f95cec496161685354d5f3fbe330a011448d..4cd821459504f1bcd9bd84c5589f701cf4cf21ad 100644
--- a/test/integration/v0.4-dividend.js
+++ b/test/integration/v0.4-dividend.js
@@ -2,12 +2,10 @@
 
 const co        = require('co');
 const should    = require('should');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const commit    = require('./tools/commit');
-const until     = require('./tools/until');
 const toolbox   = require('./tools/toolbox');
-const multicaster = require('../../app/lib/streams/multicaster');
 
 const s1 = toolbox.server({
   c: 0.1,
@@ -23,14 +21,12 @@ const cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', s
 const tac = user('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, { server: s1 });
 const tic = user('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, { server: s1 });
 
-let now;
+const now = 1484000000;
 
 describe("Protocol 0.4 Dividend", function() {
 
   before(() => co(function*() {
 
-    now = Math.round(new Date().getTime() / 1000);
-
     yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections());
 
     yield cat.createIdentity();
diff --git a/test/integration/v0.4-times.js b/test/integration/v0.4-times.js
index 6cfadd166db440d34a129024c19072236997b820..c9f7cc61f48e011909d73dede3c87b9fdda725f1 100644
--- a/test/integration/v0.4-times.js
+++ b/test/integration/v0.4-times.js
@@ -2,12 +2,9 @@
 
 const co        = require('co');
 const should    = require('should');
-const bma       = require('../../app/lib/streams/bma');
-const user      = require('./tools/user');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const commit    = require('./tools/commit');
-const until     = require('./tools/until');
 const toolbox   = require('./tools/toolbox');
-const multicaster = require('../../app/lib/streams/multicaster');
 
 const conf = {
   avgGenTime: 5000,
diff --git a/test/integration/v0.5-identity-blockstamp.js b/test/integration/v0.5-identity-blockstamp.js
index 2d44d33b8dcc6baa9888d413f26ef4690daac576..3cda2091019a473e2d7e411ff3fe985afc1ff277 100644
--- a/test/integration/v0.5-identity-blockstamp.js
+++ b/test/integration/v0.5-identity-blockstamp.js
@@ -2,11 +2,9 @@
 
 const co        = require('co');
 const should    = require('should');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const constants = require('../../app/lib/constants');
-const limiter   = require('../../app/lib/system/limiter');
 const toolbox   = require('./tools/toolbox');
-const multicaster = require('../../app/lib/streams/multicaster');
 
 const conf = {
   avgGenTime: 5000,
@@ -21,7 +19,6 @@ describe("Protocol 0.5 Identity blockstamp", function() {
 
   before(() => co(function*() {
 
-    limiter.noLimit();
     const res1 = yield toolbox.simpleNodeWith2Users(conf);
     const res2 = yield toolbox.simpleNodeWith2otherUsers(conf);
     s1 = res1.s1;
diff --git a/test/integration/v0.5-transactions.js b/test/integration/v0.5-transactions.js
index eebdf23024c36da0388085ab6b8e4f533bb54d87..9a3ac591602b23261ef35644c1662576678b0c2e 100644
--- a/test/integration/v0.5-transactions.js
+++ b/test/integration/v0.5-transactions.js
@@ -2,11 +2,9 @@
 
 const co        = require('co');
 const should    = require('should');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const constants = require('../../app/lib/constants');
-const limiter   = require('../../app/lib/system/limiter');
 const toolbox   = require('./tools/toolbox');
-const multicaster = require('../../app/lib/streams/multicaster');
 
 const conf = {
   dt: 30,
@@ -22,7 +20,6 @@ describe("Protocol 0.5 Transaction version", function() {
 
   before(() => co(function*() {
 
-    limiter.noLimit();
     const res1 = yield toolbox.simpleNodeWith2Users(conf);
     s1 = res1.s1;
     const cat = res1.cat;
diff --git a/test/integration/v0.6-difficulties.js b/test/integration/v0.6-difficulties.js
index 733cc374c35d28ca1f42443efc802506ea48a693..64f98bef6c75053e74f8ffeac23e480aa16a19e5 100644
--- a/test/integration/v0.6-difficulties.js
+++ b/test/integration/v0.6-difficulties.js
@@ -2,11 +2,9 @@
 
 const co        = require('co');
 const should    = require('should');
-const bma       = require('../../app/lib/streams/bma');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const constants = require('../../app/lib/constants');
-const limiter   = require('../../app/lib/system/limiter');
 const toolbox   = require('./tools/toolbox');
-const multicaster = require('../../app/lib/streams/multicaster');
 
 const conf = {
   avgGenTime: 5000,
@@ -21,7 +19,6 @@ describe("Protocol 0.6 Difficulties", function() {
 
   before(() => co(function*() {
 
-    limiter.noLimit();
     const res = yield toolbox.simpleNetworkOf2NodesAnd2Users(conf);
     s1 = res.s1;
     s2 = res.s2;
diff --git a/test/integration/v1.0-modules-api.js b/test/integration/v1.0-modules-api.js
new file mode 100644
index 0000000000000000000000000000000000000000..a480a8faae2359a2352747012c84def68994c504
--- /dev/null
+++ b/test/integration/v1.0-modules-api.js
@@ -0,0 +1,297 @@
+"use strict";
+
+const co      = require('co');
+const _       = require('underscore');
+const should  = require('should');
+const util    = require('util');
+const stream  = require('stream');
+const duniter = require('../../index');
+const parsers = require('../../app/lib/streams/parsers/index');
+const querablep = require('querablep');
+
+describe("v1.0 Module API", () => {
+
+  it('should be able to execute `hello` command', () => co(function*() {
+
+    const sStack = duniter.statics.simpleStack();
+    const aStack = duniter.statics.autoStack();
+
+    const helloDependency = {
+      duniter: {
+        cliOptions: [
+          { value: '--opt1', desc: 'The option 1. Enabled or not' },
+          { value: '--option2 <value>', desc: 'The option 2. Requires an argument, parsed as integer.', parser: parseInt }
+        ],
+        cli: [{
+          name: 'hello',
+          desc: 'Returns an "Hello, world" string after configuration phase.',
+          onConfiguredExecute: (server, conf, program, params) => co(function*(){
+            return "Hello, " + params[0] + ". You successfully sent arg '" + params[1] + "' along with opt1 = " + program.opt1 + " and option2 = " + program.option2 + ".";
+          })
+        }]
+      }
+    };
+
+    sStack.registerDependency(helloDependency, 'duniter-hello');
+    sStack.registerDependency(helloDependency, 'duniter-hello'); // Try to load it 2 times, should not throw an error
+    sStack.registerDependency(require('duniter-keypair'), 'duniter-keypair');
+    aStack.registerDependency(helloDependency, 'duniter-hello');
+
+    (yield sStack.executeStack(['node', 'index.js', '--memory', 'hello', 'World', 'TEST', '--opt1', '--option2', '5'])).should.equal('Hello, World. You successfully sent arg \'TEST\' along with opt1 = true and option2 = 5.');
+    (yield aStack.executeStack(['node', 'index.js', '--memory', 'hello', 'Zorld', 'ESSE', '--option2', 'd'])).should.equal('Hello, Zorld. You successfully sent arg \'ESSE\' along with opt1 = undefined and option2 = NaN.');
+  }));
+
+  /***********************
+   * CONFIGURATION HOOKS
+   **********************/
+
+  describe("Configuration hooks", () => {
+
+    let stack;
+    function run() {
+      const args = Array.from(arguments);
+      return stack.executeStack(['node', 'index.js', '--mdb', 'modules_api_tests'].concat(args));
+    }
+
+    before(() => co(function*() {
+
+      stack = duniter.statics.simpleStack();
+      const configurationDependency = {
+        duniter: {
+          cliOptions: [
+            { value: '--supersalt <salt>', desc: 'A crypto salt.' },
+            { value: '--superpasswd <passwd>', desc: 'A crypto password.' }
+          ],
+          config: {
+            onLoading: (conf, program) => co(function*(){
+
+              // Always adds a parameter named "superkey"
+              conf.superkey = { pub: 'publicPart', sec: 'secretPart' };
+              // Eventually adds a supersalt if given as option
+              if (program.supersalt) {
+                conf.supersalt = program.supersalt;
+              }
+              // Eventually adds a superpasswd if given as option
+              if (program.superpasswd) {
+                conf.superpasswd = program.superpasswd;
+              }
+            }),
+            beforeSave: (conf, program) => co(function*(){
+
+              // We never want to store "superpasswd"
+              delete conf.superpasswd;
+            })
+          }
+        }
+      };
+      const returnConfDependency = {
+        duniter: {
+          cli: [{
+            name: 'gimme-conf',
+            desc: 'Returns the configuration object.',
+            onDatabaseExecute: (server, conf, program, params, startServices, stopServices) => co(function*() {
+              // Gimme the conf!
+              return conf;
+            })
+          }],
+        }
+      };
+
+      stack.registerDependency(require('duniter-keypair'), 'duniter-keypair');
+      stack.registerDependency(configurationDependency, 'duniter-configuration');
+      stack.registerDependency(returnConfDependency, 'duniter-gimme-conf');
+    }));
+
+    it('verify that we get the CLI options', () => co(function*() {
+      const conf = yield run('gimme-conf', '--supersalt', 'NaCl');
+      conf.should.have.property('supersalt').equal('NaCl');
+    }));
+
+    it('verify that we get the saved options', () => co(function*() {
+      let conf;
+
+      // We make an initial reset
+      yield run('reset', 'config');
+      conf = yield run('gimme-conf');
+      conf.should.have.property('superkey'); // Always loaded
+      conf.should.not.have.property('supersalt');
+
+      // Nothing should have changed
+      conf = yield run('gimme-conf');
+      conf.should.have.property('superkey'); // Always loaded
+      conf.should.not.have.property('supersalt');
+
+      // Now we try to save the parameters
+      yield run('config', '--supersalt', 'NaCl2', '--superpasswd', 'megapasswd');
+      conf = yield run('gimme-conf');
+      conf.should.have.property('superkey'); // Always loaded
+      conf.should.have.property('supersalt').equal('NaCl2');
+      conf.should.not.have.property('superpasswd');
+
+      // Yet we can have all options by giving them explicitely using options
+      conf = yield run('gimme-conf', '--superpasswd', 'megapasswd2');
+      conf.should.have.property('superkey');
+      conf.should.have.property('supersalt').equal('NaCl2');
+      conf.should.have.property('superpasswd').equal('megapasswd2');
+    }));
+  });
+
+  /***********************
+   *  SERVICE START/STOP
+   **********************/
+
+  describe("Service triggers", () => {
+
+    let stack;
+    let fakeI;
+    let fakeP;
+    let fakeO;
+
+    function run() {
+      const args = Array.from(arguments);
+      return stack.executeStack(['node', 'index.js', '--memory'].concat(args));
+    }
+
+    before(() => co(function*() {
+
+      stack = duniter.statics.simpleStack();
+      fakeI = new FakeStream((that, data) => {
+        // Note: we never pass here
+        if (typeof data == "string") {
+          that.push(data);
+        }
+      });
+      fakeP = new FakeStream((that, data) => {
+        if (typeof data == "object" && data.type == "transaction") {
+          const tx = parsers.parseTransaction.syncWrite(data.doc);
+          that.push(tx);
+        }
+      });
+      fakeO = new FakeStream((that, data, enc, done) => {
+        if (data.issuers) {
+          that.resolveData();
+        }
+        done && done();
+      });
+      // Fake output has a special promise of data receival, for our tests
+      fakeO.outputed = querablep(new Promise((res) => fakeO.resolveData = res));
+      const dummyStartServiceDependency = {
+        duniter: {
+          cli: [{
+            name: 'hello-service',
+            desc: 'Says hello to the world, at service phase. And feed INPUT with a transaction.',
+            onDatabaseExecute: (duniterServer, conf, program, programArgs, startServices, stopServices) => co(function*(){
+              yield startServices();
+              fakeI.push("Version: 10\n" +
+                "Type: Transaction\n" +
+                "Currency: test_net\n" +
+                "Blockstamp: 3-2A27BD040B16B7AF59DDD88890E616987F4DD28AA47B9ABDBBEE46257B88E945\n" +
+                "Locktime: 0\n" +
+                "Issuers:\n" +
+                "HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk\n" +
+                "Inputs:\n" +
+                "100000:0:D:HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk:3428\n" +
+                "Unlocks:\n" +
+                "0:SIG(0)\n" +
+                "Outputs:\n" +
+                "1000:0:SIG(yGKRRB18B4eaZQdksWBZubea4VJKFSSpii2okemP7x1)\n" +
+                "99000:0:SIG(HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk)\n" +
+                "Comment: reessai\n" +
+                "P6MxJ/2SdkvNDyIyWuOkTz3MUwsgsfo70j+rpWeQWcm6GdvKQsbplB8482Ar1HMz2q0h5V3tfMqjCuAeWVQ+Ag==\n");
+              yield fakeO.outputed;
+              return fakeO.outputed;
+            })
+          }],
+          service: {
+            input: () => fakeI,
+            process: () => fakeP,
+            output: () => fakeO
+          }
+        }
+      };
+      const dummyStopServiceDependency = {
+        duniter: {
+          cli: [{
+            name: 'bye-service',
+            desc: 'Says goodbye to the world, at service phase.',
+            onDatabaseExecute: (duniterServer, conf, program, programArgs, startServices, stopServices) => co(function*(){
+              yield stopServices();
+              return Promise.resolve();
+            })
+          }],
+          service: {
+            input: () => fakeI,
+            process: () => fakeP,
+            output: () => fakeO
+          }
+        }
+      };
+
+      stack.registerDependency(require('duniter-keypair'), 'duniter-keypair');
+      stack.registerDependency(require('duniter-bma'), 'duniter-bma');
+      stack.registerDependency(dummyStartServiceDependency, 'duniter-dummy-start');
+      stack.registerDependency(dummyStopServiceDependency, 'duniter-dummy-stop');
+    }));
+
+    it('verify that services are started', () => co(function*() {
+      fakeI.started.isResolved().should.equal(false);
+      fakeP.started.isResolved().should.equal(false);
+      fakeO.started.isResolved().should.equal(false);
+      fakeI.stopped.isResolved().should.equal(false);
+      fakeP.stopped.isResolved().should.equal(false);
+      fakeO.stopped.isResolved().should.equal(false);
+      yield run('hello-service');
+      fakeO.outputed.isResolved().should.equal(true); // The transaction has successfully gone through the whole stream
+      fakeI.started.isResolved().should.equal(true);
+      fakeP.started.isResolved().should.equal(true);
+      fakeO.started.isResolved().should.equal(true);
+      fakeI.stopped.isResolved().should.equal(false);
+      fakeP.stopped.isResolved().should.equal(false);
+      fakeO.stopped.isResolved().should.equal(false);
+    }));
+
+    it('verify that services are stopped', () => co(function*() {
+      fakeI.stopped.isResolved().should.equal(false);
+      fakeP.stopped.isResolved().should.equal(false);
+      fakeO.stopped.isResolved().should.equal(false);
+      fakeI.started.isResolved().should.equal(true);
+      fakeP.started.isResolved().should.equal(true);
+      fakeO.started.isResolved().should.equal(true);
+      yield run('bye-service');
+      fakeI.started.isResolved().should.equal(false);
+      fakeP.started.isResolved().should.equal(false);
+      fakeO.started.isResolved().should.equal(false);
+      fakeI.stopped.isResolved().should.equal(true);
+      fakeP.stopped.isResolved().should.equal(true);
+      fakeO.stopped.isResolved().should.equal(true);
+    }));
+  });
+
+});
+
+
+function FakeStream(onWrite) {
+
+  const that = this;
+  stream.Transform.call(this, { objectMode: true });
+
+  let resolveStart = () => null;
+  let resolveStop  = () => null;
+
+  this._write = onWrite.bind(this, that);
+
+  this.started = querablep(new Promise(res => resolveStart = res));
+  this.stopped = querablep(new Promise(res => resolveStop  = res));
+
+  this.startService = () => co(function*() {
+    resolveStart();
+    that.stopped = querablep(new Promise(res => resolveStop = res));
+  });
+
+  this.stopService = () => co(function*() {
+    resolveStop();
+    that.started = querablep(new Promise(res => resolveStart = res));
+  });
+}
+
+util.inherits(FakeStream, stream.Transform);
diff --git a/test/integration/v1.0-source-garbaging.js b/test/integration/v1.0-source-garbaging.js
new file mode 100644
index 0000000000000000000000000000000000000000..f38410e35e66ec59299068437ae4c43cf655783e
--- /dev/null
+++ b/test/integration/v1.0-source-garbaging.js
@@ -0,0 +1,173 @@
+"use strict";
+
+const co        = require('co');
+const should    = require('should');
+const bma       = require('duniter-bma').duniter.methods.bma;
+const constants = require('../../app/lib/constants');
+const toolbox   = require('./tools/toolbox');
+
+const conf = {
+  ud0: 9995,
+  c: .99,
+  dt: 300,
+  avgGenTime: 5000,
+  medianTimeBlocks: 1 // The medianTime always equals previous block's medianTime
+};
+
+const now = 1480000000;
+
+constants.CORES_MAXIMUM_USE_IN_PARALLEL = 1;
+constants.NB_DIGITS_UD = 4;
+
+let s1, cat, tac;
+
+describe("Protocol 1.0 Source Garbaging", function() {
+
+  /*****
+   * DESCRIPTION
+   * -----------
+   *
+   * All accounts having less than 100 units of money (current base) must see their money garbaged, i.e. destroyed.
+   *
+   * This measure is here to avoid a metastasizing of the database because of users who would spend very little amounts
+   * of money to random addresses, or to finally destroy very old money (dozens of years).
+   */
+
+  before(() => co(function*() {
+
+    const res1 = yield toolbox.simpleNodeWith2Users(conf);
+    s1 = res1.s1;
+    cat = res1.cat; // HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd
+    tac = res1.tac; // 2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc
+    yield s1.commit({ time: now });
+    yield s1.commit({ time: now + 300 });
+  }));
+
+  it('cat should have no source initially', () => co(function*() {
+    yield s1.expectThat('/tx/sources/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (json) => {
+      json.sources.should.have.length(0);
+    });
+  }));
+
+  it('cat should have a Dividend, as well as tac', () => co(function*() {
+    yield s1.commit({ time: now + 300 });
+    yield s1.expectThat('/tx/sources/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'D', noffset: 2, identifier: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', amount: 9995, base: 0 }
+      ]);
+    });
+  }));
+
+  it('should be able to send money to tac with no losses', () => co(function*() {
+    yield cat.sendP(3000, tac);
+    yield s1.commit({ time: now + 300 });
+    yield s1.expectThat('/tx/sources/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'T', noffset: 1, identifier: '693C54130D1D393767347F657D074FA471E0844FC1CF35A6FDEAC68849737A01', amount: 6995, base: 0 }
+      ]);
+    });
+    yield s1.expectThat('/tx/sources/2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'D', noffset: 2, identifier: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', amount: 9995, base: 0 },
+        { type: 'T', noffset: 0, identifier: '693C54130D1D393767347F657D074FA471E0844FC1CF35A6FDEAC68849737A01', amount: 3000, base: 0 }
+      ]);
+    });
+  }));
+
+  it('should be able to send money to tac with still no losses', () => co(function*() {
+    yield cat.sendP(5495, tac);
+    yield s1.commit({ time: now + 300 });
+    yield s1.expectThat('/tx/sources/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'T', noffset: 1, identifier: '1E47AF2308490CD7480CD509F3D031B9F1E0DEE9E40FEC9CF9462CEE412C0710', amount: 1500, base: 0 }
+      ]);
+    });
+    yield s1.expectThat('/tx/sources/2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'D', noffset: 2, identifier: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', amount: 9995, base: 0 },
+        { type: 'T', noffset: 0, identifier: '693C54130D1D393767347F657D074FA471E0844FC1CF35A6FDEAC68849737A01', amount: 3000, base: 0 },
+        { type: 'T', noffset: 0, identifier: '1E47AF2308490CD7480CD509F3D031B9F1E0DEE9E40FEC9CF9462CEE412C0710', amount: 5495, base: 0 }
+      ]);
+    });
+  }));
+
+  it('should be able to lose money by sending 1,99,100,999,1000,300+700 units to random accounts', () => co(function*() {
+    yield cat.sendP(1, '6EQoFVnFf2xpaRzieNTXmAKU6XkDHYrvgorJ8ppMFa8b');
+    yield s1.commit({ time: now + 300 });
+    yield cat.sendP(99, '2EvWF9XM6TY3zUDjwi3qfGRW5zhN11TXcUDXdgK2XK41');
+    yield s1.commit({ time: now + 300 });
+    yield cat.sendP(100, 'DPFgnVSB14QnYFjKNhbFRYLxroSmaXZ53TzgFZBcCxbF');
+    yield s1.commit({ time: now + 300 });
+    yield cat.sendP(999, '4WmQWq4NuJtu6mzFDKkmmu6Cm6BZvgoY4b4MMDMwVvu7');
+    yield s1.commit({ time: now + 300 });
+    yield cat.sendP(300, '7kMAi8wttYKPK5QSfCwoDriNTcCTWKzTbuSjsLsjGJX2');
+    yield tac.sendP(700, '7kMAi8wttYKPK5QSfCwoDriNTcCTWKzTbuSjsLsjGJX2');
+    yield s1.commit({ time: now + 900 });
+    // Has spent all its money, + 1 unit destroyed
+    yield s1.expectThat('/tx/sources/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (json) => {
+      json.sources.should.deepEqual([]);
+    });
+    // Has seen 1 unit destroyed
+    yield s1.expectThat('/tx/sources/6EQoFVnFf2xpaRzieNTXmAKU6XkDHYrvgorJ8ppMFa8b', (json) => {
+      json.sources.should.deepEqual([]);
+    });
+    // Has seen 99 unit destroyed
+    yield s1.expectThat('/tx/sources/2EvWF9XM6TY3zUDjwi3qfGRW5zhN11TXcUDXdgK2XK41', (json) => {
+      json.sources.should.deepEqual([]);
+    });
+    // Has just enough on the account (100 units)
+    yield s1.expectThat('/tx/sources/DPFgnVSB14QnYFjKNhbFRYLxroSmaXZ53TzgFZBcCxbF', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'T', noffset: 0, identifier: '5218AA814F5AE71BF9ECF2DC86D8E8D85968F98E220D2E12DB6AAEFD2CD9EEE0', amount: 100, base: 0 }
+      ]);
+    });
+    // Has way enough on the account (999 units)
+    yield s1.expectThat('/tx/sources/4WmQWq4NuJtu6mzFDKkmmu6Cm6BZvgoY4b4MMDMwVvu7', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'T', noffset: 0, identifier: 'F603AD88714A83A0B3C68BA14E311C55CD81F609C033B18501BAE1C8A21CB174', amount: 999, base: 0 }
+      ]);
+    });
+    // Has way enough on the account (300 + 700 units)
+    yield s1.expectThat('/tx/sources/7kMAi8wttYKPK5QSfCwoDriNTcCTWKzTbuSjsLsjGJX2', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'T', noffset: 0, identifier: 'C6FBF49423B6B629DEDB3CA1F0CD2BDE756C3FD5CFA009A52218A8098E18B9D4', amount: 300, base: 0 },
+        { type: 'T', noffset: 0, identifier: 'CE69CC143D8725ECB6D666B8194907DFCA8F2FD3242271F9DA16CA6B37290BA1', amount: 700, base: 0 }
+      ]);
+    });
+  }));
+
+  it('should have lost some money with unitBase bumped from 0 to 1', () => co(function*() {
+    yield s1.commit({ time: now + 900 });
+    // Has no more enough on the account (100x10^0 < 100x10^1)
+    yield s1.expectThat('/tx/sources/DPFgnVSB14QnYFjKNhbFRYLxroSmaXZ53TzgFZBcCxbF', (json) => {
+      json.sources.should.deepEqual([]);
+    });
+    // Has NOT enough on the account (999x10^0 = 99.9x10^1 < 100x10^1)
+    yield s1.expectThat('/tx/sources/4WmQWq4NuJtu6mzFDKkmmu6Cm6BZvgoY4b4MMDMwVvu7', (json) => {
+      json.sources.should.deepEqual([]);
+    });
+    // Has enough on the account (300x10^0 + 700x10^0 = 1000x10^0 = 100x10^1)
+    yield s1.expectThat('/tx/sources/7kMAi8wttYKPK5QSfCwoDriNTcCTWKzTbuSjsLsjGJX2', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'T', noffset: 0, identifier: 'C6FBF49423B6B629DEDB3CA1F0CD2BDE756C3FD5CFA009A52218A8098E18B9D4', amount: 300, base: 0 },
+        { type: 'T', noffset: 0, identifier: 'CE69CC143D8725ECB6D666B8194907DFCA8F2FD3242271F9DA16CA6B37290BA1', amount: 700, base: 0 }
+      ]);
+    });
+    yield s1.commit({ time: now + 1800 });
+    // Has enough on the account (300x10^0 + 700x10^0 = 1000x10^0 = 100x10^1)
+    yield s1.expectThat('/tx/sources/7kMAi8wttYKPK5QSfCwoDriNTcCTWKzTbuSjsLsjGJX2', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'T', noffset: 0, identifier: 'C6FBF49423B6B629DEDB3CA1F0CD2BDE756C3FD5CFA009A52218A8098E18B9D4', amount: 300, base: 0 },
+        { type: 'T', noffset: 0, identifier: 'CE69CC143D8725ECB6D666B8194907DFCA8F2FD3242271F9DA16CA6B37290BA1', amount: 700, base: 0 }
+      ]);
+    });
+    yield s1.commit({ time: now + 3600 });
+    yield s1.expectThat('/tx/sources/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (json) => {
+      json.sources.should.deepEqual([
+        { type: 'D', noffset: 10, identifier: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', amount: 1980, base: 1 },
+        { type: 'D', noffset: 11, identifier: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', amount: 4901, base: 1 },
+        { type: 'D', noffset: 12, identifier: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', amount: 1263, base: 2 }
+      ]);
+    });
+  }));
+});
diff --git a/test/integration/wotb.js b/test/integration/wotb.js
index 4aa0e4f599d126907996d4f7c4593f0ae4e246b6..98581d58d20df3592878ce70c3b31a543f4c36be 100644
--- a/test/integration/wotb.js
+++ b/test/integration/wotb.js
@@ -3,8 +3,8 @@
 const co        = require('co');
 const should    = require('should');
 const _         = require('underscore');
-const ucoin     = require('../../index');
-const bma       = require('../../app/lib/streams/bma');
+const duniter     = require('../../index');
+const bma       = require('duniter-bma').duniter.methods.bma;
 const user      = require('./tools/user');
 const commit    = require('./tools/commit');
 
@@ -14,47 +14,46 @@ const commonConf = {
   currency: 'bb',
   httpLogs: true,
   forksize: 3,
-  parcatipate: false, // TODO: to remove when startGeneration will be an explicit call
   sigQty: 1
 };
 
-const s1 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb11'
-}, _.extend({
+const s1 = duniter(
+  '/bb11',
+  MEMORY_MODE,
+  _.extend({
   port: '9337',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
     sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
   },
-  participate: false, rootoffset: 10,
+  rootoffset: 10,
   sigQty: 1, dt: 1, ud0: 120
 }, commonConf));
 
-const s2 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb41'
-}, _.extend({
+const s2 = duniter(
+  '/bb41',
+  MEMORY_MODE,
+  _.extend({
   port: '9338',
   pair: {
     pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
     sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
   },
-  participate: false, rootoffset: 10,
+  rootoffset: 10,
   sigQty: 1, dt: 1, ud0: 120,
   msValidity: 400 // Memberships expire after 400 second delay
 }, commonConf));
 
-const s3 = ucoin({
-  memory: MEMORY_MODE,
-  name: 'bb11'
-}, _.extend({
+const s3 = duniter(
+  '/bb11',
+  MEMORY_MODE,
+  _.extend({
   port: '9339',
   pair: {
     pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
     sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
   },
-  participate: false, rootoffset: 10,
+  rootoffset: 10,
   sigQty: 1, dt: 1, ud0: 120,
   sigValidity: 1400, sigPeriod: 0
 }, commonConf));