diff --git a/.gitignore b/.gitignore
index 8d7604780e96118702e1c892434d7d75dddd1b9c..b88a3dfca8d904c9c179e1d9f3e75aba5d5470e4 100644
--- a/.gitignore
+++ b/.gitignore
@@ -32,6 +32,7 @@ test/blockchain/*.js
 test/blockchain/*.js.map
 test/blockchain/lib/*.js
 test/blockchain/lib/*.js.map
+server.js*
 app/cli.js*
 app/lib/*.js*
 app/lib/blockchain/*.js
diff --git a/app/lib/dto/ConfDTO.ts b/app/lib/dto/ConfDTO.ts
index 7965bb326be8555fa8b720016cd94954ad60a3bb..094bcc692f4e1125de7f546f3ebaf7212c477091 100644
--- a/app/lib/dto/ConfDTO.ts
+++ b/app/lib/dto/ConfDTO.ts
@@ -44,14 +44,17 @@ export class ConfDTO {
     public msWindow: number,
     public sigWindow: number,
     public swichOnTimeAheadBy: number,
-    public pair: Keypair | null,
+    public pair: Keypair,
     public remoteport: number,
     public remotehost: string,
     public remoteipv4: string,
     public remoteipv6: string,
+    public port: number,
+    public ipv4: string,
+    public ipv6: string,
 ) {}
 
   static mock() {
-    return new ConfDTO("", "", [], [], 0, 0, 0.6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, false, 0, false, 0, 0, 0, 0, 0, null, 0, "", "", "")
+    return new ConfDTO("", "", [], [], 0, 0, 0.6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, false, 0, false, 0, 0, 0, 0, 0, { pub:'', sec:'' }, 0, "", "", "", 0, "", "")
   }
 }
\ No newline at end of file
diff --git a/app/lib/system/directory.ts b/app/lib/system/directory.ts
index 4ed3cbcae296efd52af981461c58c92fdcd80cbd..3846c4b8b0aee27b91743164c4574506e03ad7c8 100644
--- a/app/lib/system/directory.ts
+++ b/app/lib/system/directory.ts
@@ -1,5 +1,6 @@
 import {SQLiteDriver} from "../dal/drivers/SQLiteDriver"
 import {CFSCore} from "../dal/fileDALs/CFSCore"
+import {WoTBObject} from "../wot"
 
 const opts = require('optimist').argv;
 const path = require('path');
@@ -29,7 +30,6 @@ const dir = module.exports = {
 
   getHomeFS: async (isMemory:boolean, theHome:string) => {
     const home = theHome || dir.getHome();
-    await someDelayFix();
     const params:any = {
       home: home
     };
@@ -45,10 +45,9 @@ const dir = module.exports = {
   getHomeParams: async (isMemory:boolean, theHome:string) => {
     const params:any = await dir.getHomeFS(isMemory, theHome)
     const home = params.home;
-    await someDelayFix()
     if (isMemory) {
       params.dbf = () => new SQLiteDriver(':memory:');
-      params.wotb = require('../wot').WoTBObject.memoryInstance();
+      params.wotb = WoTBObject.memoryInstance();
     } else {
       const sqlitePath = path.join(home, dir.DUNITER_DB_NAME + '.db');
       params.dbf = () => new SQLiteDriver(sqlitePath);
@@ -57,7 +56,7 @@ const dir = module.exports = {
       if (!existsFile) {
         fs.closeSync(fs.openSync(wotbFilePath, 'w'));
       }
-      params.wotb = require('../wot').WoTBObject.fileInstance(wotbFilePath);
+      params.wotb = WoTBObject.fileInstance(wotbFilePath);
     }
     return params;
   },
@@ -67,8 +66,3 @@ const dir = module.exports = {
     return fsHandler.makeTree('');
   }
 }
-
-const someDelayFix = () => new Promise((resolve) => {
-  setTimeout(resolve, 100);
-})
-
diff --git a/app/lib/wot.ts b/app/lib/wot.ts
index bcfafa8fd1932298aaa9e4b27169669aeedcb093..d91199b6f1fc3e2be32f996a77a918b9e81b4066 100644
--- a/app/lib/wot.ts
+++ b/app/lib/wot.ts
@@ -2,7 +2,7 @@ const wotb = require('wotb');
 
 export interface WoTBInterface {
   fileInstance: (filepath:string) => any
-  memoryInstance: (filepath:string) => any
+  memoryInstance: () => any
   setVerbose: (verbose:boolean) => void
 }
 
diff --git a/doc/contribute-french.md b/doc/contribute-french.md
index c3c639d858b7cf585219860ae8f911812716bf6f..f07f873de34705cfb6eda7ca48deaca17a9ff82a 100644
--- a/doc/contribute-french.md
+++ b/doc/contribute-french.md
@@ -1013,7 +1013,7 @@ Pius cliquez sur "Find". Vous obtiendrez le résultat suivant :
 
 <img src="https://forum.duniter.org/uploads/default/original/1X/48f80f1e07828edab1601f4414f605b995143ddd.png" width="471" height="227">
 
-Double-cliquez sur le résultat trouvé, et vous serez alors amené au fichier `server.js`, ligne 75. Ajoutez-y un point d'arrêt :
+Double-cliquez sur le résultat trouvé, et vous serez alors amené au fichier `server.ts`, ligne 75. Ajoutez-y un point d'arrêt :
 
 <img src="https://forum.duniter.org/uploads/default/original/1X/789c7fbb457d3f780316a2cf164ed45f82d0c701.png" width="448" height="94">
 
diff --git a/index.js b/index.js
index 25d912679cab483a1bed04a02c3a475dca6db12a..0b6ed6a4f5d4b45966a65c5d1806edeef28c6d44 100644
--- a/index.js
+++ b/index.js
@@ -5,7 +5,7 @@ const path = require('path');
 const util = require('util');
 const stream = require('stream');
 const _ = require('underscore');
-const Server = require('./server');
+const Server = require('./server').Server
 const directory = require('./app/lib/system/directory');
 const constants = require('./app/lib/constants');
 const CLI = require('./app/cli').ExecuteCommand
diff --git a/server.js b/server.js
index 16b2c655979f65f7dab6b8f48d170c0b7a5d3c45..143f5d22ed0c3996e1972cdb55a1a9df3c0b1723 100644
--- a/server.js
+++ b/server.js
@@ -1,472 +1,503 @@
 "use strict";
-const stream      = require('stream');
-const util        = require('util');
-const path        = require('path');
-const co          = require('co');
-const _           = require('underscore');
-const archiver    = require('archiver');
-const unzip       = require('unzip2');
-const fs          = require('fs');
-const daemonize   = require("daemonize2")
-const parsers     = require('duniter-common').parsers;
-const constants   = require('./app/lib/constants');
-const FileDAL     = require('./app/lib/dal/fileDAL').FileDAL
-const jsonpckg    = require('./package.json');
-const keyring      = require('duniter-common').keyring;
-const directory   = require('./app/lib/system/directory');
-const rawer       = require('duniter-common').rawer;
-const SQLBlockchain   = require('./app/lib/blockchain/SqlBlockchain').SQLBlockchain
-const DuniterBlockchain = require('./app/lib/blockchain/DuniterBlockchain').DuniterBlockchain
-
-function Server (home, memoryOnly, overrideConf) {
-
-  stream.Duplex.call(this, { objectMode: true });
-
-  const paramsP = directory.getHomeParams(memoryOnly, home);
-  const logger = require('./app/lib/logger').NewLogger('server');
-  const that = this;
-  that.home = home;
-  that.conf = null;
-  that.dal = null;
-  that.version = jsonpckg.version;
-  that.logger = logger;
-
-  that.MerkleService       = require("./app/lib/helpers/merkle");
-  that.IdentityService     = new (require('./app/service/IdentityService').IdentityService)()
-  that.MembershipService   = new (require('./app/service/MembershipService').MembershipService)()
-  that.PeeringService      = new (require('./app/service/PeeringService').PeeringService)(that)
-  that.BlockchainService   = new (require('./app/service/BlockchainService').BlockchainService)(that)
-  that.TransactionsService = new (require('./app/service/TransactionsService').TransactionService)()
-
-  // Create document mapping
-  const documentsMapping = {
-    'identity':      { action: (obj) => that.IdentityService.submitIdentity(obj),                                 parser: parsers.parseIdentity },
-    'certification': { action: (obj) => that.IdentityService.submitCertification(obj),                            parser: parsers.parseCertification},
-    'revocation':    { action: (obj) => that.IdentityService.submitRevocation(obj),                               parser: parsers.parseRevocation },
-    'membership':    { action: (obj) => that.MembershipService.submitMembership(obj),                             parser: parsers.parseMembership },
-    'peer':          { action: (obj) => that.PeeringService.submitP(obj),                                         parser: parsers.parsePeer },
-    'transaction':   { action: (obj) => that.TransactionsService.processTx(obj),                                  parser: parsers.parseTransaction },
-    'block':         { action: (obj) => that.BlockchainService.submitBlock(obj, true, constants.NO_FORK_ALLOWED), parser: parsers.parseBlock }
-  };
-
-  // Unused, but made mandatory by Duplex interface
-  this._read = () => null;
-
-  this._write = (obj, enc, writeDone) => that.submit(obj, false, () => writeDone);
-
-  /**
-   * Facade method to control what is pushed to the stream (we don't want it to be closed)
-   * @param obj An object to be pushed to the stream.
-   */
-  this.streamPush = (obj) => {
-    if (obj) {
-      that.push(obj);
-    }
-  };
-
-  this.getBcContext = () => this.BlockchainService.getContext();
-
-  this.plugFileSystem = () => co(function *() {
-    logger.debug('Plugging file system...');
-    const params = yield paramsP;
-    that.dal = new FileDAL(params);
-    yield that.onPluggedFSHook()
-  });
-
-  this.unplugFileSystem = () => co(function *() {
-    logger.debug('Unplugging file system...');
-    yield that.dal.close();
-  });
-
-  this.loadConf = (useDefaultConf) => co(function *() {
-    logger.debug('Loading conf...');
-    that.conf = yield that.dal.loadConf(overrideConf, useDefaultConf);
-    // Default values
-    const defaultValues = {
-      remoteipv6:         that.conf.ipv6,
-      remoteport:         that.conf.port,
-      c:                  constants.CONTRACT.DEFAULT.C,
-      dt:                 constants.CONTRACT.DEFAULT.DT,
-      ud0:                constants.CONTRACT.DEFAULT.UD0,
-      stepMax:            constants.CONTRACT.DEFAULT.STEPMAX,
-      sigPeriod:          constants.CONTRACT.DEFAULT.SIGPERIOD,
-      msPeriod:           constants.CONTRACT.DEFAULT.MSPERIOD,
-      sigStock:           constants.CONTRACT.DEFAULT.SIGSTOCK,
-      sigWindow:          constants.CONTRACT.DEFAULT.SIGWINDOW,
-      sigValidity:        constants.CONTRACT.DEFAULT.SIGVALIDITY,
-      msValidity:         constants.CONTRACT.DEFAULT.MSVALIDITY,
-      sigQty:             constants.CONTRACT.DEFAULT.SIGQTY,
-      idtyWindow:         constants.CONTRACT.DEFAULT.IDTYWINDOW,
-      msWindow:           constants.CONTRACT.DEFAULT.MSWINDOW,
-      xpercent:           constants.CONTRACT.DEFAULT.X_PERCENT,
-      percentRot:         constants.CONTRACT.DEFAULT.PERCENTROT,
-      powDelay:           constants.CONTRACT.DEFAULT.POWDELAY,
-      avgGenTime:         constants.CONTRACT.DEFAULT.AVGGENTIME,
-      dtDiffEval:         constants.CONTRACT.DEFAULT.DTDIFFEVAL,
-      medianTimeBlocks:   constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS,
-      rootoffset:         0,
-      forksize:           constants.BRANCHES.DEFAULT_WINDOW_SIZE
-    };
-    _.keys(defaultValues).forEach(function(key){
-      if (that.conf[key] == undefined) {
-        that.conf[key] = defaultValues[key];
-      }
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+    return new (P || (P = Promise))(function (resolve, reject) {
+        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+        function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+        function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
+        step((generator = generator.apply(thisArg, _arguments || [])).next());
     });
-    // 1.3.X: the msPeriod = msWindow
-    that.conf.msPeriod = that.conf.msPeriod || that.conf.msWindow
-    // Default keypair
-    if (!that.conf.pair || !that.conf.pair.pub || !that.conf.pair.sec) {
-      // Create a random key
-      that.conf.pair = keyring.randomKey().json()
-    }
-    // Extract key pair
-    that.keyPair = keyring.Key(that.conf.pair.pub, that.conf.pair.sec);
-    that.sign = that.keyPair.sign;
-    // Blockchain object
-    that.blockchain = new DuniterBlockchain(new SQLBlockchain(that.dal), that.dal);
-    // Update services
-    [that.IdentityService, that.MembershipService, that.PeeringService, that.BlockchainService, that.TransactionsService].map((service) => {
-      service.setConfDAL(that.conf, that.dal, that.keyPair);
-    });
-    return that.conf;
-  });
-
-  this.initWithDAL = () => co(function *() {
-    yield that.plugFileSystem();
-    yield that.loadConf();
-    yield that.initDAL();
-    return that;
-  });
-
-  this.submit = (obj, isInnerWrite, done) => {
-    return co(function *() {
-      if (!obj.documentType) {
-        throw 'Document type not given';
-      }
-      try {
-        const action = documentsMapping[obj.documentType].action;
-        let res;
-        if (typeof action == 'function') {
-          // Handle the incoming object
-          res = yield action(obj);
-        } else {
-          throw 'Unknown document type \'' + obj.documentType + '\'';
-        }
-        if (res) {
-          // Only emit valid documents
-          that.emit(obj.documentType, _.clone(res));
-          that.streamPush(_.clone(res));
-        }
-        if (done) {
-          isInnerWrite ? done(null, res) : done();
-        }
-        return res;
-      } catch (err) {
-        if (err && !err.uerr) {
-          // Unhandled error, display it
-          logger.debug('Document write error: ', err);
-        }
-        if (done) {
-          isInnerWrite ? done(err, null) : done();
-        } else {
-          throw err;
+};
+Object.defineProperty(exports, "__esModule", { value: true });
+const IdentityService_1 = require("./app/service/IdentityService");
+const MembershipService_1 = require("./app/service/MembershipService");
+const PeeringService_1 = require("./app/service/PeeringService");
+const BlockchainService_1 = require("./app/service/BlockchainService");
+const TransactionsService_1 = require("./app/service/TransactionsService");
+const ConfDTO_1 = require("./app/lib/dto/ConfDTO");
+const fileDAL_1 = require("./app/lib/dal/fileDAL");
+const DuniterBlockchain_1 = require("./app/lib/blockchain/DuniterBlockchain");
+const SqlBlockchain_1 = require("./app/lib/blockchain/SqlBlockchain");
+const stream = require("stream");
+const path = require('path');
+const _ = require('underscore');
+const archiver = require('archiver');
+const unzip = require('unzip2');
+const fs = require('fs');
+const daemonize = require("daemonize2");
+const parsers = require('duniter-common').parsers;
+const constants = require('./app/lib/constants');
+const jsonpckg = require('./package.json');
+const keyring = require('duniter-common').keyring;
+const directory = require('./app/lib/system/directory');
+const rawer = require('duniter-common').rawer;
+const logger = require('./app/lib/logger').NewLogger('server');
+class Server extends stream.Duplex {
+    constructor(home, memoryOnly, overrideConf) {
+        super({ objectMode: true });
+        this.overrideConf = overrideConf;
+        this.home = home;
+        this.conf = ConfDTO_1.ConfDTO.mock();
+        this.version = jsonpckg.version;
+        this.logger = logger;
+        this.rawer = rawer;
+        this.paramsP = directory.getHomeParams(memoryOnly, home);
+        this.MerkleService = require("./app/lib/helpers/merkle");
+        this.IdentityService = new IdentityService_1.IdentityService();
+        this.MembershipService = new MembershipService_1.MembershipService();
+        this.PeeringService = new PeeringService_1.PeeringService(this);
+        this.BlockchainService = new BlockchainService_1.BlockchainService(this);
+        this.TransactionsService = new TransactionsService_1.TransactionService();
+        // Create document mapping
+        this.documentsMapping = {
+            'identity': { action: (obj) => this.IdentityService.submitIdentity(obj), parser: parsers.parseIdentity },
+            'certification': { action: (obj) => this.IdentityService.submitCertification(obj), parser: parsers.parseCertification },
+            'revocation': { action: (obj) => this.IdentityService.submitRevocation(obj), parser: parsers.parseRevocation },
+            'membership': { action: (obj) => this.MembershipService.submitMembership(obj), parser: parsers.parseMembership },
+            'peer': { action: (obj) => this.PeeringService.submitP(obj), parser: parsers.parsePeer },
+            'transaction': { action: (obj) => this.TransactionsService.processTx(obj), parser: parsers.parseTransaction },
+            'block': { action: (obj) => this.BlockchainService.submitBlock(obj, true, constants.NO_FORK_ALLOWED), parser: parsers.parseBlock }
+        };
+    }
+    // Unused, but made mandatory by Duplex interface
+    _read() { }
+    _write(obj, enc, writeDone) {
+        return this.submit(obj, false, () => writeDone);
+    }
+    /**
+     * Facade method to control what is pushed to the stream (we don't want it to be closed)
+     * @param obj An object to be pushed to the stream.
+     */
+    streamPush(obj) {
+        if (obj) {
+            this.push(obj);
         }
-      }
-    });
-  };
-
-  this.submitP = (obj, isInnerWrite) => this.submit(obj, isInnerWrite)
-
-  this.initDAL = (conf) => co(function*() {
-    yield that.dal.init(conf);
-    // Maintenance
-    let head_1 = yield that.dal.bindexDAL.head(1);
-    if (head_1) {
-      // Case 1: b_index < block
-      yield that.dal.blockDAL.exec('DELETE FROM block WHERE NOT fork AND number > ' + head_1.number);
-      // Case 2: b_index > block
-      const current = yield that.dal.blockDAL.getCurrent();
-      const nbBlocksToRevert = (head_1.number - current.number);
-      for (let i = 0; i < nbBlocksToRevert; i++) {
-        yield that.revert();
-      }
-    }
-  });
-
-  this.recomputeSelfPeer = () => that.PeeringService.generateSelfPeer(that.conf, 0);
-
-  this.getCountOfSelfMadePoW = () => this.BlockchainService.getCountOfSelfMadePoW();
-  this.isServerMember = () => this.BlockchainService.isMember();
-
-  this.checkConfig = () => co(function*() {
-    if (!that.conf.pair) {
-      throw new Error('No keypair was given.');
-    }
-  });
-
-  this.resetHome = () => co(function *() {
-    const params = yield paramsP;
-    const myFS = params.fs;
-    const rootPath = params.home;
-    const existsDir = yield myFS.exists(rootPath);
-    if (existsDir) {
-      yield myFS.removeTree(rootPath);
-    }
-  });
-
-  this.resetAll = (done) => co(function*() {
-    yield that.resetDataHook()
-    yield that.resetConfigHook()
-    const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log', directory.WOTB_FILE, 'export.zip', 'import.zip', 'conf'];
-    const dirs  = ['blocks', 'blockchain', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
-    return resetFiles(files, dirs, done);
-  });
-
-  this.resetData = (done) => co(function*(){
-    yield that.resetDataHook()
-    const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log', directory.WOTB_FILE];
-    const dirs  = ['blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
-    yield resetFiles(files, dirs, done);
-  });
-
-  this.resetConf = (done) => co(function*() {
-    yield that.resetConfigHook()
-    const files = ['conf'];
-    const dirs  = [];
-    return resetFiles(files, dirs, done);
-  });
-
-  this.resetStats = (done) => {
-    const files = ['stats'];
-    const dirs  = ['ud_history'];
-    return resetFiles(files, dirs, done);
-  };
-
-  this.resetPeers = (done) => {
-    return that.dal.resetPeers(done);
-  };
-
-  this.exportAllDataAsZIP = () => co(function *() {
-    const params = yield paramsP;
-    const rootPath = params.home;
-    const myFS = params.fs;
-    const archive = archiver('zip');
-    if (yield myFS.exists(path.join(rootPath, 'indicators'))) {
-      archive.directory(path.join(rootPath, 'indicators'), '/indicators', undefined, { name: 'indicators'});
-    }
-    const files = ['duniter.db', 'stats.json', 'wotb.bin'];
-    for (const file of files) {
-      if (yield myFS.exists(path.join(rootPath, file))) {
-        archive.file(path.join(rootPath, file), { name: file });
-      }
-    }
-    archive.finalize();
-    return archive;
-  });
-
-  this.importAllDataFromZIP = (zipFile) => co(function *() {
-    const params = yield paramsP;
-    yield that.resetData();
-    const output = unzip.Extract({ path: params.home });
-    fs.createReadStream(zipFile).pipe(output);
-    return new Promise((resolve, reject) => {
-      output.on('error', reject);
-      output.on('close', resolve);
-    });
-  });
-
-  this.cleanDBData = () => co(function *() {
-    yield that.dal.cleanCaches();
-    that.dal.wotb.resetWoT();
-    const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log'];
-    const dirs  = ['blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
-    return resetFiles(files, dirs);
-  });
-
-  function resetFiles(files, dirs, done) {
-    return co(function *() {
-      try {
-        const params = yield paramsP;
-        const myFS = params.fs;
-        const rootPath = params.home;
-        for (const fName of files) {
-          // JSON file?
-          const existsJSON = yield myFS.exists(rootPath + '/' + fName + '.json');
-          if (existsJSON) {
-            const theFilePath = rootPath + '/' + fName + '.json';
-            yield myFS.remove(theFilePath);
-            if (yield myFS.exists(theFilePath)) {
-              throw Error('Failed to delete file "' + theFilePath + '"');
+    }
+    getBcContext() {
+        return this.BlockchainService.getContext();
+    }
+    plugFileSystem() {
+        return __awaiter(this, void 0, void 0, function* () {
+            logger.debug('Plugging file system...');
+            const params = yield this.paramsP;
+            this.dal = new fileDAL_1.FileDAL(params);
+            yield this.onPluggedFSHook();
+        });
+    }
+    unplugFileSystem() {
+        return __awaiter(this, void 0, void 0, function* () {
+            logger.debug('Unplugging file system...');
+            yield this.dal.close();
+        });
+    }
+    loadConf(useDefaultConf = false) {
+        return __awaiter(this, void 0, void 0, function* () {
+            logger.debug('Loading conf...');
+            this.conf = yield this.dal.loadConf(this.overrideConf, useDefaultConf);
+            // Default values
+            this.conf.remoteipv6 = this.conf.remoteipv6 === undefined ? this.conf.ipv6 : this.conf.remoteipv6;
+            this.conf.remoteport = this.conf.remoteport === undefined ? this.conf.port : this.conf.remoteport;
+            this.conf.c = this.conf.c === undefined ? constants.CONTRACT.DEFAULT.C : this.conf.c;
+            this.conf.dt = this.conf.dt === undefined ? constants.CONTRACT.DEFAULT.DT : this.conf.dt;
+            this.conf.ud0 = this.conf.ud0 === undefined ? constants.CONTRACT.DEFAULT.UD0 : this.conf.ud0;
+            this.conf.stepMax = this.conf.stepMax === undefined ? constants.CONTRACT.DEFAULT.STEPMAX : this.conf.stepMax;
+            this.conf.sigPeriod = this.conf.sigPeriod === undefined ? constants.CONTRACT.DEFAULT.SIGPERIOD : this.conf.sigPeriod;
+            this.conf.msPeriod = this.conf.msPeriod === undefined ? constants.CONTRACT.DEFAULT.MSPERIOD : this.conf.msPeriod;
+            this.conf.sigStock = this.conf.sigStock === undefined ? constants.CONTRACT.DEFAULT.SIGSTOCK : this.conf.sigStock;
+            this.conf.sigWindow = this.conf.sigWindow === undefined ? constants.CONTRACT.DEFAULT.SIGWINDOW : this.conf.sigWindow;
+            this.conf.sigValidity = this.conf.sigValidity === undefined ? constants.CONTRACT.DEFAULT.SIGVALIDITY : this.conf.sigValidity;
+            this.conf.msValidity = this.conf.msValidity === undefined ? constants.CONTRACT.DEFAULT.MSVALIDITY : this.conf.msValidity;
+            this.conf.sigQty = this.conf.sigQty === undefined ? constants.CONTRACT.DEFAULT.SIGQTY : this.conf.sigQty;
+            this.conf.idtyWindow = this.conf.idtyWindow === undefined ? constants.CONTRACT.DEFAULT.IDTYWINDOW : this.conf.idtyWindow;
+            this.conf.msWindow = this.conf.msWindow === undefined ? constants.CONTRACT.DEFAULT.MSWINDOW : this.conf.msWindow;
+            this.conf.xpercent = this.conf.xpercent === undefined ? constants.CONTRACT.DEFAULT.X_PERCENT : this.conf.xpercent;
+            this.conf.percentRot = this.conf.percentRot === undefined ? constants.CONTRACT.DEFAULT.PERCENTROT : this.conf.percentRot;
+            this.conf.powDelay = this.conf.powDelay === undefined ? constants.CONTRACT.DEFAULT.POWDELAY : this.conf.powDelay;
+            this.conf.avgGenTime = this.conf.avgGenTime === undefined ? constants.CONTRACT.DEFAULT.AVGGENTIME : this.conf.avgGenTime;
+            this.conf.dtDiffEval = this.conf.dtDiffEval === undefined ? constants.CONTRACT.DEFAULT.DTDIFFEVAL : this.conf.dtDiffEval;
+            this.conf.medianTimeBlocks = this.conf.medianTimeBlocks === undefined ? constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS : this.conf.medianTimeBlocks;
+            this.conf.rootoffset = this.conf.rootoffset === undefined ? 0 : this.conf.rootoffset;
+            this.conf.forksize = this.conf.forksize === undefined ? constants.BRANCHES.DEFAULT_WINDOW_SIZE : this.conf.forksize;
+            // 1.3.X: the msPeriod = msWindow
+            this.conf.msPeriod = this.conf.msPeriod === undefined ? this.conf.msWindow : this.conf.msPeriod;
+            // Default keypair
+            if (!this.conf.pair || !this.conf.pair.pub || !this.conf.pair.sec) {
+                // Create a random key
+                this.conf.pair = keyring.randomKey().json();
+            }
+            // Extract key pair
+            this.keyPair = keyring.Key(this.conf.pair.pub, this.conf.pair.sec);
+            this.sign = this.keyPair.sign;
+            // Blockchain object
+            this.blockchain = new DuniterBlockchain_1.DuniterBlockchain(new SqlBlockchain_1.SQLBlockchain(this.dal), this.dal);
+            // Update services
+            this.IdentityService.setConfDAL(this.conf, this.dal);
+            this.MembershipService.setConfDAL(this.conf, this.dal);
+            this.PeeringService.setConfDAL(this.conf, this.dal, this.keyPair);
+            this.BlockchainService.setConfDAL(this.conf, this.dal, this.keyPair);
+            this.TransactionsService.setConfDAL(this.conf, this.dal);
+            return this.conf;
+        });
+    }
+    initWithDAL() {
+        return __awaiter(this, void 0, void 0, function* () {
+            yield this.plugFileSystem();
+            yield this.loadConf();
+            yield this.initDAL();
+            return this;
+        });
+    }
+    submit(obj, isInnerWrite = false, done = null) {
+        return __awaiter(this, void 0, void 0, function* () {
+            if (!obj.documentType) {
+                throw 'Document type not given';
             }
-          } else {
-            // Normal file?
-            const normalFile = path.join(rootPath, fName);
-            const existsFile = yield myFS.exists(normalFile);
-            if (existsFile) {
-              yield myFS.remove(normalFile);
-              if (yield myFS.exists(normalFile)) {
-                throw Error('Failed to delete file "' + normalFile + '"');
-              }
+            try {
+                const action = this.documentsMapping[obj.documentType].action;
+                let res;
+                if (typeof action == 'function') {
+                    // Handle the incoming object
+                    res = yield action(obj);
+                }
+                else {
+                    throw 'Unknown document type \'' + obj.documentType + '\'';
+                }
+                if (res) {
+                    // Only emit valid documents
+                    this.emit(obj.documentType, _.clone(res));
+                    this.streamPush(_.clone(res));
+                }
+                if (done) {
+                    isInnerWrite ? done(null, res) : done();
+                }
+                return res;
             }
-          }
+            catch (err) {
+                if (err && !err.uerr) {
+                    // Unhandled error, display it
+                    logger.debug('Document write error: ', err);
+                }
+                if (done) {
+                    isInnerWrite ? done(err, null) : done();
+                }
+                else {
+                    throw err;
+                }
+            }
+        });
+    }
+    submitP(obj, isInnerWrite) {
+        return this.submit(obj, isInnerWrite);
+    }
+    initDAL(conf = null) {
+        return __awaiter(this, void 0, void 0, function* () {
+            yield this.dal.init(this.conf);
+            // Maintenance
+            let head_1 = yield this.dal.bindexDAL.head(1);
+            if (head_1) {
+                // Case 1: b_index < block
+                yield this.dal.blockDAL.exec('DELETE FROM block WHERE NOT fork AND number > ' + head_1.number);
+                // Case 2: b_index > block
+                const current = yield this.dal.blockDAL.getCurrent();
+                const nbBlocksToRevert = (head_1.number - current.number);
+                for (let i = 0; i < nbBlocksToRevert; i++) {
+                    yield this.revert();
+                }
+            }
+        });
+    }
+    recomputeSelfPeer() {
+        return this.PeeringService.generateSelfPeer(this.conf, 0);
+    }
+    getCountOfSelfMadePoW() {
+        return this.BlockchainService.getCountOfSelfMadePoW();
+    }
+    isServerMember() {
+        return this.BlockchainService.isMember();
+    }
+    checkConfig() {
+        if (!this.conf.pair) {
+            throw new Error('No keypair was given.');
         }
-        for (const dirName of dirs) {
-          const existsDir = yield myFS.exists(rootPath + '/' + dirName);
-          if (existsDir) {
-            yield myFS.removeTree(rootPath + '/' + dirName);
-            if (yield myFS.exists(rootPath + '/' + dirName)) {
-              throw Error('Failed to delete folder "' + rootPath + '/' + dirName + '"');
+    }
+    resetHome() {
+        return __awaiter(this, void 0, void 0, function* () {
+            const params = yield this.paramsP;
+            const myFS = params.fs;
+            const rootPath = params.home;
+            const existsDir = yield myFS.exists(rootPath);
+            if (existsDir) {
+                yield myFS.removeTree(rootPath);
             }
-          }
+        });
+    }
+    resetAll(done) {
+        return __awaiter(this, void 0, void 0, function* () {
+            yield this.resetDataHook();
+            yield this.resetConfigHook();
+            const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log', directory.WOTB_FILE, 'export.zip', 'import.zip', 'conf'];
+            const dirs = ['blocks', 'blockchain', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
+            return this.resetFiles(files, dirs, done);
+        });
+    }
+    resetData(done = null) {
+        return __awaiter(this, void 0, void 0, function* () {
+            yield this.resetDataHook();
+            const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log', directory.WOTB_FILE];
+            const dirs = ['blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
+            yield this.resetFiles(files, dirs, done);
+        });
+    }
+    resetConf(done) {
+        return __awaiter(this, void 0, void 0, function* () {
+            yield this.resetConfigHook();
+            const files = ['conf'];
+            const dirs = [];
+            return this.resetFiles(files, dirs, done);
+        });
+    }
+    resetStats(done) {
+        const files = ['stats'];
+        const dirs = ['ud_history'];
+        return this.resetFiles(files, dirs, done);
+    }
+    resetPeers(done) {
+        return this.dal.resetPeers();
+    }
+    exportAllDataAsZIP() {
+        return __awaiter(this, void 0, void 0, function* () {
+            const params = yield this.paramsP;
+            const rootPath = params.home;
+            const myFS = params.fs;
+            const archive = archiver('zip');
+            if (yield myFS.exists(path.join(rootPath, 'indicators'))) {
+                archive.directory(path.join(rootPath, 'indicators'), '/indicators', undefined, { name: 'indicators' });
+            }
+            const files = ['duniter.db', 'stats.json', 'wotb.bin'];
+            for (const file of files) {
+                if (yield myFS.exists(path.join(rootPath, file))) {
+                    archive.file(path.join(rootPath, file), { name: file });
+                }
+            }
+            archive.finalize();
+            return archive;
+        });
+    }
+    importAllDataFromZIP(zipFile) {
+        return __awaiter(this, void 0, void 0, function* () {
+            const params = yield this.paramsP;
+            yield this.resetData();
+            const output = unzip.Extract({ path: params.home });
+            fs.createReadStream(zipFile).pipe(output);
+            return new Promise((resolve, reject) => {
+                output.on('error', reject);
+                output.on('close', resolve);
+            });
+        });
+    }
+    cleanDBData() {
+        return __awaiter(this, void 0, void 0, function* () {
+            yield this.dal.cleanCaches();
+            this.dal.wotb.resetWoT();
+            const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log'];
+            const dirs = ['blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
+            return this.resetFiles(files, dirs);
+        });
+    }
+    resetFiles(files, dirs, done = null) {
+        return __awaiter(this, void 0, void 0, function* () {
+            try {
+                const params = yield this.paramsP;
+                const myFS = params.fs;
+                const rootPath = params.home;
+                for (const fName of files) {
+                    // JSON file?
+                    const existsJSON = yield myFS.exists(rootPath + '/' + fName + '.json');
+                    if (existsJSON) {
+                        const theFilePath = rootPath + '/' + fName + '.json';
+                        yield myFS.remove(theFilePath);
+                        if (yield myFS.exists(theFilePath)) {
+                            throw Error('Failed to delete file "' + theFilePath + '"');
+                        }
+                    }
+                    else {
+                        // Normal file?
+                        const normalFile = path.join(rootPath, fName);
+                        const existsFile = yield myFS.exists(normalFile);
+                        if (existsFile) {
+                            yield myFS.remove(normalFile);
+                            if (yield myFS.exists(normalFile)) {
+                                throw Error('Failed to delete file "' + normalFile + '"');
+                            }
+                        }
+                    }
+                }
+                for (const dirName of dirs) {
+                    const existsDir = yield myFS.exists(rootPath + '/' + dirName);
+                    if (existsDir) {
+                        yield myFS.removeTree(rootPath + '/' + dirName);
+                        if (yield myFS.exists(rootPath + '/' + dirName)) {
+                            throw Error('Failed to delete folder "' + rootPath + '/' + dirName + '"');
+                        }
+                    }
+                }
+                done && done();
+            }
+            catch (e) {
+                done && done(e);
+                throw e;
+            }
+        });
+    }
+    disconnect() {
+        return Promise.resolve(this.dal && this.dal.close());
+    }
+    revert() {
+        return this.BlockchainService.revertCurrentBlock();
+    }
+    revertTo(number) {
+        return __awaiter(this, void 0, void 0, function* () {
+            const current = yield this.BlockchainService.current();
+            for (let i = 0, count = current.number - number; i < count; i++) {
+                yield this.BlockchainService.revertCurrentBlock();
+            }
+            if (current.number <= number) {
+                logger.warn('Already reached');
+            }
+        });
+    }
+    reapplyTo(number) {
+        return __awaiter(this, void 0, void 0, function* () {
+            const current = yield this.BlockchainService.current();
+            if (current.number == number) {
+                logger.warn('Already reached');
+            }
+            else {
+                for (let i = 0, count = number - current.number; i < count; i++) {
+                    yield this.BlockchainService.applyNextAvailableFork();
+                }
+            }
+        });
+    }
+    singleWritePromise(obj) {
+        return this.submit(obj);
+    }
+    writeRaw(raw, type) {
+        return __awaiter(this, void 0, void 0, function* () {
+            const parser = this.documentsMapping[type] && this.documentsMapping[type].parser;
+            const obj = parser.syncWrite(raw, logger);
+            return yield this.singleWritePromise(obj);
+        });
+    }
+    /*****************
+     * DAEMONIZATION
+     ****************/
+    /**
+     * Get the daemon handle. Eventually give arguments to launch a new daemon.
+     * @param overrideCommand The new command to launch.
+     * @param insteadOfCmd The current command to be replaced by `overrideCommand` command.
+     * @returns {*} The daemon handle.
+     */
+    getDaemon(overrideCommand, insteadOfCmd) {
+        const mainModule = process.argv[1];
+        const cwd = path.resolve(mainModule, '../..');
+        const argv = this.getCommand(overrideCommand, insteadOfCmd);
+        return daemonize.setup({
+            main: mainModule,
+            name: directory.INSTANCE_NAME,
+            pidfile: path.join(directory.INSTANCE_HOME, "app.pid"),
+            argv,
+            cwd
+        });
+    }
+    /**
+     * Return current script full command arguments except the two firsts (which are node executable + js file).
+     * If the two optional `cmd` and `insteadOfCmd` parameters are given, replace `insteadOfCmd`'s value by `cmd` in
+     * the script arguments.
+     *
+     *   Ex:
+     *     * process.argv: ['/usr/bin/node', '/opt/duniter/sources/bin/duniter', 'restart', '--mdb', 'g1']
+     *
+     *     Then `getCommand('direct_start', 'restart') will return:
+     *
+     *     * ['direct_start', '--mdb', 'g1']
+     *
+     *     This new array is what will be given to a *fork* of current script, resulting in a new process with:
+     *
+     *     * process.argv: ['/usr/bin/node', '/opt/duniter/sources/bin/duniter', 'direct_start', '--mdb', 'g1']
+     *
+     * @param cmd
+     * @param insteadOfCmd
+     * @returns {*}
+     */
+    getCommand(cmd, insteadOfCmd) {
+        if (insteadOfCmd) {
+            // Return the same command args, except the command `insteadOfCmd` which is replaced by `cmd`
+            return process.argv.slice(2).map((arg) => {
+                if (arg == insteadOfCmd) {
+                    return cmd;
+                }
+                else {
+                    return arg;
+                }
+            });
         }
-        done && done();
-    } catch(e) {
-          done && done(e);
-          throw e;
-      }
-    });
-  }
-
-  this.disconnect = () => Promise.resolve(that.dal && that.dal.close());
-
-  this.revert = () => this.BlockchainService.revertCurrentBlock();
-
-  this.revertTo = (number) => co(function *() {
-    const current = yield that.BlockchainService.current();
-    for (let i = 0, count = current.number - number; i < count; i++) {
-      yield that.BlockchainService.revertCurrentBlock();
-    }
-    if (current.number <= number) {
-      logger.warn('Already reached');
-    }
-  });
-
-  this.reapplyTo = (number) => co(function *() {
-    const current = yield that.BlockchainService.current();
-    if (current.number == number) {
-      logger.warn('Already reached');
-    } else {
-      for (let i = 0, count = number - current.number; i < count; i++) {
-        yield that.BlockchainService.applyNextAvailableFork();
-      }
-    }
-  });
-
-  this.singleWritePromise = (obj) => that.submit(obj);
-
-  this.rawer = rawer;
-
-  this.writeRaw = (raw, type) => co(function *() {
-    const parser = documentsMapping[type] && documentsMapping[type].parser;
-    const obj = parser.syncWrite(raw, logger);
-    return yield that.singleWritePromise(obj);
-  });
-
-  /*****************
-   * DAEMONIZATION
-   ****************/
-
-  /**
-   * Get the daemon handle. Eventually give arguments to launch a new daemon.
-   * @param overrideCommand The new command to launch.
-   * @param insteadOfCmd The current command to be replaced by `overrideCommand` command.
-   * @returns {*} The daemon handle.
-   */
-  this.getDaemon = function getDaemon(overrideCommand, insteadOfCmd) {
-    const mainModule = process.argv[1]
-    const cwd = path.resolve(mainModule, '../..')
-    const argv = getCommand(overrideCommand, insteadOfCmd)
-    return daemonize.setup({
-      main: mainModule,
-      name: directory.INSTANCE_NAME,
-      pidfile: path.join(directory.INSTANCE_HOME, "app.pid"),
-      argv,
-      cwd
-    });
-  }
-
-  /**
-   * Return current script full command arguments except the two firsts (which are node executable + js file).
-   * If the two optional `cmd` and `insteadOfCmd` parameters are given, replace `insteadOfCmd`'s value by `cmd` in
-   * the script arguments.
-   *
-   *   Ex:
-   *     * process.argv: ['/usr/bin/node', '/opt/duniter/sources/bin/duniter', 'restart', '--mdb', 'g1']
-   *
-   *     Then `getCommand('direct_start', 'restart') will return:
-   *
-   *     * ['direct_start', '--mdb', 'g1']
-   *
-   *     This new array is what will be given to a *fork* of current script, resulting in a new process with:
-   *
-   *     * process.argv: ['/usr/bin/node', '/opt/duniter/sources/bin/duniter', 'direct_start', '--mdb', 'g1']
-   *
-   * @param cmd
-   * @param insteadOfCmd
-   * @returns {*}
-   */
-  function getCommand(cmd, insteadOfCmd) {
-    if (insteadOfCmd) {
-      // Return the same command args, except the command `insteadOfCmd` which is replaced by `cmd`
-      return process.argv.slice(2).map((arg) => {
-        if (arg == insteadOfCmd) {
-          return cmd
-        } else {
-          return arg
+        else {
+            // Return the exact same args (generally for stop/status commands)
+            return process.argv.slice(2);
         }
-      })
-    } else {
-      // Return the exact same args (generally for stop/status commands)
-      return process.argv.slice(2)
-    }
-  }
-
-  /**
-   * Retrieve the last linesQuantity lines from the log file.
-   * @param linesQuantity
-   */
-  this.getLastLogLines = (linesQuantity) => this.dal.getLogContent(linesQuantity);
-
-  /*****************
-   * MODULES PLUGS
-   ****************/
-
-  /**
-   * Default endpoint. To be overriden by a module to specify another endpoint value (for ex. BMA).
-   */
-  this.getMainEndpoint = () => Promise.resolve('DEFAULT_ENDPOINT')
-
-  /**
-   * Default WoT incoming data for new block. To be overriden by a module.
-   */
-  this.generatorGetJoinData = () => Promise.resolve({})
-
-  /**
-   * Default WoT incoming certifications for new block, filtering wrong certs. To be overriden by a module.
-   */
-  this.generatorComputeNewCerts = () => Promise.resolve({})
-
-  /**
-   * Default WoT transforming method for certs => links. To be overriden by a module.
-   */
-  this.generatorNewCertsToLinks = () => Promise.resolve({})
-
-  /**
-   * Default hook on file system plugging. To be overriden by module system.
-   */
-  this.onPluggedFSHook = () => Promise.resolve({})
-
-  /**
-   * Default hook on data reset. To be overriden by module system.
-   */
-  this.resetDataHook = () => Promise.resolve({})
-
-  /**
-   * Default hook on data reset. To be overriden by module system.
-   */
-  this.resetConfigHook = () => Promise.resolve({})
+    }
+    /**
+     * Retrieve the last linesQuantity lines from the log file.
+     * @param linesQuantity
+     */
+    getLastLogLines(linesQuantity) {
+        return this.dal.getLogContent(linesQuantity);
+    }
+    /*****************
+     * MODULES PLUGS
+     ****************/
+    /**
+     * Default endpoint. To be overriden by a module to specify another endpoint value (for ex. BMA).
+     */
+    getMainEndpoint() {
+        return Promise.resolve('DEFAULT_ENDPOINT');
+    }
+    /**
+     * Default WoT incoming data for new block. To be overriden by a module.
+     */
+    generatorGetJoinData() {
+        return Promise.resolve({});
+    }
+    /**
+     * Default WoT incoming certifications for new block, filtering wrong certs. To be overriden by a module.
+     */
+    generatorComputeNewCerts() {
+        return Promise.resolve({});
+    }
+    /**
+     * Default WoT transforming method for certs => links. To be overriden by a module.
+     */
+    generatorNewCertsToLinks() {
+        return Promise.resolve({});
+    }
+    /**
+     * Default hook on file system plugging. To be overriden by module system.
+     */
+    onPluggedFSHook() {
+        return Promise.resolve({});
+    }
+    /**
+     * Default hook on data reset. To be overriden by module system.
+     */
+    resetDataHook() {
+        return Promise.resolve({});
+    }
+    /**
+     * Default hook on data reset. To be overriden by module system.
+     */
+    resetConfigHook() {
+        return Promise.resolve({});
+    }
 }
-
-util.inherits(Server, stream.Duplex);
-
-module.exports = Server;
+exports.Server = Server;
+//# sourceMappingURL=server.js.map
\ No newline at end of file
diff --git a/server.ts b/server.ts
new file mode 100644
index 0000000000000000000000000000000000000000..dc5a7e472b29b275306423ee0f6c1a436a26bf9e
--- /dev/null
+++ b/server.ts
@@ -0,0 +1,528 @@
+import {IdentityService} from "./app/service/IdentityService"
+import {MembershipService} from "./app/service/MembershipService"
+import {PeeringService} from "./app/service/PeeringService"
+import {BlockchainService} from "./app/service/BlockchainService"
+import {TransactionService} from "./app/service/TransactionsService"
+import {ConfDTO} from "./app/lib/dto/ConfDTO"
+import {FileDAL} from "./app/lib/dal/fileDAL"
+import {DuniterBlockchain} from "./app/lib/blockchain/DuniterBlockchain"
+import {SQLBlockchain} from "./app/lib/blockchain/SqlBlockchain"
+import * as stream from "stream"
+
+interface HookableServer {
+  getMainEndpoint: (...args:any[]) => Promise<any>
+  generatorGetJoinData: (...args:any[]) => Promise<any>
+  generatorComputeNewCerts: (...args:any[]) => Promise<any>
+  generatorNewCertsToLinks: (...args:any[]) => Promise<any>
+  onPluggedFSHook: (...args:any[]) => Promise<any>
+  resetDataHook: (...args:any[]) => Promise<any>
+  resetConfigHook: (...args:any[]) => Promise<any>
+}
+
+const path        = require('path');
+const _           = require('underscore');
+const archiver    = require('archiver');
+const unzip       = require('unzip2');
+const fs          = require('fs');
+const daemonize   = require("daemonize2")
+const parsers     = require('duniter-common').parsers;
+const constants   = require('./app/lib/constants');
+const jsonpckg    = require('./package.json');
+const keyring      = require('duniter-common').keyring;
+const directory   = require('./app/lib/system/directory');
+const rawer       = require('duniter-common').rawer;
+const logger      = require('./app/lib/logger').NewLogger('server');
+
+export class Server extends stream.Duplex implements HookableServer {
+
+  private paramsP:Promise<any>|null
+  conf:ConfDTO
+  dal:FileDAL
+
+  documentsMapping:any
+  home:string
+  version:number
+  logger:any
+  rawer:any
+  keyPair:any
+  sign:any
+  blockchain:any
+
+  MerkleService:(req:any, merkle:any, valueCoroutine:any) => any
+  IdentityService:IdentityService
+  MembershipService:MembershipService
+  PeeringService:PeeringService
+  BlockchainService:BlockchainService
+  TransactionsService:TransactionService
+
+  constructor(home:string, memoryOnly:boolean, private overrideConf:any) {
+    super({ objectMode: true })
+
+    this.home = home;
+    this.conf = ConfDTO.mock()
+    this.version = jsonpckg.version;
+    this.logger = logger;
+    this.rawer = rawer;
+
+    this.paramsP = directory.getHomeParams(memoryOnly, home)
+
+    this.MerkleService       = require("./app/lib/helpers/merkle");
+    this.IdentityService     = new IdentityService()
+    this.MembershipService   = new MembershipService()
+    this.PeeringService      = new PeeringService(this)
+    this.BlockchainService   = new BlockchainService(this)
+    this.TransactionsService = new TransactionService()
+
+    // Create document mapping
+    this.documentsMapping = {
+      'identity':      { action: (obj:any) => this.IdentityService.submitIdentity(obj),                                 parser: parsers.parseIdentity },
+      'certification': { action: (obj:any) => this.IdentityService.submitCertification(obj),                            parser: parsers.parseCertification},
+      'revocation':    { action: (obj:any) => this.IdentityService.submitRevocation(obj),                               parser: parsers.parseRevocation },
+      'membership':    { action: (obj:any) => this.MembershipService.submitMembership(obj),                             parser: parsers.parseMembership },
+      'peer':          { action: (obj:any) => this.PeeringService.submitP(obj),                                         parser: parsers.parsePeer },
+      'transaction':   { action: (obj:any) => this.TransactionsService.processTx(obj),                                  parser: parsers.parseTransaction },
+      'block':         { action: (obj:any) => this.BlockchainService.submitBlock(obj, true, constants.NO_FORK_ALLOWED), parser: parsers.parseBlock }
+    }
+  }
+
+  // Unused, but made mandatory by Duplex interface
+  _read() {}
+
+  _write(obj:any, enc:any, writeDone:any) {
+    return this.submit(obj, false, () => writeDone)
+  }
+
+  /**
+   * Facade method to control what is pushed to the stream (we don't want it to be closed)
+   * @param obj An object to be pushed to the stream.
+   */
+  streamPush(obj:any) {
+    if (obj) {
+      this.push(obj);
+    }
+  }
+
+  getBcContext() {
+    return this.BlockchainService.getContext()
+  }
+
+  async plugFileSystem() {
+    logger.debug('Plugging file system...');
+    const params = await this.paramsP
+    this.dal = new FileDAL(params)
+    await this.onPluggedFSHook()
+  }
+
+  async unplugFileSystem() {
+    logger.debug('Unplugging file system...');
+    await this.dal.close()
+  }
+
+  async loadConf(useDefaultConf:any = false) {
+    logger.debug('Loading conf...');
+    this.conf = await this.dal.loadConf(this.overrideConf, useDefaultConf)
+    // Default values
+    this.conf.remoteipv6       = this.conf.remoteipv6 === undefined ?        this.conf.ipv6                               : this.conf.remoteipv6
+    this.conf.remoteport       = this.conf.remoteport === undefined ?        this.conf.port                               : this.conf.remoteport
+    this.conf.c                = this.conf.c === undefined ?                 constants.CONTRACT.DEFAULT.C                 : this.conf.c
+    this.conf.dt               = this.conf.dt === undefined ?                constants.CONTRACT.DEFAULT.DT                : this.conf.dt
+    this.conf.ud0              = this.conf.ud0 === undefined ?               constants.CONTRACT.DEFAULT.UD0               : this.conf.ud0
+    this.conf.stepMax          = this.conf.stepMax === undefined ?           constants.CONTRACT.DEFAULT.STEPMAX           : this.conf.stepMax
+    this.conf.sigPeriod        = this.conf.sigPeriod === undefined ?         constants.CONTRACT.DEFAULT.SIGPERIOD         : this.conf.sigPeriod
+    this.conf.msPeriod         = this.conf.msPeriod === undefined ?          constants.CONTRACT.DEFAULT.MSPERIOD          : this.conf.msPeriod
+    this.conf.sigStock         = this.conf.sigStock === undefined ?          constants.CONTRACT.DEFAULT.SIGSTOCK          : this.conf.sigStock
+    this.conf.sigWindow        = this.conf.sigWindow === undefined ?         constants.CONTRACT.DEFAULT.SIGWINDOW         : this.conf.sigWindow
+    this.conf.sigValidity      = this.conf.sigValidity === undefined ?       constants.CONTRACT.DEFAULT.SIGVALIDITY       : this.conf.sigValidity
+    this.conf.msValidity       = this.conf.msValidity === undefined ?        constants.CONTRACT.DEFAULT.MSVALIDITY        : this.conf.msValidity
+    this.conf.sigQty           = this.conf.sigQty === undefined ?            constants.CONTRACT.DEFAULT.SIGQTY            : this.conf.sigQty
+    this.conf.idtyWindow       = this.conf.idtyWindow === undefined ?        constants.CONTRACT.DEFAULT.IDTYWINDOW        : this.conf.idtyWindow
+    this.conf.msWindow         = this.conf.msWindow === undefined ?          constants.CONTRACT.DEFAULT.MSWINDOW          : this.conf.msWindow
+    this.conf.xpercent         = this.conf.xpercent === undefined ?          constants.CONTRACT.DEFAULT.X_PERCENT         : this.conf.xpercent
+    this.conf.percentRot       = this.conf.percentRot === undefined ?        constants.CONTRACT.DEFAULT.PERCENTROT        : this.conf.percentRot
+    this.conf.powDelay         = this.conf.powDelay === undefined ?          constants.CONTRACT.DEFAULT.POWDELAY          : this.conf.powDelay
+    this.conf.avgGenTime       = this.conf.avgGenTime === undefined ?        constants.CONTRACT.DEFAULT.AVGGENTIME        : this.conf.avgGenTime
+    this.conf.dtDiffEval       = this.conf.dtDiffEval === undefined ?        constants.CONTRACT.DEFAULT.DTDIFFEVAL        : this.conf.dtDiffEval
+    this.conf.medianTimeBlocks = this.conf.medianTimeBlocks === undefined ?  constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS  : this.conf.medianTimeBlocks
+    this.conf.rootoffset       = this.conf.rootoffset === undefined ?        0                                            : this.conf.rootoffset
+    this.conf.forksize         = this.conf.forksize === undefined ?          constants.BRANCHES.DEFAULT_WINDOW_SIZE       : this.conf.forksize
+    // 1.3.X: the msPeriod = msWindow
+    this.conf.msPeriod         = this.conf.msPeriod === undefined ?          this.conf.msWindow                           : this.conf.msPeriod
+    // Default keypair
+    if (!this.conf.pair || !this.conf.pair.pub || !this.conf.pair.sec) {
+      // Create a random key
+      this.conf.pair = keyring.randomKey().json()
+    }
+    // Extract key pair
+    this.keyPair = keyring.Key(this.conf.pair.pub, this.conf.pair.sec);
+    this.sign = this.keyPair.sign;
+    // Blockchain object
+    this.blockchain = new DuniterBlockchain(new SQLBlockchain(this.dal), this.dal);
+    // Update services
+    this.IdentityService.setConfDAL(this.conf, this.dal)
+    this.MembershipService.setConfDAL(this.conf, this.dal)
+    this.PeeringService.setConfDAL(this.conf, this.dal, this.keyPair)
+    this.BlockchainService.setConfDAL(this.conf, this.dal, this.keyPair)
+    this.TransactionsService.setConfDAL(this.conf, this.dal)
+    return this.conf;
+  }
+
+  async initWithDAL() {
+    await this.plugFileSystem()
+    await this.loadConf()
+    await this.initDAL()
+    return this;
+  }
+
+  async submit(obj:any, isInnerWrite:boolean = false, done:any|null = null) {
+    if (!obj.documentType) {
+      throw 'Document type not given';
+    }
+    try {
+      const action = this.documentsMapping[obj.documentType].action;
+      let res;
+      if (typeof action == 'function') {
+        // Handle the incoming object
+        res = await action(obj)
+      } else {
+        throw 'Unknown document type \'' + obj.documentType + '\'';
+      }
+      if (res) {
+        // Only emit valid documents
+        this.emit(obj.documentType, _.clone(res));
+        this.streamPush(_.clone(res));
+      }
+      if (done) {
+        isInnerWrite ? done(null, res) : done();
+      }
+      return res;
+    } catch (err) {
+      if (err && !err.uerr) {
+        // Unhandled error, display it
+        logger.debug('Document write error: ', err);
+      }
+      if (done) {
+        isInnerWrite ? done(err, null) : done();
+      } else {
+        throw err;
+      }
+    }
+  }
+
+  submitP(obj:any, isInnerWrite:boolean) {
+    return this.submit(obj, isInnerWrite)
+  }
+
+  async initDAL(conf:ConfDTO|null = null) {
+    await this.dal.init(this.conf)
+    // Maintenance
+    let head_1 = await this.dal.bindexDAL.head(1);
+    if (head_1) {
+      // Case 1: b_index < block
+      await this.dal.blockDAL.exec('DELETE FROM block WHERE NOT fork AND number > ' + head_1.number);
+      // Case 2: b_index > block
+      const current = await this.dal.blockDAL.getCurrent();
+      const nbBlocksToRevert = (head_1.number - current.number);
+      for (let i = 0; i < nbBlocksToRevert; i++) {
+        await this.revert();
+      }
+    }
+  }
+
+  recomputeSelfPeer() {
+    return this.PeeringService.generateSelfPeer(this.conf, 0)
+  }
+
+  getCountOfSelfMadePoW() {
+    return this.BlockchainService.getCountOfSelfMadePoW()
+  }
+  
+  isServerMember() {
+    return this.BlockchainService.isMember()
+  }
+
+  checkConfig() {
+    if (!this.conf.pair) {
+      throw new Error('No keypair was given.');
+    }
+  }
+
+  async resetHome() {
+    const params = await this.paramsP;
+    const myFS = params.fs;
+    const rootPath = params.home;
+    const existsDir = await myFS.exists(rootPath);
+    if (existsDir) {
+      await myFS.removeTree(rootPath);
+    }
+  }
+
+  async resetAll(done:any) {
+    await this.resetDataHook()
+    await this.resetConfigHook()
+    const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log', directory.WOTB_FILE, 'export.zip', 'import.zip', 'conf'];
+    const dirs  = ['blocks', 'blockchain', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
+    return this.resetFiles(files, dirs, done);
+  }
+
+  async resetData(done:any = null) {
+    await this.resetDataHook()
+    const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log', directory.WOTB_FILE];
+    const dirs  = ['blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
+    await this.resetFiles(files, dirs, done);
+  }
+
+  async resetConf(done:any) {
+    await this.resetConfigHook()
+    const files = ['conf'];
+    const dirs:string[]  = [];
+    return this.resetFiles(files, dirs, done);
+  }
+
+  resetStats(done:any) {
+    const files = ['stats'];
+    const dirs  = ['ud_history'];
+    return this.resetFiles(files, dirs, done);
+  }
+
+  resetPeers(done:any) {
+    return this.dal.resetPeers()
+  }
+
+  async exportAllDataAsZIP() {
+    const params = await this.paramsP
+    const rootPath = params.home;
+    const myFS = params.fs;
+    const archive = archiver('zip');
+    if (await myFS.exists(path.join(rootPath, 'indicators'))) {
+      archive.directory(path.join(rootPath, 'indicators'), '/indicators', undefined, { name: 'indicators'});
+    }
+    const files = ['duniter.db', 'stats.json', 'wotb.bin'];
+    for (const file of files) {
+      if (await myFS.exists(path.join(rootPath, file))) {
+        archive.file(path.join(rootPath, file), { name: file });
+      }
+    }
+    archive.finalize();
+    return archive;
+  }
+
+  async importAllDataFromZIP(zipFile:any) {
+    const params = await this.paramsP
+    await this.resetData()
+    const output = unzip.Extract({ path: params.home });
+    fs.createReadStream(zipFile).pipe(output);
+    return new Promise((resolve, reject) => {
+      output.on('error', reject);
+      output.on('close', resolve);
+    })
+  }
+
+  async cleanDBData() {
+    await this.dal.cleanCaches();
+    this.dal.wotb.resetWoT();
+    const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log'];
+    const dirs  = ['blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
+    return this.resetFiles(files, dirs);
+  }
+
+  private async resetFiles(files:string[], dirs:string[], done:any = null) {
+    try {
+      const params = await this.paramsP;
+      const myFS = params.fs;
+      const rootPath = params.home;
+      for (const fName of files) {
+        // JSON file?
+        const existsJSON = await myFS.exists(rootPath + '/' + fName + '.json');
+        if (existsJSON) {
+          const theFilePath = rootPath + '/' + fName + '.json';
+          await myFS.remove(theFilePath);
+          if (await myFS.exists(theFilePath)) {
+            throw Error('Failed to delete file "' + theFilePath + '"');
+          }
+        } else {
+          // Normal file?
+          const normalFile = path.join(rootPath, fName);
+          const existsFile = await myFS.exists(normalFile);
+          if (existsFile) {
+            await myFS.remove(normalFile);
+            if (await myFS.exists(normalFile)) {
+              throw Error('Failed to delete file "' + normalFile + '"');
+            }
+          }
+        }
+      }
+      for (const dirName of dirs) {
+        const existsDir = await myFS.exists(rootPath + '/' + dirName);
+        if (existsDir) {
+          await myFS.removeTree(rootPath + '/' + dirName);
+          if (await myFS.exists(rootPath + '/' + dirName)) {
+            throw Error('Failed to delete folder "' + rootPath + '/' + dirName + '"');
+          }
+        }
+      }
+      done && done();
+    } catch(e) {
+      done && done(e);
+      throw e;
+    }
+  }
+
+  disconnect() {
+    return Promise.resolve(this.dal && this.dal.close())
+  }
+
+  revert() {
+    return this.BlockchainService.revertCurrentBlock()
+  }
+
+  async revertTo(number:number) {
+    const current = await this.BlockchainService.current();
+    for (let i = 0, count = current.number - number; i < count; i++) {
+      await this.BlockchainService.revertCurrentBlock()
+    }
+    if (current.number <= number) {
+      logger.warn('Already reached');
+    }
+  }
+
+  async reapplyTo(number:number) {
+    const current = await this.BlockchainService.current();
+    if (current.number == number) {
+      logger.warn('Already reached');
+    } else {
+      for (let i = 0, count = number - current.number; i < count; i++) {
+        await this.BlockchainService.applyNextAvailableFork();
+      }
+    }
+  }
+
+  singleWritePromise(obj:any) {
+    return this.submit(obj)
+  }
+
+  async writeRaw(raw:string, type:string) {
+    const parser = this.documentsMapping[type] && this.documentsMapping[type].parser;
+    const obj = parser.syncWrite(raw, logger);
+    return await this.singleWritePromise(obj);
+  }
+
+  /*****************
+   * DAEMONIZATION
+   ****************/
+
+  /**
+   * Get the daemon handle. Eventually give arguments to launch a new daemon.
+   * @param overrideCommand The new command to launch.
+   * @param insteadOfCmd The current command to be replaced by `overrideCommand` command.
+   * @returns {*} The daemon handle.
+   */
+  getDaemon(overrideCommand:string, insteadOfCmd:string) {
+    const mainModule = process.argv[1]
+    const cwd = path.resolve(mainModule, '../..')
+    const argv = this.getCommand(overrideCommand, insteadOfCmd)
+    return daemonize.setup({
+      main: mainModule,
+      name: directory.INSTANCE_NAME,
+      pidfile: path.join(directory.INSTANCE_HOME, "app.pid"),
+      argv,
+      cwd
+    });
+  }
+
+  /**
+   * Return current script full command arguments except the two firsts (which are node executable + js file).
+   * If the two optional `cmd` and `insteadOfCmd` parameters are given, replace `insteadOfCmd`'s value by `cmd` in
+   * the script arguments.
+   *
+   *   Ex:
+   *     * process.argv: ['/usr/bin/node', '/opt/duniter/sources/bin/duniter', 'restart', '--mdb', 'g1']
+   *
+   *     Then `getCommand('direct_start', 'restart') will return:
+   *
+   *     * ['direct_start', '--mdb', 'g1']
+   *
+   *     This new array is what will be given to a *fork* of current script, resulting in a new process with:
+   *
+   *     * process.argv: ['/usr/bin/node', '/opt/duniter/sources/bin/duniter', 'direct_start', '--mdb', 'g1']
+   *
+   * @param cmd
+   * @param insteadOfCmd
+   * @returns {*}
+   */
+  private getCommand(cmd:string, insteadOfCmd:string) {
+    if (insteadOfCmd) {
+      // Return the same command args, except the command `insteadOfCmd` which is replaced by `cmd`
+      return process.argv.slice(2).map((arg) => {
+        if (arg == insteadOfCmd) {
+          return cmd
+        } else {
+          return arg
+        }
+      })
+    } else {
+      // Return the exact same args (generally for stop/status commands)
+      return process.argv.slice(2)
+    }
+  }
+
+  /**
+   * Retrieve the last linesQuantity lines from the log file.
+   * @param linesQuantity
+   */
+  getLastLogLines(linesQuantity:number) {
+    return this.dal.getLogContent(linesQuantity)
+  }
+
+  /*****************
+   * MODULES PLUGS
+   ****************/
+
+  /**
+   * Default endpoint. To be overriden by a module to specify another endpoint value (for ex. BMA).
+   */
+  getMainEndpoint() {
+    return Promise.resolve('DEFAULT_ENDPOINT')
+  }
+
+  /**
+   * Default WoT incoming data for new block. To be overriden by a module.
+   */
+  generatorGetJoinData() {
+    return Promise.resolve({})
+  }
+
+  /**
+   * Default WoT incoming certifications for new block, filtering wrong certs. To be overriden by a module.
+   */
+  generatorComputeNewCerts() {
+    return Promise.resolve({})
+  }
+
+  /**
+   * Default WoT transforming method for certs => links. To be overriden by a module.
+   */
+  generatorNewCertsToLinks() {
+    return Promise.resolve({})
+  }
+
+  /**
+   * Default hook on file system plugging. To be overriden by module system.
+   */
+  onPluggedFSHook() {
+    return Promise.resolve({})
+  }
+
+  /**
+   * Default hook on data reset. To be overriden by module system.
+   */
+  resetDataHook() {
+    return Promise.resolve({})
+  }
+
+  /**
+   * Default hook on data reset. To be overriden by module system.
+   */
+  resetConfigHook() {
+    return Promise.resolve({})
+  }
+}
\ No newline at end of file
diff --git a/tsconfig.json b/tsconfig.json
index 69700a1ba0d361e92000bacffa30c87d9f5f892d..cb18c28f5f5a9efaf873956191ade38cb90277d2 100644
--- a/tsconfig.json
+++ b/tsconfig.json
@@ -10,6 +10,7 @@
     "noImplicitReturns": true
   },
   "include": [
+    "server.ts",
     "app",
     "bin",
     "test"