diff --git a/app/lib/dal/drivers/SQLiteDriver.ts b/app/lib/dal/drivers/SQLiteDriver.ts
index c0ce0cba05c0f77167a91633af840d5ce737146d..1b2c3896d7867e8ddee0b304e2684d13980f0e33 100644
--- a/app/lib/dal/drivers/SQLiteDriver.ts
+++ b/app/lib/dal/drivers/SQLiteDriver.ts
@@ -12,8 +12,8 @@
 // GNU Affero General Public License for more details.
 
 import {OtherConstants} from "../../other_constants"
+import {RealFS} from "../../system/directory"
 
-const qfs     = require('q-io/fs')
 const sqlite3 = require("sqlite3").verbose()
 
 const MEMORY_PATH = ':memory:'
@@ -82,7 +82,7 @@ export class SQLiteDriver {
     this.logger.debug('Removing SQLite database...')
     await this.closeConnection()
     if (this.path !== MEMORY_PATH) {
-      await qfs.remove(this.path)
+      await RealFS().fsUnlink(this.path)
     }
     this.logger.debug('Database removed')
   }
diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts
index f0a9103a5eb2c1d877a9929748b5cd10351c53ec..b5118d27cbc3109f816aa361128bc135da060855 100644
--- a/app/lib/dal/fileDAL.ts
+++ b/app/lib/dal/fileDAL.ts
@@ -39,6 +39,7 @@ import {IIndexDAL} from "./sqliteDAL/index/IIndexDAL"
 import {DataErrors} from "../common-libs/errors"
 import {BasicRevocableIdentity, IdentityDTO} from "../dto/IdentityDTO"
 import {BlockDAL} from "./sqliteDAL/BlockDAL"
+import {FileSystem} from "../system/directory"
 
 const fs      = require('fs')
 const path    = require('path')
@@ -50,7 +51,7 @@ const constants = require('../constants');
 
 export interface FileDALParams {
   home:string
-  fs:any
+  fs:FileSystem
   dbf:() => SQLiteDriver
   wotb:any
 }
diff --git a/app/lib/dal/fileDALs/CFSCore.ts b/app/lib/dal/fileDALs/CFSCore.ts
index 094ca53b2547edf6b5952121e3cc3c907077cb76..faa8c3a33be4ff1ba1a12882424499ed1bbb5f52 100644
--- a/app/lib/dal/fileDALs/CFSCore.ts
+++ b/app/lib/dal/fileDALs/CFSCore.ts
@@ -13,6 +13,8 @@
 
 "use strict";
 
+import {FileSystem} from "../../system/directory"
+
 const _ = require('underscore');
 const path = require('path');
 
@@ -24,7 +26,7 @@ export class CFSCore {
   private deletionFolderPromise: Promise<any> | null
   private createDeletionFolder: () => Promise<any> | null
 
-  constructor(private rootPath:string, private qfs:any) {
+  constructor(private rootPath:string, private qfs:FileSystem) {
     this.deletedFolder = path.join(rootPath, '.deleted')
     this.deletionFolderPromise = null
 
@@ -42,12 +44,12 @@ export class CFSCore {
    */
   async read(filePath:string): Promise<string | null> {
     try {
-      const isDeleted = await this.qfs.exists(path.join(this.deletedFolder, this.toRemoveFileName(filePath)));
+      const isDeleted = await this.qfs.fsExists(path.join(this.deletedFolder, this.toRemoveFileName(filePath)));
       if (isDeleted) {
         // A deleted file must be considered non-existant
         return null;
       }
-      return await this.qfs.read(path.join(this.rootPath, filePath));
+      return await this.qfs.fsReadFile(path.join(this.rootPath, filePath));
     } catch (e) {
       return null
     }
@@ -60,12 +62,12 @@ export class CFSCore {
    */
   async exists(filePath:string): Promise<boolean | null> {
     try {
-      const isDeleted = await this.qfs.exists(path.join(this.deletedFolder, this.toRemoveFileName(filePath)));
+      const isDeleted = await this.qfs.fsExists(path.join(this.deletedFolder, this.toRemoveFileName(filePath)));
       if (isDeleted) {
         // A deleted file must be considered non-existant
         return false;
       }
-      return await this.qfs.exists(path.join(this.rootPath, filePath))
+      return await this.qfs.fsExists(path.join(this.rootPath, filePath))
     } catch (e) {
       return null
     }
@@ -80,13 +82,13 @@ export class CFSCore {
   async list(ofPath:string): Promise<string[]> {
     const dirPath = path.normalize(ofPath);
     let files: string[] = [], folder = path.join(this.rootPath, dirPath);
-    const hasDir = await this.qfs.exists(folder);
+    const hasDir = await this.qfs.fsExists(folder);
     if (hasDir) {
-      files = files.concat(await this.qfs.list(folder));
+      files = files.concat(await this.qfs.fsList(folder));
     }
-    const hasDeletedFiles = await this.qfs.exists(this.deletedFolder);
+    const hasDeletedFiles = await this.qfs.fsExists(this.deletedFolder);
     if (hasDeletedFiles) {
-      const deletedFiles = await this.qfs.list(this.deletedFolder);
+      const deletedFiles = await this.qfs.fsList(this.deletedFolder);
       const deletedOfThisPath = deletedFiles.filter((f:string) => f.match(new RegExp('^' + this.toRemoveDirName(dirPath))));
       const locallyDeletedFiles = deletedOfThisPath.map((f:string) => f.replace(this.toRemoveDirName(dirPath), '')
         .replace(/^__/, ''));
@@ -102,7 +104,7 @@ export class CFSCore {
    * @param deep Wether to make a deep write or not.
    */
   async write(filePath:string, content:string, deep:boolean): Promise<void> {
-    return this.qfs.write(path.join(this.rootPath, filePath), content);
+    return this.qfs.fsWrite(path.join(this.rootPath, filePath), content);
   };
 
   /**
@@ -114,7 +116,7 @@ export class CFSCore {
   async remove(filePath:string, deep = false): Promise<void> {
     // Make a deep physical deletion
     // Root core: physical deletion
-    return this.qfs.remove(path.join(this.rootPath, filePath));
+    await this.qfs.fsUnlink(path.join(this.rootPath, filePath));
   }
 
   /**
@@ -138,9 +140,9 @@ export class CFSCore {
       let folder = this.rootPath;
       for (let i = 0, len = folders.length; i < len; i++) {
         folder = folder ? path.join(folder, folders[i]) : folders[i];
-        let exists = await this.qfs.exists(folder);
+        let exists = await this.qfs.fsExists(folder);
         if (!exists) {
-          await this.qfs.makeDirectory(folder);
+          await this.qfs.fsMakeDirectory(folder);
         }
       }
     } catch (e) {
diff --git a/app/lib/logger.ts b/app/lib/logger.ts
index 0fe2865b849f4ea81e889697798118c1ab5f3fd6..4ef6b1be18f1816527b0d79d50116874a83fce8c 100644
--- a/app/lib/logger.ts
+++ b/app/lib/logger.ts
@@ -11,11 +11,11 @@
 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 // GNU Affero General Public License for more details.
 
-"use strict";
+import {Directory} from "./system/directory"
+
 const moment = require('moment');
 const path = require('path');
 const winston = require('winston');
-const directory = require('../lib/system/directory');
 
 /***************
  * CALLBACK LOGGER
@@ -151,7 +151,7 @@ logger.unmute = () => {
 /**
  * Default logging path
  */
-logger.addHomeLogs(directory.INSTANCE_HOME)
+logger.addHomeLogs(Directory.INSTANCE_HOME)
 
 /**
 * Convenience function to get logger directly
diff --git a/app/lib/system/directory.ts b/app/lib/system/directory.ts
index 1dffb23d07035b38568a353abb118d2ce9cf6d05..8f7a89f28f85a799441da4e11b05bb918bd74d53 100644
--- a/app/lib/system/directory.ts
+++ b/app/lib/system/directory.ts
@@ -14,6 +14,7 @@
 import {SQLiteDriver} from "../dal/drivers/SQLiteDriver"
 import {CFSCore} from "../dal/fileDALs/CFSCore"
 import {WoTBObject} from "../wot"
+import {FileDALParams} from "../dal/fileDAL"
 
 const opts = require('optimist').argv;
 const path = require('path');
@@ -31,7 +32,58 @@ const getUserHome = (directory:string|null = null) => (directory || DEFAULT_HOME
 
 const getDomain = (profile:string|null = null) => (profile || DEFAULT_DOMAIN);
 
-const dir = module.exports = {
+export interface FileSystem {
+  fsExists(file:string): Promise<boolean>
+  fsReadFile(file:string): Promise<string>
+  fsUnlink(file:string): Promise<boolean>
+  fsList(dir:string): Promise<string[]>
+  fsWrite(file:string, content:string): Promise<void>
+  fsMakeDirectory(dir:string): Promise<void>
+  fsRemoveTree(dir:string): Promise<void>
+}
+
+class QioFileSystem implements FileSystem {
+
+  constructor(private qio:any) {}
+
+  async fsExists(file:string) {
+    return this.qio.exists(file)
+  }
+
+  async fsReadFile(file:string) {
+    return this.qio.read(file)
+  }
+
+  async fsUnlink(file:string) {
+    return this.qio.remove(file)
+  }
+
+  async fsList(dir: string) {
+    return this.qio.list(dir)
+  }
+
+  fsWrite(file: string, content: string): Promise<void> {
+    return this.qio.write(file, content)
+  }
+
+  fsMakeDirectory(dir: string): Promise<void> {
+    return this.qio.makeTree(dir)
+  }
+
+  async fsRemoveTree(dir: string): Promise<void> {
+    return this.qio.removeTree(dir)
+  }
+}
+
+export const RealFS = (): FileSystem => {
+  return new QioFileSystem(qfs)
+}
+
+export const MockFS = (initialTree:{ [folder:string]: { [file:string]: string }} = {}): FileSystem => {
+  return new QioFileSystem(require('q-io/fs-mock')(initialTree))
+}
+
+export const Directory = {
 
   INSTANCE_NAME: getDomain(opts.mdb),
   INSTANCE_HOME: getHomePath(opts.mdb, opts.home),
@@ -42,38 +94,41 @@ const dir = module.exports = {
   getHome: (profile:string|null = null, directory:string|null = null) => getHomePath(profile, directory),
 
   getHomeFS: async (isMemory:boolean, theHome:string, makeTree = true) => {
-    const home = theHome || dir.getHome();
-    const params:any = {
-      home: home
-    };
-    if (isMemory) {
-      params.fs = require('q-io/fs-mock')({});
-    } else {
-      params.fs = qfs;
+    const home = theHome || Directory.getHome()
+    const params = {
+      home: home,
+      fs: isMemory ? MockFS() : RealFS()
     }
     if (makeTree) {
-      await params.fs.makeTree(home)
+      await params.fs.fsMakeDirectory(home)
     }
     return params;
   },
 
-  getHomeParams: async (isMemory:boolean, theHome:string) => {
-    const params:any = await dir.getHomeFS(isMemory, theHome)
+  getHomeParams: async (isMemory:boolean, theHome:string): Promise<FileDALParams> => {
+    const params = await Directory.getHomeFS(isMemory, theHome)
     const home = params.home;
+    let dbf: () => SQLiteDriver
+    let wotb: any
     if (isMemory) {
-      params.dbf = () => new SQLiteDriver(':memory:');
-      params.wotb = WoTBObject.memoryInstance();
+      dbf = () => new SQLiteDriver(':memory:');
+      wotb = WoTBObject.memoryInstance();
     } else {
-      const sqlitePath = path.join(home, dir.DUNITER_DB_NAME + '.db');
-      params.dbf = () => new SQLiteDriver(sqlitePath);
-      const wotbFilePath = path.join(home, dir.WOTB_FILE);
+      const sqlitePath = path.join(home, Directory.DUNITER_DB_NAME + '.db');
+      dbf = () => new SQLiteDriver(sqlitePath);
+      const wotbFilePath = path.join(home, Directory.WOTB_FILE);
       let existsFile = await qfs.exists(wotbFilePath)
       if (!existsFile) {
         fs.closeSync(fs.openSync(wotbFilePath, 'w'));
       }
-      params.wotb = WoTBObject.fileInstance(wotbFilePath);
+      wotb = WoTBObject.fileInstance(wotbFilePath);
+    }
+    return {
+      home: params.home,
+      fs: params.fs,
+      dbf,
+      wotb
     }
-    return params;
   },
 
   createHomeIfNotExists: async (fileSystem:any, theHome:string) => {
diff --git a/app/modules/daemon.ts b/app/modules/daemon.ts
index f4a24d592b9838a2b9b74b13efca34b219794ac3..9fbff1423b4dec5f7cf07837b41b026a0ea39cc8 100644
--- a/app/modules/daemon.ts
+++ b/app/modules/daemon.ts
@@ -13,11 +13,8 @@
 
 import {ConfDTO} from "../lib/dto/ConfDTO"
 import {Server} from "../../server"
+import {Directory, RealFS} from "../lib/system/directory"
 
-"use strict";
-
-const qfs       = require('q-io/fs');
-const directory = require('../lib/system/directory');
 const constants = require('../lib/constants');
 const Tail      = require("tail").Tail
 
@@ -95,7 +92,7 @@ module.exports = {
       desc: 'Follow duniter logs.',
       logs: false,
       onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => {
-        printTailAndWatchFile(directory.INSTANCE_HOMELOG_FILE, constants.NB_INITIAL_LINES_TO_SHOW)
+        printTailAndWatchFile(Directory.INSTANCE_HOMELOG_FILE, constants.NB_INITIAL_LINES_TO_SHOW)
         // Never ending command
         return new Promise(res => null)
       }
@@ -149,8 +146,9 @@ function stopDaemon(daemon:any) {
 }
 
 async function printTailAndWatchFile(file:any, tailSize:number) {
-    if (await qfs.exists(file)) {
-      const content = await qfs.read(file)
+    const fs = RealFS()
+    if (await fs.fsExists(file)) {
+      const content = await fs.fsReadFile(file)
       const lines = content.split('\n')
       const from = Math.max(0, lines.length - tailSize)
       const lastLines = lines.slice(from).join('\n')
diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts
index 9bc46362a4fd7ec376dafdc93ab9ed25c505a368..4b3fa0127ae5be06215c62bf91ac3b406d7087f0 100644
--- a/app/modules/prover/lib/powCluster.ts
+++ b/app/modules/prover/lib/powCluster.ts
@@ -50,7 +50,7 @@ export class Master {
   onInfoCallback:any
   workersOnline:Promise<any>[]
 
-  constructor(private nbCores:number, logger:any, private dal?:FileDAL) {
+  constructor(private nbCores:number|null|undefined, logger:any, private dal?:FileDAL) {
     this.clusterId = clusterId++
     this.logger = logger || Master.defaultLogger()
     this.onInfoMessage = (message:any) => {
@@ -90,7 +90,8 @@ export class Master {
       execArgv: [] // Do not try to debug forks
     })
 
-    this.slaves = Array.from({ length: this.nbCores }).map((value, index) => {
+    const nbCores = this.nbCores !== undefined && this.nbCores !== null ? this.nbCores : 1
+    this.slaves = Array.from({ length: nbCores }).map((value, index) => {
       const nodejsWorker = cluster.fork()
       const worker = new PowWorker(nodejsWorker, message => {
         this.onWorkerMessage(index, message)
diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index af1024409ad401bc0f0991215e52de8b3f0bdf82..aa5381a80d88fca94914e3ee5c54dbd47a2e3f1f 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -21,10 +21,10 @@ import {dos2unix} from "../../../lib/common-libs/dos2unix"
 import {rawer} from "../../../lib/common-libs/index"
 import {ProcessCpuProfiler} from "../../../ProcessCpuProfiler"
 import {PowDAL} from "../../../lib/dal/fileDALs/PowDAL";
+import {Directory} from "../../../lib/system/directory"
 
 const moment = require('moment');
 const querablep = require('querablep');
-const directory = require('../../../lib/system/directory');
 
 export function createPowWorker() {
 
@@ -64,7 +64,7 @@ export function createPowWorker() {
           }
 
           if (message.value.rootPath) {
-            const params = await directory.getHomeFS(false, message.value.rootPath, false)
+            const params = await Directory.getHomeFS(false, message.value.rootPath, false)
             powDAL = new PowDAL(message.value.rootPath, params.fs)
           }
 
diff --git a/index.ts b/index.ts
index d28736977909a07a2705f2bb3a96c6418f2183a6..f944fabbfe04f12d07a82784201c4ec70cfc3189 100644
--- a/index.ts
+++ b/index.ts
@@ -24,10 +24,10 @@ import {ProverConstants} from "./app/modules/prover/lib/constants"
 import {ProxiesConf} from './app/lib/proxy';
 import {RouterDependency} from "./app/modules/router"
 import {OtherConstants} from "./app/lib/other_constants"
+import {Directory} from "./app/lib/system/directory"
 
 const path = require('path');
 const _ = require('underscore');
-const directory = require('./app/lib/system/directory');
 const constants = require('./app/lib/constants');
 const logger = require('./app/lib/logger').NewLogger('duniter');
 
@@ -292,7 +292,7 @@ class Stack {
     OtherConstants.SQL_TRACES = program.sqlTraces === true
     const dbName = program.mdb;
     const dbHome = program.home;
-    const home = directory.getHome(dbName, dbHome);
+    const home = Directory.getHome(dbName, dbHome);
 
     if (command.logs === false) {
       logger.mute();
diff --git a/package.json b/package.json
index cd615c07f65b73d2e9ee29a9bfa44a333b340564..b59b931e10049e4d9f291e619319af995a730176 100644
--- a/package.json
+++ b/package.json
@@ -84,7 +84,7 @@
     "node-pre-gyp": "0.6.34",
     "node-uuid": "1.4.8",
     "optimist": "0.6.1",
-    "q-io": "1.13.2",
+    "q-io": "^1.13.5",
     "querablep": "^0.1.0",
     "request": "2.81.0",
     "request-promise": "4.2.0",
diff --git a/server.ts b/server.ts
index df2dddd5de05c7daa20ec56cbd3281f08e4a0d23..c7153ddd53abbb0b05beb891c5365dfe811e3a40 100644
--- a/server.ts
+++ b/server.ts
@@ -17,7 +17,7 @@ import {PeeringService} from "./app/service/PeeringService"
 import {BlockchainService} from "./app/service/BlockchainService"
 import {TransactionService} from "./app/service/TransactionsService"
 import {ConfDTO} from "./app/lib/dto/ConfDTO"
-import {FileDAL} from "./app/lib/dal/fileDAL"
+import {FileDAL, FileDALParams} from "./app/lib/dal/fileDAL"
 import {DuniterBlockchain} from "./app/lib/blockchain/DuniterBlockchain"
 import {SQLBlockchain} from "./app/lib/blockchain/SqlBlockchain"
 import * as stream from "stream"
@@ -39,6 +39,7 @@ import {WS2PCluster} from "./app/modules/ws2p/lib/WS2PCluster"
 import {DBBlock} from "./app/lib/db/DBBlock"
 import {ProxiesConf} from './app/lib/proxy';
 import {DBPeer} from "./app/lib/dal/sqliteDAL/PeerDAL"
+import {Directory, FileSystem} from "./app/lib/system/directory"
 
 export interface HookableServer {
   generatorGetJoinData: (...args:any[]) => Promise<any>
@@ -57,12 +58,11 @@ const es          = require('event-stream');
 const daemonize   = require("daemonize2")
 const constants   = require('./app/lib/constants');
 const jsonpckg    = require('./package.json');
-const directory   = require('./app/lib/system/directory');
 const logger      = require('./app/lib/logger').NewLogger('server');
 
 export class Server extends stream.Duplex implements HookableServer {
 
-  private paramsP:Promise<any>|null
+  private paramsP:Promise<FileDALParams>
   private endpointsDefinitions:(()=>Promise<string>)[] = []
   private wrongEndpointsFilters:((endpoints:string[])=>Promise<string[]>)[] = []
   startService:()=>Promise<void>
@@ -95,7 +95,7 @@ export class Server extends stream.Duplex implements HookableServer {
     this.version = jsonpckg.version;
     this.logger = logger;
 
-    this.paramsP = directory.getHomeParams(memoryOnly, home)
+    this.paramsP = Directory.getHomeParams(memoryOnly, home)
 
     this.documentFIFO = new GlobalFifoPromise()
 
@@ -365,23 +365,23 @@ export class Server extends stream.Duplex implements HookableServer {
     const params = await this.paramsP;
     const myFS = params.fs;
     const rootPath = params.home;
-    const existsDir = await myFS.exists(rootPath);
+    const existsDir = await myFS.fsExists(rootPath);
     if (existsDir) {
-      await myFS.removeTree(rootPath);
+      await myFS.fsRemoveTree(rootPath);
     }
   }
 
   async resetAll(done:any = null) {
     await this.resetDataHook()
     await this.resetConfigHook()
-    const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log', directory.WOTB_FILE, 'export.zip', 'import.zip', 'conf'];
+    const files = ['stats', 'cores', 'current', Directory.DUNITER_DB_NAME, Directory.DUNITER_DB_NAME + '.db', Directory.DUNITER_DB_NAME + '.log', Directory.WOTB_FILE, 'export.zip', 'import.zip', 'conf'];
     const dirs  = ['blocks', 'blockchain', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
     return this.resetFiles(files, dirs, done);
   }
 
   async resetData(done:any = null) {
     await this.resetDataHook()
-    const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log', directory.WOTB_FILE];
+    const files = ['stats', 'cores', 'current', Directory.DUNITER_DB_NAME, Directory.DUNITER_DB_NAME + '.db', Directory.DUNITER_DB_NAME + '.log', Directory.WOTB_FILE];
     const dirs  = ['blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
     await this.resetFiles(files, dirs, done);
   }
@@ -408,12 +408,12 @@ export class Server extends stream.Duplex implements HookableServer {
     const rootPath = params.home;
     const myFS = params.fs;
     const archive = archiver('zip');
-    if (await myFS.exists(path.join(rootPath, 'indicators'))) {
+    if (await myFS.fsExists(path.join(rootPath, 'indicators'))) {
       archive.directory(path.join(rootPath, 'indicators'), '/indicators', undefined, { name: 'indicators'});
     }
     const files = ['duniter.db', 'stats.json', 'wotb.bin'];
     for (const file of files) {
-      if (await myFS.exists(path.join(rootPath, file))) {
+      if (await myFS.fsExists(path.join(rootPath, file))) {
         archive.file(path.join(rootPath, file), { name: file });
       }
     }
@@ -435,7 +435,7 @@ export class Server extends stream.Duplex implements HookableServer {
   async cleanDBData() {
     await this.dal.cleanCaches();
     this.dal.wotb.resetWoT();
-    const files = ['stats', 'cores', 'current', directory.DUNITER_DB_NAME, directory.DUNITER_DB_NAME + '.db', directory.DUNITER_DB_NAME + '.log'];
+    const files = ['stats', 'cores', 'current', Directory.DUNITER_DB_NAME, Directory.DUNITER_DB_NAME + '.db', Directory.DUNITER_DB_NAME + '.log'];
     const dirs  = ['blocks', 'ud_history', 'branches', 'certs', 'txs', 'cores', 'sources', 'links', 'ms', 'identities', 'peers', 'indicators', 'leveldb'];
     return this.resetFiles(files, dirs);
   }
@@ -443,34 +443,34 @@ export class Server extends stream.Duplex implements HookableServer {
   private async resetFiles(files:string[], dirs:string[], done:any = null) {
     try {
       const params = await this.paramsP;
-      const myFS = params.fs;
+      const myFS:FileSystem = params.fs;
       const rootPath = params.home;
       for (const fName of files) {
         // JSON file?
-        const existsJSON = await myFS.exists(rootPath + '/' + fName + '.json');
+        const existsJSON = await myFS.fsExists(rootPath + '/' + fName + '.json');
         if (existsJSON) {
           const theFilePath = rootPath + '/' + fName + '.json';
-          await myFS.remove(theFilePath);
-          if (await myFS.exists(theFilePath)) {
+          await myFS.fsUnlink(theFilePath);
+          if (await myFS.fsExists(theFilePath)) {
             throw Error('Failed to delete file "' + theFilePath + '"');
           }
         } else {
           // Normal file?
           const normalFile = path.join(rootPath, fName);
-          const existsFile = await myFS.exists(normalFile);
+          const existsFile = await myFS.fsExists(normalFile);
           if (existsFile) {
-            await myFS.remove(normalFile);
-            if (await myFS.exists(normalFile)) {
+            await myFS.fsUnlink(normalFile);
+            if (await myFS.fsExists(normalFile)) {
               throw Error('Failed to delete file "' + normalFile + '"');
             }
           }
         }
       }
       for (const dirName of dirs) {
-        const existsDir = await myFS.exists(rootPath + '/' + dirName);
+        const existsDir = await myFS.fsExists(rootPath + '/' + dirName);
         if (existsDir) {
-          await myFS.removeTree(rootPath + '/' + dirName);
-          if (await myFS.exists(rootPath + '/' + dirName)) {
+          await myFS.fsRemoveTree(rootPath + '/' + dirName);
+          if (await myFS.fsExists(rootPath + '/' + dirName)) {
             throw Error('Failed to delete folder "' + rootPath + '/' + dirName + '"');
           }
         }
@@ -544,8 +544,8 @@ export class Server extends stream.Duplex implements HookableServer {
     const argv = this.getCommand(overrideCommand, insteadOfCmd)
     return daemonize.setup({
       main: mainModule,
-      name: directory.INSTANCE_NAME,
-      pidfile: path.join(directory.INSTANCE_HOME, "app.pid"),
+      name: Directory.INSTANCE_NAME,
+      pidfile: path.join(Directory.INSTANCE_HOME, "app.pid"),
       argv,
       cwd
     });
diff --git a/test/dal/dal.js b/test/dal/dal.js
index 44940fa4ca241274bf624a940c2d3db02ba7687e..f28674f7d6db41aaba53b3de7a07c19d11547606 100644
--- a/test/dal/dal.js
+++ b/test/dal/dal.js
@@ -17,7 +17,7 @@ var _ = require('underscore');
 var should = require('should');
 var assert = require('assert');
 var dal = require('../../app/lib/dal/fileDAL').FileDAL
-var dir = require('../../app/lib/system/directory');
+var dir = require('../../app/lib/system/directory').Directory
 var constants = require('../../app/lib/constants');
 var PeerDTO   = require('../../app/lib/dto/PeerDTO').PeerDTO
 
diff --git a/test/dal/source_dal.js b/test/dal/source_dal.js
index 08c243bcdefa51c5507ed5e9631aafc69201e957..528ac444fc8fee0abd1195702a9f7a8abbd61196 100644
--- a/test/dal/source_dal.js
+++ b/test/dal/source_dal.js
@@ -15,7 +15,7 @@
 const co = require('co');
 const should = require('should');
 const FileDAL = require('../../app/lib/dal/fileDAL').FileDAL
-const dir = require('../../app/lib/system/directory');
+const dir = require('../../app/lib/system/directory').Directory
 const indexer    = require('../../app/lib/indexer').Indexer
 
 let dal;
diff --git a/test/dal/triming.js b/test/dal/triming.js
index a684a0c0e0b66106b2b7bc5a1a2ba26daf9b0385..704567a45604de04a5bc149527c8f63eaec08e21 100644
--- a/test/dal/triming.js
+++ b/test/dal/triming.js
@@ -15,7 +15,7 @@
 const co = require('co');
 const should = require('should');
 const FileDAL = require('../../app/lib/dal/fileDAL').FileDAL
-const dir = require('../../app/lib/system/directory');
+const dir = require('../../app/lib/system/directory').Directory
 const indexer    = require('../../app/lib/indexer').Indexer
 const toolbox = require('../integration/tools/toolbox');
 
diff --git a/test/fast/cfs.js b/test/fast/cfs.js
index 2067d7b27217e1edca77a9da04c5fb79d28d5302..8dbdfa9bb189b7e3abfc6ed841b5bb9966a5789b 100644
--- a/test/fast/cfs.js
+++ b/test/fast/cfs.js
@@ -17,7 +17,7 @@ var assert = require('assert');
 var should = require('should');
 var co = require('co');
 var CFSCore = require('../../app/lib/dal/fileDALs/CFSCore').CFSCore;
-var mockFS = require('q-io/fs-mock')({
+const mockFS = require('../../app/lib/system/directory').MockFS({
   'B5_a': {
     "A.json": '{ "text": "Content of A from B5_a" }'
   },
diff --git a/yarn.lock b/yarn.lock
index 27b57764e92f118d2b9267a7f057a165ee4f1760..13d63d000fa4171fee31f5b4df5c3936f66e9747 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -3059,15 +3059,15 @@ punycode@^1.4.1:
   version "1.4.1"
   resolved "https://registry.yarnpkg.com/punycode/-/punycode-1.4.1.tgz#c0d5a63b2718800ad8e1eb0fa5269c84dd41845e"
 
-q-io@1.13.2:
-  version "1.13.2"
-  resolved "https://registry.yarnpkg.com/q-io/-/q-io-1.13.2.tgz#eea130d481ddb5e1aa1bc5a66855f7391d06f003"
+q-io@^1.13.5:
+  version "1.13.5"
+  resolved "https://registry.yarnpkg.com/q-io/-/q-io-1.13.5.tgz#6ac39deb5cfe0dc68436e6f8c33d0d7c3f471bb2"
   dependencies:
     collections "^0.2.0"
     mime "^1.2.11"
     mimeparse "^0.1.4"
     q "^1.0.1"
-    qs "^1.2.1"
+    qs "^6.4.0"
     url2 "^0.0.0"
 
 q@^1.0.1:
@@ -3078,11 +3078,7 @@ qs@6.4.0, qs@~6.4.0:
   version "6.4.0"
   resolved "https://registry.yarnpkg.com/qs/-/qs-6.4.0.tgz#13e26d28ad6b0ffaa91312cd3bf708ed351e7233"
 
-qs@^1.2.1:
-  version "1.2.2"
-  resolved "https://registry.yarnpkg.com/qs/-/qs-1.2.2.tgz#19b57ff24dc2a99ce1f8bdf6afcda59f8ef61f88"
-
-qs@^6.5.1, qs@~6.5.1:
+qs@^6.4.0, qs@^6.5.1, qs@~6.5.1:
   version "6.5.1"
   resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.1.tgz#349cdf6eef89ec45c12d7d5eb3fc0c870343a6d8"