diff --git a/app/lib/dal/drivers/SQLiteDriver.ts b/app/lib/dal/drivers/SQLiteDriver.ts
index d034f0a034b07261dde7cff4e10edb65896b2213..404371634614543f04bee34812528d961a5ee043 100644
--- a/app/lib/dal/drivers/SQLiteDriver.ts
+++ b/app/lib/dal/drivers/SQLiteDriver.ts
@@ -95,16 +95,16 @@ export class SQLiteDriver {
   }
 
   async destroyDatabase(): Promise<void> {
-    this.logger.debug("Removing SQLite database \"%s\"...", this.path);
+    this.logger.debug('Removing SQLite database "%s"...', this.path);
     await this.closeConnection();
     if (this.path !== MEMORY_PATH) {
       await RealFS().fsUnlink(this.path);
     }
-    this.logger.debug("Database \"%s\" removed", this.path);
+    this.logger.debug('Database "%s" removed', this.path);
   }
 
   get closed() {
-   return this.dbPromise
+    return this.dbPromise;
   }
 
   async closeConnection(): Promise<void> {
@@ -116,9 +116,9 @@ export class SQLiteDriver {
       db.open; // For an unknown reason, we need this line.
     }
     await new Promise((resolve, reject) => {
-      this.logger.debug("Closing SQLite database \"%s\"...", this.path);
+      this.logger.debug('Closing SQLite database "%s"...', this.path);
       db.on("close", () => {
-        this.logger.info("Database \"%s\" closed.", this.path);
+        this.logger.info('Database "%s" closed.', this.path);
         this.dbPromise = null;
         resolve();
       });
diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts
index 100d1cb51ba2e82c0337e9ce9c9c6b9e8123d4d7..2d7915db8d508ca3ab8014d3ef4129a454ff1cb8 100644
--- a/app/lib/dal/fileDAL.ts
+++ b/app/lib/dal/fileDAL.ts
@@ -13,12 +13,12 @@
 
 import * as fs from "fs";
 import * as path from "path";
-import {SQLiteDriver} from "./drivers/SQLiteDriver";
-import {ConfDAL} from "./fileDALs/ConfDAL";
-import {ConfDTO} from "../dto/ConfDTO";
-import {BlockDTO} from "../dto/BlockDTO";
-import {DBHead} from "../db/DBHead";
-import {DBIdentity, IdentityDAL} from "./sqliteDAL/IdentityDAL";
+import { SQLiteDriver } from "./drivers/SQLiteDriver";
+import { ConfDAL } from "./fileDALs/ConfDAL";
+import { ConfDTO } from "../dto/ConfDTO";
+import { BlockDTO } from "../dto/BlockDTO";
+import { DBHead } from "../db/DBHead";
+import { DBIdentity, IdentityDAL } from "./sqliteDAL/IdentityDAL";
 import {
   CindexEntry,
   FullCindexEntry,
@@ -31,55 +31,55 @@ import {
   SimpleUdEntryForWallet,
   SindexEntry,
 } from "../indexer";
-import {TransactionDTO} from "../dto/TransactionDTO";
-import {CertDAL, DBCert} from "./sqliteDAL/CertDAL";
-import {DBBlock} from "../db/DBBlock";
-import {DBMembership, MembershipDAL} from "./sqliteDAL/MembershipDAL";
-import {MerkleDTO} from "../dto/MerkleDTO";
-import {CommonConstants} from "../common-libs/constants";
-import {PowDAL} from "./fileDALs/PowDAL";
-import {Initiable} from "./sqliteDAL/Initiable";
-import {MetaDAL} from "./sqliteDAL/MetaDAL";
-import {DataErrors} from "../common-libs/errors";
-import {BasicRevocableIdentity, IdentityDTO} from "../dto/IdentityDTO";
-import {FileSystem} from "../system/directory";
-import {Wot} from "../../../neon/lib";
-import {IIndexDAO} from "./indexDAL/abstract/IIndexDAO";
-import {BIndexDAO} from "./indexDAL/abstract/BIndexDAO";
-import {MIndexDAO} from "./indexDAL/abstract/MIndexDAO";
-import {SIndexDAO} from "./indexDAL/abstract/SIndexDAO";
-import {CIndexDAO} from "./indexDAL/abstract/CIndexDAO";
-import {IdentityForRequirements} from "../../service/BlockchainService";
-import {BlockchainDAO} from "./indexDAL/abstract/BlockchainDAO";
-import {TxsDAO} from "./indexDAL/abstract/TxsDAO";
-import {WalletDAO} from "./indexDAL/abstract/WalletDAO";
-import {PeerDAO} from "./indexDAL/abstract/PeerDAO";
-import {DBTx} from "../db/DBTx";
-import {DBWallet} from "../db/DBWallet";
-import {Tristamp} from "../common/Tristamp";
-import {CFSCore} from "./fileDALs/CFSCore";
-import {Underscore} from "../common-libs/underscore";
-import {DBPeer} from "../db/DBPeer";
-import {MonitorFlushedIndex} from "../debug/MonitorFlushedIndex";
-import {cliprogram} from "../common-libs/programOptions";
-import {DividendDAO, UDSource} from "./indexDAL/abstract/DividendDAO";
-import {HttpSource, HttpUD} from "../../modules/bma/lib/dtos";
-import {GenericDAO} from "./indexDAL/abstract/GenericDAO";
-import {MonitorExecutionTime} from "../debug/MonitorExecutionTime";
-import {LevelDBDividend} from "./indexDAL/leveldb/LevelDBDividend";
-import {LevelDBBindex} from "./indexDAL/leveldb/LevelDBBindex";
-
-import {LevelUp} from "levelup";
-import {LevelDBBlockchain} from "./indexDAL/leveldb/LevelDBBlockchain";
-import {LevelDBSindex} from "./indexDAL/leveldb/LevelDBSindex";
-import {SqliteTransactions} from "./indexDAL/sqlite/SqliteTransactions";
-import {SqlitePeers} from "./indexDAL/sqlite/SqlitePeers";
-import {LevelDBWallet} from "./indexDAL/leveldb/LevelDBWallet";
-import {LevelDBCindex} from "./indexDAL/leveldb/LevelDBCindex";
-import {LevelDBIindex} from "./indexDAL/leveldb/LevelDBIindex";
-import {LevelDBMindex} from "./indexDAL/leveldb/LevelDBMindex";
-import {ConfDAO} from "./indexDAL/abstract/ConfDAO";
-import {ServerDAO} from "./server-dao";
+import { TransactionDTO } from "../dto/TransactionDTO";
+import { CertDAL, DBCert } from "./sqliteDAL/CertDAL";
+import { DBBlock } from "../db/DBBlock";
+import { DBMembership, MembershipDAL } from "./sqliteDAL/MembershipDAL";
+import { MerkleDTO } from "../dto/MerkleDTO";
+import { CommonConstants } from "../common-libs/constants";
+import { PowDAL } from "./fileDALs/PowDAL";
+import { Initiable } from "./sqliteDAL/Initiable";
+import { MetaDAL } from "./sqliteDAL/MetaDAL";
+import { DataErrors } from "../common-libs/errors";
+import { BasicRevocableIdentity, IdentityDTO } from "../dto/IdentityDTO";
+import { FileSystem } from "../system/directory";
+import { Wot } from "../../../neon/lib";
+import { IIndexDAO } from "./indexDAL/abstract/IIndexDAO";
+import { BIndexDAO } from "./indexDAL/abstract/BIndexDAO";
+import { MIndexDAO } from "./indexDAL/abstract/MIndexDAO";
+import { SIndexDAO } from "./indexDAL/abstract/SIndexDAO";
+import { CIndexDAO } from "./indexDAL/abstract/CIndexDAO";
+import { IdentityForRequirements } from "../../service/BlockchainService";
+import { BlockchainDAO } from "./indexDAL/abstract/BlockchainDAO";
+import { TxsDAO } from "./indexDAL/abstract/TxsDAO";
+import { WalletDAO } from "./indexDAL/abstract/WalletDAO";
+import { PeerDAO } from "./indexDAL/abstract/PeerDAO";
+import { DBTx } from "../db/DBTx";
+import { DBWallet } from "../db/DBWallet";
+import { Tristamp } from "../common/Tristamp";
+import { CFSCore } from "./fileDALs/CFSCore";
+import { Underscore } from "../common-libs/underscore";
+import { DBPeer } from "../db/DBPeer";
+import { MonitorFlushedIndex } from "../debug/MonitorFlushedIndex";
+import { cliprogram } from "../common-libs/programOptions";
+import { DividendDAO, UDSource } from "./indexDAL/abstract/DividendDAO";
+import { HttpSource, HttpUD } from "../../modules/bma/lib/dtos";
+import { GenericDAO } from "./indexDAL/abstract/GenericDAO";
+import { MonitorExecutionTime } from "../debug/MonitorExecutionTime";
+import { LevelDBDividend } from "./indexDAL/leveldb/LevelDBDividend";
+import { LevelDBBindex } from "./indexDAL/leveldb/LevelDBBindex";
+
+import { LevelUp } from "levelup";
+import { LevelDBBlockchain } from "./indexDAL/leveldb/LevelDBBlockchain";
+import { LevelDBSindex } from "./indexDAL/leveldb/LevelDBSindex";
+import { SqliteTransactions } from "./indexDAL/sqlite/SqliteTransactions";
+import { SqlitePeers } from "./indexDAL/sqlite/SqlitePeers";
+import { LevelDBWallet } from "./indexDAL/leveldb/LevelDBWallet";
+import { LevelDBCindex } from "./indexDAL/leveldb/LevelDBCindex";
+import { LevelDBIindex } from "./indexDAL/leveldb/LevelDBIindex";
+import { LevelDBMindex } from "./indexDAL/leveldb/LevelDBMindex";
+import { ConfDAO } from "./indexDAL/abstract/ConfDAO";
+import { ServerDAO } from "./server-dao";
 
 const readline = require("readline");
 const indexer = require("../indexer").Indexer;
@@ -150,7 +150,8 @@ export class FileDAL implements ServerDAO {
     this.powDAL = new PowDAL(this.rootPath, params.fs);
     this.confDAL = new ConfDAL(this.rootPath, params.fs);
     this.metaDAL = new (require("./sqliteDAL/MetaDAL").MetaDAL)(
-      this.sqliteDriver, getSqliteDB
+      this.sqliteDriver,
+      getSqliteDB
     );
     this.idtyDAL = new (require("./sqliteDAL/IdentityDAL").IdentityDAL)(
       this.sqliteDriver
@@ -1357,26 +1358,26 @@ export class FileDAL implements ServerDAO {
    * @private
    */
   private async mapToDBTxs(
-      txs: TransactionDTO[],
-      block_number: number,
-      medianTime: number
+    txs: TransactionDTO[],
+    block_number: number,
+    medianTime: number
   ): Promise<DBTx[]> {
     return Promise.all(
-        txs.map(async (tx) => {
-          const sp = tx.blockstamp.split("-", 2);
-          const basedBlock = (await this.getAbsoluteBlockByNumberAndHash(
-              parseInt(sp[0]),
-              sp[1]
-          )) as DBBlock;
-          tx.blockstampTime = basedBlock.medianTime;
-          const txEntity = TransactionDTO.fromJSONObject(tx);
-          if (!txEntity.hash) txEntity.computeAllHashes();
-          const dbTx = DBTx.fromTransactionDTO(txEntity);
-          dbTx.written = true;
-          dbTx.block_number = block_number;
-          dbTx.time = medianTime;
-          return dbTx;
-        })
+      txs.map(async (tx) => {
+        const sp = tx.blockstamp.split("-", 2);
+        const basedBlock = (await this.getAbsoluteBlockByNumberAndHash(
+          parseInt(sp[0]),
+          sp[1]
+        )) as DBBlock;
+        tx.blockstampTime = basedBlock.medianTime;
+        const txEntity = TransactionDTO.fromJSONObject(tx);
+        if (!txEntity.hash) txEntity.computeAllHashes();
+        const dbTx = DBTx.fromTransactionDTO(txEntity);
+        dbTx.written = true;
+        dbTx.block_number = block_number;
+        dbTx.time = medianTime;
+        return dbTx;
+      })
     );
   }
 
@@ -1392,9 +1393,9 @@ export class FileDAL implements ServerDAO {
   }
 
   async insertTxsInFiles(
-      txs: TransactionDTO[],
-      block_number: number,
-      medianTime: number
+    txs: TransactionDTO[],
+    block_number: number,
+    medianTime: number
   ): Promise<DBTx[]> {
     if (!txs.length) return [];
     const dbTxs = await this.mapToDBTxs(txs, block_number, medianTime);
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts
index f54665843753d63737a7a79afcab510eb2cff06e..756d367c5df96461a44543f3b6def81e66b3cf24 100644
--- a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts
+++ b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts
@@ -65,12 +65,12 @@ export class SqliteTable<T> {
    * Allow to migrate the table
    */
   generateUpgradeSql(): string {
-    return '';
+    return "";
   }
 
   generateCreateIndexes() {
     return this.keys()
-      .filter(key => this.fields[key]?.indexed)
+      .filter((key) => this.fields[key]?.indexed)
       .map((fieldName) => {
         return `CREATE INDEX IF NOT EXISTS idx_${this.name}_${fieldName} ON ${this.name} (${fieldName});\n`;
       })
@@ -205,7 +205,10 @@ export class SqliteTable<T> {
 
   async countBy(fieldName: keyof T, fieldValue: any): Promise<number> {
     return ((
-        await this.driver.sqlRead(`SELECT COUNT(*) as max FROM ${this.name} WHERE ${fieldName} = ?`, [fieldValue])
+      await this.driver.sqlRead(
+        `SELECT COUNT(*) as max FROM ${this.name} WHERE ${fieldName} = ?`,
+        [fieldValue]
+      )
     )[0] as any).max;
   }
 
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts
index 8bc6cb8c6ce34559f69c5f80320d6bcf7c675e4d..ffe7d944bb1ef0a968aaad1f082f24d0aeaae6a0 100644
--- a/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts
+++ b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts
@@ -18,24 +18,51 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
       "txs",
       {
         hash: new SqlNotNullableFieldDefinition("VARCHAR", true, 70),
-        block_number: new SqlNullableFieldDefinition("INT", true /*need by getTxHistoryByPubkeyBetweenBlocks() */),
+        block_number: new SqlNullableFieldDefinition(
+          "INT",
+          true /*need by getTxHistoryByPubkeyBetweenBlocks() */
+        ),
         locktime: new SqlNullableFieldDefinition("INT", false),
         version: new SqlNullableFieldDefinition("INT", false),
         currency: new SqlNullableFieldDefinition("VARCHAR", false, 10),
         comment: new SqlNullableFieldDefinition("TEXT", false),
-        blockstamp: new SqlNullableFieldDefinition("VARCHAR", true /* need by getWrittenOn() */, 100),
-        blockstampTime: new SqlNullableFieldDefinition("INT", true /* need by trimExpiredNonWrittenTxs() */),
-        time: new SqlNullableFieldDefinition("INT", true /*need by getTxHistoryByPubkeyBetweenTimes() */ ),
+        blockstamp: new SqlNullableFieldDefinition(
+          "VARCHAR",
+          true /* need by getWrittenOn() */,
+          100
+        ),
+        blockstampTime: new SqlNullableFieldDefinition(
+          "INT",
+          true /* need by trimExpiredNonWrittenTxs() */
+        ),
+        time: new SqlNullableFieldDefinition(
+          "INT",
+          true /*need by getTxHistoryByPubkeyBetweenTimes() */
+        ),
         inputs: new SqlNullableFieldDefinition("JSON", false),
         unlocks: new SqlNullableFieldDefinition("JSON", false),
         outputs: new SqlNullableFieldDefinition("JSON", false),
-        issuer: new SqlNullableFieldDefinition("VARCHAR", true, 50), /* computed column - need by getTxHistoryXxx() */
+        issuer: new SqlNullableFieldDefinition(
+          "VARCHAR",
+          true,
+          50
+        ) /* computed column - need by getTxHistoryXxx() */,
         issuers: new SqlNullableFieldDefinition("JSON", false),
         signatures: new SqlNullableFieldDefinition("JSON", false),
-        recipient: new SqlNullableFieldDefinition("VARCHAR", true, 50), /* computed column - need by getTxHistoryXxx() */
+        recipient: new SqlNullableFieldDefinition(
+          "VARCHAR",
+          true,
+          50
+        ) /* computed column - need by getTxHistoryXxx() */,
         recipients: new SqlNullableFieldDefinition("JSON", false),
-        written: new SqlNotNullableFieldDefinition("BOOLEAN", true /* need by getTxHistoryMempool() */),
-        removed: new SqlNotNullableFieldDefinition("BOOLEAN", true /* need by getSandboxTs() */),
+        written: new SqlNotNullableFieldDefinition(
+          "BOOLEAN",
+          true /* need by getTxHistoryMempool() */
+        ),
+        removed: new SqlNotNullableFieldDefinition(
+          "BOOLEAN",
+          true /* need by getSandboxTs() */
+        ),
         received: new SqlNullableFieldDefinition("BOOLEAN", false),
         output_base: new SqlNullableFieldDefinition("INT", false),
         output_amount: new SqlNullableFieldDefinition("INT", false),
@@ -93,7 +120,7 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
   @MonitorExecutionTime()
   async saveBatch(records: DBTx[]): Promise<void> {
     if (records.length) {
-      await this.removeByHashBatch(records.map(t => t.hash));
+      await this.removeByHashBatch(records.map((t) => t.hash));
       await this.insertBatch(records);
     }
   }
@@ -172,8 +199,18 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
     to: number
   ): Promise<{ sent: DBTx[]; received: DBTx[] }> {
     return {
-      sent: await this.getLinkedWithIssuerByRange('block_number', pubkey, from, to),
-      received: await this.getLinkedWithRecipientByRange('block_number', pubkey, from, to),
+      sent: await this.getLinkedWithIssuerByRange(
+        "block_number",
+        pubkey,
+        from,
+        to
+      ),
+      received: await this.getLinkedWithRecipientByRange(
+        "block_number",
+        pubkey,
+        from,
+        to
+      ),
     };
   }
 
@@ -183,8 +220,13 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
     to: number
   ): Promise<{ sent: DBTx[]; received: DBTx[] }> {
     return {
-      sent: await this.getLinkedWithIssuerByRange('time', pubkey, from, to),
-      received: await this.getLinkedWithRecipientByRange('time', pubkey, from, to)
+      sent: await this.getLinkedWithIssuerByRange("time", pubkey, from, to),
+      received: await this.getLinkedWithRecipientByRange(
+        "time",
+        pubkey,
+        from,
+        to
+      ),
     };
   }
 
@@ -198,7 +240,8 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
   }
 
   getLinkedWithIssuer(pubkey: string): Promise<DBTx[]> {
-    return this.findEntities(`SELECT * FROM txs 
+    return this.findEntities(
+      `SELECT * FROM txs 
         WHERE written 
         AND (
             issuer = ?
@@ -208,8 +251,14 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
     );
   }
 
-  getLinkedWithIssuerByRange(rangeFieldName: keyof DBTx, pubkey: string, from: number, to: number): Promise<DBTx[]> {
-    return this.findEntities(`SELECT * FROM txs 
+  getLinkedWithIssuerByRange(
+    rangeFieldName: keyof DBTx,
+    pubkey: string,
+    from: number,
+    to: number
+  ): Promise<DBTx[]> {
+    return this.findEntities(
+      `SELECT * FROM txs 
         WHERE written 
         AND (
           issuer = ?
@@ -217,12 +266,13 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
         )
         AND ${rangeFieldName} >= ? 
         AND ${rangeFieldName} <= ?`,
-        [pubkey, `%${pubkey}%`, from, to]
+      [pubkey, `%${pubkey}%`, from, to]
     );
   }
 
   getLinkedWithRecipient(pubkey: string): Promise<DBTx[]> {
-    return this.findEntities(`SELECT * FROM txs 
+    return this.findEntities(
+      `SELECT * FROM txs 
         WHERE written 
         AND (
             recipient = ?
@@ -232,8 +282,14 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
     );
   }
 
-  getLinkedWithRecipientByRange(rangeColumnName: string, pubkey: string, from: number, to: number): Promise<DBTx[]> {
-    return this.findEntities(`SELECT * FROM txs 
+  getLinkedWithRecipientByRange(
+    rangeColumnName: string,
+    pubkey: string,
+    from: number,
+    to: number
+  ): Promise<DBTx[]> {
+    return this.findEntities(
+      `SELECT * FROM txs 
         WHERE written 
         AND (
             recipient = ?
@@ -241,12 +297,13 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
         )
         AND ${rangeColumnName} >= ? 
         AND ${rangeColumnName} <= ?`,
-        [pubkey, pubkey, `%${pubkey}%`, from, to]
+      [pubkey, pubkey, `%${pubkey}%`, from, to]
     );
   }
 
   getPendingWithIssuer(pubkey: string): Promise<DBTx[]> {
-    return this.findEntities(`SELECT * FROM txs 
+    return this.findEntities(
+      `SELECT * FROM txs 
         WHERE NOT written
         AND (
             issuer = ? 
@@ -257,7 +314,8 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
   }
 
   getPendingWithRecipient(pubkey: string): Promise<DBTx[]> {
-    return this.findEntities(`SELECT * FROM txs 
+    return this.findEntities(
+      `SELECT * FROM txs 
         WHERE NOT written 
         AND (
             recipient = ?
@@ -268,7 +326,7 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
   }
 
   async existsByHash(hash: string): Promise<boolean> {
-    return (await this.countBy('hash', hash)) > 0;
+    return (await this.countBy("hash", hash)) > 0;
   }
 
   async getTX(hash: string): Promise<DBTx> {
@@ -298,7 +356,10 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
     // Delete by slice of 500 items (because SQLite IN operator is limited)
     while (i < hashArray.length - 1) {
       const slice = hashArray.slice(i, i + 500);
-      await this.driver.sqlWrite(`DELETE FROM txs WHERE hash IN (${slice.map(_ => '?')})`, slice);
+      await this.driver.sqlWrite(
+        `DELETE FROM txs WHERE hash IN (${slice.map((_) => "?")})`,
+        slice
+      );
       i += 500;
     }
   }
diff --git a/app/lib/dal/sqliteDAL/MetaDAL.ts b/app/lib/dal/sqliteDAL/MetaDAL.ts
index 4f4672c8d29e53362701289cb6a12ac156c4d0f6..ecb5c7a573bca8ddf2244a442d60738cea0c5ed1 100644
--- a/app/lib/dal/sqliteDAL/MetaDAL.ts
+++ b/app/lib/dal/sqliteDAL/MetaDAL.ts
@@ -16,10 +16,10 @@ import { SQLiteDriver } from "../drivers/SQLiteDriver";
 import { ConfDTO } from "../../dto/ConfDTO";
 import { TransactionDTO } from "../../dto/TransactionDTO";
 import { IdentityDAL } from "./IdentityDAL";
-import {SqliteTransactions} from "../indexDAL/sqlite/SqliteTransactions";
-import {Directory} from "../../system/directory";
+import { SqliteTransactions } from "../indexDAL/sqlite/SqliteTransactions";
+import { Directory } from "../../system/directory";
 
-const constants = require('../../constants');
+const constants = require("../../constants");
 const logger = require("../../logger").NewLogger("metaDAL");
 
 export interface DBMeta {
@@ -30,8 +30,10 @@ export interface DBMeta {
 export class MetaDAL extends AbstractSQLite<DBMeta> {
   driverCopy: SQLiteDriver;
 
-  constructor(driver: SQLiteDriver,
-              private getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) {
+  constructor(
+    driver: SQLiteDriver,
+    private getSqliteDB: (dbName: string) => Promise<SQLiteDriver>
+  ) {
     super(
       driver,
       "meta",
@@ -193,20 +195,18 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
     25: async () => {},
 
     // Drop old table 'txs' (replaced by a file 'txs.db')
-    26: async() => {
-      await this.exec("BEGIN;" +
-          "DROP TABLE IF EXISTS txs;" +
-          "COMMIT;")
+    26: async () => {
+      await this.exec("BEGIN;" + "DROP TABLE IF EXISTS txs;" + "COMMIT;");
     },
 
     // Add columns 'issuer' and 'recipient' in transaction table - see issue #1442
-    27: async() => {
+    27: async () => {
       const txsDriver = await this.getSqliteDB("txs.db");
       const txsDAL = new MetaDAL(txsDriver, this.getSqliteDB);
 
       // Drop unused indices
       await txsDAL.exec(
-          "BEGIN;" +
+        "BEGIN;" +
           "DROP INDEX IF EXISTS idx_txs_locktime;" +
           "DROP INDEX IF EXISTS idx_txs_version;" +
           "DROP INDEX IF EXISTS idx_txs_currency;" +
@@ -224,17 +224,16 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
       try {
         await txsDAL.exec(
           "BEGIN;" +
-          "ALTER TABLE txs ADD COLUMN issuer VARCHAR(50) NULL;" +
-          "ALTER TABLE txs ADD COLUMN recipient VARCHAR(50) NULL;" +
-          "UOPDATE txs SET issuer = SUBSTR(issuers, 2, LENGTH(issuers) - 4) WHERE issuer IS NULL AND issuers NOT LIKE '%,%';" +
-          "UOPDATE txs SET recipient = SUBSTR(recipients, 2, LENGTH(recipients) - 4) WHERE recipient IS NULL AND recipients NOT LIKE '%,%';" +
-          "COMMIT;"
+            "ALTER TABLE txs ADD COLUMN issuer VARCHAR(50) NULL;" +
+            "ALTER TABLE txs ADD COLUMN recipient VARCHAR(50) NULL;" +
+            "UOPDATE txs SET issuer = SUBSTR(issuers, 2, LENGTH(issuers) - 4) WHERE issuer IS NULL AND issuers NOT LIKE '%,%';" +
+            "UOPDATE txs SET recipient = SUBSTR(recipients, 2, LENGTH(recipients) - 4) WHERE recipient IS NULL AND recipients NOT LIKE '%,%';" +
+            "COMMIT;"
         );
-      }
-      catch(err) {
+      } catch (err) {
         // Silent: if column already exists
       }
-    }
+    },
   };
 
   async init() {
@@ -271,7 +270,11 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
   async upgradeDatabase(conf: ConfDTO) {
     let version = await this.getVersion();
     while (this.migrations[version]) {
-      logger.trace(`Upgrade database... (patch ${version}/${constants.CURRENT_DB_VERSION - 1})`);
+      logger.trace(
+        `Upgrade database... (patch ${version}/${
+          constants.CURRENT_DB_VERSION - 1
+        })`
+      );
 
       await this.executeMigration(this.migrations[version], conf);
       // Version increment
@@ -286,7 +289,7 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
 
   async getVersion() {
     try {
-      const {version} = await this.getRow();
+      const { version } = await this.getRow();
       return version;
     } catch (e) {
       // Insert zero, as first version
diff --git a/app/lib/db/DBTx.ts b/app/lib/db/DBTx.ts
index bcb1954decbbc06e7639d84be1a177a7ae96a719..b238ac831cdf85433e5db66cf6997cc010249f30 100644
--- a/app/lib/db/DBTx.ts
+++ b/app/lib/db/DBTx.ts
@@ -48,11 +48,12 @@ export class DBTx {
     dbTx.output_amount = tx.output_amount;
 
     // Computed columns (unique issuer and/or recipient)
-    dbTx.issuer = (dbTx.issuers.length === 1) ? dbTx.issuers[0] : null;
-    const recipients = !dbTx.issuer ? dbTx.recipients : dbTx.recipients.filter(r => r !== dbTx.issuer);
-    dbTx.recipient = (recipients.length === 1) ? recipients[0] : null;
+    dbTx.issuer = dbTx.issuers.length === 1 ? dbTx.issuers[0] : null;
+    const recipients = !dbTx.issuer
+      ? dbTx.recipients
+      : dbTx.recipients.filter((r) => r !== dbTx.issuer);
+    dbTx.recipient = recipients.length === 1 ? recipients[0] : null;
 
     return dbTx;
   }
-
 }
diff --git a/app/lib/dto/PeerDTO.ts b/app/lib/dto/PeerDTO.ts
index 62e70afdf35260284e01f08e40bcd054875a78f8..7dae1e7b84b08469de0f54a84af87553fefb7ba8 100644
--- a/app/lib/dto/PeerDTO.ts
+++ b/app/lib/dto/PeerDTO.ts
@@ -381,6 +381,6 @@ export class PeerDTO implements Cloneable {
   }
 
   static isBMA(endpoint: string) {
-    return endpoint && !!endpoint.match(/^(BASIC_MERKLED_API|BMAS)/) || false;
+    return (endpoint && !!endpoint.match(/^(BASIC_MERKLED_API|BMAS)/)) || false;
   }
 }
diff --git a/app/lib/dto/TransactionDTO.ts b/app/lib/dto/TransactionDTO.ts
index 618488046ac509dcd55666a6fe5784213f96e506..e107470334ad275bfbb5132273e8d8daef2d1cd4 100644
--- a/app/lib/dto/TransactionDTO.ts
+++ b/app/lib/dto/TransactionDTO.ts
@@ -14,7 +14,7 @@
 import { hashf } from "../common";
 import { Cloneable } from "./Cloneable";
 import { verify } from "../../../neon/lib";
-import {CommonConstants} from "../common-libs/constants";
+import { CommonConstants } from "../common-libs/constants";
 
 export interface BaseDTO {
   base: number;
@@ -168,10 +168,15 @@ export class TransactionDTO implements Cloneable {
     return this.outputs.reduce((res, output) => {
       let match: any;
       const recipients: string[] = [];
-      while (output && (match = CommonConstants.TRANSACTION.OUTPUT_CONDITION_SIG_PUBKEY.exec(output)) !== null) {
+      while (
+        output &&
+        (match = CommonConstants.TRANSACTION.OUTPUT_CONDITION_SIG_PUBKEY.exec(
+          output
+        )) !== null
+      ) {
         const pub = match[1] as string;
         if (!res.includes(pub) && !recipients.includes(pub)) {
-          recipients.push(pub)
+          recipients.push(pub);
         }
         output = output.substring(match.index + match[0].length);
       }
diff --git a/app/modules/bma/index.ts b/app/modules/bma/index.ts
index 73f4f5d5f7706a31b64665e575824e9fcd50a851..92d66a56b93d42bbfa48f0413ee1f31d3fdb6196 100644
--- a/app/modules/bma/index.ts
+++ b/app/modules/bma/index.ts
@@ -212,9 +212,7 @@ export const BmaDependency = {
           }
         }
         if (!conf.nobma) {
-          server.addEndpointsDefinitions(async () =>
-            getEndpoint(conf)
-          );
+          server.addEndpointsDefinitions(async () => getEndpoint(conf));
           server.addWrongEndpointFilter((endpoints: string[]) =>
             getWrongEndpoints(endpoints, server.conf.pair.pub)
           );
@@ -246,10 +244,14 @@ async function getWrongEndpoints(endpoints: string[], selfPubkey: string) {
       .map(async (ep: string) => {
         const peer = PeerDTO.fromJSONObject({ endpoints: [ep] });
         try {
-          const protocol = ep.startsWith("BMAS") || peer.getPort() == 443 ? "https" : "http";
-          const answer = await rp(protocol + "://" + peer.getURL() + "/network/peering", {
-            json: true,
-          });
+          const protocol =
+            ep.startsWith("BMAS") || peer.getPort() == 443 ? "https" : "http";
+          const answer = await rp(
+            protocol + "://" + peer.getURL() + "/network/peering",
+            {
+              json: true,
+            }
+          );
           if (!answer || answer.pubkey != selfPubkey) {
             throw Error("Not same pubkey as local instance");
           }
@@ -257,7 +259,7 @@ async function getWrongEndpoints(endpoints: string[], selfPubkey: string) {
           wrongs.push(ep);
         }
       })
-    );
+  );
   return wrongs;
 }
 
diff --git a/app/modules/crawler/index.ts b/app/modules/crawler/index.ts
index 7257de8c439007da6776aa0479bd846863b7e207..0ae24bd3661b5d108fe1d6593159d13c90078a11 100644
--- a/app/modules/crawler/index.ts
+++ b/app/modules/crawler/index.ts
@@ -309,7 +309,11 @@ export const CrawlerDependency = {
                 ? [
                     {
                       endpoints: [
-                        [fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API", fromHost, fromPort].join(" "),
+                        [
+                          fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API",
+                          fromHost,
+                          fromPort,
+                        ].join(" "),
                       ],
                     },
                   ]
@@ -358,7 +362,11 @@ export const CrawlerDependency = {
           const { host, port } = extractHostPort(from);
           try {
             const peer = PeerDTO.fromJSONObject({
-              endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ")],
+              endpoints: [
+                [port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(
+                  " "
+                ),
+              ],
             });
             const fromHost = peer.getHostPreferDNS();
             const fromPort = peer.getPort();
@@ -405,7 +413,11 @@ export const CrawlerDependency = {
           const { host, port } = extractHostPort(from);
           try {
             const peer = PeerDTO.fromJSONObject({
-              endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ")],
+              endpoints: [
+                [port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(
+                  " "
+                ),
+              ],
             });
             const fromHost = peer.getHostPreferDNS();
             const fromPort = peer.getPort();
@@ -459,7 +471,11 @@ export const CrawlerDependency = {
           const { host: toHost, port: toPort } = extractHostPort(target);
           try {
             const peer = PeerDTO.fromJSONObject({
-              endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ")],
+              endpoints: [
+                [port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(
+                  " "
+                ),
+              ],
             });
             logger.info("Looking at %s...", source);
             try {
@@ -508,7 +524,9 @@ export const CrawlerDependency = {
           const { host, port } = extractHostPort(source);
           try {
             const peer = PeerDTO.fromJSONObject({
-              endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API"].join(" ")],
+              endpoints: [
+                [port == "443" ? "BMAS" : "BASIC_MERKLED_API"].join(" "),
+              ],
             });
             logger.info("Looking at %s...", source);
             try {
@@ -752,7 +770,11 @@ export const CrawlerDependency = {
                 ? [
                     {
                       endpoints: [
-                        [fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API", fromHost, fromPort].join(" "),
+                        [
+                          fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API",
+                          fromHost,
+                          fromPort,
+                        ].join(" "),
                       ],
                     },
                   ]
diff --git a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts
index e3f931b632116bb583b801fbf3638db2926f6aac..0dff93c71b2f92fe9c1a3eaf3aafc3e8b8d4c3eb 100644
--- a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts
+++ b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts
@@ -150,7 +150,7 @@ export class RemoteSynchronizer extends AbstractSynchronizer {
       const host = access.host;
       const port = access.port;
       const path = access.path;
-      logger.info(`Connecting to address ${host}:${port}${path||''}...`);
+      logger.info(`Connecting to address ${host}:${port}${path || ""}...`);
 
       // If we know this is a WS2P connection, don't try BMA
       if (access.isWS2P !== true) {
@@ -158,16 +158,18 @@ export class RemoteSynchronizer extends AbstractSynchronizer {
           const contacter = await connect(
             PeerDTO.fromJSONObject({
               endpoints: [
-                [port == 443 ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ") +
-                (path ? (' ' + path) : '')
+                [port == 443 ? "BMAS" : "BASIC_MERKLED_API", host, port].join(
+                  " "
+                ) + (path ? " " + path : ""),
               ],
             }),
             3000
           );
           peering = await contacter.getPeer();
           api = new BMARemoteContacter(contacter);
-          endpoint = [port == 443 ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ") +
-            (path ? (' ' + path) : '');
+          endpoint =
+            [port == 443 ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ") +
+            (path ? " " + path : "");
         } catch (e) {}
       }
 
@@ -176,7 +178,7 @@ export class RemoteSynchronizer extends AbstractSynchronizer {
         const pair = new Key(keypair.pub, keypair.sec);
         const connection = WS2PConnection.newConnectionToAddress(
           1,
-          `ws://${host}:${port}${path || ''}`,
+          `ws://${host}:${port}${path || ""}`,
           new (class SyncMessageHandler implements WS2PMessageHandler {
             async answerToRequest(
               json: any,
diff --git a/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts b/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts
index 9abc78f271857c6420c73085376f43e5efa915ec..242ecf2b1ea0fef3d543b7e43e4eeb2ee2e5a7b3 100644
--- a/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts
+++ b/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts
@@ -90,7 +90,6 @@ export class GlobalIndexStream extends Duplex {
   private numberOfChunksToDownload: number;
   private memToCopyDone = false;
 
-
   constructor(
     private conf: ConfDTO,
     private dal: FileDAL,
@@ -442,25 +441,25 @@ export class GlobalIndexStream extends Duplex {
       // if cautious, use a save (insert or update)
       if (this.cautious) {
         await Promise.all(
-            blocks.map((block) =>
-                this.dal.saveTxsInFiles(
-                    block.transactions,
-                    block.number,
-                    block.medianTime
-                )
+          blocks.map((block) =>
+            this.dal.saveTxsInFiles(
+              block.transactions,
+              block.number,
+              block.medianTime
             )
+          )
         );
       }
       // If not cautious: use insert only
       else {
         await Promise.all(
-            blocks.map((block) =>
-                this.dal.insertTxsInFiles(
-                    block.transactions,
-                    block.number,
-                    block.medianTime
-                )
+          blocks.map((block) =>
+            this.dal.insertTxsInFiles(
+              block.transactions,
+              block.number,
+              block.medianTime
             )
+          )
         );
       }
     }
diff --git a/app/modules/prover/index.ts b/app/modules/prover/index.ts
index 2d4a3d1ddcfffe4a6a3ff74af3223f46854675fb..80336f87478ce87c35f6e482722d3af601d214cb 100644
--- a/app/modules/prover/index.ts
+++ b/app/modules/prover/index.ts
@@ -351,7 +351,13 @@ function proveAndSend(
               next();
             } else {
               const peer = PeerDTO.fromJSONObject({
-                endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ")],
+                endpoints: [
+                  [
+                    port == "443" ? "BMAS" : "BASIC_MERKLED_API",
+                    host,
+                    port,
+                  ].join(" "),
+                ],
               });
               program.show && console.log(proven.getRawSigned());
               logger.info("Posted block " + proven.getRawSigned());
diff --git a/app/service/PeeringService.ts b/app/service/PeeringService.ts
index aa8a7ca60ffca207ff4c863d54a6ed920bec025e..daec3e5899f338a9203fc0650cd3c980816870bb 100755
--- a/app/service/PeeringService.ts
+++ b/app/service/PeeringService.ts
@@ -76,10 +76,9 @@ export class PeeringService {
   async mirrorBMAEndpoints() {
     const localPeer = await this.peer();
     const localEndpoints = await this.server.getEndpoints();
-    return this.getOtherEndpoints(
-      localPeer.endpoints,
-      localEndpoints
-    ).filter(PeerDTO.isBMA);
+    return this.getOtherEndpoints(localPeer.endpoints, localEndpoints).filter(
+      PeerDTO.isBMA
+    );
   }
 
   checkPeerSignature(p: PeerDTO) {
@@ -363,8 +362,8 @@ export class PeeringService {
       const ws2pAccess = PeerDTO.fromJSONObject(p2).getFirstNonTorWS2P();
       if (ws2pAccess) {
         logger.info(
-          `WS2P access: ${ws2pAccess.host}:${ws2pAccess.port}`
-          + (ws2pAccess.path ? (" " + ws2pAccess.path) : "")
+          `WS2P access: ${ws2pAccess.host}:${ws2pAccess.port}` +
+            (ws2pAccess.path ? " " + ws2pAccess.path : "")
         );
       }
       logger.debug(