From a12818a763fc38b10634af3c96e44ecfa593f374 Mon Sep 17 00:00:00 2001 From: Benoit Lavenier <benoit.lavenier@e-is.pro> Date: Tue, 6 Jun 2023 18:43:20 +0200 Subject: [PATCH] fix(dal): Remove unused GenericDAO.triggerInit() fix(ts): refactor code for NodeJS 18 compliance (e.g. typed Promise, and other lint error) enh(dal): add disableCheckConstraints() and enableCheckConstraints() --- app/cli.ts | 4 +- app/lib/common-libs/manual-promise.ts | 2 +- app/lib/common-libs/timeout-promise.ts | 4 +- app/lib/dal/drivers/LevelDBDriver.ts | 4 +- app/lib/dal/drivers/SQLiteDriver.ts | 8 +-- app/lib/dal/fileDAL.ts | 47 ++++++++--------- app/lib/dal/indexDAL/abstract/GenericDAO.ts | 5 -- app/lib/dal/indexDAL/abstract/PeerDAO.ts | 5 -- app/lib/dal/indexDAL/abstract/TxsDAO.ts | 4 ++ app/lib/dal/indexDAL/abstract/WalletDAO.ts | 5 -- .../dal/indexDAL/leveldb/LevelDBDividend.ts | 3 -- app/lib/dal/indexDAL/leveldb/LevelDBTable.ts | 10 ++-- app/lib/dal/indexDAL/sqlite/SqliteDividend.ts | 2 - app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts | 2 - app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts | 2 - app/lib/dal/indexDAL/sqlite/SqlitePeers.ts | 2 - app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts | 4 +- app/lib/dal/indexDAL/sqlite/SqliteTable.ts | 33 ++++++++---- .../dal/indexDAL/sqlite/SqliteTransactions.ts | 4 +- app/lib/dal/indexDAL/sqlite/SqliteWallet.ts | 2 - app/lib/dto/ConfDTO.ts | 4 +- app/lib/dto/PeerDTO.ts | 2 +- app/lib/indexer.ts | 2 +- app/lib/streams/multicaster.ts | 2 +- app/lib/system/directory.ts | 4 +- app/lib/wizard.ts | 2 +- app/modules/bma/index.ts | 26 ++++++---- app/modules/bma/lib/upnp.ts | 2 +- app/modules/config.ts | 5 +- app/modules/crawler/index.ts | 34 ++++++++++--- app/modules/crawler/lib/req2fwd.ts | 2 +- app/modules/crawler/lib/sync.ts | 8 ++- .../crawler/lib/sync/RemoteSynchronizer.ts | 12 ++--- .../crawler/lib/sync/v2/GlobalIndexStream.ts | 31 +++++------- .../crawler/lib/sync/v2/ValidatorStream.ts | 2 +- app/modules/daemon.ts | 8 +-- app/modules/keypair/index.ts | 4 +- app/modules/keypair/lib/scrypt.ts | 37 +++++++------- app/modules/plugin.ts | 10 ++-- app/modules/prover/index.ts | 12 +++-- app/modules/prover/lib/permanentProver.ts | 50 ++++++++++--------- app/modules/prover/lib/proof.ts | 4 +- app/modules/prover/lib/prover.ts | 2 +- app/modules/upnp-provider.ts | 6 +-- app/modules/ws2p/lib/WS2PConnection.ts | 8 +-- app/modules/ws2p/lib/WS2PServer.ts | 2 +- app/service/IdentityService.ts | 2 +- app/service/PeeringService.ts | 11 ++-- server.ts | 12 +++-- test/dal/basic-dal-tests.ts | 4 ++ test/fast/modules/ws2p/single_write.ts | 4 +- test/fast/prover/prover-pow-1-cluster.ts | 2 +- .../fork-resolution/register-fork-blocks.ts | 2 +- test/integration/misc/http-api.ts | 2 +- .../proof-of-work/continuous-proof.ts | 6 +-- test/integration/tools/test-until.ts | 2 +- test/integration/tools/toolbox.ts | 12 ++--- 57 files changed, 260 insertions(+), 232 deletions(-) diff --git a/app/cli.ts b/app/cli.ts index f0bb646c8..ca3e8db1c 100644 --- a/app/cli.ts +++ b/app/cli.ts @@ -34,10 +34,10 @@ export const ExecuteCommand = () => { // Callback for command rejection let onReject: any = () => - Promise.reject(Error("Uninitilized rejection throw")); + Promise.reject(Error("Uninitialized rejection throw")); // Command execution promise - const currentCommand = new Promise((resolve, reject) => { + const currentCommand = new Promise<void>((resolve, reject) => { onResolve = resolve; onReject = reject; }); diff --git a/app/lib/common-libs/manual-promise.ts b/app/lib/common-libs/manual-promise.ts index 24d6382aa..2a7187ff8 100644 --- a/app/lib/common-libs/manual-promise.ts +++ b/app/lib/common-libs/manual-promise.ts @@ -14,7 +14,7 @@ export interface ManualPromise<T> extends Querable<T> { export function newManualPromise<T>() { let resolveCb: (data: T) => void = () => {}; let rejectCb: (error: Error) => void = () => {}; - const p = new Promise((res, rej) => { + const p = new Promise<T>((res, rej) => { resolveCb = res; rejectCb = rej; }); diff --git a/app/lib/common-libs/timeout-promise.ts b/app/lib/common-libs/timeout-promise.ts index eab7d91f9..08c6cce2b 100644 --- a/app/lib/common-libs/timeout-promise.ts +++ b/app/lib/common-libs/timeout-promise.ts @@ -12,7 +12,7 @@ // GNU Affero General Public License for more details. export function newRejectTimeoutPromise(timeout: number) { - return new Promise((res, rej) => { + return new Promise<void>((res, rej) => { setTimeout(rej, timeout); }); } @@ -21,7 +21,7 @@ export function newResolveTimeoutPromise<T>( timeout: number, value: T ): Promise<T> { - return new Promise((res) => { + return new Promise<T>((res) => { setTimeout(() => res(value), timeout); }); } diff --git a/app/lib/dal/drivers/LevelDBDriver.ts b/app/lib/dal/drivers/LevelDBDriver.ts index 7959c3f1e..0fd4c0344 100644 --- a/app/lib/dal/drivers/LevelDBDriver.ts +++ b/app/lib/dal/drivers/LevelDBDriver.ts @@ -20,7 +20,7 @@ import * as memdown from "memdown"; export const LevelDBDriver = { newMemoryInstance: (): Promise<LevelUp> => { const impl: any = memdown.default(); - return new Promise((res, rej) => { + return new Promise<LevelUp<any>>((res, rej) => { const db: LevelUp = levelup.default(impl, undefined, (err: Error) => { if (err) return rej(err); res(db); @@ -30,7 +30,7 @@ export const LevelDBDriver = { newFileInstance: (path: string): Promise<LevelUp> => { const impl: any = leveldown.default(path); - return new Promise((res, rej) => { + return new Promise<LevelUp<any>>((res, rej) => { const db: LevelUp = levelup.default(impl, undefined, (err: Error) => { if (err) return rej(err); res(db); diff --git a/app/lib/dal/drivers/SQLiteDriver.ts b/app/lib/dal/drivers/SQLiteDriver.ts index 404371634..0cc970038 100644 --- a/app/lib/dal/drivers/SQLiteDriver.ts +++ b/app/lib/dal/drivers/SQLiteDriver.ts @@ -41,7 +41,7 @@ export class SQLiteDriver { // Force case sensitiveness on LIKE operator const sql = "PRAGMA case_sensitive_like=ON"; - await new Promise<any>((resolve, reject) => + await new Promise<void>((resolve, reject) => sqlite.exec(sql, (err: any) => { if (err) return reject( @@ -103,8 +103,8 @@ export class SQLiteDriver { this.logger.debug('Database "%s" removed', this.path); } - get closed() { - return this.dbPromise; + isClosed() { + return !this.dbPromise; } async closeConnection(): Promise<void> { @@ -115,7 +115,7 @@ export class SQLiteDriver { if (process.platform === "win32") { db.open; // For an unknown reason, we need this line. } - await new Promise((resolve, reject) => { + await new Promise<void>((resolve, reject) => { this.logger.debug('Closing SQLite database "%s"...', this.path); db.on("close", () => { this.logger.info('Database "%s" closed.', this.path); diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts index 2d7915db8..2d04a6163 100644 --- a/app/lib/dal/fileDAL.ts +++ b/app/lib/dal/fileDAL.ts @@ -129,8 +129,7 @@ export class FileDAL implements ServerDAO { sindexDAL: SIndexDAO; cindexDAL: CIndexDAO; dividendDAL: DividendDAO; - newDals: { [k: string]: Initiable }; - private dals: (PeerDAO | WalletDAO | GenericDAO<any>)[]; + dals: { [k: string]: Initiable }; loadConfHook: (conf: ConfDTO) => Promise<void>; saveConfHook: (conf: ConfDTO) => Promise<ConfDTO>; @@ -174,7 +173,7 @@ export class FileDAL implements ServerDAO { this.cindexDAL = new LevelDBCindex(getLevelDB); this.dividendDAL = new LevelDBDividend(getLevelDB); - this.newDals = { + this.dals = { powDAL: this.powDAL, metaDAL: this.metaDAL, blockDAL: this.blockDAL, @@ -196,24 +195,9 @@ export class FileDAL implements ServerDAO { async init(conf: ConfDTO) { this.wotb = this.params.wotbf(); - this.dals = [ - this.blockDAL, - this.txsDAL, - this.peerDAL, - this.walletDAL, - this.bindexDAL, - this.mindexDAL, - this.iindexDAL, - this.sindexDAL, - this.cindexDAL, - this.dividendDAL, - ]; - for (const indexDAL of this.dals) { - indexDAL.triggerInit(); - } - const dalNames = Underscore.keys(this.newDals); + const dalNames = Underscore.keys(this.dals); for (const dalName of dalNames) { - const dal = this.newDals[dalName]; + const dal = this.dals[dalName]; await dal.init(); } logger.debug("Upgrade database..."); @@ -229,6 +213,23 @@ export class FileDAL implements ServerDAO { } } + generateUpgradeSql() { + // Make sure to always renable constraints (a.g. if the last sync failed, it can be still disabled) + return "PRAGMA ignore_check_constraints = true;"; + } + + async disableCheckConstraints() { + logger.info("Disabling database check constraints..."); + await this.metaDAL.exec("PRAGMA ignore_check_constraints = true;"); + await this.txsDAL.disableCheckConstraints(); + } + + async enableCheckConstraints() { + logger.info("Enabling database check constraints..."); + await this.metaDAL.exec("PRAGMA ignore_check_constraints = false;"); + await this.txsDAL.enableCheckConstraints(); + } + getDBVersion() { return this.metaDAL.getVersion(); } @@ -1583,14 +1584,14 @@ export class FileDAL implements ServerDAO { } async cleanCaches() { - await Underscore.values(this.newDals).map( + await Underscore.values(this.dals).map( (dal: Initiable) => dal.cleanCache && dal.cleanCache() ); } async close() { await Promise.all( - Underscore.values(this.newDals).map(async (dal: Initiable) => { + Underscore.values(this.dals).map(async (dal: Initiable) => { dal.cleanCache(); await dal.close(); }) @@ -1604,7 +1605,7 @@ export class FileDAL implements ServerDAO { } getLogContent(linesQuantity: number) { - return new Promise((resolve, reject) => { + return new Promise<string[]>((resolve, reject) => { try { let lines: string[] = [], i = 0; diff --git a/app/lib/dal/indexDAL/abstract/GenericDAO.ts b/app/lib/dal/indexDAL/abstract/GenericDAO.ts index b7df7ad28..f3f9651e6 100644 --- a/app/lib/dal/indexDAL/abstract/GenericDAO.ts +++ b/app/lib/dal/indexDAL/abstract/GenericDAO.ts @@ -1,11 +1,6 @@ import { Initiable } from "../../sqliteDAL/Initiable"; export interface GenericDAO<T> extends Initiable { - /** - * Trigger the initialization of the DAO. Called when the underlying DB is ready. - */ - triggerInit(): void; - /** * Make a generic find with some ordering. * @param criterion Criterion object, LokiJS's find object format. diff --git a/app/lib/dal/indexDAL/abstract/PeerDAO.ts b/app/lib/dal/indexDAL/abstract/PeerDAO.ts index 0432e2c15..b85f893e5 100644 --- a/app/lib/dal/indexDAL/abstract/PeerDAO.ts +++ b/app/lib/dal/indexDAL/abstract/PeerDAO.ts @@ -2,11 +2,6 @@ import { DBPeer } from "../../../db/DBPeer"; import { Initiable } from "../../sqliteDAL/Initiable"; export interface PeerDAO extends Initiable { - /** - * Trigger the initialization of the DAO. Called when the underlying DB is ready. - */ - triggerInit(): void; - listAll(): Promise<DBPeer[]>; withUPStatus(): Promise<DBPeer[]>; diff --git a/app/lib/dal/indexDAL/abstract/TxsDAO.ts b/app/lib/dal/indexDAL/abstract/TxsDAO.ts index 93854de3e..44a414484 100644 --- a/app/lib/dal/indexDAL/abstract/TxsDAO.ts +++ b/app/lib/dal/indexDAL/abstract/TxsDAO.ts @@ -4,6 +4,10 @@ import { SandBox } from "../../sqliteDAL/SandBox"; import { DBTx } from "../../../db/DBTx"; export interface TxsDAO extends GenericDAO<DBTx> { + disableCheckConstraints(): Promise<void>; + + enableCheckConstraints(): Promise<void>; + trimExpiredNonWrittenTxs(limitTime: number): Promise<void>; getAllPending(versionMin: number): Promise<DBTx[]>; diff --git a/app/lib/dal/indexDAL/abstract/WalletDAO.ts b/app/lib/dal/indexDAL/abstract/WalletDAO.ts index 3e076f2eb..00e047aba 100644 --- a/app/lib/dal/indexDAL/abstract/WalletDAO.ts +++ b/app/lib/dal/indexDAL/abstract/WalletDAO.ts @@ -2,11 +2,6 @@ import { Initiable } from "../../sqliteDAL/Initiable"; import { DBWallet } from "../../../db/DBWallet"; export interface WalletDAO extends Initiable { - /** - * Trigger the initialization of the DAO. Called when the underlying DB is ready. - */ - triggerInit(): void; - /** * Saves a wallet. * @param {DBWallet} wallet diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts b/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts index 4d1afc8e8..161ddca38 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts @@ -32,9 +32,6 @@ export class LevelDBDividend extends LevelDBTable<DividendEntry> */ cleanCache(): void {} - - triggerInit(): void {} - async init(): Promise<void> { await super.init(); this.indexForTrimming = new LevelDBTable<string[]>( diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts b/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts index c6b1f6de2..f9d018a11 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts @@ -12,8 +12,6 @@ export class LevelDBTable<T> { cleanCache(): void {} - triggerInit(): void {} - async close() { await this.db.close(); } @@ -74,7 +72,7 @@ export class LevelDBTable<T> { public async count(options?: AbstractIteratorOptions) { let count = 0; - await new Promise((res) => { + await new Promise<void>((res) => { this.db .createReadStream(options) .on("data", () => count++) @@ -87,7 +85,7 @@ export class LevelDBTable<T> { callback: (entry: T) => void, options?: AbstractIteratorOptions ) { - await new Promise((res) => { + await new Promise<void>((res) => { this.db .createReadStream(options) .on("data", (data) => callback(JSON.parse(String(data.value)))) @@ -99,7 +97,7 @@ export class LevelDBTable<T> { callback: (entry: { key: string; value: T }) => void, options?: AbstractIteratorOptions ) { - await new Promise((res) => { + await new Promise<void>((res) => { this.db .createReadStream(options) .on("data", (data) => @@ -117,7 +115,7 @@ export class LevelDBTable<T> { options?: AbstractIteratorOptions ) { const ops: Promise<void>[] = []; - await new Promise((res) => { + await new Promise<void>((res) => { this.db .createReadStream(options) .on("data", (data) => diff --git a/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts b/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts index 3152d1174..d64cc48b3 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts @@ -35,8 +35,6 @@ export class SqliteDividend extends SqliteTable<DividendEntry> cleanCache(): void {} - triggerInit(): void {} - /** * INSERT */ diff --git a/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts b/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts index d31696ed6..a50fdb041 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts @@ -44,8 +44,6 @@ export class SqliteIIndex extends SqliteTable<IindexEntry> cleanCache(): void {} - triggerInit(): void {} - /** * INSERT */ diff --git a/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts b/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts index 6fe3b6230..0d8b60d2b 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts @@ -74,8 +74,6 @@ export class SqliteMIndex extends SqliteTable<MindexEntry> `); } - triggerInit(): void {} - /** * INSERT */ diff --git a/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts b/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts index 3b67bd9fe..50ce41093 100644 --- a/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts +++ b/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts @@ -95,8 +95,6 @@ export class SqlitePeers extends SqliteTable<DBPeer> implements PeerDAO { return peer; } - triggerInit(): void {} - withUPStatus(): Promise<DBPeer[]> { return this.findEntities("SELECT * FROM peers WHERE status = ?", ["UP"]); } diff --git a/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts b/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts index c208fa380..9fdc1eb5f 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts @@ -46,8 +46,6 @@ export class SqliteSIndex extends SqliteTable<SindexEntry> cleanCache(): void {} - triggerInit(): void {} - /** * INSERT */ @@ -106,7 +104,7 @@ export class SqliteSIndex extends SqliteTable<SindexEntry> private async find(sql: string, params: any[]): Promise<SindexEntry[]> { return (await this.driver.sqlRead(sql, params)).map((r) => { return { - index: "CINDEX", + index: "SINDEX", op: r.op, written_on: r.written_on, writtenOn: r.writtenOn, diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts index 756d367c5..a0ee566e0 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts @@ -4,7 +4,7 @@ import { SqliteNodeIOManager } from "./SqliteNodeIOManager"; import { SQLiteDriver } from "../../drivers/SQLiteDriver"; export class SqliteTable<T> { - private readonly pdriver: Promise<SQLiteDriver>; + private readonly _driverPromise: Promise<SQLiteDriver>; protected driver: SqliteNodeIOManager<T>; protected constructor( @@ -14,11 +14,11 @@ export class SqliteTable<T> { }, getSqliteDB: (dbName: string) => Promise<SQLiteDriver> ) { - this.pdriver = getSqliteDB(`${name}.db`); + this._driverPromise = getSqliteDB(`${name}.db`); } async init(): Promise<void> { - this.driver = new SqliteNodeIOManager(await this.pdriver, "sindex"); + this.driver = new SqliteNodeIOManager(await this._driverPromise, this.name); await this.driver.sqlExec(` BEGIN; ${this.generateCreateTable()}; @@ -32,11 +32,20 @@ export class SqliteTable<T> { await this.driver.close(); } + async disableCheckConstraints(): Promise<void> { + await this.driver.sqlExec("PRAGMA ignore_check_constraints = true;"); + } + + async enableCheckConstraints(): Promise<void> { + await this.driver.sqlExec("PRAGMA ignore_check_constraints = false;"); + } + generateCreateTable() { let sql = `CREATE TABLE IF NOT EXISTS ${this.name} (`; const fields = this.keys() - .map((fieldName) => { - const f = this.fields[fieldName] as SqlFieldDefinition; + .map((key) => { + const fieldName = String(key); + const f = this.fields[key] as SqlFieldDefinition; switch (f.type) { case "BOOLEAN": return `\n${fieldName} BOOLEAN${f.nullable ? " NULL" : ""}`; @@ -72,7 +81,9 @@ export class SqliteTable<T> { return this.keys() .filter((key) => this.fields[key]?.indexed) .map((fieldName) => { - return `CREATE INDEX IF NOT EXISTS idx_${this.name}_${fieldName} ON ${this.name} (${fieldName});\n`; + return `CREATE INDEX IF NOT EXISTS idx_${this.name}_${String( + fieldName + )} ON ${this.name} (${String(fieldName)});\n`; }) .join(""); } @@ -93,12 +104,14 @@ export class SqliteTable<T> { ) { const valuesOfRecord = fieldsToUpdate .map( - (fieldName) => `${fieldName} = ${this.getFieldValue(fieldName, record)}` + (fieldName) => + `${String(fieldName)} = ${this.getFieldValue(fieldName, record)}` ) .join(","); const conditionsOfRecord = whereFields .map( - (fieldName) => `${fieldName} = ${this.getFieldValue(fieldName, record)}` + (fieldName) => + `${String(fieldName)} = ${this.getFieldValue(fieldName, record)}` ) .join(","); await driver.sqlWrite( @@ -206,7 +219,9 @@ export class SqliteTable<T> { async countBy(fieldName: keyof T, fieldValue: any): Promise<number> { return (( await this.driver.sqlRead( - `SELECT COUNT(*) as max FROM ${this.name} WHERE ${fieldName} = ?`, + `SELECT COUNT(*) as max FROM ${this.name} WHERE ${String( + fieldName + )} = ?`, [fieldValue] ) )[0] as any).max; diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts index 0298fd57c..f739136e5 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts @@ -357,7 +357,7 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { while (i < hashArray.length - 1) { const slice = hashArray.slice(i, i + 500); await this.driver.sqlWrite( - `DELETE FROM txs WHERE hash IN (${slice.map((_) => "?").join(", ")})`, + `DELETE FROM txs WHERE hash IN (${slice.map((_) => "?").join(",")})`, slice ); i += 500; @@ -368,8 +368,6 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { return this.driver.sqlWrite("DELETE FROM txs WHERE hash = ?", [hash]); } - triggerInit(): void {} - trimExpiredNonWrittenTxs(limitTime: number): Promise<void> { return this.driver.sqlWrite( "DELETE FROM txs WHERE NOT written AND blockstampTime <= ?", diff --git a/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts b/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts index 3b70811fe..a9f8fb031 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts @@ -23,8 +23,6 @@ export class SqliteWallet extends SqliteTable<DBWallet> implements WalletDAO { cleanCache(): void {} - triggerInit(): void {} - /** * INSERT */ diff --git a/app/lib/dto/ConfDTO.ts b/app/lib/dto/ConfDTO.ts index 7d5d82474..6449d4edd 100644 --- a/app/lib/dto/ConfDTO.ts +++ b/app/lib/dto/ConfDTO.ts @@ -68,8 +68,8 @@ export interface CurrencyConfDTO { export interface KeypairConfDTO { pair: Keypair; oldPair: Keypair | null; - salt: string; - passwd: string; + salt?: string; + passwd?: string; } export interface NetworkConfDTO { diff --git a/app/lib/dto/PeerDTO.ts b/app/lib/dto/PeerDTO.ts index 62e70afdf..7dae1e7b8 100644 --- a/app/lib/dto/PeerDTO.ts +++ b/app/lib/dto/PeerDTO.ts @@ -381,6 +381,6 @@ export class PeerDTO implements Cloneable { } static isBMA(endpoint: string) { - return endpoint && !!endpoint.match(/^(BASIC_MERKLED_API|BMAS)/) || false; + return (endpoint && !!endpoint.match(/^(BASIC_MERKLED_API|BMAS)/)) || false; } } diff --git a/app/lib/indexer.ts b/app/lib/indexer.ts index c691672f4..6a01b03df 100644 --- a/app/lib/indexer.ts +++ b/app/lib/indexer.ts @@ -2436,7 +2436,7 @@ export function reduceForDBTrimming<T extends { writtenOn: number }>( export function reduce<T>(records: T[]): T { return records.reduce((obj: T, record) => { - const keys = Object.keys(record) as (keyof T)[]; + const keys = Underscore.keys<T>(record); for (const k of keys) { if (record[k] !== undefined && record[k] !== null) { obj[k] = record[k]; diff --git a/app/lib/streams/multicaster.ts b/app/lib/streams/multicaster.ts index 5afd88cd1..2dd59e92d 100644 --- a/app/lib/streams/multicaster.ts +++ b/app/lib/streams/multicaster.ts @@ -267,7 +267,7 @@ export class Multicaster extends stream.Transform { if (!peer.isReachable()) { return Promise.resolve(); } - return new Promise((resolve, reject) => { + return new Promise<any>((resolve, reject) => { const postReq = request.post( { uri: protocol(peer.getPort()) + "://" + peer.getURL() + uri, diff --git a/app/lib/system/directory.ts b/app/lib/system/directory.ts index b8fec8b43..3e8e445d6 100644 --- a/app/lib/system/directory.ts +++ b/app/lib/system/directory.ts @@ -95,8 +95,8 @@ class QioFileSystem implements FileSystem { } } else { // Use NodeJS streams for faster writing - let wstream = fs.createWriteStream(file); - await new Promise(async (res, rej) => { + const wstream = fs.createWriteStream(file); + await new Promise<void>(async (res, rej) => { // When done, return wstream.on("close", (err: any) => { if (err) return rej(err); diff --git a/app/lib/wizard.ts b/app/lib/wizard.ts index 1c38b02e1..3ef480391 100644 --- a/app/lib/wizard.ts +++ b/app/lib/wizard.ts @@ -33,7 +33,7 @@ export class Wizard { } function doTasks(todos: string[], conf: ConfDTO) { - return new Promise((res, rej) => { + return new Promise<void>((res, rej) => { async.forEachSeries( todos, function (task: any, callback: any) { diff --git a/app/modules/bma/index.ts b/app/modules/bma/index.ts index 73f4f5d5f..8a9372f76 100644 --- a/app/modules/bma/index.ts +++ b/app/modules/bma/index.ts @@ -89,7 +89,11 @@ export const BmaDependency = { }, config: { - onLoading: async (conf: NetworkConfDTO, program: any, logger: any) => { + onLoading: async ( + conf: Partial<NetworkConfDTO>, + program: any, + logger: any + ) => { // If the usage of BMA hasn't been defined yet if (conf.nobma === undefined) { // Do we have an existing BMA conf? @@ -186,7 +190,7 @@ export const BmaDependency = { } }, - beforeSave: async (conf: NetworkConfDTO, program: any) => { + beforeSave: async (conf: Partial<NetworkConfDTO>, program: any) => { if (!conf.ipv4) delete conf.ipv4; if (!conf.ipv6) delete conf.ipv6; if (!conf.remoteipv4) delete conf.remoteipv4; @@ -212,9 +216,7 @@ export const BmaDependency = { } } if (!conf.nobma) { - server.addEndpointsDefinitions(async () => - getEndpoint(conf) - ); + server.addEndpointsDefinitions(async () => getEndpoint(conf)); server.addWrongEndpointFilter((endpoints: string[]) => getWrongEndpoints(endpoints, server.conf.pair.pub) ); @@ -246,10 +248,14 @@ async function getWrongEndpoints(endpoints: string[], selfPubkey: string) { .map(async (ep: string) => { const peer = PeerDTO.fromJSONObject({ endpoints: [ep] }); try { - const protocol = ep.startsWith("BMAS") || peer.getPort() == 443 ? "https" : "http"; - const answer = await rp(protocol + "://" + peer.getURL() + "/network/peering", { - json: true, - }); + const protocol = + ep.startsWith("BMAS") || peer.getPort() == 443 ? "https" : "http"; + const answer = await rp( + protocol + "://" + peer.getURL() + "/network/peering", + { + json: true, + } + ); if (!answer || answer.pubkey != selfPubkey) { throw Error("Not same pubkey as local instance"); } @@ -257,7 +263,7 @@ async function getWrongEndpoints(endpoints: string[], selfPubkey: string) { wrongs.push(ep); } }) - ); + ); return wrongs; } diff --git a/app/modules/bma/lib/upnp.ts b/app/modules/bma/lib/upnp.ts index 3c5eecb98..50910d13b 100644 --- a/app/modules/bma/lib/upnp.ts +++ b/app/modules/bma/lib/upnp.ts @@ -35,7 +35,7 @@ export const Upnp = async function ( await Q.nbind(client.externalIp, client)(); } catch (err) { if (err && err.message == "timeout") { - throw 'No UPnP gateway found: your node won\'t be reachable from the Internet. Use --noupnp option to avoid this message.'; + throw "No UPnP gateway found: your node won't be reachable from the Internet. Use --noupnp option to avoid this message."; } throw err; } finally { diff --git a/app/modules/config.ts b/app/modules/config.ts index 6776cd045..f5e90b3e0 100644 --- a/app/modules/config.ts +++ b/app/modules/config.ts @@ -93,10 +93,9 @@ module.exports = { const aggregates = Underscore.uniq( lines .map((l) => l.match(/: (\[\w+\](\[\w+\])*)/)) - .filter((l) => l) - .map((l: string[]) => l[1]) + .filter((l) => !!l) + .map((l) => l && ((l[1] as unknown) as string)) ); - console.log(aggregates); const results = aggregates.map((a: string) => { return { name: a, diff --git a/app/modules/crawler/index.ts b/app/modules/crawler/index.ts index 7257de8c4..0ae24bd36 100644 --- a/app/modules/crawler/index.ts +++ b/app/modules/crawler/index.ts @@ -309,7 +309,11 @@ export const CrawlerDependency = { ? [ { endpoints: [ - [fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API", fromHost, fromPort].join(" "), + [ + fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API", + fromHost, + fromPort, + ].join(" "), ], }, ] @@ -358,7 +362,11 @@ export const CrawlerDependency = { const { host, port } = extractHostPort(from); try { const peer = PeerDTO.fromJSONObject({ - endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ")], + endpoints: [ + [port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join( + " " + ), + ], }); const fromHost = peer.getHostPreferDNS(); const fromPort = peer.getPort(); @@ -405,7 +413,11 @@ export const CrawlerDependency = { const { host, port } = extractHostPort(from); try { const peer = PeerDTO.fromJSONObject({ - endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ")], + endpoints: [ + [port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join( + " " + ), + ], }); const fromHost = peer.getHostPreferDNS(); const fromPort = peer.getPort(); @@ -459,7 +471,11 @@ export const CrawlerDependency = { const { host: toHost, port: toPort } = extractHostPort(target); try { const peer = PeerDTO.fromJSONObject({ - endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ")], + endpoints: [ + [port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join( + " " + ), + ], }); logger.info("Looking at %s...", source); try { @@ -508,7 +524,9 @@ export const CrawlerDependency = { const { host, port } = extractHostPort(source); try { const peer = PeerDTO.fromJSONObject({ - endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API"].join(" ")], + endpoints: [ + [port == "443" ? "BMAS" : "BASIC_MERKLED_API"].join(" "), + ], }); logger.info("Looking at %s...", source); try { @@ -752,7 +770,11 @@ export const CrawlerDependency = { ? [ { endpoints: [ - [fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API", fromHost, fromPort].join(" "), + [ + fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API", + fromHost, + fromPort, + ].join(" "), ], }, ] diff --git a/app/modules/crawler/lib/req2fwd.ts b/app/modules/crawler/lib/req2fwd.ts index 14bee1a63..befe13ec8 100644 --- a/app/modules/crawler/lib/req2fwd.ts +++ b/app/modules/crawler/lib/req2fwd.ts @@ -63,7 +63,7 @@ export const req2fwd = async ( for (const received of idty.pendingCerts) { const cid = [received.from, iid].join("-"); if (!certs[cid]) { - await new Promise((res) => setTimeout(res, 300)); + await new Promise<void>((res) => setTimeout(res, 300)); certs[cid] = received; const rawCert = rawer.getOfficialCertification({ currency: "g1", diff --git a/app/modules/crawler/lib/sync.ts b/app/modules/crawler/lib/sync.ts index 69d073a23..bf58408e4 100644 --- a/app/modules/crawler/lib/sync.ts +++ b/app/modules/crawler/lib/sync.ts @@ -183,6 +183,9 @@ export class Synchroniser extends stream.Duplex { // We use cautious mode if it is asked, or not particulary asked but blockchain has been started const cautious = askedCautious === true || localNumber >= 0; + // TODO Disable constraints + //if (!cautious) await this.server.dal.disableCheckConstraints(); + const milestonesStream = new ValidatorStream( localNumber, to, @@ -211,7 +214,7 @@ export class Synchroniser extends stream.Duplex { this.watcher ); - await new Promise((res, rej) => { + await new Promise<void>((res, rej) => { milestonesStream .pipe(download) .pipe(localIndexer) @@ -241,6 +244,9 @@ export class Synchroniser extends stream.Duplex { await this.syncStrategy.syncPeers(fullSync, to); } + // TODO Enable constraints + //if (!cautious) await this.server.dal.enableCheckConstraints(); + const syncDuration = Date.now() - syncStartTime; this.watcher.end(syncDuration); this.push({ sync: true }); diff --git a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts index fa3c1099d..028172c49 100644 --- a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts +++ b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts @@ -155,19 +155,17 @@ export class RemoteSynchronizer extends AbstractSynchronizer { // If we know this is a WS2P connection, don't try BMA if (access.isWS2P !== true) { try { + endpoint = + [port == 443 ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ") + + (path ? " " + path : ""); const contacter = await connect( PeerDTO.fromJSONObject({ - endpoints: [ - [port == 443 ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ") + - (path ? (' ' + path) : '') - ], + endpoints: [endpoint], }), 3000 ); peering = await contacter.getPeer(); api = new BMARemoteContacter(contacter); - endpoint = [port == 443 ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ") + - (path ? (' ' + path) : ''); } catch (e) {} } @@ -176,7 +174,7 @@ export class RemoteSynchronizer extends AbstractSynchronizer { const pair = new Key(keypair.pub, keypair.sec); const connection = WS2PConnection.newConnectionToAddress( 1, - `ws://${host}:${port}${path || ''}`, + `ws://${host}:${port}${path || ""}`, new (class SyncMessageHandler implements WS2PMessageHandler { async answerToRequest( json: any, diff --git a/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts b/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts index 9abc78f27..120578daa 100644 --- a/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts +++ b/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts @@ -90,7 +90,6 @@ export class GlobalIndexStream extends Duplex { private numberOfChunksToDownload: number; private memToCopyDone = false; - constructor( private conf: ConfDTO, private dal: FileDAL, @@ -125,12 +124,6 @@ export class GlobalIndexStream extends Duplex { })(); } - private async injectLoki<T, K extends keyof T>(dal: T, f: K, obj: T[K]) { - // this.mapInjection[f] = dal[f] - // dal[f] = obj - // await (obj as any).triggerInit() - } - readChunk(i: number) {} _read(size: number) { @@ -442,25 +435,25 @@ export class GlobalIndexStream extends Duplex { // if cautious, use a save (insert or update) if (this.cautious) { await Promise.all( - blocks.map((block) => - this.dal.saveTxsInFiles( - block.transactions, - block.number, - block.medianTime - ) + blocks.map((block) => + this.dal.saveTxsInFiles( + block.transactions, + block.number, + block.medianTime ) + ) ); } // If not cautious: use insert only else { await Promise.all( - blocks.map((block) => - this.dal.insertTxsInFiles( - block.transactions, - block.number, - block.medianTime - ) + blocks.map((block) => + this.dal.insertTxsInFiles( + block.transactions, + block.number, + block.medianTime ) + ) ); } } diff --git a/app/modules/crawler/lib/sync/v2/ValidatorStream.ts b/app/modules/crawler/lib/sync/v2/ValidatorStream.ts index 9f2802eb5..26322bac6 100644 --- a/app/modules/crawler/lib/sync/v2/ValidatorStream.ts +++ b/app/modules/crawler/lib/sync/v2/ValidatorStream.ts @@ -74,7 +74,7 @@ export class ValidatorStream extends Readable { } } catch (e) { failures++; - await new Promise((res) => setTimeout(res, 3000)); + await new Promise<void>((res) => setTimeout(res, 3000)); if (failures >= 15) { NewLogger().error( "Could not get a validation from remote blockchain after %s trials. Stopping sync.", diff --git a/app/modules/daemon.ts b/app/modules/daemon.ts index 7604e6043..984133fd8 100644 --- a/app/modules/daemon.ts +++ b/app/modules/daemon.ts @@ -129,7 +129,7 @@ module.exports = { constants.NB_INITIAL_LINES_TO_SHOW ); // Never ending command - return new Promise((res) => null); + return new Promise<void>((res) => null); }, }, { @@ -159,7 +159,7 @@ module.exports = { logger.info(">> Server ready!"); - return new Promise(() => null); // Never ending + return new Promise<void>(() => null); // Never ending }, }, ], @@ -173,7 +173,7 @@ function ServerService(server: Server) { } function startDaemon(daemon: any) { - return new Promise((resolve, reject) => + return new Promise<void>((resolve, reject) => daemon.start((err: any) => { if (err) return reject(err); resolve(); @@ -182,7 +182,7 @@ function startDaemon(daemon: any) { } function stopDaemon(daemon: any) { - return new Promise((resolve, reject) => + return new Promise<void>((resolve, reject) => daemon.stop((err: any) => { err && console.error(err); if (err) return reject(err); diff --git a/app/modules/keypair/index.ts b/app/modules/keypair/index.ts index a809f75fd..735b5b03b 100644 --- a/app/modules/keypair/index.ts +++ b/app/modules/keypair/index.ts @@ -156,7 +156,7 @@ export const KeypairDependency = { }, beforeSave: async ( - conf: KeypairConfDTO, + conf: Partial<KeypairConfDTO>, program: any, logger: any, confDAL: any @@ -172,7 +172,7 @@ export const KeypairDependency = { // We save the key in a separate file const keyring = - 'pub: "' + conf.pair.pub + '"\n' + 'sec: "' + conf.pair.sec + '"'; + 'pub: "' + conf.pair?.pub + '"\n' + 'sec: "' + conf.pair?.sec + '"'; await confDAL.coreFS.write("keyring.yml", keyring); // We never want to store salt, password or keypair in the conf.json file diff --git a/app/modules/keypair/lib/scrypt.ts b/app/modules/keypair/lib/scrypt.ts index 3135ecad6..8e35f8d32 100644 --- a/app/modules/keypair/lib/scrypt.ts +++ b/app/modules/keypair/lib/scrypt.ts @@ -32,24 +32,25 @@ export const Scrypt = async ( r = 16, p = 1 ) => { - const res: { pub: string; sec: string } = await new Promise( - (resolve, reject) => { - crypto.scrypt( - key, - salt, - SEED_LENGTH, - { N, r, p }, - (err: any, seed: Buffer) => { - if (err) return reject(err); - const pair = KeyPairBuilder.fromSeed(seed); - resolve({ - pub: pair.getPublicKey(), - sec: seedToSecretKey(seed), - }); - } - ); - } - ); + const res: { pub: string; sec: string } = await new Promise<{ + pub: string; + sec: string; + }>((resolve, reject) => { + crypto.scrypt( + key, + salt, + SEED_LENGTH, + { N, r, p }, + (err: any, seed: Buffer) => { + if (err) return reject(err); + const pair = KeyPairBuilder.fromSeed(seed); + resolve({ + pub: pair.getPublicKey(), + sec: seedToSecretKey(seed), + }); + } + ); + }); return res; }; diff --git a/app/modules/plugin.ts b/app/modules/plugin.ts index 06c60ff58..83d279339 100644 --- a/app/modules/plugin.ts +++ b/app/modules/plugin.ts @@ -14,7 +14,7 @@ import { ConfDTO } from "../lib/dto/ConfDTO"; import { Server } from "../../server"; -"use strict"; +("use strict"); const fs = require("fs"); const path = require("path"); @@ -86,7 +86,7 @@ function npmInstall( npm: string | null = null, cwd: string | null = null ) { - return new Promise((res, rej) => { + return new Promise<void>((res, rej) => { const node = getNode(); npm = npm || getNPM(); cwd = cwd || getCWD(); @@ -117,7 +117,7 @@ function npmRemove( npm: string | null = null, cwd: string | null = null ) { - return new Promise((res, rej) => { + return new Promise<void>((res, rej) => { const node = getNode(); npm = npm || getNPM(); cwd = cwd || getCWD(); @@ -167,7 +167,7 @@ async function checkNPMAccess() { } async function getNPMAccess() { - const hasAccessToPackageJSON = await new Promise((res) => { + const hasAccessToPackageJSON = await new Promise<boolean>((res) => { fs.access( path.join(__dirname, "/../../package.json"), fs.constants.R_OK | fs.constants.W_OK, @@ -176,7 +176,7 @@ async function getNPMAccess() { } ); }); - const hasAccessToNodeModules = await new Promise((res) => { + const hasAccessToNodeModules = await new Promise<boolean>((res) => { fs.access( path.join(__dirname, "/../../node_modules"), fs.constants.R_OK | fs.constants.W_OK, diff --git a/app/modules/prover/index.ts b/app/modules/prover/index.ts index 2d4a3d1dd..05cf362dc 100644 --- a/app/modules/prover/index.ts +++ b/app/modules/prover/index.ts @@ -50,7 +50,7 @@ export const ProverDependency = { conf.powSecurityRetryDelay = ProverConstants.POW_SECURITY_RETRY_DELAY; conf.powMaxHandicap = ProverConstants.POW_MAXIMUM_ACCEPTABLE_HANDICAP; }, - beforeSave: async (conf: ConfDTO) => { + beforeSave: async (conf: Partial<ConfDTO>) => { delete conf.powSecurityRetryDelay; delete conf.powMaxHandicap; }, @@ -244,7 +244,7 @@ function generateAndSend( getGenerationMethod: any ) { const logger = server.logger; - return new Promise((resolve, reject) => { + return new Promise<any>((resolve, reject) => { if (!program.submitLocal) { if (!program.submitHost) { throw "Option --submit-host is required."; @@ -351,7 +351,13 @@ function proveAndSend( next(); } else { const peer = PeerDTO.fromJSONObject({ - endpoints: [[port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ")], + endpoints: [ + [ + port == "443" ? "BMAS" : "BASIC_MERKLED_API", + host, + port, + ].join(" "), + ], }); program.show && console.log(proven.getRawSigned()); logger.info("Posted block " + proven.getRawSigned()); diff --git a/app/modules/prover/lib/permanentProver.ts b/app/modules/prover/lib/permanentProver.ts index bf52ee7d7..aaf574c96 100644 --- a/app/modules/prover/lib/permanentProver.ts +++ b/app/modules/prover/lib/permanentProver.ts @@ -20,6 +20,7 @@ import { parsers } from "../../../lib/common-libs/parsers/index"; import { Server } from "../../../../server"; import { Querable, querablep } from "../../../lib/common-libs/querable"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; export class PermanentProver { logger: any; @@ -29,12 +30,11 @@ export class PermanentProver { loops: number; private permanencePromise: Querable<void> | null = null; - - private blockchainChangedResolver: any = null; - private promiseOfWaitingBetween2BlocksOfOurs: any = null; - private lastComputedBlock: any = null; - private resolveContinuePromise: any = null; - private continuePromise: any = null; + private blockchainChangedResolver: ((value: void) => void) | null = null; + private promiseOfWaitingBetween2BlocksOfOurs: Promise<void> | null = null; + private lastComputedBlock: BlockDTO | null = null; + private resolveContinuePromise: ((value: boolean) => void) | null = null; + private continuePromise: Promise<boolean> | null = null; constructor(private server: Server) { this.logger = server.logger; @@ -44,7 +44,7 @@ export class PermanentProver { // Promises triggering the prooving lopp this.resolveContinuePromise = null; - this.continuePromise = new Promise( + this.continuePromise = new Promise<boolean>( (resolve) => (this.resolveContinuePromise = resolve) ); @@ -55,13 +55,13 @@ export class PermanentProver { if (!this.permanencePromise || this.permanencePromise.isFulfilled()) { this.startPermanence(); } - this.resolveContinuePromise(true); + this.resolveContinuePromise && this.resolveContinuePromise(true); } async startPermanence() { - let permanenceResolve = () => {}; + let permanenceResolve: (value: void) => void = () => {}; this.permanencePromise = querablep( - new Promise((res) => { + new Promise<void>((res) => { permanenceResolve = res; }) ); @@ -99,8 +99,8 @@ export class PermanentProver { this.checkTrialIsNotTooHigh(trial, current, selfPubkey); const lastIssuedByUs = current.issuer == selfPubkey; if (lastIssuedByUs && !this.promiseOfWaitingBetween2BlocksOfOurs) { - this.promiseOfWaitingBetween2BlocksOfOurs = new Promise((resolve) => - setTimeout(resolve, theConf.powDelay) + this.promiseOfWaitingBetween2BlocksOfOurs = new Promise<void>( + (resolve) => setTimeout(resolve, theConf.powDelay) ); this.logger.warn( "Waiting " + @@ -126,15 +126,15 @@ export class PermanentProver { let cancelAlreadyTriggered = false; // The canceller - (async () => { + setTimeout(async () => { // If the blockchain changes - await new Promise( + await new Promise<void>( (resolve) => (this.blockchainChangedResolver = resolve) ); cancelAlreadyTriggered = true; // Then cancel the generation await this.prover.cancel(); - })(); + }); let unsignedBlock = null, trial2 = 0; @@ -165,11 +165,13 @@ export class PermanentProver { ); } try { - const obj = parsers.parseBlock.syncWrite( - dos2unix(this.lastComputedBlock.getRawSigned()) - ); + const obj = + this.lastComputedBlock && + parsers.parseBlock.syncWrite( + dos2unix(this.lastComputedBlock.getRawSigned()) + ); await this.server.writeBlock(obj); - await new Promise((res) => { + await new Promise<void>((res) => { this.server.once("bcEvent", () => res()); }); } catch (err) { @@ -201,7 +203,7 @@ export class PermanentProver { await Promise.race( waitingRaces.concat([ // The blockchain has changed! We or someone else found a proof, we must make a gnu one - new Promise( + new Promise<void>( (resolve) => (this.blockchainChangedResolver = () => { this.logger.warn("Blockchain changed!"); @@ -210,7 +212,7 @@ export class PermanentProver { ), // Security: if nothing happens for a while, trigger the whole process again - new Promise((resolve) => + new Promise<void>((resolve) => setTimeout(() => { if (!raceDone) { this.logger.warn( @@ -251,10 +253,10 @@ export class PermanentProver { } } - async stopEveryting() { + async stopEverything() { // First: avoid continuing the main loop - this.resolveContinuePromise(true); - this.continuePromise = new Promise( + this.resolveContinuePromise && this.resolveContinuePromise(true); + this.continuePromise = new Promise<boolean>( (resolve) => (this.resolveContinuePromise = resolve) ); // Second: stop any started proof diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts index f24d30571..5c242b58e 100644 --- a/app/modules/prover/lib/proof.ts +++ b/app/modules/prover/lib/proof.ts @@ -53,7 +53,7 @@ export function createPowWorker() { process.exit(ExitCodes.OK); }); - process.on("message", async (message) => { + process.on("message", async (message: any) => { switch (message.command) { case "newPoW": (async () => { @@ -383,7 +383,7 @@ export function createPowWorker() { } function pSend(stuff: any) { - return new Promise(function (resolve, reject) { + return new Promise<void>(function (resolve, reject) { if (process.send) { process.send(stuff, function (error: any) { !error && resolve(); diff --git a/app/modules/prover/lib/prover.ts b/app/modules/prover/lib/prover.ts index c1adcbdea..dbdce3328 100644 --- a/app/modules/prover/lib/prover.ts +++ b/app/modules/prover/lib/prover.ts @@ -45,6 +45,6 @@ export class Prover extends stream.Transform { } async stopService() { - await this.permaProver.stopEveryting(); + await this.permaProver.stopEverything(); } } diff --git a/app/modules/upnp-provider.ts b/app/modules/upnp-provider.ts index 776453de1..6783bb937 100644 --- a/app/modules/upnp-provider.ts +++ b/app/modules/upnp-provider.ts @@ -43,7 +43,7 @@ export class UpnpProvider { async checkUPnPisAvailable() { try { - await new Promise((resolve, reject) => { + await new Promise<void>((resolve, reject) => { this.client.externalIp((err: any, res: any) => { if (err || !res) { reject(); @@ -179,7 +179,7 @@ export class UpnpProvider { } static async getUPnPMappings(client: any): Promise<any> { - return new Promise((resolve, reject) => { + return new Promise<any>((resolve, reject) => { client.getMappings((err: any, res: any) => { if (err) { reject(err); @@ -200,7 +200,7 @@ export class UpnpProvider { let keys = Underscore.keys(netInterfaces); let res = []; for (const name of keys) { - let addresses = netInterfaces[name]; + let addresses = netInterfaces[name] || []; for (const addr of addresses) { if (!family || addr.family == family) { res.push({ diff --git a/app/modules/ws2p/lib/WS2PConnection.ts b/app/modules/ws2p/lib/WS2PConnection.ts index e186327f1..4fd39fd37 100644 --- a/app/modules/ws2p/lib/WS2PConnection.ts +++ b/app/modules/ws2p/lib/WS2PConnection.ts @@ -528,7 +528,7 @@ export class WS2PConnection { try { await Promise.race([ connectionTimeout, - new Promise((resolve, reject) => { + new Promise<void>((resolve, reject) => { (async () => { await this.onWsOpened; try { @@ -737,7 +737,7 @@ export class WS2PConnection { async request(body: WS2PRequest) { await this.connectAsInitiator(); const uuid = nuuid.v4(); - return new Promise((resolve, reject) => { + return new Promise<void>((resolve, reject) => { this.nbRequestsCount++; this.ws.send( JSON.stringify({ @@ -761,7 +761,7 @@ export class WS2PConnection { extras, Promise.race([ // The answer - new Promise((res, rej) => { + new Promise<void>((res, rej) => { extras.resolve = res; extras.reject = () => { this.errorDetected(WS2P_ERR.REQUEST_FAILED); @@ -827,7 +827,7 @@ export class WS2PConnection { async pushData(type: WS2P_PUSH, key: string, data: any) { await this.connectAsInitiator(); - return new Promise((resolve, reject) => { + return new Promise<void>((resolve, reject) => { this.nbPushsToRemoteCount++; try { this.ws.send( diff --git a/app/modules/ws2p/lib/WS2PServer.ts b/app/modules/ws2p/lib/WS2PServer.ts index 5ebf238f6..fa66c2462 100644 --- a/app/modules/ws2p/lib/WS2PServer.ts +++ b/app/modules/ws2p/lib/WS2PServer.ts @@ -288,7 +288,7 @@ export class WS2PServer extends events.EventEmitter { async close() { await Promise.all(this.connections.map((c) => c.close())); - return new Promise((res, rej) => { + return new Promise<void>((res, rej) => { this.wss.close((err: any) => { if (err) return rej(err); res(); diff --git a/app/service/IdentityService.ts b/app/service/IdentityService.ts index f3173f885..9501db0bb 100644 --- a/app/service/IdentityService.ts +++ b/app/service/IdentityService.ts @@ -30,7 +30,7 @@ import { MindexEntry } from "../lib/indexer"; import { DataErrors } from "../lib/common-libs/errors"; import { Tristamp } from "../lib/common/Tristamp"; -"use strict"; +("use strict"); const constants = require("../lib/constants"); const BY_ABSORPTION = true; diff --git a/app/service/PeeringService.ts b/app/service/PeeringService.ts index aa8a7ca60..daec3e589 100755 --- a/app/service/PeeringService.ts +++ b/app/service/PeeringService.ts @@ -76,10 +76,9 @@ export class PeeringService { async mirrorBMAEndpoints() { const localPeer = await this.peer(); const localEndpoints = await this.server.getEndpoints(); - return this.getOtherEndpoints( - localPeer.endpoints, - localEndpoints - ).filter(PeerDTO.isBMA); + return this.getOtherEndpoints(localPeer.endpoints, localEndpoints).filter( + PeerDTO.isBMA + ); } checkPeerSignature(p: PeerDTO) { @@ -363,8 +362,8 @@ export class PeeringService { const ws2pAccess = PeerDTO.fromJSONObject(p2).getFirstNonTorWS2P(); if (ws2pAccess) { logger.info( - `WS2P access: ${ws2pAccess.host}:${ws2pAccess.port}` - + (ws2pAccess.path ? (" " + ws2pAccess.path) : "") + `WS2P access: ${ws2pAccess.host}:${ws2pAccess.port}` + + (ws2pAccess.path ? " " + ws2pAccess.path : "") ); } logger.debug( diff --git a/server.ts b/server.ts index d5c4b7e5d..97f067ffa 100644 --- a/server.ts +++ b/server.ts @@ -167,9 +167,9 @@ export class Server extends stream.Duplex implements HookableServer { async getSQLiteDB(dbName: string, home: string) { // Check in cach (useful to avoid migration task to create a new driver on the same DB file) let driver: SQLiteDriver = this.sqliteDBs[dbName]; - if (!driver || driver.closed) { + if (!driver || driver.isClosed()) { driver = await Directory.getHomeDB(this.memoryOnly, dbName, home); - this.sqliteDBs[dbName] = driver; + if (!this.memoryOnly) this.sqliteDBs[dbName] = driver; } return driver; } @@ -179,7 +179,7 @@ export class Server extends stream.Duplex implements HookableServer { let driver: LevelUp = this.levelDBs[dbName]; if (!driver || driver.isClosed()) { driver = await Directory.getHomeLevelDB(this.memoryOnly, dbName, home); - this.levelDBs[dbName] = driver; + if (!this.memoryOnly) this.levelDBs[dbName] = driver; } return driver; } @@ -514,6 +514,12 @@ export class Server extends stream.Duplex implements HookableServer { if (this.dal) { await this.dal.close() } + await Promise.all(Object.values(this.sqliteDBs) + .filter(db => db && !db.isClosed()) + .map(db => db.closeConnection())); + await Promise.all(Object.values(this.levelDBs) + .filter(db => db && !db.isClosed()) + .map(db => db.close())); } revert() { diff --git a/test/dal/basic-dal-tests.ts b/test/dal/basic-dal-tests.ts index ccc4ffd26..6696c74c3 100644 --- a/test/dal/basic-dal-tests.ts +++ b/test/dal/basic-dal-tests.ts @@ -113,6 +113,10 @@ describe("DAL", function(){ return fileDAL.saveConf({ currency: "meta_brouzouf" } as any); }) + after(() => { + return fileDAL.close(); + }) + it('should have last DB version', async () => { let version = await fileDAL.getDBVersion(); should.exist(version); diff --git a/test/fast/modules/ws2p/single_write.ts b/test/fast/modules/ws2p/single_write.ts index 31d8d0905..667aedcc5 100644 --- a/test/fast/modules/ws2p/single_write.ts +++ b/test/fast/modules/ws2p/single_write.ts @@ -26,7 +26,7 @@ describe('WS2P Single Write limiter', () => { const source = new Readable() const protection = new WS2PSingleWriteStream(PROTECTION_DURATION) let nbDocs = 0 - await new Promise(res => { + await new Promise<void>(res => { source .pipe(protection) .pipe(es.mapSync(() => { @@ -56,4 +56,4 @@ class Readable extends stream.Readable { async _read() { } -} \ No newline at end of file +} diff --git a/test/fast/prover/prover-pow-1-cluster.ts b/test/fast/prover/prover-pow-1-cluster.ts index ea00618c0..3560bf861 100644 --- a/test/fast/prover/prover-pow-1-cluster.ts +++ b/test/fast/prover/prover-pow-1-cluster.ts @@ -111,7 +111,7 @@ describe('PoW Cluster', () => { } } }) - await new Promise(res => { + await new Promise<void>(res => { master.onInfoMessage = () => res() }) await master.cancelWork() diff --git a/test/integration/fork-resolution/register-fork-blocks.ts b/test/integration/fork-resolution/register-fork-blocks.ts index d448373d5..920d6f21b 100644 --- a/test/integration/fork-resolution/register-fork-blocks.ts +++ b/test/integration/fork-resolution/register-fork-blocks.ts @@ -176,7 +176,7 @@ describe("Fork blocks", function() { await s2.sharePeeringWith(s1) await s2.writeBlock(b4a) const b3c = await s3.commit({ time: now + 33 }) - await new Promise((res, rej) => { + await new Promise<void>((res, rej) => { const event = CommonConstants.DocumentError s2.on(event, (e:any) => { try { diff --git a/test/integration/misc/http-api.ts b/test/integration/misc/http-api.ts index 6ec11146c..1dcb732f6 100644 --- a/test/integration/misc/http-api.ts +++ b/test/integration/misc/http-api.ts @@ -350,7 +350,7 @@ function postBlock(server2:TestingServer) { }) .then(async (result:HttpBlock) => { const numberToReach = block.number - await new Promise((res) => { + await new Promise<void>((res) => { const interval = setInterval(async () => { const current = await server2.dal.getCurrentBlockOrNull() if (current && current.number == numberToReach) { diff --git a/test/integration/proof-of-work/continuous-proof.ts b/test/integration/proof-of-work/continuous-proof.ts index 1db5b4f28..63fba46cd 100644 --- a/test/integration/proof-of-work/continuous-proof.ts +++ b/test/integration/proof-of-work/continuous-proof.ts @@ -117,9 +117,9 @@ describe("Continous proof-of-work", function() { s2.startBlockComputation(); await s2.until('block', 15); await s2.stopBlockComputation(); - await [ + await Promise.all([ CrawlerDependency.duniter.methods.pullBlocks(s3._server), - new Promise(res => { + new Promise<void>(res => { s3.pipe(es.mapSync((e:any) => { if (e.number === 15) { res() @@ -129,7 +129,7 @@ describe("Continous proof-of-work", function() { }), s3.startBlockComputation() - ]; + ]); const current = await s3.get('/blockchain/current') await s3.stopBlockComputation(); current.number.should.be.aboveOrEqual(14) diff --git a/test/integration/tools/test-until.ts b/test/integration/tools/test-until.ts index 4c05899ea..eff7ba46b 100644 --- a/test/integration/tools/test-until.ts +++ b/test/integration/tools/test-until.ts @@ -18,7 +18,7 @@ const UNTIL_TIMEOUT = 115000; export function until(server:TestingServer, eventName:string, count:number) { let counted = 0; const max = count == undefined ? 1 : count; - return new Promise(function (resolve, reject) { + return new Promise<void>(function (resolve, reject) { let finished = false; server._server.on(eventName, function () { counted++; diff --git a/test/integration/tools/toolbox.ts b/test/integration/tools/toolbox.ts index 5dd1d545b..5c3b65dcf 100644 --- a/test/integration/tools/toolbox.ts +++ b/test/integration/tools/toolbox.ts @@ -260,7 +260,7 @@ export const NewTestingServer = (conf:any) => { } export const serverWaitBlock = async (server:Server, number:number) => { - await new Promise((res) => { + await new Promise<void>((res) => { const interval = setInterval(async () => { const current = await server.dal.getCurrentBlockOrNull() if (current && current.number == number) { @@ -276,7 +276,7 @@ export const waitToHaveBlock = async (server:Server, number:number) => { } export const waitForkResolution = async (server:Server, number:number) => { - await new Promise(res => { + await new Promise<void>(res => { server.pipe(es.mapSync((e:any) => { if (e.bcEvent === 'switched' && e.block.number === number) { res() @@ -288,7 +288,7 @@ export const waitForkResolution = async (server:Server, number:number) => { } export const waitForkWS2PConnection = async (server:Server, pubkey:string) => { - await new Promise(res => { + await new Promise<void>(res => { server.pipe(es.mapSync((e:any) => { if (e.ws2p === 'connected' && e.to.pubkey === pubkey) { res() @@ -300,7 +300,7 @@ export const waitForkWS2PConnection = async (server:Server, pubkey:string) => { } export const waitForkWS2PDisconnection = async (server:Server, pubkey:string) => { - await new Promise(res => { + await new Promise<void>((res) => { server.pipe(es.mapSync((e:any) => { if (e.ws2p === 'disconnected' && e.peer.pub === pubkey) { res() @@ -473,7 +473,7 @@ export class TestingServer { } push(chunk: any, encoding?: string) { - return this.server.push(chunk, encoding) + return this.server.push(chunk, encoding as BufferEncoding) } pipe(writable:stream.Writable) { @@ -562,7 +562,7 @@ export class TestingServer { async commitWaitError(options:any, expectedError:string) { const results = await Promise.all([ - new Promise(res => { + new Promise<void>((res) => { this.server.pipe(es.mapSync((e:any) => { if (e.blockResolutionError === expectedError) { res() -- GitLab