diff --git a/app/lib/constants.ts b/app/lib/constants.ts index fc3d9a200082bb5ef05fa086d4df0422eb1d3c8f..243c2bf4dbc9126adca7b4f8002370d19c3e75bd 100644 --- a/app/lib/constants.ts +++ b/app/lib/constants.ts @@ -218,7 +218,7 @@ module.exports = { PEER: CommonConstants.PEER, - CURRENT_DB_VERSION: 26, + CURRENT_DB_VERSION: 27, // Should be set with 'max(keys(MetaDAL.migration)) + 1' NETWORK: { MAX_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS: 10, diff --git a/app/lib/dal/drivers/SQLiteDriver.ts b/app/lib/dal/drivers/SQLiteDriver.ts index 5a8b0bb068129563c43142329b44216a8d26838c..2fe0faf7816383af5affe04de93bae7345329682 100644 --- a/app/lib/dal/drivers/SQLiteDriver.ts +++ b/app/lib/dal/drivers/SQLiteDriver.ts @@ -103,6 +103,10 @@ export class SQLiteDriver { this.logger.debug("Database removed"); } + get closed() { + return this.dbPromise + } + async closeConnection(): Promise<void> { if (!this.dbPromise) { return; diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts index b20aaf2b56bff4339950a546d147f39938db6a99..53ca4838473c9e46f59ace6b13300929422e4457 100644 --- a/app/lib/dal/fileDAL.ts +++ b/app/lib/dal/fileDAL.ts @@ -151,7 +151,7 @@ export class FileDAL implements ServerDAO { this.powDAL = new PowDAL(this.rootPath, params.fs); this.confDAL = new ConfDAL(this.rootPath, params.fs); this.metaDAL = new (require("./sqliteDAL/MetaDAL").MetaDAL)( - this.sqliteDriver + this.sqliteDriver, getSqliteDB ); this.idtyDAL = new (require("./sqliteDAL/IdentityDAL").IdentityDAL)( this.sqliteDriver diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts index dcd909b299658e2eae8442ad462b7d1099d4ac2a..52aa803f9060d41eaf8a0b73bbe2170e47d694b8 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts @@ -22,6 +22,7 @@ export class SqliteTable<T> { await this.driver.sqlExec(` BEGIN; ${this.generateCreateTable()}; + ${this.generateUpgradeSql()}; ${this.generateCreateIndexes()}; COMMIT; `); @@ -60,6 +61,13 @@ export class SqliteTable<T> { return sql; } + /** + * Allow to migrate the table + */ + generateUpgradeSql(): string { + return ''; + } + generateCreateIndexes() { return this.keys() .map((fieldName) => { @@ -194,6 +202,12 @@ export class SqliteTable<T> { )[0] as any).max; } + async countBy(fieldName: keyof T, fieldValue: any): Promise<number> { + return (( + await this.driver.sqlRead(`SELECT COUNT(*) as max FROM ${this.name} WHERE ${fieldName} = ?`, [fieldValue]) + )[0] as any).max; + } + /** * Debugging function: allows to make a hot copy of an SQLite database to a new file, even if the source is in-memory. * @param {string} path The path where to write the copy. diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts index 429f9aba028e16630e9b97053da13c65c2f9334f..28f36b9aee3619efceb475f4a758513c50771458 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts @@ -29,8 +29,10 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { inputs: new SqlNullableFieldDefinition("JSON", false), unlocks: new SqlNullableFieldDefinition("JSON", false), outputs: new SqlNullableFieldDefinition("JSON", false), + issuer: new SqlNullableFieldDefinition("VARCHAR", true, 50), /* computed column - need by getTxHistoryXxx() */ issuers: new SqlNullableFieldDefinition("JSON", false), signatures: new SqlNullableFieldDefinition("JSON", false), + recipient: new SqlNullableFieldDefinition("VARCHAR", true, 50), /* computed column - need by getTxHistoryXxx() */ recipients: new SqlNullableFieldDefinition("JSON", false), written: new SqlNotNullableFieldDefinition("BOOLEAN", true), removed: new SqlNotNullableFieldDefinition("BOOLEAN", true), @@ -78,16 +80,25 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { @MonitorExecutionTime() async insert(record: DBTx): Promise<void> { + this.onBeforeInsert(record); await this.insertInTable(this.driver, record); } @MonitorExecutionTime() async insertBatch(records: DBTx[]): Promise<void> { if (records.length) { + records.forEach(r => this.onBeforeInsert(r)); return this.insertBatchInTable(this.driver, records); } } + onBeforeInsert(dbTx: DBTx) { + // Compute unique issuer/recipient (need to improve tx history) + dbTx.issuer = (dbTx.issuers.length === 1) ? dbTx.issuers[0] : null; + const recipients = !dbTx.issuer ? dbTx.recipients : dbTx.recipients.filter(r => r !== dbTx.issuer); + dbTx.recipient = (recipients.length === 1) ? recipients[0] : null; + } + sandbox: SandBox<{ issuers: string[]; output_base: number; @@ -99,12 +110,12 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { block_number: number, time: number ): Promise<DBTx> { - const dbTx = await this.getTX(tx.hash); + const exists = await this.existsByHash(tx.hash); const theDBTx = DBTx.fromTransactionDTO(tx); theDBTx.written = true; theDBTx.block_number = block_number; theDBTx.time = time; - if (!dbTx) { + if (!exists) { await this.insert(theDBTx); } else { await this.update( @@ -114,7 +125,7 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { ["hash"] ); } - return dbTx; + return theDBTx; } async addPending(dbTx: DBTx): Promise<DBTx> { @@ -148,28 +159,12 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { } async getTxHistoryByPubkey(pubkey: string) { - const history: { - sent: DBTx[]; - received: DBTx[]; - sending: DBTx[]; - pending: DBTx[]; - } = { - sent: [], - received: [], - sending: [], - pending: [], + return { + sent: await this.getLinkedWithIssuer(pubkey), + received: await this.getLinkedWithRecipient(pubkey), + sending: await this.getPendingWithIssuer(pubkey), + pending: await this.getPendingWithRecipient(pubkey), }; - const res = await Promise.all([ - this.getLinkedWithIssuer(pubkey), - this.getLinkedWithRecipient(pubkey), - this.getPendingWithIssuer(pubkey), - this.getPendingWithRecipient(pubkey), - ]); - history.sent = res[0] || []; - history.received = res[1] || []; - history.sending = res[2] || []; - history.pending = res[3] || []; - return history; } async getTxHistoryByPubkeyBetweenBlocks( @@ -177,26 +172,10 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { from: number, to: number ): Promise<{ sent: DBTx[]; received: DBTx[] }> { - const history: { - sent: DBTx[]; - received: DBTx[]; - } = { - sent: [], - received: [], + return { + sent: await this.getLinkedWithIssuerByRange('block_number', pubkey, from, to), + received: await this.getLinkedWithRecipientByRange('block_number', pubkey, from, to), }; - const res = await Promise.all([ - this.findEntities( - "SELECT * FROM txs WHERE written AND issuers LIKE ? AND block_number >= ? AND block_number <= ?", - [`%${pubkey}%`, from, to] - ), - this.findEntities( - "SELECT * FROM txs WHERE written AND recipients LIKE ? AND block_number >= ? AND block_number <= ?", - [`%${pubkey}%`, from, to] - ), - ]); - history.sent = res[0] || []; - history.received = res[1] || []; - return history; } async getTxHistoryByPubkeyBetweenTimes( @@ -204,75 +183,95 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { from: number, to: number ): Promise<{ sent: DBTx[]; received: DBTx[] }> { - const history: { - sent: DBTx[]; - received: DBTx[]; - } = { - sent: [], - received: [], + return { + sent: await this.getLinkedWithIssuerByRange('blockstampTime', pubkey, from, to), + received: await this.getLinkedWithRecipientByRange('blockstampTime', pubkey, from, to) }; - const res = await Promise.all([ - this.findEntities( - "SELECT * FROM txs WHERE written AND issuers LIKE ? AND time >= ? AND time <= ?", - [`%${pubkey}%`, from, to] - ), - this.findEntities( - "SELECT * FROM txs WHERE written AND recipients LIKE ? AND time >= ? AND time <= ?", - [`%${pubkey}%`, from, to] - ), - ]); - history.sent = res[0] || []; - history.received = res[1] || []; - return history; } async getTxHistoryMempool( pubkey: string ): Promise<{ sending: DBTx[]; pending: DBTx[] }> { - const history: { - sending: DBTx[]; - pending: DBTx[]; - } = { - sending: [], - pending: [], + return { + sending: await this.getPendingWithIssuer(pubkey), + pending: await this.getPendingWithRecipient(pubkey), }; - const res = await Promise.all([ - this.getPendingWithIssuer(pubkey), - this.getPendingWithRecipient(pubkey), - ]); - history.sending = res[0] || []; - history.pending = res[1] || []; - return history; } getLinkedWithIssuer(pubkey: string): Promise<DBTx[]> { - return this.findEntities( - "SELECT * FROM txs WHERE written AND issuers LIKE ?", - [`%${pubkey}%`] + return this.findEntities(`SELECT * FROM txs + WHERE written + AND ( + issuer = ? + OR (issuer IS NULL AND issuers LIKE ?) + )`, + [pubkey, `%${pubkey}%`] + ); + } + + getLinkedWithIssuerByRange(rangeFieldName: keyof DBTx, pubkey: string, from: number, to: number): Promise<DBTx[]> { + return this.findEntities(`SELECT * FROM txs + WHERE written + AND ( + issuer = ? + OR (issuer IS NULL AND issuers LIKE ?) + ) + AND ${rangeFieldName} >= ? + AND ${rangeFieldName} <= ?`, + [pubkey, `%${pubkey}%`, from, to] ); } getLinkedWithRecipient(pubkey: string): Promise<DBTx[]> { - return this.findEntities( - "SELECT * FROM txs WHERE written AND recipients LIKE ?", - [`%${pubkey}%`] + return this.findEntities(`SELECT * FROM txs + WHERE written + AND ( + recipient = ? + OR (recipient IS NULL AND issuer <> ? AND recipients LIKE ? ) + )`, + [pubkey, pubkey, `%${pubkey}%`] + ); + } + + getLinkedWithRecipientByRange(rangeColumnName: string, pubkey: string, from: number, to: number): Promise<DBTx[]> { + return this.findEntities(`SELECT * FROM txs + WHERE written + AND ( + recipient = ? + OR (recipient IS NULL AND issuer <> ? AND recipients LIKE ? ) + ) + AND ${rangeColumnName} >= ? + AND ${rangeColumnName} <= ?`, + [pubkey, pubkey, `%${pubkey}%`, from, to] ); } getPendingWithIssuer(pubkey: string): Promise<DBTx[]> { - return this.findEntities( - "SELECT * FROM txs WHERE NOT written AND issuers LIKE ?", - [`%${pubkey}%`] + return this.findEntities(`SELECT * FROM txs + WHERE NOT written + AND ( + issuer = ? + OR (issuer IS NULL AND issuers LIKE ?) + )`, + [pubkey, `%${pubkey}%`] ); } getPendingWithRecipient(pubkey: string): Promise<DBTx[]> { - return this.findEntities( - "SELECT * FROM txs WHERE NOT written AND recipients LIKE ?", - [`%${pubkey}%`] + return this.findEntities(`SELECT * FROM txs + WHERE NOT written + AND ( + recipient = ? + OR (recipient IS NULL AND issuer <> ? AND recipients LIKE ?) + ) `, + [pubkey, pubkey, `%${pubkey}%`] ); } + async existsByHash(hash: string): Promise<boolean> { + return (await this.countBy('hash', hash)) > 0; + } + async getTX(hash: string): Promise<DBTx> { return ( await this.findEntities("SELECT * FROM txs WHERE hash = ?", [hash]) diff --git a/app/lib/dal/sqliteDAL/MetaDAL.ts b/app/lib/dal/sqliteDAL/MetaDAL.ts index 5660bb53567c97a5eaf4fea2411affdf385fb506..b74e6dd82736bdebb44fdcca7ded9ab8d7ad6470 100644 --- a/app/lib/dal/sqliteDAL/MetaDAL.ts +++ b/app/lib/dal/sqliteDAL/MetaDAL.ts @@ -16,7 +16,10 @@ import { SQLiteDriver } from "../drivers/SQLiteDriver"; import { ConfDTO } from "../../dto/ConfDTO"; import { TransactionDTO } from "../../dto/TransactionDTO"; import { IdentityDAL } from "./IdentityDAL"; +import {SqliteTransactions} from "../indexDAL/sqlite/SqliteTransactions"; +import {Directory} from "../../system/directory"; +const constants = require('../../constants'); const logger = require("../../logger").NewLogger("metaDAL"); export interface DBMeta { @@ -27,7 +30,8 @@ export interface DBMeta { export class MetaDAL extends AbstractSQLite<DBMeta> { driverCopy: SQLiteDriver; - constructor(driver: SQLiteDriver) { + constructor(driver: SQLiteDriver, + private getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) { super( driver, "meta", @@ -88,29 +92,6 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { ");" + "CREATE INDEX IF NOT EXISTS idx_block_hash ON block (hash);" + "CREATE INDEX IF NOT EXISTS idx_block_fork ON block (fork);" + - // Same, but for Transactions - "CREATE TABLE IF NOT EXISTS txs (" + - "hash CHAR(64) NOT NULL," + - "block_number INTEGER," + - "locktime INTEGER NOT NULL," + - "version INTEGER NOT NULL," + - "currency VARCHAR(50) NOT NULL," + - "comment VARCHAR(255) NOT NULL," + - "time DATETIME," + - "inputs TEXT NOT NULL," + - "unlocks TEXT NOT NULL," + - "outputs TEXT NOT NULL," + - "issuers TEXT NOT NULL," + - "signatures TEXT NOT NULL," + - "recipients TEXT NOT NULL," + - "written BOOLEAN NOT NULL," + - "removed BOOLEAN NOT NULL," + - "PRIMARY KEY (hash)" + - ");" + - "CREATE INDEX IF NOT EXISTS idx_txs_issuers ON txs (issuers);" + - "CREATE INDEX IF NOT EXISTS idx_txs_written ON txs (written);" + - "CREATE INDEX IF NOT EXISTS idx_txs_removed ON txs (removed);" + - "CREATE INDEX IF NOT EXISTS idx_txs_hash ON txs (hash);" + "COMMIT;", // Test @@ -118,15 +99,13 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { "BEGIN;" + "CREATE VIEW IF NOT EXISTS identities_pending AS SELECT * FROM idty WHERE NOT written;" + "CREATE VIEW IF NOT EXISTS certifications_pending AS SELECT * FROM cert WHERE NOT written;" + - "CREATE VIEW IF NOT EXISTS transactions_pending AS SELECT * FROM txs WHERE NOT written;" + - "CREATE VIEW IF NOT EXISTS transactions_desc AS SELECT * FROM txs ORDER BY time DESC;" + "CREATE VIEW IF NOT EXISTS forks AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE fork ORDER BY number DESC;" + "CREATE VIEW IF NOT EXISTS blockchain AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE NOT fork ORDER BY number DESC;" + "CREATE VIEW IF NOT EXISTS network AS select i.uid, (last_try - first_down) / 1000 as down_delay_in_sec, p.* from peer p LEFT JOIN idty i on i.pubkey = p.pubkey ORDER by down_delay_in_sec;" + "COMMIT;", // New `receveid` column - 2: "BEGIN; ALTER TABLE txs ADD COLUMN received INTEGER NULL; COMMIT;", + 2: async () => {}, // Update wrong recipients field (was not filled in) 3: async () => {}, @@ -140,12 +119,8 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { 6: "BEGIN; ALTER TABLE idty ADD COLUMN expired INTEGER NULL; COMMIT;", 7: "BEGIN; ALTER TABLE cert ADD COLUMN expired INTEGER NULL; COMMIT;", 8: "BEGIN; ALTER TABLE membership ADD COLUMN expired INTEGER NULL; COMMIT;", - 9: - "BEGIN;" + - "ALTER TABLE txs ADD COLUMN output_base INTEGER NULL;" + - "ALTER TABLE txs ADD COLUMN output_amount INTEGER NULL;" + - "COMMIT;", - 10: "BEGIN; ALTER TABLE txs ADD COLUMN blockstamp VARCHAR(200) NULL; COMMIT;", + 9: async () => {}, + 10: async () => {}, 11: "BEGIN;" + "ALTER TABLE block ADD COLUMN issuersFrame INTEGER NULL;" + @@ -153,14 +128,12 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { "ALTER TABLE block ADD COLUMN issuersCount INTEGER NULL;" + "COMMIT;", 12: async () => { - let blockDAL = new MetaDAL(this.driverCopy); + let blockDAL = new MetaDAL(this.driverCopy, this.getSqliteDB); await blockDAL.exec("ALTER TABLE block ADD COLUMN len INTEGER NULL;"); - await blockDAL.exec("ALTER TABLE txs ADD COLUMN len INTEGER NULL;"); }, - 13: "BEGIN; ALTER TABLE txs ADD COLUMN blockstampTime INTEGER NULL; COMMIT;", + 13: async () => {}, 14: "BEGIN; " + - "CREATE VIEW IF NOT EXISTS sandbox_txs AS SELECT * FROM txs WHERE NOT written AND NOT removed ORDER BY output_base DESC, output_amount DESC;" + "CREATE VIEW IF NOT EXISTS sandbox_idty AS SELECT " + "I.*, " + "I.hash, " + @@ -190,16 +163,8 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { }, 16: async () => {}, - - 17: async () => { - // This migration is now obsolete - }, - - 18: - "BEGIN;" + - // Add a `massReeval` column - // 'ALTER TABLE b_index ADD COLUMN massReeval VARCHAR(100) NOT NULL DEFAULT \'0\';' + - "COMMIT;", + 17: async () => {}, + 18: async () => {}, 19: "BEGIN;" + @@ -207,69 +172,67 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { "ALTER TABLE idty ADD COLUMN removed BOOLEAN NULL DEFAULT 0;" + "COMMIT;", - /** - * Feeds the table of wallets with balances - */ + // Feeds the table of wallets with balances 20: async () => {}, - 21: async (conf: ConfDTO) => {}, + 21: async () => {}, // Replay the wallet table feeding, because of a potential bug 22: () => { return this.migrations[20](); }, - 23: "BEGIN;" + "COMMIT;", + 23: async () => {}, /** * Feeds the m_index.chainable_on correctly */ - 24: async (conf: ConfDTO) => {}, + 24: async () => {}, - /** - * Wrong transaction storage - */ - 25: async () => { - const txsDAL: any = new MetaDAL(this.driverCopy); - const wrongTXS = await txsDAL.query( - "SELECT * FROM txs WHERE outputs LIKE ? OR inputs LIKE ?", - ["%amount%", "%amount%"] + // Wrong transaction storage + 25: async () => {}, + + // Add columns 'issuer' and 'recipient' in transaction table - see issue #1442 + 26: async() => { + // Drop old table 'txs' (replaced by a file 'txs.db') + await this.exec("BEGIN;" + + "DROP TABLE IF EXISTS txs;" + + "COMMIT;") + + // Migrate txs.db + const txsDriver = await this.getSqliteDB("txs.db"); + const txsDAL = new MetaDAL(txsDriver, this.getSqliteDB); + + // Drop unused indices + await txsDAL.exec( + "BEGIN;" + + "DROP INDEX IF EXISTS idx_txs_locktime;" + + "DROP INDEX IF EXISTS idx_txs_version;" + + "DROP INDEX IF EXISTS idx_txs_currency;" + + "DROP INDEX IF EXISTS idx_txs_comment;" + + "DROP INDEX IF EXISTS idx_txs_signatures;" + + "DROP INDEX IF EXISTS idx_txs_received;" + + "DROP INDEX IF EXISTS idx_txs_output_base;" + + "DROP INDEX IF EXISTS idx_txs_output_amount;" + + "CREATE INDEX IF NOT EXISTS idx_txs_recipients ON txs (recipients);" + + "COMMIT;" ); - let i = 1; - for (const tx of wrongTXS) { - logger.info( - "Updating incorrect transaction %s/%s.", - i, - wrongTXS.length - ); - i++; - const dto = TransactionDTO.fromJSONObject(tx); - dto.outputs = dto.outputs.map((o) => { - if (typeof o === "object") { - return TransactionDTO.outputObj2Str(o); - } - return o; - }); - dto.inputs = dto.inputs.map((o) => { - if (typeof o === "object") { - return TransactionDTO.inputObj2Str(o); - } - return o; - }); + + // Add new columns 'issuer' and 'recipient' + try { await txsDAL.exec( - "UPDATE txs SET " + - "outputs = '" + - JSON.stringify(dto.outputs) + - "', " + - "inputs = '" + - JSON.stringify(dto.inputs) + - "' " + - "WHERE hash = '" + - tx.hash + - "'" + "BEGIN;" + + "ALTER TABLE txs ADD COLUMN issuer VARCHAR(50) NULL;" + + "ALTER TABLE txs ADD COLUMN recipient VARCHAR(50) NULL;" + + "UOPDATE txs SET issuer = SUBSTR(issuers, 2, LENGTH(issuers) - 4) WHERE issuer IS NULL AND issuers NOT LIKE '%,%';" + + "UOPDATE txs SET recipient = SUBSTR(recipients, 2, LENGTH(recipients) - 4) WHERE recipient IS NULL AND recipients NOT LIKE '%,%';" + + "COMMIT;" ); } - }, + catch(err) { + // Silent: if column already exists + } + } }; async init() { @@ -299,17 +262,19 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { await migration(conf); } } catch (e) { - logger.warn("An error occured during DB migration, continue.", e); + logger.warn("An error occurred during DB migration, continue.", e); } } async upgradeDatabase(conf: ConfDTO) { let version = await this.getVersion(); while (this.migrations[version]) { + logger.trace(`Upgrade database... (patch ${version}/${constants.CURRENT_DB_VERSION - 1})`); + await this.executeMigration(this.migrations[version], conf); - // Automated increment - await this.exec("UPDATE meta SET version = version + 1"); + // Version increment version++; + await this.exec("UPDATE meta SET version = " + version); } } @@ -319,9 +284,10 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { async getVersion() { try { - const row = await this.getRow(); - return row.version; + const {version} = await this.getRow(); + return version; } catch (e) { + // Insert zero, as first version await this.exec("INSERT INTO " + this.table + " VALUES (1,0);"); return 0; } diff --git a/app/lib/db/DBTx.ts b/app/lib/db/DBTx.ts index 1f0fd0e64b4e2c173b8d69a2aa8c0082c81fb1cb..4b74b845768501f9f52ee0be7b2cb653d498700c 100644 --- a/app/lib/db/DBTx.ts +++ b/app/lib/db/DBTx.ts @@ -24,6 +24,9 @@ export class DBTx { written_on: string; writtenOn: number; + issuer: string | null; // Computed + recipient: string | null; // Computed + static fromTransactionDTO(tx: TransactionDTO) { const dbTx = new DBTx(); dbTx.hash = tx.hash; diff --git a/app/modules/bma/lib/controllers/transactions.ts b/app/modules/bma/lib/controllers/transactions.ts index 37ab7b2afa1644683122dc7c9ebbed6db7baa993..de6dd2236c6e74ec9cbb4b9f4d5d695dd56182cc 100644 --- a/app/modules/bma/lib/controllers/transactions.ts +++ b/app/modules/bma/lib/controllers/transactions.ts @@ -28,6 +28,10 @@ import { DBTx } from "../../../../lib/db/DBTx"; const http2raw = require("../http2raw"); export class TransactionBinding extends AbstractController { + get medianTimeOffset(): number { + return (this.conf.avgGenTime * this.conf.medianTimeBlocks) / 2; + } + async parseTransaction(req: any): Promise<HttpTransactionPending> { const res = await this.pushEntity( req, @@ -106,10 +110,11 @@ export class TransactionBinding extends AbstractController { const pubkey = await ParametersService.getPubkeyP(req); const from = await ParametersService.getFromP(req); const to = await ParametersService.getToP(req); + const medianTimeOffset = this.medianTimeOffset || 0; // Need to convert time into medianTime, because GVA module use median_time const history = await this.server.dal.getTxHistoryByPubkeyBetweenTimes( pubkey, - +from, - +to + +from - medianTimeOffset, + +to - medianTimeOffset ); return this.toHttpTxHistory(pubkey, history); } diff --git a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts index f173627cc4a47fedbb42f6dfac25892c8236706e..e00e9ef2f71cc73d66db9665ab045ca0165748cf 100644 --- a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts +++ b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts @@ -150,7 +150,7 @@ export class RemoteSynchronizer extends AbstractSynchronizer { const host = access.host; const port = access.port; const path = access.path; - logger.info(`Connecting to address ${host} :${port}...`); + logger.info(`Connecting to address ${host}:${port}${path||''}...`); // If we know this is a WS2P connection, don't try BMA if (access.isWS2P !== true) { @@ -158,21 +158,16 @@ export class RemoteSynchronizer extends AbstractSynchronizer { const contacter = await connect( PeerDTO.fromJSONObject({ endpoints: [ - `BASIC_MERKLED_API ${host} ${port}${ - (path && " " + path) || "" - }`, + `BASIC_MERKLED_API ${host} ${port}` + + ((path && ' ' + path) || ''), ], }), 3000 ); peering = await contacter.getPeer(); api = new BMARemoteContacter(contacter); - endpoint = - "BASIC_MERKLED_API " + - host + - " " + - port + - ((path && " " + path) || ""); + endpoint = `BASIC_MERKLED_API ${host} ${port}` + + ((path && ' ' + path) || ''); } catch (e) {} } @@ -181,7 +176,7 @@ export class RemoteSynchronizer extends AbstractSynchronizer { const pair = new Key(keypair.pub, keypair.sec); const connection = WS2PConnection.newConnectionToAddress( 1, - `ws://${host}:${port}${(path && " " + path) || ""}`, + `ws://${host}:${port}${path || ''}`, new (class SyncMessageHandler implements WS2PMessageHandler { async answerToRequest( json: any, diff --git a/server.ts b/server.ts index 76c6004500c1d19e20ae05584587ccc50c81e3db..d5c4b7e5d30a25d83bfa321c83c52afc8afd2cbd 100644 --- a/server.ts +++ b/server.ts @@ -83,6 +83,8 @@ export class Server extends stream.Duplex implements HookableServer { keyPair:any sign:any blockchain:any + sqliteDBs: {[path: string]: SQLiteDriver} = {}; + levelDBs: {[path: string]: LevelUp} = {}; MerkleService:(req:any, merkle:any, valueCoroutine:any) => any IdentityService:IdentityService @@ -156,14 +158,32 @@ export class Server extends stream.Duplex implements HookableServer { async plugFileSystem() { logger.debug('Plugging file system...'); const params = await this.paramsP - this.dal = new FileDAL(params, async (dbName: string): Promise<SQLiteDriver> => { - return Directory.getHomeDB(this.memoryOnly, dbName, params.home) - }, async (dbName: string): Promise<LevelUp> => { - return Directory.getHomeLevelDB(this.memoryOnly, dbName, params.home) - }, ) + this.dal = new FileDAL(params, + (dbName: string) => this.getSQLiteDB(dbName, params.home), + (dbName: string) => this.getLevelDB(dbName, params.home)) await this.onPluggedFSHook() } + async getSQLiteDB(dbName: string, home: string) { + // Check in cach (useful to avoid migration task to create a new driver on the same DB file) + let driver: SQLiteDriver = this.sqliteDBs[dbName]; + if (!driver || driver.closed) { + driver = await Directory.getHomeDB(this.memoryOnly, dbName, home); + this.sqliteDBs[dbName] = driver; + } + return driver; + } + + async getLevelDB(dbName: string, home: string) { + // Check in cach (useful to avoid migration task to create a new driver on the same DB file) + let driver: LevelUp = this.levelDBs[dbName]; + if (!driver || driver.isClosed()) { + driver = await Directory.getHomeLevelDB(this.memoryOnly, dbName, home); + this.levelDBs[dbName] = driver; + } + return driver; + } + async unplugFileSystem() { logger.debug('Unplugging file system...'); await this.dal.close() diff --git a/test/dal/basic-dal-tests.ts b/test/dal/basic-dal-tests.ts index 3692640125fed0f48cdd96f1cb9945fd1985a809..ccc4ffd264927a573939c862e8688d930d80ffd4 100644 --- a/test/dal/basic-dal-tests.ts +++ b/test/dal/basic-dal-tests.ts @@ -113,7 +113,7 @@ describe("DAL", function(){ return fileDAL.saveConf({ currency: "meta_brouzouf" } as any); }) - it('should have DB version 21', async () => { + it('should have last DB version', async () => { let version = await fileDAL.getDBVersion(); should.exist(version); version.should.equal(constants.CURRENT_DB_VERSION); diff --git a/test/integration/transactions/transactions-history.ts b/test/integration/transactions/transactions-history.ts new file mode 100644 index 0000000000000000000000000000000000000000..1eb345f5eea807b3137b07a592c44e0ead8d4a28 --- /dev/null +++ b/test/integration/transactions/transactions-history.ts @@ -0,0 +1,173 @@ +// Source file from duniter: Crypto-currency software to manage libre currency such as Äž1 +// Copyright (C) 2018 Cedric Moreau <cem.moreau@gmail.com> +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as published by +// the Free Software Foundation, either version 3 of the License, or +// (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. + +import {TestUser} from "../tools/TestUser" +import {CommonConstants} from "../../../app/lib/common-libs/constants" +import {NewTestingServer, TestingServer} from "../tools/toolbox" +import {HttpBlock, HttpTxHistory} from "../../../app/modules/bma/lib/dtos" +import {Underscore} from "../../../app/lib/common-libs/underscore"; + +const should = require('should'); + +let s1:TestingServer, cat1:TestUser, tac1:TestUser + +describe("Transactions history", function() { + + const now = 1500000000 + const conf = { + udTime0: now, + dt: 30, + avgGenTime: 5000, + medianTimeBlocks: 2 + }; + + before(async () => { + + s1 = NewTestingServer(Underscore.extend({ + currency: 'currency_one', + pair: { + pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', + sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' + } + }, conf)); + + cat1 = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: s1 }); + tac1 = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, { server: s1 }); + + await s1.prepareForNetwork(); + + const now = parseInt(String(Date.now() / 1000)) + + // Publishing identities + await cat1.createIdentity(); + await tac1.createIdentity(); + await cat1.cert(tac1); + await tac1.cert(cat1); + await cat1.join(); + await tac1.join(); + await s1.commit(); + await s1.commit({ + time: now + conf.avgGenTime + }); + await s1.commit(); + await cat1.sendMoney(20, tac1); + }) + + after(() => { + return Promise.all([ + s1.closeCluster() + ]) + }) + + it('sending transactions should exist in /tx/history/:pubkey/pending', () => s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/pending', (res:HttpTxHistory) => { + res.history.should.have.property('sending').length(1); + res.history.should.have.property('pending').length(0); + })); + + it('pending transactions should exist in /tx/history/:pubkey/pending', () => s1.expect('/tx/history/2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc/pending', (res:HttpTxHistory) => { + res.history.should.have.property('sending').length(0); + res.history.should.have.property('pending').length(1); + })); + + it('sent and received transactions should should exist', async () => { + await s1.commit(); + + // cat1 pending should be empty + await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/pending', (res:HttpTxHistory) => { + res.history.should.have.property('sending').length(0); + res.history.should.have.property('pending').length(0); + }); + // cat1 sent should have one element + await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (res:HttpTxHistory) => { + res.history.should.have.property('sent').length(1); + res.history.should.have.property('received').length(0); + }); + // tac1 sending should be empty + await s1.expect('/tx/history/2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc/pending', (res:HttpTxHistory) => { + res.history.should.have.property('sending').length(0); + res.history.should.have.property('pending').length(0); + }); + // tac1 received should have one element + await s1.expect('/tx/history/2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', (res:HttpTxHistory) => { + res.history.should.have.property('sent').length(0); + res.history.should.have.property('received').length(1); + }); + }) + + it('get transactions by blocks slice', async () => { + + const firstBlock = await s1.commit(); + + // cat1 sent should have one element + await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/blocks/0/' + firstBlock.number, (res:HttpTxHistory) => { + res.history.should.have.property('sent').length(1); + res.history.should.have.property('received').length(0); + }); + + // Add a pending TX from tac1 -> cat1 + await s1.commit({ + time: firstBlock.time + conf.avgGenTime + }); + await tac1.sendMoney(10, cat1); + const secondBlock = await s1.commit(); + + // Should not appear in sliced history + await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/blocks/0/' + firstBlock.number, (res:HttpTxHistory) => { + res.history.should.have.property('sent').length(1); + res.history.should.have.property('received').length(0); + }); + await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/blocks/' + (firstBlock.number + 1) + '/' + secondBlock.number, (res:HttpTxHistory) => { + res.history.should.have.property('sent').length(0); + res.history.should.have.property('received').length(1); + }); + + // Whole history + await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (res:HttpTxHistory) => { + res.history.should.have.property('sent').length(1); + res.history.should.have.property('received').length(1); + }); + }) + + it('get transactions by times slice', async () => { + + const medianTimeOffset = conf.avgGenTime * conf.medianTimeBlocks / 2; + const firstBlock = await s1.commit(); + const startTime = firstBlock.medianTime + medianTimeOffset; + + // Should not have TX yet + await s1.expect(`/tx/history/${cat1.pub}/times/${startTime}/${startTime + conf.avgGenTime - 1}`, (res:HttpTxHistory) => { + res.history.should.have.property('sent').length(0); + res.history.should.have.property('received').length(0); + }); + + // Add a pending TX from tac1 -> cat1 + await tac1.sendMoney(10, cat1); + const secondBlock = await s1.commit({ + time: firstBlock.time + conf.avgGenTime + }); + should(secondBlock).property('time').greaterThan(firstBlock.time); + const secondTime = secondBlock.medianTime + medianTimeOffset; + + // Previous range (before TX) should still be empty + await s1.expect(`/tx/history/${cat1.pub}/times/${startTime}/${secondTime - 1}`, (res:HttpTxHistory) => { + res.history.should.have.property('sent').length(0); + res.history.should.have.property('received').length(0); + }); + + // Should appear in next range + await s1.expect(`/tx/history/${cat1.pub}/times/${secondTime}/${secondTime + conf.avgGenTime}`, (res:HttpTxHistory) => { + res.history.should.have.property('sent').length(0); + res.history.should.have.property('received').length(1); + }); + }) +})