diff --git a/.eslintcache b/.eslintcache index 8f3e8b2b87aeeeafefd1daf5c0bbf8569fd875a1..cc575baefb0ef0c5b33cef11e140325aa2511ca5 100644 --- a/.eslintcache +++ b/.eslintcache @@ -1 +1 @@ -{"/var/www/html/duniter/app/lib/common-libs/index.ts":{"size":959,"mtime":1587206548928,"hashOfConfig":"1m0359q","results":{"filePath":"/var/www/html/duniter/app/lib/common-libs/index.ts","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0}},"/var/www/html/duniter/app/lib/common-libs/moment.ts":{"size":736,"mtime":1587206548928,"hashOfConfig":"1m0359q","results":{"filePath":"/var/www/html/duniter/app/lib/common-libs/moment.ts","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0,"output":"// Source file from duniter: Crypto-currency software to manage libre currency such as Ğ1\n// Copyright (C) 2018 Cedric Moreau <cem.moreau@gmail.com>\n//\n// This program is free software: you can redistribute it and/or modify\n// it under the terms of the GNU Affero General Public License as published by\n// the Free Software Foundation, either version 3 of the License, or\n// (at your option) any later version.\n//\n// This program is distributed in the hope that it will be useful,\n// but WITHOUT ANY WARRANTY; without even the implied warranty of\n// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the\n// GNU Affero General Public License for more details.\n\nconst _moment_ = require(\"moment\")\n\nexport const moment = _moment_\n"}}} \ No newline at end of file +{"/var/www/html/duniter/app/lib/common-libs/index.ts":{"size":959,"mtime":1587206548928,"hashOfConfig":"1m0359q","results":{"filePath":"/var/www/html/duniter/app/lib/common-libs/index.ts","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0}},"/var/www/html/duniter/app/lib/common-libs/moment.ts":{"size":739,"mtime":1587286756270,"hashOfConfig":"1m0359q","results":{"filePath":"/var/www/html/duniter/app/lib/common-libs/moment.ts","messages":[],"errorCount":0,"warningCount":0,"fixableErrorCount":0,"fixableWarningCount":0}}} \ No newline at end of file diff --git a/app/lib/blockchain/DuniterBlockchain.ts b/app/lib/blockchain/DuniterBlockchain.ts index 6e8955c17438d3f67e698c91b82bd272284b9dff..691a57128a07991d7929d879f9379ab286d8629e 100644 --- a/app/lib/blockchain/DuniterBlockchain.ts +++ b/app/lib/blockchain/DuniterBlockchain.ts @@ -19,37 +19,40 @@ import { Indexer, MindexEntry, SimpleSindexEntryForWallet, - SimpleUdEntryForWallet -} from "../indexer" -import {ConfDTO} from "../dto/ConfDTO" -import {BlockDTO} from "../dto/BlockDTO" -import {DBHead} from "../db/DBHead" -import {DBBlock} from "../db/DBBlock" -import {CHECK} from "../rules/index" -import {RevocationDTO} from "../dto/RevocationDTO" -import {IdentityDTO} from "../dto/IdentityDTO" -import {CertificationDTO} from "../dto/CertificationDTO" -import {MembershipDTO} from "../dto/MembershipDTO" -import {TransactionDTO} from "../dto/TransactionDTO" -import {CommonConstants} from "../common-libs/constants" -import {FileDAL} from "../dal/fileDAL" -import {NewLogger} from "../logger" -import {DBTx} from "../db/DBTx" -import {Underscore} from "../common-libs/underscore" -import {OtherConstants} from "../other_constants" -import {MonitorExecutionTime} from "../debug/MonitorExecutionTime" -import {Wot} from "dubp-wot-rs" -import { Directory } from "../system/directory" + SimpleUdEntryForWallet, +} from "../indexer"; +import { ConfDTO } from "../dto/ConfDTO"; +import { BlockDTO } from "../dto/BlockDTO"; +import { DBHead } from "../db/DBHead"; +import { DBBlock } from "../db/DBBlock"; +import { CHECK } from "../rules/index"; +import { RevocationDTO } from "../dto/RevocationDTO"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { CertificationDTO } from "../dto/CertificationDTO"; +import { MembershipDTO } from "../dto/MembershipDTO"; +import { TransactionDTO } from "../dto/TransactionDTO"; +import { CommonConstants } from "../common-libs/constants"; +import { FileDAL } from "../dal/fileDAL"; +import { NewLogger } from "../logger"; +import { DBTx } from "../db/DBTx"; +import { Underscore } from "../common-libs/underscore"; +import { OtherConstants } from "../other_constants"; +import { MonitorExecutionTime } from "../debug/MonitorExecutionTime"; +import { Wot } from "dubp-wot-rs"; +import { Directory } from "../system/directory"; export class DuniterBlockchain { - - static async checkBlock(block:BlockDTO, withPoWAndSignature:boolean, conf: ConfDTO, dal:FileDAL) { - const index = Indexer.localIndex(block, conf) + static async checkBlock( + block: BlockDTO, + withPoWAndSignature: boolean, + conf: ConfDTO, + dal: FileDAL + ) { + const index = Indexer.localIndex(block, conf); if (withPoWAndSignature) { - await CHECK.ASYNC.ALL_LOCAL(block, conf, index) - } - else { - await CHECK.ASYNC.ALL_LOCAL_BUT_POW(block, conf, index) + await CHECK.ASYNC.ALL_LOCAL(block, conf, index); + } else { + await CHECK.ASYNC.ALL_LOCAL_BUT_POW(block, conf, index); } const HEAD = await Indexer.completeGlobalScope(block, conf, index, dal); const HEAD_1 = await dal.bindexDAL.head(1); @@ -58,112 +61,150 @@ export class DuniterBlockchain { const sindex = Indexer.sindex(index); const cindex = Indexer.cindex(index); // BR_G49 - if (Indexer.ruleVersion(HEAD, HEAD_1) === false) throw Error('ruleVersion'); + if (Indexer.ruleVersion(HEAD, HEAD_1) === false) throw Error("ruleVersion"); // BR_G50 - if (Indexer.ruleBlockSize(HEAD) === false) throw Error('ruleBlockSize'); + if (Indexer.ruleBlockSize(HEAD) === false) throw Error("ruleBlockSize"); // BR_G98 - if (Indexer.ruleCurrency(block, HEAD) === false) throw Error('ruleCurrency'); + if (Indexer.ruleCurrency(block, HEAD) === false) + throw Error("ruleCurrency"); // BR_G51 if (Indexer.ruleNumber(block, HEAD) === false) { - throw Error('ruleNumber') + throw Error("ruleNumber"); } // BR_G52 - if (Indexer.rulePreviousHash(block, HEAD) === false) throw Error('rulePreviousHash'); + if (Indexer.rulePreviousHash(block, HEAD) === false) + throw Error("rulePreviousHash"); // BR_G53 - if (Indexer.rulePreviousIssuer(block, HEAD) === false) throw Error('rulePreviousIssuer'); + if (Indexer.rulePreviousIssuer(block, HEAD) === false) + throw Error("rulePreviousIssuer"); // BR_G101 - if (Indexer.ruleIssuerIsMember(HEAD) === false) throw Error('ruleIssuerIsMember'); + if (Indexer.ruleIssuerIsMember(HEAD) === false) + throw Error("ruleIssuerIsMember"); // BR_G54 - if (Indexer.ruleIssuersCount(block, HEAD) === false) throw Error('ruleIssuersCount'); + if (Indexer.ruleIssuersCount(block, HEAD) === false) + throw Error("ruleIssuersCount"); // BR_G55 - if (Indexer.ruleIssuersFrame(block, HEAD) === false) throw Error('ruleIssuersFrame'); + if (Indexer.ruleIssuersFrame(block, HEAD) === false) + throw Error("ruleIssuersFrame"); // BR_G56 - if (Indexer.ruleIssuersFrameVar(block, HEAD) === false) throw Error('ruleIssuersFrameVar'); + if (Indexer.ruleIssuersFrameVar(block, HEAD) === false) + throw Error("ruleIssuersFrameVar"); // BR_G57 if (Indexer.ruleMedianTime(block, HEAD) === false) { - throw Error('ruleMedianTime') + throw Error("ruleMedianTime"); } // BR_G58 - if (Indexer.ruleDividend(block, HEAD) === false) throw Error('ruleDividend'); + if (Indexer.ruleDividend(block, HEAD) === false) + throw Error("ruleDividend"); // BR_G59 - if (Indexer.ruleUnitBase(block, HEAD) === false) throw Error('ruleUnitBase'); + if (Indexer.ruleUnitBase(block, HEAD) === false) + throw Error("ruleUnitBase"); // BR_G60 - if (Indexer.ruleMembersCount(block, HEAD) === false) throw Error('ruleMembersCount'); + if (Indexer.ruleMembersCount(block, HEAD) === false) + throw Error("ruleMembersCount"); // BR_G61 - if (Indexer.rulePowMin(block, HEAD) === false) throw Error('rulePowMin'); + if (Indexer.rulePowMin(block, HEAD) === false) throw Error("rulePowMin"); if (withPoWAndSignature) { // BR_G62 - if (Indexer.ruleProofOfWork(HEAD) === false) throw Error('ruleProofOfWork'); + if (Indexer.ruleProofOfWork(HEAD) === false) + throw Error("ruleProofOfWork"); } // BR_G63 - if (Indexer.ruleIdentityWritability(iindex, conf) === false) throw Error('ruleIdentityWritability'); + if (Indexer.ruleIdentityWritability(iindex, conf) === false) + throw Error("ruleIdentityWritability"); // BR_G64 - if (Indexer.ruleMembershipWritability(mindex, conf) === false) throw Error('ruleMembershipWritability'); + if (Indexer.ruleMembershipWritability(mindex, conf) === false) + throw Error("ruleMembershipWritability"); // BR_G108 - if (Indexer.ruleMembershipPeriod(mindex) === false) throw Error('ruleMembershipPeriod'); + if (Indexer.ruleMembershipPeriod(mindex) === false) + throw Error("ruleMembershipPeriod"); // BR_G65 - if (Indexer.ruleCertificationWritability(cindex, conf) === false) throw Error('ruleCertificationWritability'); + if (Indexer.ruleCertificationWritability(cindex, conf) === false) + throw Error("ruleCertificationWritability"); // BR_G66 - if (Indexer.ruleCertificationStock(cindex, conf) === false) throw Error('ruleCertificationStock'); + if (Indexer.ruleCertificationStock(cindex, conf) === false) + throw Error("ruleCertificationStock"); // BR_G67 - if (Indexer.ruleCertificationPeriod(cindex) === false) throw Error('ruleCertificationPeriod'); + if (Indexer.ruleCertificationPeriod(cindex) === false) + throw Error("ruleCertificationPeriod"); // BR_G68 - if (Indexer.ruleCertificationFromMember(HEAD, cindex) === false) throw Error('ruleCertificationFromMember'); + if (Indexer.ruleCertificationFromMember(HEAD, cindex) === false) + throw Error("ruleCertificationFromMember"); // BR_G69 - if (Indexer.ruleCertificationToMemberOrNewcomer(cindex) === false) throw Error('ruleCertificationToMemberOrNewcomer'); + if (Indexer.ruleCertificationToMemberOrNewcomer(cindex) === false) + throw Error("ruleCertificationToMemberOrNewcomer"); // BR_G70 - if (Indexer.ruleCertificationToLeaver(cindex) === false) throw Error('ruleCertificationToLeaver'); + if (Indexer.ruleCertificationToLeaver(cindex) === false) + throw Error("ruleCertificationToLeaver"); // BR_G71 if (Indexer.ruleCertificationReplay(cindex) === false) { - throw Error('ruleCertificationReplay') + throw Error("ruleCertificationReplay"); } // BR_G72 - if (Indexer.ruleCertificationSignature(cindex) === false) throw Error('ruleCertificationSignature'); + if (Indexer.ruleCertificationSignature(cindex) === false) + throw Error("ruleCertificationSignature"); // BR_G73 - if (Indexer.ruleIdentityUIDUnicity(iindex) === false) throw Error('ruleIdentityUIDUnicity'); + if (Indexer.ruleIdentityUIDUnicity(iindex) === false) + throw Error("ruleIdentityUIDUnicity"); // BR_G74 - if (Indexer.ruleIdentityPubkeyUnicity(iindex) === false) throw Error('ruleIdentityPubkeyUnicity'); + if (Indexer.ruleIdentityPubkeyUnicity(iindex) === false) + throw Error("ruleIdentityPubkeyUnicity"); // BR_G75 - if (Indexer.ruleMembershipSuccession(mindex) === false) throw Error('ruleMembershipSuccession'); + if (Indexer.ruleMembershipSuccession(mindex) === false) + throw Error("ruleMembershipSuccession"); // BR_G76 - if (Indexer.ruleMembershipDistance(HEAD, mindex) === false) throw Error('ruleMembershipDistance'); + if (Indexer.ruleMembershipDistance(HEAD, mindex) === false) + throw Error("ruleMembershipDistance"); // BR_G77 - if (Indexer.ruleMembershipOnRevoked(mindex) === false) throw Error('ruleMembershipOnRevoked'); + if (Indexer.ruleMembershipOnRevoked(mindex) === false) + throw Error("ruleMembershipOnRevoked"); // BR_G78 - if (Indexer.ruleMembershipJoinsTwice(mindex) === false) throw Error('ruleMembershipJoinsTwice'); + if (Indexer.ruleMembershipJoinsTwice(mindex) === false) + throw Error("ruleMembershipJoinsTwice"); // BR_G79 - if (Indexer.ruleMembershipEnoughCerts(mindex) === false) throw Error('ruleMembershipEnoughCerts'); + if (Indexer.ruleMembershipEnoughCerts(mindex) === false) + throw Error("ruleMembershipEnoughCerts"); // BR_G80 - if (Indexer.ruleMembershipLeaverIsMember(mindex) === false) throw Error('ruleMembershipLeaverIsMember'); + if (Indexer.ruleMembershipLeaverIsMember(mindex) === false) + throw Error("ruleMembershipLeaverIsMember"); // BR_G81 if (Indexer.ruleMembershipActiveIsMember(mindex) === false) { - throw Error('ruleMembershipActiveIsMember') + throw Error("ruleMembershipActiveIsMember"); } // BR_G82 - if (Indexer.ruleMembershipRevokedIsMember(mindex) === false) throw Error('ruleMembershipRevokedIsMember'); + if (Indexer.ruleMembershipRevokedIsMember(mindex) === false) + throw Error("ruleMembershipRevokedIsMember"); // BR_G83 - if (Indexer.ruleMembershipRevokedSingleton(mindex) === false) throw Error('ruleMembershipRevokedSingleton'); + if (Indexer.ruleMembershipRevokedSingleton(mindex) === false) + throw Error("ruleMembershipRevokedSingleton"); // BR_G84 - if (Indexer.ruleMembershipRevocationSignature(mindex) === false) throw Error('ruleMembershipRevocationSignature'); + if (Indexer.ruleMembershipRevocationSignature(mindex) === false) + throw Error("ruleMembershipRevocationSignature"); // BR_G85 - if (Indexer.ruleMembershipExcludedIsMember(iindex) === false) throw Error('ruleMembershipExcludedIsMember'); + if (Indexer.ruleMembershipExcludedIsMember(iindex) === false) + throw Error("ruleMembershipExcludedIsMember"); // BR_G86 if ((await Indexer.ruleToBeKickedArePresent(iindex, dal)) === false) { - throw Error('ruleToBeKickedArePresent') + throw Error("ruleToBeKickedArePresent"); } // BR_G103 - if (Indexer.ruleTxWritability(sindex) === false) throw Error('ruleTxWritability'); + if (Indexer.ruleTxWritability(sindex) === false) + throw Error("ruleTxWritability"); // BR_G87 - if (Indexer.ruleInputIsAvailable(sindex) === false) throw Error('ruleInputIsAvailable'); + if (Indexer.ruleInputIsAvailable(sindex) === false) + throw Error("ruleInputIsAvailable"); // BR_G88 - if (Indexer.ruleInputIsUnlocked(sindex) === false) throw Error('ruleInputIsUnlocked'); + if (Indexer.ruleInputIsUnlocked(sindex) === false) + throw Error("ruleInputIsUnlocked"); // BR_G89 - if (Indexer.ruleInputIsTimeUnlocked(sindex) === false) throw Error('ruleInputIsTimeUnlocked'); + if (Indexer.ruleInputIsTimeUnlocked(sindex) === false) + throw Error("ruleInputIsTimeUnlocked"); // BR_G90 - if (Indexer.ruleOutputBase(sindex, HEAD_1) === false) throw Error('ruleOutputBase'); + if (Indexer.ruleOutputBase(sindex, HEAD_1) === false) + throw Error("ruleOutputBase"); // Check document's coherence - const matchesList = (regexp:RegExp, list:string[]) => { + const matchesList = (regexp: RegExp, list: string[]) => { let i = 0; let found = ""; while (!found && i < list.length) { @@ -171,16 +212,16 @@ export class DuniterBlockchain { i++; } return found; - } + }; const isMember = await dal.isMember(block.issuer); if (!isMember) { if (block.number == 0) { - if (!matchesList(new RegExp('^' + block.issuer + ':'), block.joiners)) { - throw Error('Block not signed by the root members'); + if (!matchesList(new RegExp("^" + block.issuer + ":"), block.joiners)) { + throw Error("Block not signed by the root members"); } } else { - throw Error('Block must be signed by an existing member'); + throw Error("Block must be signed by an existing member"); } } @@ -188,22 +229,41 @@ export class DuniterBlockchain { // Check the local rules // Enrich with the global index // Check the global rules - return { index, HEAD } + return { index, HEAD }; } - static async pushTheBlock(obj:BlockDTO, index:IndexEntry[], HEAD:DBHead | null, conf:ConfDTO, dal:FileDAL, logger:any, trim = true) { + static async pushTheBlock( + obj: BlockDTO, + index: IndexEntry[], + HEAD: DBHead | null, + conf: ConfDTO, + dal: FileDAL, + logger: any, + trim = true + ) { const start = Date.now(); - const block = BlockDTO.fromJSONObject(obj) + const block = BlockDTO.fromJSONObject(obj); try { const currentBlock = await dal.getCurrentBlockOrNull(); block.fork = false; - const added = await this.saveBlockData(currentBlock, block, conf, dal, logger, index, HEAD, trim); - - logger.info('Block #' + block.number + ' added to the blockchain in %s ms', (Date.now() - start)); - - return BlockDTO.fromJSONObject(added) - } - catch(err) { + const added = await this.saveBlockData( + currentBlock, + block, + conf, + dal, + logger, + index, + HEAD, + trim + ); + + logger.info( + "Block #" + block.number + " added to the blockchain in %s ms", + Date.now() - start + ); + + return BlockDTO.fromJSONObject(added); + } catch (err) { throw err; } @@ -213,7 +273,16 @@ export class DuniterBlockchain { // await supra.recordIndex(index) } - static async saveBlockData(current:DBBlock|null, block:BlockDTO, conf:ConfDTO, dal:FileDAL, logger:any, index:IndexEntry[], HEAD:DBHead | null, trim: boolean) { + static async saveBlockData( + current: DBBlock | null, + block: BlockDTO, + conf: ConfDTO, + dal: FileDAL, + logger: any, + index: IndexEntry[], + HEAD: DBHead | null, + trim: boolean + ) { if (block.number == 0) { await this.saveParametersForRoot(block, conf, dal); } @@ -225,20 +294,20 @@ export class DuniterBlockchain { // Save indexes await dal.bindexDAL.insert(indexes.HEAD); - await dal.flushIndexes(indexes) + await dal.flushIndexes(indexes); // Create/Update nodes in wotb await this.updateMembers(block, dal); // Update the wallets' blances - await this.updateWallets(indexes.sindex, indexes.dividends, dal) + await this.updateWallets(indexes.sindex, indexes.dividends, dal); if (trim) { - await DuniterBlockchain.trimIndexes(dal, indexes.HEAD, conf) + await DuniterBlockchain.trimIndexes(dal, indexes.HEAD, conf); } - const dbb = DBBlock.fromBlockDTO(block) - this.updateBlocksComputedVars(current, dbb) + const dbb = DBBlock.fromBlockDTO(block); + this.updateBlocksComputedVars(current, dbb); // --> Update links await dal.updateWotbLinks(indexes.cindex); @@ -256,19 +325,23 @@ export class DuniterBlockchain { // Saves the block (DAL) await dal.saveBlock(dbb); - + // Save wot file if (!dal.fs.isMemoryOnly()) { let wotbFilepath = await Directory.getWotbFilePath(dal.rootPath); dal.wotb.writeInFile(wotbFilepath); - } + } - return dbb + return dbb; } - static async saveParametersForRoot(block:BlockDTO, conf:ConfDTO, dal:FileDAL) { + static async saveParametersForRoot( + block: BlockDTO, + conf: ConfDTO, + dal: FileDAL + ) { if (block.parameters) { - const bconf = BlockDTO.getConf(block) + const bconf = BlockDTO.getConf(block); conf.c = bconf.c; conf.dt = bconf.dt; conf.ud0 = bconf.ud0; @@ -297,73 +370,128 @@ export class DuniterBlockchain { } @MonitorExecutionTime() - static async createNewcomers(iindex:IindexEntry[], dal:FileDAL, logger:any, instance?: Wot) { - const wotb = instance || dal.wotb + static async createNewcomers( + iindex: IindexEntry[], + dal: FileDAL, + logger: any, + instance?: Wot + ) { + const wotb = instance || dal.wotb; for (const i of iindex) { if (i.op == CommonConstants.IDX_CREATE) { - const entry = i as FullIindexEntry + const entry = i as FullIindexEntry; // Reserves a wotb ID entry.wotb_id = wotb.addNode(); - logger.trace('%s was affected wotb_id %s', entry.uid, entry.wotb_id); + logger.trace("%s was affected wotb_id %s", entry.uid, entry.wotb_id); // Remove from the sandbox any other identity with the same pubkey/uid, since it has now been reserved. - await dal.removeUnWrittenWithPubkey(entry.pub) - await dal.removeUnWrittenWithUID(entry.uid) + await dal.removeUnWrittenWithPubkey(entry.pub); + await dal.removeUnWrittenWithUID(entry.uid); } } } - static async updateMembers(block:BlockDTO, dal:FileDAL, instance?: Wot) { - const wotb = instance || dal.wotb + static async updateMembers(block: BlockDTO, dal: FileDAL, instance?: Wot) { + const wotb = instance || dal.wotb; // Joiners (come back) for (const inlineMS of block.joiners) { - let ms = MembershipDTO.fromInline(inlineMS) + let ms = MembershipDTO.fromInline(inlineMS); const idty = await dal.getWrittenIdtyByPubkeyForWotbID(ms.issuer); wotb.setEnabled(true, idty.wotb_id); - await dal.dividendDAL.setMember(true, ms.issuer) + await dal.dividendDAL.setMember(true, ms.issuer); } // Revoked for (const inlineRevocation of block.revoked) { - let revocation = RevocationDTO.fromInline(inlineRevocation) - await dal.revokeIdentity(revocation.pubkey) + let revocation = RevocationDTO.fromInline(inlineRevocation); + await dal.revokeIdentity(revocation.pubkey); } // Excluded for (const excluded of block.excluded) { const idty = await dal.getWrittenIdtyByPubkeyForWotbID(excluded); wotb.setEnabled(false, idty.wotb_id); - await dal.dividendDAL.setMember(false, excluded) + await dal.dividendDAL.setMember(false, excluded); } } - static async updateWallets(sindex:SimpleSindexEntryForWallet[], dividends:SimpleUdEntryForWallet[], aDal:any, reverse = false, at?: number) { - const differentConditions = Underscore.uniq(sindex.map((entry) => entry.conditions).concat(dividends.map(d => d.conditions))) + static async updateWallets( + sindex: SimpleSindexEntryForWallet[], + dividends: SimpleUdEntryForWallet[], + aDal: any, + reverse = false, + at?: number + ) { + const differentConditions = Underscore.uniq( + sindex + .map((entry) => entry.conditions) + .concat(dividends.map((d) => d.conditions)) + ); for (const conditions of differentConditions) { - const udsOfKey: BasedAmount[] = dividends.filter(d => d.conditions === conditions).map(d => ({ amount: d.amount, base: d.base })) - const creates: BasedAmount[] = sindex.filter(entry => entry.conditions === conditions && entry.op === CommonConstants.IDX_CREATE) - const updates: BasedAmount[] = sindex.filter(entry => entry.conditions === conditions && entry.op === CommonConstants.IDX_UPDATE) - const positives = creates.concat(udsOfKey).reduce((sum, src) => sum + src.amount * Math.pow(10, src.base), 0) - const negatives = updates.reduce((sum, src) => sum + src.amount * Math.pow(10, src.base), 0) - const wallet = await aDal.getWallet(conditions) - let variation = positives - negatives + const udsOfKey: BasedAmount[] = dividends + .filter((d) => d.conditions === conditions) + .map((d) => ({ amount: d.amount, base: d.base })); + const creates: BasedAmount[] = sindex.filter( + (entry) => + entry.conditions === conditions && + entry.op === CommonConstants.IDX_CREATE + ); + const updates: BasedAmount[] = sindex.filter( + (entry) => + entry.conditions === conditions && + entry.op === CommonConstants.IDX_UPDATE + ); + const positives = creates + .concat(udsOfKey) + .reduce((sum, src) => sum + src.amount * Math.pow(10, src.base), 0); + const negatives = updates.reduce( + (sum, src) => sum + src.amount * Math.pow(10, src.base), + 0 + ); + const wallet = await aDal.getWallet(conditions); + let variation = positives - negatives; if (reverse) { // To do the opposite operations, for a reverted block - variation *= -1 + variation *= -1; } if (OtherConstants.TRACE_BALANCES) { - if (!OtherConstants.TRACE_PARTICULAR_BALANCE || wallet.conditions.match(new RegExp(OtherConstants.TRACE_PARTICULAR_BALANCE))) { - NewLogger().trace('Balance of %s: %s (%s %s %s) at #%s', wallet.conditions, wallet.balance + variation, wallet.balance, variation < 0 ? '-' : '+', Math.abs(variation), at) + if ( + !OtherConstants.TRACE_PARTICULAR_BALANCE || + wallet.conditions.match( + new RegExp(OtherConstants.TRACE_PARTICULAR_BALANCE) + ) + ) { + NewLogger().trace( + "Balance of %s: %s (%s %s %s) at #%s", + wallet.conditions, + wallet.balance + variation, + wallet.balance, + variation < 0 ? "-" : "+", + Math.abs(variation), + at + ); } } - wallet.balance += variation - if (OtherConstants.TRACE_PARTICULAR_BALANCE && wallet.conditions.match(new RegExp(OtherConstants.TRACE_PARTICULAR_BALANCE))) { - NewLogger().trace('>>>>>>>>> WALLET = ', (wallet.balance > 0 ? '+' : '') + wallet.balance) + wallet.balance += variation; + if ( + OtherConstants.TRACE_PARTICULAR_BALANCE && + wallet.conditions.match( + new RegExp(OtherConstants.TRACE_PARTICULAR_BALANCE) + ) + ) { + NewLogger().trace( + ">>>>>>>>> WALLET = ", + (wallet.balance > 0 ? "+" : "") + wallet.balance + ); } - await aDal.saveWallet(wallet) + await aDal.saveWallet(wallet); } } - static async revertBlock(number:number, hash:string, dal:FileDAL, block?: DBBlock) { - - const blockstamp = [number, hash].join('-'); + static async revertBlock( + number: number, + hash: string, + dal: FileDAL, + block?: DBBlock + ) { + const blockstamp = [number, hash].join("-"); // Revert links const writtenOn = await dal.cindexDAL.getWrittenOn(blockstamp); @@ -383,34 +511,42 @@ export class DuniterBlockchain { await this.undoMembersUpdate(blockstamp, dal); // Get the money movements to revert in the balance - const REVERSE_BALANCE = true - const sindexOfBlock = await dal.sindexDAL.getWrittenOnTxs(blockstamp) + const REVERSE_BALANCE = true; + const sindexOfBlock = await dal.sindexDAL.getWrittenOnTxs(blockstamp); await dal.bindexDAL.removeBlock(blockstamp); await dal.mindexDAL.removeBlock(blockstamp); await dal.iindexDAL.removeBlock(blockstamp); await dal.cindexDAL.removeBlock(blockstamp); await dal.sindexDAL.removeBlock(blockstamp); - const { createdUDsDestroyedByRevert, consumedUDsRecoveredByRevert } = await dal.dividendDAL.revertUDs(number) + const { + createdUDsDestroyedByRevert, + consumedUDsRecoveredByRevert, + } = await dal.dividendDAL.revertUDs(number); // Then: normal updates - const previousBlock = await dal.getFullBlockOf(number - 1) + const previousBlock = await dal.getFullBlockOf(number - 1); // Set the block as SIDE block (equivalent to removal from main branch) await dal.blockDAL.setSideBlock(number, previousBlock); // Update the dividends in our wallet - await this.updateWallets([], createdUDsDestroyedByRevert, dal, REVERSE_BALANCE) - await this.updateWallets([], consumedUDsRecoveredByRevert, dal) + await this.updateWallets( + [], + createdUDsDestroyedByRevert, + dal, + REVERSE_BALANCE + ); + await this.updateWallets([], consumedUDsRecoveredByRevert, dal); // Revert the balances variations for this block - await this.updateWallets(sindexOfBlock, [], dal, REVERSE_BALANCE) + await this.updateWallets(sindexOfBlock, [], dal, REVERSE_BALANCE); // Restore block's transaction as incoming transactions if (block) { - await this.undoDeleteTransactions(block, dal) + await this.undoDeleteTransactions(block, dal); } } - static async undoMembersUpdate(blockstamp:string, dal:FileDAL) { + static async undoMembersUpdate(blockstamp: string, dal: FileDAL) { const joiners = await dal.iindexDAL.getWrittenOn(blockstamp); for (const entry of joiners) { // Undo 'join' which can be either newcomers or comebackers @@ -418,7 +554,7 @@ export class DuniterBlockchain { if (entry.member === true && entry.op === CommonConstants.IDX_UPDATE) { const idty = await dal.getWrittenIdtyByPubkeyForWotbID(entry.pub); dal.wotb.setEnabled(false, idty.wotb_id); - await dal.dividendDAL.setMember(false, entry.pub) + await dal.dividendDAL.setMember(false, entry.pub); } } const newcomers = await dal.iindexDAL.getWrittenOn(blockstamp); @@ -427,11 +563,11 @@ export class DuniterBlockchain { // => equivalent to i_index.op = 'CREATE' if (entry.op === CommonConstants.IDX_CREATE) { // Does not matter which one it really was, we pop the last X identities - NewLogger().trace('removeNode') + NewLogger().trace("removeNode"); if (dal.wotb.getWoTSize() > 0) { dal.wotb.removeNode(); } - await dal.dividendDAL.deleteMember(entry.pub) + await dal.dividendDAL.deleteMember(entry.pub); } } const excluded = await dal.iindexDAL.getWrittenOn(blockstamp); @@ -441,16 +577,16 @@ export class DuniterBlockchain { if (entry.member === false && entry.op === CommonConstants.IDX_UPDATE) { const idty = await dal.getWrittenIdtyByPubkeyForWotbID(entry.pub); dal.wotb.setEnabled(true, idty.wotb_id); - await dal.dividendDAL.setMember(true, entry.pub) + await dal.dividendDAL.setMember(true, entry.pub); } } } - static async undoDeleteTransactions(block:DBBlock, dal:FileDAL) { + static async undoDeleteTransactions(block: DBBlock, dal: FileDAL) { for (const obj of block.transactions) { obj.currency = block.currency; - let tx = TransactionDTO.fromJSONObject(obj) - await dal.saveTransaction(DBTx.fromTransactionDTO(tx)) + let tx = TransactionDTO.fromJSONObject(obj); + await dal.saveTransaction(DBTx.fromTransactionDTO(tx)); } } @@ -460,9 +596,9 @@ export class DuniterBlockchain { * @param block Block in which are contained the certifications to remove from sandbox. * @param dal The DAL */ - static async removeCertificationsFromSandbox(block:BlockDTO, dal:FileDAL) { + static async removeCertificationsFromSandbox(block: BlockDTO, dal: FileDAL) { for (let inlineCert of block.certifications) { - let cert = CertificationDTO.fromInline(inlineCert) + let cert = CertificationDTO.fromInline(inlineCert); let idty = await dal.getWrittenIdtyByPubkeyForHashing(cert.to); await dal.deleteCert({ from: cert.from, @@ -478,87 +614,112 @@ export class DuniterBlockchain { * @param block Block in which are contained the certifications to remove from sandbox. * @param dal The DAL */ - static async removeMembershipsFromSandbox(block:BlockDTO, dal:FileDAL) { + static async removeMembershipsFromSandbox(block: BlockDTO, dal: FileDAL) { const mss = block.joiners.concat(block.actives).concat(block.leavers); for (const inlineMS of mss) { - let ms = MembershipDTO.fromInline(inlineMS) + let ms = MembershipDTO.fromInline(inlineMS); await dal.deleteMS({ issuer: ms.issuer, - signature: ms.signature + signature: ms.signature, }); } } - static async computeToBeRevoked(mindex:MindexEntry[], dal:FileDAL) { - const revocations = Underscore.filter(mindex, (entry:MindexEntry) => !!(entry.revoked_on)) + static async computeToBeRevoked(mindex: MindexEntry[], dal: FileDAL) { + const revocations = Underscore.filter( + mindex, + (entry: MindexEntry) => !!entry.revoked_on + ); for (const revoked of revocations) { - await dal.setRevoked(revoked.pub) + await dal.setRevoked(revoked.pub); } } - static async deleteTransactions(block:BlockDTO, dal:FileDAL) { + static async deleteTransactions(block: BlockDTO, dal: FileDAL) { for (const obj of block.transactions) { obj.currency = block.currency; - const tx = TransactionDTO.fromJSONObject(obj) + const tx = TransactionDTO.fromJSONObject(obj); const txHash = tx.getHash(); await dal.removeTxByHash(txHash); } } static updateBlocksComputedVars( - current:{ unitbase:number, monetaryMass:number }|null, - block:{ number:number, unitbase:number, dividend:number|null, membersCount:number, monetaryMass:number }): void { + current: { unitbase: number; monetaryMass: number } | null, + block: { + number: number; + unitbase: number; + dividend: number | null; + membersCount: number; + monetaryMass: number; + } + ): void { // Unit Base - block.unitbase = (block.dividend && block.unitbase) || (current && current.unitbase) || 0; + block.unitbase = + (block.dividend && block.unitbase) || (current && current.unitbase) || 0; // Monetary Mass update if (current) { - block.monetaryMass = (current.monetaryMass || 0) - + (block.dividend || 0) * Math.pow(10, block.unitbase || 0) * block.membersCount; + block.monetaryMass = + (current.monetaryMass || 0) + + (block.dividend || 0) * + Math.pow(10, block.unitbase || 0) * + block.membersCount; } else { - block.monetaryMass = 0 + block.monetaryMass = 0; } // UD Time update if (block.number == 0) { block.dividend = null; - } - else if (!block.dividend) { + } else if (!block.dividend) { block.dividend = null; } } - static async pushSideBlock(obj:BlockDTO, dal:FileDAL, logger:any) { + static async pushSideBlock(obj: BlockDTO, dal: FileDAL, logger: any) { const start = Date.now(); - const block = DBBlock.fromBlockDTO(BlockDTO.fromJSONObject(obj)) + const block = DBBlock.fromBlockDTO(BlockDTO.fromJSONObject(obj)); block.fork = true; try { // Saves the block (DAL) block.wrong = false; await dal.saveSideBlockInFile(block); - logger.info('SIDE Block #%s-%s added to the blockchain in %s ms', block.number, block.hash.substr(0, 8), (Date.now() - start)); + logger.info( + "SIDE Block #%s-%s added to the blockchain in %s ms", + block.number, + block.hash.substr(0, 8), + Date.now() - start + ); return block; } catch (err) { throw err; } } - public static async trimIndexes(dal: FileDAL, HEAD: { number: number }, conf: ConfDTO) { + public static async trimIndexes( + dal: FileDAL, + HEAD: { number: number }, + conf: ConfDTO + ) { const TAIL = await dal.bindexDAL.tail(); - const MAX_BINDEX_SIZE = requiredBindexSizeForTail(TAIL, conf) - const currentSize = HEAD.number - TAIL.number + 1 + const MAX_BINDEX_SIZE = requiredBindexSizeForTail(TAIL, conf); + const currentSize = HEAD.number - TAIL.number + 1; if (currentSize > MAX_BINDEX_SIZE) { await dal.trimIndexes(HEAD.number - MAX_BINDEX_SIZE); } } } -export function requiredBindexSizeForTail(TAIL: { issuersCount: number, issuersFrame: number }, conf: { medianTimeBlocks: number, dtDiffEval: number, forksize: number }) { +export function requiredBindexSizeForTail( + TAIL: { issuersCount: number; issuersFrame: number }, + conf: { medianTimeBlocks: number; dtDiffEval: number; forksize: number } +) { const bindexSize = [ TAIL.issuersCount, TAIL.issuersFrame, conf.medianTimeBlocks, - conf.dtDiffEval + conf.dtDiffEval, ].reduce((max, value) => { return Math.max(max, value); }, 0); - return conf.forksize + bindexSize + return conf.forksize + bindexSize; } diff --git a/app/lib/blockchain/Switcher.ts b/app/lib/blockchain/Switcher.ts index 367fafd4ebe4fcd1ab3abd132df4ffdafed63d16..f600aeedfa7bde0e465df80714c2d3cc6d942250 100644 --- a/app/lib/blockchain/Switcher.ts +++ b/app/lib/blockchain/Switcher.ts @@ -11,75 +11,92 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../dto/BlockDTO" -import {Underscore} from "../common-libs/underscore" +import { BlockDTO } from "../dto/BlockDTO"; +import { Underscore } from "../common-libs/underscore"; export interface SwitchBlock { - - number:number - hash:string - previousHash:string - medianTime:number + number: number; + hash: string; + previousHash: string; + medianTime: number; } export interface SwitcherDao<T extends SwitchBlock> { - - getCurrent(): Promise<T|null> - getPotentials(numberStart:number, timeStart:number, maxNumber:number): Promise<T[]> - getBlockchainBlock(number:number, hash:string): Promise<T|null> - getAbsoluteBlockInForkWindow(number:number, hash:string): Promise<T|null> - revertTo(number:number): Promise<T[]> - addBlock(block:T): Promise<T> + getCurrent(): Promise<T | null>; + getPotentials( + numberStart: number, + timeStart: number, + maxNumber: number + ): Promise<T[]>; + getBlockchainBlock(number: number, hash: string): Promise<T | null>; + getAbsoluteBlockInForkWindow(number: number, hash: string): Promise<T | null>; + revertTo(number: number): Promise<T[]>; + addBlock(block: T): Promise<T>; } export class Switcher<T extends SwitchBlock> { - constructor( - private dao:SwitcherDao<T>, - private invalidForks:string[], - private avgGenTime:number, - private forkWindowSize:number, - private switchOnHeadAdvance:number, - private logger:any = undefined) {} + private dao: SwitcherDao<T>, + private invalidForks: string[], + private avgGenTime: number, + private forkWindowSize: number, + private switchOnHeadAdvance: number, + private logger: any = undefined + ) {} /** * Looks at known blocks in the sandbox and try to follow the longest resulting chain that has at least both 3 blocks of * advance and 3 * avgGenTime of medianTime advancce. */ async tryToFork() { - const current = await this.dao.getCurrent() + const current = await this.dao.getCurrent(); if (current) { - const numberStart = current.number + this.switchOnHeadAdvance - const timeStart = current.medianTime + this.switchOnHeadAdvance * this.avgGenTime + const numberStart = current.number + this.switchOnHeadAdvance; + const timeStart = + current.medianTime + this.switchOnHeadAdvance * this.avgGenTime; // Phase 1: find potential chains - const suites = await this.findPotentialSuites(numberStart, timeStart) + const suites = await this.findPotentialSuites(numberStart, timeStart); if (suites.length) { - this.logger && this.logger.info("Fork resolution: %s potential suite(s) found...", suites.length) + this.logger && + this.logger.info( + "Fork resolution: %s potential suite(s) found...", + suites.length + ); } // Phase 2: select the best chain - let longestChain:null|T[] = await this.findLongestChain(current, suites) + let longestChain: null | T[] = await this.findLongestChain( + current, + suites + ); // Phase 3: a best exist? if (longestChain) { - const chainHEAD = longestChain[longestChain.length - 1] + const chainHEAD = longestChain[longestChain.length - 1]; // apply it if it respects the 3-3 rule - if (chainHEAD.number >= numberStart && chainHEAD.medianTime >= timeStart) { - await this.switchOnChain(longestChain) - return await this.dao.getCurrent() + if ( + chainHEAD.number >= numberStart && + chainHEAD.medianTime >= timeStart + ) { + await this.switchOnChain(longestChain); + return await this.dao.getCurrent(); } } } - return null + return null; } /** * Find all the suites' HEAD that we could potentially fork on, in the current fork window. * @param current */ - async findPotentialSuitesHeads(current:{ number:number, medianTime:number }) { - const numberStart = current.number - this.forkWindowSize - const timeStart = current.medianTime - this.forkWindowSize * this.avgGenTime - const suites = await this.findPotentialSuites(numberStart, timeStart) - return suites.map(suite => suite[suite.length - 1]) + async findPotentialSuitesHeads(current: { + number: number; + medianTime: number; + }) { + const numberStart = current.number - this.forkWindowSize; + const timeStart = + current.medianTime - this.forkWindowSize * this.avgGenTime; + const suites = await this.findPotentialSuites(numberStart, timeStart); + return suites.map((suite) => suite[suite.length - 1]); } /** @@ -89,52 +106,86 @@ export class Switcher<T extends SwitchBlock> { * @param timeStart The minimum medianTime of a fork block. * @returns {SwitchBlock[][]} The suites found. */ - private async findPotentialSuites(numberStart:number, timeStart:number) { - const suites:T[][] = [] - const potentials:T[] = Underscore.sortBy(await this.dao.getPotentials(numberStart, timeStart, numberStart + this.forkWindowSize), element => -element.number) - const knownForkBlocks:{ [k:string]: boolean } = {} + private async findPotentialSuites(numberStart: number, timeStart: number) { + const suites: T[][] = []; + const potentials: T[] = Underscore.sortBy( + await this.dao.getPotentials( + numberStart, + timeStart, + numberStart + this.forkWindowSize + ), + (element) => -element.number + ); + const knownForkBlocks: { [k: string]: boolean } = {}; for (const candidate of potentials) { - knownForkBlocks[BlockDTO.fromJSONObject(candidate).blockstamp] = true + knownForkBlocks[BlockDTO.fromJSONObject(candidate).blockstamp] = true; } - const invalids: { [hash:string]: T } = {} + const invalids: { [hash: string]: T } = {}; if (potentials.length) { - this.logger && this.logger.info("Fork resolution: %s potential block(s) found...", potentials.length) + this.logger && + this.logger.info( + "Fork resolution: %s potential block(s) found...", + potentials.length + ); } for (const candidate of potentials) { - const suite:T[] = [] + const suite: T[] = []; // Do not process the block if it is already known as invalid (has no fork point with current blockchain or misses // some blocks) or is already contained in a valid chain. - if (!invalids[candidate.hash] && !Switcher.suitesContains(suites, candidate)) { + if ( + !invalids[candidate.hash] && + !Switcher.suitesContains(suites, candidate) + ) { // Tries to build up a full chain that is linked to current chain by a fork point. - let previous:T|null = candidate, commonRootFound = false - let previousNumber:number = previous.number - 1 - let previousHash:string = previous.previousHash - while (previous && previous.number > candidate.number - this.forkWindowSize) { - suite.push(previous) - previousNumber = previous.number - 1 - previousHash = previous.previousHash - previous = null - const previousBlockstamp = [previousNumber, previousHash].join('-') + let previous: T | null = candidate, + commonRootFound = false; + let previousNumber: number = previous.number - 1; + let previousHash: string = previous.previousHash; + while ( + previous && + previous.number > candidate.number - this.forkWindowSize + ) { + suite.push(previous); + previousNumber = previous.number - 1; + previousHash = previous.previousHash; + previous = null; + const previousBlockstamp = [previousNumber, previousHash].join("-"); // We try to look at blockchain if, of course, it is not already known as a fork block // Otherwise it cost a useless DB access if (!knownForkBlocks[previousBlockstamp]) { - previous = await this.dao.getBlockchainBlock(previousNumber, previousHash) + previous = await this.dao.getBlockchainBlock( + previousNumber, + previousHash + ); } if (previous) { // Stop the loop: common block has been found - previous = null - suites.push(suite) - commonRootFound = true + previous = null; + suites.push(suite); + commonRootFound = true; } else { // Have a look in sandboxes - previous = await this.dao.getAbsoluteBlockInForkWindow(previousNumber, previousHash) + previous = await this.dao.getAbsoluteBlockInForkWindow( + previousNumber, + previousHash + ); if (previous) { - knownForkBlocks[BlockDTO.fromJSONObject(previous).blockstamp] = true - const alreadyKnownInvalidBlock = this.invalidForks.indexOf([previous.number, previous.hash].join('-')) !== -1 + knownForkBlocks[ + BlockDTO.fromJSONObject(previous).blockstamp + ] = true; + const alreadyKnownInvalidBlock = + this.invalidForks.indexOf( + [previous.number, previous.hash].join("-") + ) !== -1; if (alreadyKnownInvalidBlock) { // Incorrect = not found - this.logger && this.logger.info("Fork resolution: block #%s-%s is known as incorrect. Skipping.", previous.number, previous.hash.substr(0, 8)) - previous = null + this.logger && + this.logger.info( + "Fork resolution: block #%s-%s is known as incorrect. Skipping.", + previous.number, + previous.hash.substr(0, 8) + ); + previous = null; } } } @@ -142,18 +193,32 @@ export class Switcher<T extends SwitchBlock> { // Forget about invalid blocks if (!commonRootFound) { if (!previous) { - this.logger && this.logger.debug("Suite -> %s-%s missing block#%s-%s", candidate.number, candidate.hash.substr(0, 8), previousNumber, previousHash.substr(0, 8)) + this.logger && + this.logger.debug( + "Suite -> %s-%s missing block#%s-%s", + candidate.number, + candidate.hash.substr(0, 8), + previousNumber, + previousHash.substr(0, 8) + ); for (const b of suite) { - invalids[b.hash] = b + invalids[b.hash] = b; } } else { // The chain would be too long, we could not revert correctly the chain. - this.logger && this.logger.debug("Suite #%s-%s -> %s-%s out of fork window", previousNumber, previousHash.substr(0, 8), candidate.number, candidate.hash.substr(0, 8)) + this.logger && + this.logger.debug( + "Suite #%s-%s -> %s-%s out of fork window", + previousNumber, + previousHash.substr(0, 8), + candidate.number, + candidate.hash.substr(0, 8) + ); } } } } - return suites + return suites; } /** @@ -163,66 +228,102 @@ export class Switcher<T extends SwitchBlock> { * @param {SwitchBlock[][]} suites * @returns {SwitchBlock[]} */ - private async findLongestChain(current:T, suites:T[][]) { + private async findLongestChain(current: T, suites: T[][]) { if (suites.length) { - this.logger && this.logger.info("Fork resolution: HEAD = block#%s", current.number) + this.logger && + this.logger.info("Fork resolution: HEAD = block#%s", current.number); } - let longestChain:null|T[] = null - let j = 0 + let longestChain: null | T[] = null; + let j = 0; for (const s of suites) { - j++ - s.reverse() + j++; + s.reverse(); // Revert current blockchain to fork point - const forkPoint = s[0].number - 1 - const forkHead = s[s.length - 1] - this.logger && this.logger.info("Fork resolution: suite %s/%s (-> #%s-%s) revert to fork point block#%s", j, suites.length, forkHead.number, forkHead.hash.substr(0, 6), forkPoint) - const reverted = await this.dao.revertTo(s[0].number - 1) + const forkPoint = s[0].number - 1; + const forkHead = s[s.length - 1]; + this.logger && + this.logger.info( + "Fork resolution: suite %s/%s (-> #%s-%s) revert to fork point block#%s", + j, + suites.length, + forkHead.number, + forkHead.hash.substr(0, 6), + forkPoint + ); + const reverted = await this.dao.revertTo(s[0].number - 1); // Try to add a maximum of blocks - let added = true, i = 0, successfulBlocks:T[] = [] + let added = true, + i = 0, + successfulBlocks: T[] = []; while (added && i < s.length) { try { - await this.dao.addBlock(s[i]) - this.logger && this.logger.info("Fork resolution: suite %s/%s added block#%s-%s", j, suites.length, s[i].number, s[i].hash) - successfulBlocks.push(s[i]) + await this.dao.addBlock(s[i]); + this.logger && + this.logger.info( + "Fork resolution: suite %s/%s added block#%s-%s", + j, + suites.length, + s[i].number, + s[i].hash + ); + successfulBlocks.push(s[i]); } catch (e) { - this.invalidForks.push([s[i].number, s[i].hash].join('-')) - this.logger && this.logger.info("Fork resolution: suite %s/%s REFUSED block#%s: %s", j, suites.length, s[0].number + i, e && e.message) - added = false + this.invalidForks.push([s[i].number, s[i].hash].join("-")); + this.logger && + this.logger.info( + "Fork resolution: suite %s/%s REFUSED block#%s: %s", + j, + suites.length, + s[0].number + i, + e && e.message + ); + added = false; } - i++ + i++; } // Pop the successfuly added blocks if (successfulBlocks.length) { - for (const b of successfulBlocks) { - this.invalidForks.push([b.number, b.hash].join('-')) + for (const b of successfulBlocks) { + this.invalidForks.push([b.number, b.hash].join("-")); } - const addedToHeadLevel = successfulBlocks[successfulBlocks.length-1].number - current.number - this.logger && this.logger.info("Fork resolution: suite %s/%s reached HEAD + %s. Now rolling back.", j, suites.length, addedToHeadLevel) - await this.dao.revertTo(forkPoint) + const addedToHeadLevel = + successfulBlocks[successfulBlocks.length - 1].number - current.number; + this.logger && + this.logger.info( + "Fork resolution: suite %s/%s reached HEAD + %s. Now rolling back.", + j, + suites.length, + addedToHeadLevel + ); + await this.dao.revertTo(forkPoint); } // Push back the initial blocks that were temporarily reverted - reverted.reverse() + reverted.reverse(); for (const b of reverted) { - await this.dao.addBlock(b) + await this.dao.addBlock(b); } // Remember the chain if it is the longest (highest HEAD) among tested chains - const longestHEAD = longestChain && longestChain[longestChain.length - 1] - const successHEAD = successfulBlocks && successfulBlocks[successfulBlocks.length - 1] - if ((!longestHEAD && successHEAD) || (longestHEAD && successHEAD && longestHEAD.number < successHEAD.number)) { - longestChain = successfulBlocks + const longestHEAD = longestChain && longestChain[longestChain.length - 1]; + const successHEAD = + successfulBlocks && successfulBlocks[successfulBlocks.length - 1]; + if ( + (!longestHEAD && successHEAD) || + (longestHEAD && successHEAD && longestHEAD.number < successHEAD.number) + ) { + longestChain = successfulBlocks; } } - return longestChain + return longestChain; } /** * Switch current blockchain on another chain, by poping top blocks and replacing them by new ones. * @param {SwitchBlock[]} chain */ - private async switchOnChain(chain:T[]) { - await this.dao.revertTo(chain[0].number - 1) + private async switchOnChain(chain: T[]) { + await this.dao.revertTo(chain[0].number - 1); for (const b of chain) { - await this.dao.addBlock(b) + await this.dao.addBlock(b); } } @@ -231,14 +332,14 @@ export class Switcher<T extends SwitchBlock> { * @param {SwitchBlock[][]} suites * @param {SwitchBlock} block */ - static suitesContains<T extends SwitchBlock>(suites:T[][], block:T) { + static suitesContains<T extends SwitchBlock>(suites: T[][], block: T) { for (const suite of suites) { for (const b of suite) { if (b.number === block.number && b.hash === block.hash) { - return true + return true; } } } - return false + return false; } } diff --git a/app/lib/common-libs/array-filter.ts b/app/lib/common-libs/array-filter.ts index 134aff471d712cbf6e20d8e62f4305eccb21691c..4c38092bca5bd14b4d3d1ef057c24224bad30557 100644 --- a/app/lib/common-libs/array-filter.ts +++ b/app/lib/common-libs/array-filter.ts @@ -1,3 +1,3 @@ export function uniqFilter<T>(value: T, index: number, self: T[]) { - return self.indexOf(value) === index -} \ No newline at end of file + return self.indexOf(value) === index; +} diff --git a/app/lib/common-libs/array-prune.ts b/app/lib/common-libs/array-prune.ts index 0e262d538bcf44bf7a6727066df6b58bfd073e97..42884cbb246b76a54876c3ce39690c12b5141b81 100644 --- a/app/lib/common-libs/array-prune.ts +++ b/app/lib/common-libs/array-prune.ts @@ -1,14 +1,14 @@ export function arrayPruneAll<T>(array: T[], value: T) { if (!array || array.length === 0) { - return + return; } - let index + let index; do { - index = array.indexOf(value) + index = array.indexOf(value); if (index !== -1) { - array.splice(index, 1) + array.splice(index, 1); } - } while (index !== -1) + } while (index !== -1); } /** @@ -17,13 +17,13 @@ export function arrayPruneAll<T>(array: T[], value: T) { * @param value The value we don't want to see in our copy array. */ export function arrayPruneAllCopy<T>(original: T[], value: T) { - const array = original.slice() - let index + const array = original.slice(); + let index; do { - index = array.indexOf(value) + index = array.indexOf(value); if (index !== -1) { - array.splice(index, 1) + array.splice(index, 1); } - } while (index !== -1) - return array + } while (index !== -1); + return array; } diff --git a/app/lib/common-libs/buid.ts b/app/lib/common-libs/buid.ts index d5d2be517902f352e4bb6654832bc53f57482431..c9a1fc12e617a9631dedee88c9f4117d1c4c200f 100644 --- a/app/lib/common-libs/buid.ts +++ b/app/lib/common-libs/buid.ts @@ -14,36 +14,34 @@ "use strict"; const BLOCK_UID = /^(0|[1-9]\d{0,18})-[A-F0-9]{64}$/; -const buidFunctions:any = function(number:number, hash:string) { +const buidFunctions: any = function (number: number, hash: string) { if (arguments.length === 2) { - return [number, hash].join('-'); + return [number, hash].join("-"); } if (arguments[0]) { - return [arguments[0].number, arguments[0].hash].join('-'); + return [arguments[0].number, arguments[0].hash].join("-"); } - return '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855'; -} + return "0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855"; +}; -buidFunctions.fromTS = (line:string) => { - const match = line.match(/TS:(.*)/) - return (match && match[1]) || "" -} -buidFunctions.fromIdty = (idty:any) => { - return buidFunctions(idty.ts_number, idty.ts_hash) -} +buidFunctions.fromTS = (line: string) => { + const match = line.match(/TS:(.*)/); + return (match && match[1]) || ""; +}; +buidFunctions.fromIdty = (idty: any) => { + return buidFunctions(idty.ts_number, idty.ts_hash); +}; export const Buid = { - format: { - - isBuid: (value:any) => { - return (typeof value === 'string') && value.match(BLOCK_UID) ? true : false; + isBuid: (value: any) => { + return typeof value === "string" && value.match(BLOCK_UID) ? true : false; }, - buid: buidFunctions + buid: buidFunctions, }, - getBlockstamp: (block:{ number:number, hash:string }) => { - return [block.number, block.hash].join('-') - } + getBlockstamp: (block: { number: number; hash: string }) => { + return [block.number, block.hash].join("-"); + }, }; diff --git a/app/lib/common-libs/constants.ts b/app/lib/common-libs/constants.ts index c91a38579e0bc7b95ef7cf36056baceffc05c4ca..edd8b8ed5ba961e985f178f94dcda0e698c23a49 100755 --- a/app/lib/common-libs/constants.ts +++ b/app/lib/common-libs/constants.ts @@ -13,51 +13,60 @@ "use strict"; -const CURRENCY = "[a-zA-Z0-9-_ ]{2,50}" -const BASE58 = "[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]+" -const PUBKEY = "[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{43,44}" -const SIGNATURE = "[A-Za-z0-9+\\/=]{87,88}" -const USER_ID = "[A-Za-z0-9_-]{2,100}" -const INTEGER = "(0|[1-9]\\d{0,18})" -const FINGERPRINT = "[A-F0-9]{64}" -const BLOCK_VERSION = "(10|11|12)" -const TX_VERSION = "(10)" -const DIVIDEND = "[1-9][0-9]{0,5}" -const ZERO_OR_POSITIVE_INT = "0|[1-9][0-9]{0,18}" -const BLOCK_UID = "(" + ZERO_OR_POSITIVE_INT + ")-" + FINGERPRINT -const RELATIVE_INTEGER = "(0|-?[1-9]\\d{0,18})" -const FLOAT = "\\d+\.\\d+" -const POSITIVE_INT = "[1-9][0-9]{0,18}" -const TIMESTAMP = "[1-9][0-9]{0,18}" -const BOOLEAN = "[01]" -const WS2PID = "[0-9a-f]{8}" -const SOFTWARE = "[a-z0-9._-]{2,15}" -const SOFT_VERSION = "[0-9a-z._-]{2,15}" -const POW_PREFIX = "([1-9]|[1-9][0-9]|[1-8][0-9][0-9])" // 1-899 -const SPECIAL_BLOCK = '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855' -const META_TS = "META:TS:" + BLOCK_UID -const COMMENT = "[ a-zA-Z0-9-_:/;*\\[\\]()?!^\\+=@&~#{}|\\\\<>%.]{0,255}" +const CURRENCY = "[a-zA-Z0-9-_ ]{2,50}"; +const BASE58 = "[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]+"; +const PUBKEY = + "[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{43,44}"; +const SIGNATURE = "[A-Za-z0-9+\\/=]{87,88}"; +const USER_ID = "[A-Za-z0-9_-]{2,100}"; +const INTEGER = "(0|[1-9]\\d{0,18})"; +const FINGERPRINT = "[A-F0-9]{64}"; +const BLOCK_VERSION = "(10|11|12)"; +const TX_VERSION = "(10)"; +const DIVIDEND = "[1-9][0-9]{0,5}"; +const ZERO_OR_POSITIVE_INT = "0|[1-9][0-9]{0,18}"; +const BLOCK_UID = "(" + ZERO_OR_POSITIVE_INT + ")-" + FINGERPRINT; +const RELATIVE_INTEGER = "(0|-?[1-9]\\d{0,18})"; +const FLOAT = "\\d+.\\d+"; +const POSITIVE_INT = "[1-9][0-9]{0,18}"; +const TIMESTAMP = "[1-9][0-9]{0,18}"; +const BOOLEAN = "[01]"; +const WS2PID = "[0-9a-f]{8}"; +const SOFTWARE = "[a-z0-9._-]{2,15}"; +const SOFT_VERSION = "[0-9a-z._-]{2,15}"; +const POW_PREFIX = "([1-9]|[1-9][0-9]|[1-8][0-9][0-9])"; // 1-899 +const SPECIAL_BLOCK = + "0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855"; +const META_TS = "META:TS:" + BLOCK_UID; +const COMMENT = "[ a-zA-Z0-9-_:/;*\\[\\]()?!^\\+=@&~#{}|\\\\<>%.]{0,255}"; const CLTV_INTEGER = "([0-9]{1,10})"; -const CSV_INTEGER = "([0-9]{1,8})"; -const XUNLOCK = "[a-zA-Z0-9]{1,64}"; -const UNLOCK = "(SIG\\(" + INTEGER + "\\)|XHX\\(" + XUNLOCK + "\\))" -const CONDITIONS = "(&&|\\|\\|| |[()]|(SIG\\(" + PUBKEY + "\\)|(XHX\\([A-F0-9]{64}\\)|CLTV\\(" + CLTV_INTEGER + "\\)|CSV\\(" + CSV_INTEGER + "\\))))*" +const CSV_INTEGER = "([0-9]{1,8})"; +const XUNLOCK = "[a-zA-Z0-9]{1,64}"; +const UNLOCK = "(SIG\\(" + INTEGER + "\\)|XHX\\(" + XUNLOCK + "\\))"; +const CONDITIONS = + "(&&|\\|\\|| |[()]|(SIG\\(" + + PUBKEY + + "\\)|(XHX\\([A-F0-9]{64}\\)|CLTV\\(" + + CLTV_INTEGER + + "\\)|CSV\\(" + + CSV_INTEGER + + "\\))))*"; -const BMA_REGEXP = /^BASIC_MERKLED_API( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/ -const BMAS_REGEXP = /^BMAS( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))( (\/.+))?$/ -const BMATOR_REGEXP = /^BMATOR( ([a-z0-9]{16})\.onion)( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/ -const WS2P_REGEXP = /^WS2P (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/ -const WS2P_V2_REGEXP = /^WS2P ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/ -const WS2PTOR_REGEXP = /^WS2PTOR (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/ -const WS2PTOR_V2_REGEXP = /^WS2PTOR ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/ -const WS_FULL_ADDRESS_ONION_REGEX = /^(?:wss?:\/\/)(?:www\.)?([0-9a-z]{16}\.onion)(:[0-9]+)?$/ +const BMA_REGEXP = /^BASIC_MERKLED_API( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/; +const BMAS_REGEXP = /^BMAS( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))( (\/.+))?$/; +const BMATOR_REGEXP = /^BMATOR( ([a-z0-9]{16})\.onion)( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/; +const WS2P_REGEXP = /^WS2P (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/; +const WS2P_V2_REGEXP = /^WS2P ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/; +const WS2PTOR_REGEXP = /^WS2PTOR (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/; +const WS2PTOR_V2_REGEXP = /^WS2PTOR ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/; +const WS_FULL_ADDRESS_ONION_REGEX = /^(?:wss?:\/\/)(?:www\.)?([0-9a-z]{16}\.onion)(:[0-9]+)?$/; const IPV4_REGEXP = /^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$/; const IPV6_REGEXP = /^((([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}:[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){5}:([0-9A-Fa-f]{1,4}:)?[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){4}:([0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){3}:([0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){2}:([0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(([0-9A-Fa-f]{1,4}:){0,5}:((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(::([0-9A-Fa-f]{1,4}:){0,5}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|([0-9A-Fa-f]{1,4}::([0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4})|(::([0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){1,7}:))$/; -const HOST_ONION_REGEX = /^(?:www\.)?([0-9a-z]{16}\.onion)$/ +const HOST_ONION_REGEX = /^(?:www\.)?([0-9a-z]{16}\.onion)$/; -const MAXIMUM_LEN_OF_COMPACT_TX = 100 -const MAXIMUM_LEN_OF_OUTPUT = 2000 -const MAXIMUM_LEN_OF_UNLOCK = MAXIMUM_LEN_OF_OUTPUT +const MAXIMUM_LEN_OF_COMPACT_TX = 100; +const MAXIMUM_LEN_OF_OUTPUT = 2000; +const MAXIMUM_LEN_OF_UNLOCK = MAXIMUM_LEN_OF_OUTPUT; export enum DuniterDocument { ENTITY_NULL, @@ -67,25 +76,31 @@ export enum DuniterDocument { ENTITY_MEMBERSHIP, ENTITY_REVOCATION, ENTITY_TRANSACTION, - ENTITY_PEER + ENTITY_PEER, } -export const duniterDocument2str = (type:DuniterDocument) => { +export const duniterDocument2str = (type: DuniterDocument) => { switch (type) { - case DuniterDocument.ENTITY_BLOCK: return "block" - case DuniterDocument.ENTITY_IDENTITY: return "identity" - case DuniterDocument.ENTITY_CERTIFICATION: return "certification" - case DuniterDocument.ENTITY_REVOCATION: return "revocation" - case DuniterDocument.ENTITY_MEMBERSHIP: return "membership" - case DuniterDocument.ENTITY_TRANSACTION: return "transaction" - case DuniterDocument.ENTITY_PEER: return "peer" + case DuniterDocument.ENTITY_BLOCK: + return "block"; + case DuniterDocument.ENTITY_IDENTITY: + return "identity"; + case DuniterDocument.ENTITY_CERTIFICATION: + return "certification"; + case DuniterDocument.ENTITY_REVOCATION: + return "revocation"; + case DuniterDocument.ENTITY_MEMBERSHIP: + return "membership"; + case DuniterDocument.ENTITY_TRANSACTION: + return "transaction"; + case DuniterDocument.ENTITY_PEER: + return "peer"; default: - return "" + return ""; } -} +}; export const CommonConstants = { - FORMATS: { CURRENCY, PUBKEY, @@ -98,7 +113,7 @@ export const CommonConstants = { SOFT_VERSION, POW_PREFIX, ZERO_OR_POSITIVE_INT, - SIGNATURE + SIGNATURE, }, // Version of genesis block @@ -136,7 +151,8 @@ export const CommonConstants = { DOCUMENTS_BLOCK_VERSION_REGEXP: new RegExp("^" + BLOCK_VERSION + "$"), DOCUMENTS_TRANSACTION_VERSION_REGEXP: /^(10)$/, SPECIAL_BLOCK, - SPECIAL_HASH: 'E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855', + SPECIAL_HASH: + "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855", MAXIMUM_LEN_OF_COMPACT_TX, MAXIMUM_LEN_OF_OUTPUT, MAXIMUM_LEN_OF_UNLOCK, @@ -146,64 +162,171 @@ export const CommonConstants = { PROOF_OF_WORK: { UPPER_BOUND: [ - '9A-F', - '9A-E', - '9A-D', - '9A-C', - '9A-B', - '9A', - '9', - '8', - '7', - '6', - '5', - '4', - '3', - '2', - '1', - '1' // In case remainder 15 happens for some reason - ] + "9A-F", + "9A-E", + "9A-D", + "9A-C", + "9A-B", + "9A", + "9", + "8", + "7", + "6", + "5", + "4", + "3", + "2", + "1", + "1", // In case remainder 15 happens for some reason + ], }, DocumentError: "documentError", ERRORS: { // Technical errors - WRONG_DOCUMENT: { httpCode: 400, uerr: { ucode: 1005, message: "Document has unkown fields or wrong line ending format" }}, - DOCUMENT_BEING_TREATED: { httpCode: 400, uerr: { ucode: 1015, message: "Document already under treatment" }}, + WRONG_DOCUMENT: { + httpCode: 400, + uerr: { + ucode: 1005, + message: "Document has unkown fields or wrong line ending format", + }, + }, + DOCUMENT_BEING_TREATED: { + httpCode: 400, + uerr: { ucode: 1015, message: "Document already under treatment" }, + }, // Business errors - WRONG_UNLOCKER: { httpCode: 400, uerr: { ucode: 2013, message: "Wrong unlocker in transaction" }}, - LOCKTIME_PREVENT: { httpCode: 400, uerr: { ucode: 2014, message: "Locktime not elapsed yet" }}, - SOURCE_ALREADY_CONSUMED: { httpCode: 400, uerr: { ucode: 2015, message: "Source already consumed" }}, - WRONG_AMOUNTS: { httpCode: 400, uerr: { ucode: 2016, message: "Sum of inputs must equal sum of outputs" }}, - WRONG_OUTPUT_BASE: { httpCode: 400, uerr: { ucode: 2017, message: "Wrong unit base for outputs" }}, - CANNOT_ROOT_BLOCK_NO_MEMBERS: { httpCode: 400, uerr: { ucode: 2018, message: "Wrong new block: cannot make a root block without members" }}, - IDENTITY_WRONGLY_SIGNED: { httpCode: 400, uerr: { ucode: 2019, message: "Weird, the signature is wrong and in the database." }}, - TOO_OLD_IDENTITY: { httpCode: 400, uerr: { ucode: 2020, message: "Identity has expired and cannot be written in the blockchain anymore." }}, - NEWER_PEER_DOCUMENT_AVAILABLE: { httpCode: 409, uerr: { ucode: 2022, message: "A newer peer document is available" }}, - PEER_DOCUMENT_ALREADY_KNOWN: { httpCode: 400, uerr: { ucode: 2023, message: "Peer document already known" }}, - TX_INPUTS_OUTPUTS_NOT_EQUAL: { httpCode: 400, uerr: { ucode: 2024, message: "Transaction inputs sum must equal outputs sum" }}, - TX_OUTPUT_SUM_NOT_EQUALS_PREV_DELTAS: { httpCode: 400, uerr: { ucode: 2025, message: "Transaction output base amount does not equal previous base deltas" }}, - BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK: { httpCode: 400, uerr: { ucode: 2026, message: "Blockstamp does not match a block" }}, - A_TRANSACTION_HAS_A_MAX_SIZE: { httpCode: 400, uerr: { ucode: 2027, message: 'A transaction has a maximum size of ' + MAXIMUM_LEN_OF_COMPACT_TX + ' lines' }}, - TOO_OLD_MEMBERSHIP: { httpCode: 400, uerr: { ucode: 2029, message: "Too old membership." }}, - MAXIMUM_LEN_OF_OUTPUT: { httpCode: 400, uerr: { ucode: 2032, message: 'A transaction output has a maximum size of ' + MAXIMUM_LEN_OF_OUTPUT + ' characters' }}, - MAXIMUM_LEN_OF_UNLOCK: { httpCode: 400, uerr: { ucode: 2033, message: 'A transaction unlock has a maximum size of ' + MAXIMUM_LEN_OF_UNLOCK + ' characters' }}, - WRONG_CURRENCY: { httpCode: 400, uerr: { ucode: 2500, message: 'Wrong currency' }}, - WRONG_POW: { httpCode: 400, uerr: { ucode: 2501, message: 'Wrong proof-of-work' }}, - OUT_OF_FORK_WINDOW: { httpCode: 400, uerr: { ucode: 2501, message: 'Out of fork window' }}, + WRONG_UNLOCKER: { + httpCode: 400, + uerr: { ucode: 2013, message: "Wrong unlocker in transaction" }, + }, + LOCKTIME_PREVENT: { + httpCode: 400, + uerr: { ucode: 2014, message: "Locktime not elapsed yet" }, + }, + SOURCE_ALREADY_CONSUMED: { + httpCode: 400, + uerr: { ucode: 2015, message: "Source already consumed" }, + }, + WRONG_AMOUNTS: { + httpCode: 400, + uerr: { ucode: 2016, message: "Sum of inputs must equal sum of outputs" }, + }, + WRONG_OUTPUT_BASE: { + httpCode: 400, + uerr: { ucode: 2017, message: "Wrong unit base for outputs" }, + }, + CANNOT_ROOT_BLOCK_NO_MEMBERS: { + httpCode: 400, + uerr: { + ucode: 2018, + message: "Wrong new block: cannot make a root block without members", + }, + }, + IDENTITY_WRONGLY_SIGNED: { + httpCode: 400, + uerr: { + ucode: 2019, + message: "Weird, the signature is wrong and in the database.", + }, + }, + TOO_OLD_IDENTITY: { + httpCode: 400, + uerr: { + ucode: 2020, + message: + "Identity has expired and cannot be written in the blockchain anymore.", + }, + }, + NEWER_PEER_DOCUMENT_AVAILABLE: { + httpCode: 409, + uerr: { ucode: 2022, message: "A newer peer document is available" }, + }, + PEER_DOCUMENT_ALREADY_KNOWN: { + httpCode: 400, + uerr: { ucode: 2023, message: "Peer document already known" }, + }, + TX_INPUTS_OUTPUTS_NOT_EQUAL: { + httpCode: 400, + uerr: { + ucode: 2024, + message: "Transaction inputs sum must equal outputs sum", + }, + }, + TX_OUTPUT_SUM_NOT_EQUALS_PREV_DELTAS: { + httpCode: 400, + uerr: { + ucode: 2025, + message: + "Transaction output base amount does not equal previous base deltas", + }, + }, + BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK: { + httpCode: 400, + uerr: { ucode: 2026, message: "Blockstamp does not match a block" }, + }, + A_TRANSACTION_HAS_A_MAX_SIZE: { + httpCode: 400, + uerr: { + ucode: 2027, + message: + "A transaction has a maximum size of " + + MAXIMUM_LEN_OF_COMPACT_TX + + " lines", + }, + }, + TOO_OLD_MEMBERSHIP: { + httpCode: 400, + uerr: { ucode: 2029, message: "Too old membership." }, + }, + MAXIMUM_LEN_OF_OUTPUT: { + httpCode: 400, + uerr: { + ucode: 2032, + message: + "A transaction output has a maximum size of " + + MAXIMUM_LEN_OF_OUTPUT + + " characters", + }, + }, + MAXIMUM_LEN_OF_UNLOCK: { + httpCode: 400, + uerr: { + ucode: 2033, + message: + "A transaction unlock has a maximum size of " + + MAXIMUM_LEN_OF_UNLOCK + + " characters", + }, + }, + WRONG_CURRENCY: { + httpCode: 400, + uerr: { ucode: 2500, message: "Wrong currency" }, + }, + WRONG_POW: { + httpCode: 400, + uerr: { ucode: 2501, message: "Wrong proof-of-work" }, + }, + OUT_OF_FORK_WINDOW: { + httpCode: 400, + uerr: { ucode: 2501, message: "Out of fork window" }, + }, - WRONG_SIGNATURE_FOR_CERT: { httpCode: 400, uerr: { ucode: 3000, message: 'Wrong signature for certification' }}, + WRONG_SIGNATURE_FOR_CERT: { + httpCode: 400, + uerr: { ucode: 3000, message: "Wrong signature for certification" }, + }, }, // INDEXES - M_INDEX: 'MINDEX', - I_INDEX: 'IINDEX', - S_INDEX: 'SINDEX', - C_INDEX: 'CINDEX', - IDX_CREATE: 'CREATE', - IDX_UPDATE: 'UPDATE', + M_INDEX: "MINDEX", + I_INDEX: "IINDEX", + S_INDEX: "SINDEX", + C_INDEX: "CINDEX", + IDX_CREATE: "CREATE", + IDX_UPDATE: "UPDATE", // Protocol fixed values NB_DIGITS_UD: 4, @@ -212,96 +335,203 @@ export const CommonConstants = { POW_DIFFICULTY_RANGE_RATIO: 1.189, // deduced from Hexadecimal relation between 2 chars ~= 16^(1/16) ACCOUNT_MINIMUM_CURRENT_BASED_AMOUNT: 100, - DOCUMENTS: { - DOC_VERSION: find('Version: (10)'), - DOC_CURRENCY: find('Currency: (' + CURRENCY + ')'), - DOC_ISSUER: find('Issuer: (' + PUBKEY + ')'), - TIMESTAMP: find('Timestamp: (' + BLOCK_UID + ')') + DOC_VERSION: find("Version: (10)"), + DOC_CURRENCY: find("Currency: (" + CURRENCY + ")"), + DOC_ISSUER: find("Issuer: (" + PUBKEY + ")"), + TIMESTAMP: find("Timestamp: (" + BLOCK_UID + ")"), }, IDENTITY: { INLINE: exact(PUBKEY + ":" + SIGNATURE + ":" + BLOCK_UID + ":" + USER_ID), - IDTY_TYPE: find('Type: (Identity)'), - IDTY_UID: find('UniqueID: (' + USER_ID + ')') + IDTY_TYPE: find("Type: (Identity)"), + IDTY_UID: find("UniqueID: (" + USER_ID + ")"), }, BLOCK: { - NONCE: find("Nonce: (" + ZERO_OR_POSITIVE_INT + ")"), - VERSION: find("Version: " + BLOCK_VERSION), - TYPE: find("Type: (Block)"), - CURRENCY: find("Currency: (" + CURRENCY + ")"), - BNUMBER: find("Number: (" + ZERO_OR_POSITIVE_INT + ")"), - POWMIN: find("PoWMin: (" + ZERO_OR_POSITIVE_INT + ")"), - TIME: find("Time: (" + TIMESTAMP + ")"), + NONCE: find("Nonce: (" + ZERO_OR_POSITIVE_INT + ")"), + VERSION: find("Version: " + BLOCK_VERSION), + TYPE: find("Type: (Block)"), + CURRENCY: find("Currency: (" + CURRENCY + ")"), + BNUMBER: find("Number: (" + ZERO_OR_POSITIVE_INT + ")"), + POWMIN: find("PoWMin: (" + ZERO_OR_POSITIVE_INT + ")"), + TIME: find("Time: (" + TIMESTAMP + ")"), MEDIAN_TIME: find("MedianTime: (" + TIMESTAMP + ")"), - UD: find("UniversalDividend: (" + DIVIDEND + ")"), - UNIT_BASE: find("UnitBase: (" + INTEGER + ")"), - PREV_HASH: find("PreviousHash: (" + FINGERPRINT + ")"), + UD: find("UniversalDividend: (" + DIVIDEND + ")"), + UNIT_BASE: find("UnitBase: (" + INTEGER + ")"), + PREV_HASH: find("PreviousHash: (" + FINGERPRINT + ")"), PREV_ISSUER: find("PreviousIssuer: (" + PUBKEY + ")"), - MEMBERS_COUNT:find("MembersCount: (" + ZERO_OR_POSITIVE_INT + ")"), - BLOCK_ISSUER:find('Issuer: (' + PUBKEY + ')'), - BLOCK_ISSUERS_FRAME:find('IssuersFrame: (' + INTEGER + ')'), - BLOCK_ISSUERS_FRAME_VAR:find('IssuersFrameVar: (' + RELATIVE_INTEGER + ')'), - DIFFERENT_ISSUERS_COUNT:find('DifferentIssuersCount: (' + INTEGER + ')'), - PARAMETERS: find("Parameters: (" + FLOAT + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + FLOAT + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + FLOAT + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ")"), - JOINER: exact(PUBKEY + ":" + SIGNATURE + ":" + BLOCK_UID + ":" + BLOCK_UID + ":" + USER_ID), - ACTIVE: exact(PUBKEY + ":" + SIGNATURE + ":" + BLOCK_UID + ":" + BLOCK_UID + ":" + USER_ID), - LEAVER: exact(PUBKEY + ":" + SIGNATURE + ":" + BLOCK_UID + ":" + BLOCK_UID + ":" + USER_ID), + MEMBERS_COUNT: find("MembersCount: (" + ZERO_OR_POSITIVE_INT + ")"), + BLOCK_ISSUER: find("Issuer: (" + PUBKEY + ")"), + BLOCK_ISSUERS_FRAME: find("IssuersFrame: (" + INTEGER + ")"), + BLOCK_ISSUERS_FRAME_VAR: find( + "IssuersFrameVar: (" + RELATIVE_INTEGER + ")" + ), + DIFFERENT_ISSUERS_COUNT: find("DifferentIssuersCount: (" + INTEGER + ")"), + PARAMETERS: find( + "Parameters: (" + + FLOAT + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + FLOAT + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + FLOAT + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ")" + ), + JOINER: exact( + PUBKEY + + ":" + + SIGNATURE + + ":" + + BLOCK_UID + + ":" + + BLOCK_UID + + ":" + + USER_ID + ), + ACTIVE: exact( + PUBKEY + + ":" + + SIGNATURE + + ":" + + BLOCK_UID + + ":" + + BLOCK_UID + + ":" + + USER_ID + ), + LEAVER: exact( + PUBKEY + + ":" + + SIGNATURE + + ":" + + BLOCK_UID + + ":" + + BLOCK_UID + + ":" + + USER_ID + ), REVOCATION: exact(PUBKEY + ":" + SIGNATURE), EXCLUDED: exact(PUBKEY), INNER_HASH: find("InnerHash: (" + FINGERPRINT + ")"), - SPECIAL_HASH: 'E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855', - SPECIAL_BLOCK + SPECIAL_HASH: + "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855", + SPECIAL_BLOCK, }, CERT: { SELF: { UID: exact("UID:" + USER_ID), - META: exact(META_TS) + META: exact(META_TS), }, REVOKE: exact("UID:REVOKE"), OTHER: { META: exact(META_TS), - INLINE: exact(PUBKEY + ":" + PUBKEY + ":" + INTEGER + ":" + SIGNATURE) - } + INLINE: exact(PUBKEY + ":" + PUBKEY + ":" + INTEGER + ":" + SIGNATURE), + }, }, CERTIFICATION: { - CERT_TYPE: find('Type: (Certification)'), - IDTY_ISSUER: find('IdtyIssuer: (' + PUBKEY + ')'), - IDTY_UID: find('IdtyUniqueID: (' + USER_ID + ')'), - IDTY_TIMESTAMP: find('IdtyTimestamp: (' + BLOCK_UID + ')'), - IDTY_SIG: find('IdtySignature: (' + SIGNATURE + ')'), - CERT_TIMESTAMP: find('CertTimestamp: (' + BLOCK_UID + ')') + CERT_TYPE: find("Type: (Certification)"), + IDTY_ISSUER: find("IdtyIssuer: (" + PUBKEY + ")"), + IDTY_UID: find("IdtyUniqueID: (" + USER_ID + ")"), + IDTY_TIMESTAMP: find("IdtyTimestamp: (" + BLOCK_UID + ")"), + IDTY_SIG: find("IdtySignature: (" + SIGNATURE + ")"), + CERT_TIMESTAMP: find("CertTimestamp: (" + BLOCK_UID + ")"), }, REVOCATION: { - REVOC_TYPE: find('Type: (Certification)'), - IDTY_ISSUER: find('IdtyIssuer: (' + PUBKEY + ')'), - IDTY_UID: find('IdtyUniqueID: (' + USER_ID + ')'), - IDTY_TIMESTAMP: find('IdtyTimestamp: (' + BLOCK_UID + ')'), - IDTY_SIG: find('IdtySignature: (' + SIGNATURE + ')') + REVOC_TYPE: find("Type: (Certification)"), + IDTY_ISSUER: find("IdtyIssuer: (" + PUBKEY + ")"), + IDTY_UID: find("IdtyUniqueID: (" + USER_ID + ")"), + IDTY_TIMESTAMP: find("IdtyTimestamp: (" + BLOCK_UID + ")"), + IDTY_SIG: find("IdtySignature: (" + SIGNATURE + ")"), }, MEMBERSHIP: { - BLOCK: find('Block: (' + BLOCK_UID + ')'), - VERSION: find('Version: (10)'), - CURRENCY: find('Currency: (' + CURRENCY + ')'), - ISSUER: find('Issuer: (' + PUBKEY + ')'), - MEMBERSHIP: find('Membership: (IN|OUT)'), - USERID: find('UserID: (' + USER_ID + ')'), - CERTTS: find('CertTS: (' + BLOCK_UID + ')') + BLOCK: find("Block: (" + BLOCK_UID + ")"), + VERSION: find("Version: (10)"), + CURRENCY: find("Currency: (" + CURRENCY + ")"), + ISSUER: find("Issuer: (" + PUBKEY + ")"), + MEMBERSHIP: find("Membership: (IN|OUT)"), + USERID: find("UserID: (" + USER_ID + ")"), + CERTTS: find("CertTS: (" + BLOCK_UID + ")"), }, TRANSACTION: { - HEADER: exact("TX:" + TX_VERSION + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + BOOLEAN + ":" + INTEGER), - SENDER: exact(PUBKEY), - SOURCE_V3: exact("(" + POSITIVE_INT + ":" + INTEGER + ":T:" + FINGERPRINT + ":" + INTEGER + "|" + POSITIVE_INT + ":" + INTEGER + ":D:" + PUBKEY + ":" + POSITIVE_INT + ")"), - UNLOCK: exact(INTEGER + ":" + UNLOCK + "( (" + UNLOCK + "))*"), - TARGET: exact(POSITIVE_INT + ":" + INTEGER + ":" + CONDITIONS), - BLOCKSTAMP:find('Blockstamp: (' + BLOCK_UID + ')'), + HEADER: exact( + "TX:" + + TX_VERSION + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + BOOLEAN + + ":" + + INTEGER + ), + SENDER: exact(PUBKEY), + SOURCE_V3: exact( + "(" + + POSITIVE_INT + + ":" + + INTEGER + + ":T:" + + FINGERPRINT + + ":" + + INTEGER + + "|" + + POSITIVE_INT + + ":" + + INTEGER + + ":D:" + + PUBKEY + + ":" + + POSITIVE_INT + + ")" + ), + UNLOCK: exact(INTEGER + ":" + UNLOCK + "( (" + UNLOCK + "))*"), + TARGET: exact(POSITIVE_INT + ":" + INTEGER + ":" + CONDITIONS), + BLOCKSTAMP: find("Blockstamp: (" + BLOCK_UID + ")"), COMMENT: find("Comment: (" + COMMENT + ")"), - LOCKTIME:find("Locktime: (" + INTEGER + ")"), + LOCKTIME: find("Locktime: (" + INTEGER + ")"), INLINE_COMMENT: exact(COMMENT), - OUTPUT_CONDITION: exact(CONDITIONS) + OUTPUT_CONDITION: exact(CONDITIONS), }, PEER: { BLOCK: find("Block: (" + INTEGER + "-" + FINGERPRINT + ")"), - SPECIAL_BLOCK + SPECIAL_BLOCK, }, BLOCK_MAX_TX_CHAINING_DEPTH: 5, @@ -309,7 +539,7 @@ export const CommonConstants = { SYNC_BLOCKS_CHUNK: 250, MILESTONES_PER_PAGE: 50, - CHUNK_PREFIX: 'chunk_', + CHUNK_PREFIX: "chunk_", BLOCKS_IN_MEMORY_MAX: 288 * 60, // 288 = 1 day MAX_AGE_OF_PEER_IN_BLOCKS: 200, // blocks @@ -320,12 +550,12 @@ export const CommonConstants = { WAIT_P2P_CANDIDATE_HEARTBEAT: 30 * 1000, // Wait X seconds for a node to answer about its state MAX_READING_SLOTS_FOR_FILE_SYNC: 20, // Number of file reading in parallel -} +}; -function exact (regexpContent:string) { +function exact(regexpContent: string) { return new RegExp("^" + regexpContent + "$"); } -function find (regexpContent:string) { +function find(regexpContent: string) { return new RegExp(regexpContent); } diff --git a/app/lib/common-libs/dos2unix.ts b/app/lib/common-libs/dos2unix.ts index e319feace52dead71eec20c430040a523a3e8d39..150a2a75fa800e59a855b105559f603eb3ba71d5 100644 --- a/app/lib/common-libs/dos2unix.ts +++ b/app/lib/common-libs/dos2unix.ts @@ -11,6 +11,6 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -export function dos2unix(str:string) { - return str.replace(/\r\n/g, '\n') +export function dos2unix(str: string) { + return str.replace(/\r\n/g, "\n"); } diff --git a/app/lib/common-libs/errors.ts b/app/lib/common-libs/errors.ts index e905ac699c947e369d8ae46911e241d8dcf7a35e..0deb3e7047d0cd3b4a257f612f88827d9d552285 100755 --- a/app/lib/common-libs/errors.ts +++ b/app/lib/common-libs/errors.ts @@ -31,5 +31,5 @@ export enum DataErrors { CANNOT_REVERT_NO_CURRENT_BLOCK, BLOCK_TO_REVERT_NOT_FOUND, MEMBER_NOT_FOUND, - MILESTONE_BLOCK_NOT_FOUND + MILESTONE_BLOCK_NOT_FOUND, } diff --git a/app/lib/common-libs/filter-async.ts b/app/lib/common-libs/filter-async.ts index d495d54012c0b75d8ba6394e42bcb0cb566113c9..182d78a4b727e7482eb355ab2c0b5f278e188e5c 100644 --- a/app/lib/common-libs/filter-async.ts +++ b/app/lib/common-libs/filter-async.ts @@ -1,9 +1,14 @@ -export async function filterAsync<T>(arr: T[], filter: (t: T) => Promise<boolean>) { - const filtered: T[] = [] - await Promise.all(arr.map(async t => { - if (await filter(t)) { - filtered.push(t) - } - })) - return filtered -} \ No newline at end of file +export async function filterAsync<T>( + arr: T[], + filter: (t: T) => Promise<boolean> +) { + const filtered: T[] = []; + await Promise.all( + arr.map(async (t) => { + if (await filter(t)) { + filtered.push(t); + } + }) + ); + return filtered; +} diff --git a/app/lib/common-libs/index.ts b/app/lib/common-libs/index.ts index 856bc80fb7eeb58b8e66ebd4ddbd66ebc4b9c7d3..896ab213d28b9b436a90172d6836e8703e20026d 100644 --- a/app/lib/common-libs/index.ts +++ b/app/lib/common-libs/index.ts @@ -11,19 +11,14 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as rawer from './rawer' -import {Base58decode, Base58encode} from "./crypto/base58" -import {unlock as txunlock} from "./txunlock" -import {hashf} from "../common"; +import * as rawer from "./rawer"; +import { Base58decode, Base58encode } from "./crypto/base58"; +import { unlock as txunlock } from "./txunlock"; +import { hashf } from "../common"; const base58 = { decode: Base58decode, - encode: Base58encode -} + encode: Base58encode, +}; -export { - rawer, - base58, - txunlock, - hashf -} +export { rawer, base58, txunlock, hashf }; diff --git a/app/lib/common-libs/manual-promise.ts b/app/lib/common-libs/manual-promise.ts index ea7f4c436cd80bdc43c48da2aa3ec16f451f572e..24d6382aa5f259c7d778a1f7d94f3a5312209ef8 100644 --- a/app/lib/common-libs/manual-promise.ts +++ b/app/lib/common-libs/manual-promise.ts @@ -1,10 +1,10 @@ -import {Querable} from "./querable" +import { Querable } from "./querable"; -const querablePromise = require('querablep'); +const querablePromise = require("querablep"); export interface ManualPromise<T> extends Querable<T> { - resolve: (data: T) => void - reject: (error: Error) => void + resolve: (data: T) => void; + reject: (error: Error) => void; } /** @@ -12,14 +12,14 @@ export interface ManualPromise<T> extends Querable<T> { * @returns {ManualPromise<T>} */ export function newManualPromise<T>() { - let resolveCb: (data: T) => void = () => {} - let rejectCb: (error: Error) => void = () => {} + let resolveCb: (data: T) => void = () => {}; + let rejectCb: (error: Error) => void = () => {}; const p = new Promise((res, rej) => { - resolveCb = res - rejectCb = rej - }) - const q: ManualPromise<T> = querablePromise(p) - q.resolve = resolveCb - q.reject = rejectCb - return q + resolveCb = res; + rejectCb = rej; + }); + const q: ManualPromise<T> = querablePromise(p); + q.resolve = resolveCb; + q.reject = rejectCb; + return q; } diff --git a/app/lib/common-libs/pint.ts b/app/lib/common-libs/pint.ts index f95518f80e47013bfd67bf6c9393ef2947c4cf3d..69b5e574090a71a4824421d3f98db16a054cfec3 100644 --- a/app/lib/common-libs/pint.ts +++ b/app/lib/common-libs/pint.ts @@ -1,6 +1,6 @@ export function pint(value: string | number): number { - if (typeof value === 'string') { - return parseInt(value, 10) + if (typeof value === "string") { + return parseInt(value, 10); } - return value + return value; } diff --git a/app/lib/common-libs/programOptions.ts b/app/lib/common-libs/programOptions.ts index a845061ed951fbdedad44e81d56b9f50a5b91208..501cc4c1a1c91289b44740d60910388322ace304 100644 --- a/app/lib/common-libs/programOptions.ts +++ b/app/lib/common-libs/programOptions.ts @@ -11,25 +11,25 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -const opts = require('optimist').argv +const opts = require("optimist").argv; export interface ProgramOptions { - mdb?: string - home?: string - notrim?: boolean - notrimc?: boolean // Specificaly disable for c_index - nosbx?: boolean - nopeers?: boolean - nop2p?: boolean - syncTrace?: string - isSync: boolean - noSources: boolean - slow?: boolean - loglevel?: string - sqlTraces?: boolean - memory?: boolean - storeTxs?: boolean - storeWw?: boolean + mdb?: string; + home?: string; + notrim?: boolean; + notrimc?: boolean; // Specificaly disable for c_index + nosbx?: boolean; + nopeers?: boolean; + nop2p?: boolean; + syncTrace?: string; + isSync: boolean; + noSources: boolean; + slow?: boolean; + loglevel?: string; + sqlTraces?: boolean; + memory?: boolean; + storeTxs?: boolean; + storeWw?: boolean; } export const cliprogram: ProgramOptions = { @@ -40,7 +40,7 @@ export const cliprogram: ProgramOptions = { nopeers: opts.nopeers, nop2p: opts.nop2p, noSources: !!opts.nosources, - syncTrace: opts['sync-trace'], - isSync: opts._[0] === 'sync', + syncTrace: opts["sync-trace"], + isSync: opts._[0] === "sync", slow: opts.slow, -} +}; diff --git a/app/lib/common-libs/querable.ts b/app/lib/common-libs/querable.ts index b5cba5da23170c599d46d7dadd138376b0d9f20f..c4f4398ba1f61d6567d11a22c7fda60e78ec6255 100644 --- a/app/lib/common-libs/querable.ts +++ b/app/lib/common-libs/querable.ts @@ -1,14 +1,14 @@ -const querablePromise = require('querablep'); +const querablePromise = require("querablep"); export interface Querable<T> extends Promise<T> { - isFulfilled(): boolean - isResolved(): boolean - isRejected(): boolean - startedOn: number + isFulfilled(): boolean; + isResolved(): boolean; + isRejected(): boolean; + startedOn: number; } export function querablep<T>(p: Promise<T>): Querable<T> { - const qp = querablePromise(p) - qp.startedOn = Date.now() - return qp + const qp = querablePromise(p); + qp.startedOn = Date.now(); + return qp; } diff --git a/app/lib/common-libs/randomPick.ts b/app/lib/common-libs/randomPick.ts index ccd912754e15840a089ef5ca35d641d4e1be8bf1..8368d9ef284e7ae2f1745a57520803cf1bc53995 100644 --- a/app/lib/common-libs/randomPick.ts +++ b/app/lib/common-libs/randomPick.ts @@ -11,14 +11,16 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. - -export const randomPick = <T>(elements:T[], max:number) => { - const chosen:T[] = [] - const nbElements = elements.length +export const randomPick = <T>(elements: T[], max: number) => { + const chosen: T[] = []; + const nbElements = elements.length; for (let i = 0; i < Math.min(nbElements, max); i++) { - const randIndex = Math.max(Math.floor(Math.random() * 10) - (10 - nbElements) - i, 0) - chosen.push(elements[randIndex]) - elements.splice(randIndex, 1) + const randIndex = Math.max( + Math.floor(Math.random() * 10) - (10 - nbElements) - i, + 0 + ); + chosen.push(elements[randIndex]); + elements.splice(randIndex, 1); } - return chosen -} \ No newline at end of file + return chosen; +}; diff --git a/app/lib/common-libs/rawer.ts b/app/lib/common-libs/rawer.ts index 9ed172071cad9d086bc01db5d13e3df19440d02b..018addc59fa948c23e1f09d617d0160880795d77 100644 --- a/app/lib/common-libs/rawer.ts +++ b/app/lib/common-libs/rawer.ts @@ -11,101 +11,104 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {dos2unix} from "./dos2unix" -import {PeerDTO} from "../dto/PeerDTO" -import {IdentityDTO} from "../dto/IdentityDTO" -import {MembershipDTO} from "../dto/MembershipDTO" -import {TransactionDTO} from "../dto/TransactionDTO" -import {BlockDTO} from "../dto/BlockDTO" +import { dos2unix } from "./dos2unix"; +import { PeerDTO } from "../dto/PeerDTO"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { MembershipDTO } from "../dto/MembershipDTO"; +import { TransactionDTO } from "../dto/TransactionDTO"; +import { BlockDTO } from "../dto/BlockDTO"; const DOCUMENTS_VERSION = 10; -const SIGNED = false -const UNSIGNED = true +const SIGNED = false; +const UNSIGNED = true; -export const getOfficialIdentity = (json:any, withSig = true) => { - const dto = IdentityDTO.fromJSONObject(json) +export const getOfficialIdentity = (json: any, withSig = true) => { + const dto = IdentityDTO.fromJSONObject(json); if (withSig !== false) { - return dto.getRawSigned() + return dto.getRawSigned(); } else { - return dto.rawWithoutSig() + return dto.rawWithoutSig(); } -} - -export const getOfficialCertification = (json:{ - version?:number - currency:string - issuer:string - idty_issuer:string - idty_uid:string - idty_buid:string - idty_sig:string - buid:string - sig?:string +}; + +export const getOfficialCertification = (json: { + version?: number; + currency: string; + issuer: string; + idty_issuer: string; + idty_uid: string; + idty_buid: string; + idty_sig: string; + buid: string; + sig?: string; }) => { - let raw = getNormalHeader('Certification', json); - raw += "IdtyIssuer: " + json.idty_issuer + '\n'; - raw += "IdtyUniqueID: " + json.idty_uid + '\n'; - raw += "IdtyTimestamp: " + json.idty_buid + '\n'; - raw += "IdtySignature: " + json.idty_sig + '\n'; - raw += "CertTimestamp: " + json.buid + '\n'; + let raw = getNormalHeader("Certification", json); + raw += "IdtyIssuer: " + json.idty_issuer + "\n"; + raw += "IdtyUniqueID: " + json.idty_uid + "\n"; + raw += "IdtyTimestamp: " + json.idty_buid + "\n"; + raw += "IdtySignature: " + json.idty_sig + "\n"; + raw += "CertTimestamp: " + json.buid + "\n"; if (json.sig) { - raw += json.sig + '\n'; + raw += json.sig + "\n"; } return dos2unix(raw); -} +}; -export const getOfficialRevocation = (json:any) => { - let raw = getNormalHeader('Revocation', json); - raw += "IdtyUniqueID: " + json.uid + '\n'; - raw += "IdtyTimestamp: " + json.buid + '\n'; - raw += "IdtySignature: " + json.sig + '\n'; +export const getOfficialRevocation = (json: any) => { + let raw = getNormalHeader("Revocation", json); + raw += "IdtyUniqueID: " + json.uid + "\n"; + raw += "IdtyTimestamp: " + json.buid + "\n"; + raw += "IdtySignature: " + json.sig + "\n"; if (json.revocation) { - raw += json.revocation + '\n'; + raw += json.revocation + "\n"; } return dos2unix(raw); -} +}; -export const getPeerWithoutSignature = (json:any) => { - return PeerDTO.fromJSONObject(json).getRawUnsigned() -} +export const getPeerWithoutSignature = (json: any) => { + return PeerDTO.fromJSONObject(json).getRawUnsigned(); +}; -export const getPeer = (json:any) => { - return PeerDTO.fromJSONObject(json).getRawSigned() -} +export const getPeer = (json: any) => { + return PeerDTO.fromJSONObject(json).getRawSigned(); +}; -export const getMembershipWithoutSignature = (json:any) => { - return MembershipDTO.fromJSONObject(json).getRaw() -} +export const getMembershipWithoutSignature = (json: any) => { + return MembershipDTO.fromJSONObject(json).getRaw(); +}; -export const getMembership = (json:any) => { +export const getMembership = (json: any) => { return dos2unix(signed(getMembershipWithoutSignature(json), json)); -} +}; -export const getBlockInnerPart = (json:any) => { - return BlockDTO.fromJSONObject(json).getRawInnerPart() -} +export const getBlockInnerPart = (json: any) => { + return BlockDTO.fromJSONObject(json).getRawInnerPart(); +}; -export const getBlockWithInnerHashAndNonce = (json:any) => { - return BlockDTO.fromJSONObject(json).getRawUnSigned() -} +export const getBlockWithInnerHashAndNonce = (json: any) => { + return BlockDTO.fromJSONObject(json).getRawUnSigned(); +}; -export const getBlockInnerHashAndNonceWithSignature = (json:any) => { - return BlockDTO.fromJSONObject(json).getSignedPartSigned() -} +export const getBlockInnerHashAndNonceWithSignature = (json: any) => { + return BlockDTO.fromJSONObject(json).getSignedPartSigned(); +}; -export const getBlock = (json:any) => { +export const getBlock = (json: any) => { return dos2unix(signed(getBlockWithInnerHashAndNonce(json), json)); -} - -export const getTransaction = (json:any) => { - return TransactionDTO.toRAW(json) -} - -function getNormalHeader(doctype:string, json:{ - version?:number - currency:string - issuer:string -}) { +}; + +export const getTransaction = (json: any) => { + return TransactionDTO.toRAW(json); +}; + +function getNormalHeader( + doctype: string, + json: { + version?: number; + currency: string; + issuer: string; + } +) { let raw = ""; raw += "Version: " + (json.version || DOCUMENTS_VERSION) + "\n"; raw += "Type: " + doctype + "\n"; @@ -114,7 +117,7 @@ function getNormalHeader(doctype:string, json:{ return raw; } -function signed(raw:string, json:any) { - raw += json.signature + '\n'; +function signed(raw: string, json: any) { + raw += json.signature + "\n"; return raw; } diff --git a/app/lib/common-libs/reduce.ts b/app/lib/common-libs/reduce.ts index 2d674e4ed585c738463f83e1b0c9b181b434287a..6c52a1355bf58b52f389af4007ddc24ffb5aa756 100644 --- a/app/lib/common-libs/reduce.ts +++ b/app/lib/common-libs/reduce.ts @@ -1,16 +1,19 @@ export function reduceConcat<T>(cumulated: T[], arr: T[]) { - return cumulated.concat(arr) + return cumulated.concat(arr); } -export type GroupResult<T> = { [k:string]: T[] } +export type GroupResult<T> = { [k: string]: T[] }; -export function reduceGroupBy<T, K extends keyof T>(arr: T[], k: K): GroupResult<T> { +export function reduceGroupBy<T, K extends keyof T>( + arr: T[], + k: K +): GroupResult<T> { return arr.reduce((cumulated: GroupResult<T>, t: T) => { - const key: string = String(t[k]) + const key: string = String(t[k]); if (!cumulated[key]) { - cumulated[key] = [] + cumulated[key] = []; } - cumulated[key].push(t) - return cumulated - }, {} as GroupResult<T>) + cumulated[key].push(t); + return cumulated; + }, {} as GroupResult<T>); } diff --git a/app/lib/common-libs/timeout-promise.ts b/app/lib/common-libs/timeout-promise.ts index 57c6e3daad65f7e3296b47bff61abe660fcb9368..eab7d91f93ab0e2efe82905358e71b44d1b10694 100644 --- a/app/lib/common-libs/timeout-promise.ts +++ b/app/lib/common-libs/timeout-promise.ts @@ -13,12 +13,15 @@ export function newRejectTimeoutPromise(timeout: number) { return new Promise((res, rej) => { - setTimeout(rej, timeout) - }) + setTimeout(rej, timeout); + }); } -export function newResolveTimeoutPromise<T>(timeout: number, value: T): Promise<T> { - return new Promise(res => { - setTimeout(() => res(value), timeout) - }) +export function newResolveTimeoutPromise<T>( + timeout: number, + value: T +): Promise<T> { + return new Promise((res) => { + setTimeout(() => res(value), timeout); + }); } diff --git a/app/lib/common-libs/txunlock.ts b/app/lib/common-libs/txunlock.ts index bde87663a909cbd715f953032a88ece16632b3b5..8450e0263363533edfb996ab17481ea309399daa 100644 --- a/app/lib/common-libs/txunlock.ts +++ b/app/lib/common-libs/txunlock.ts @@ -11,122 +11,134 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {hashf} from "../common" -import {evalParams} from "../rules/global_rules" -import {TxSignatureResult} from "../dto/TransactionDTO" +import { hashf } from "../common"; +import { evalParams } from "../rules/global_rules"; +import { TxSignatureResult } from "../dto/TransactionDTO"; -let Parser = require("jison").Parser +let Parser = require("jison").Parser; let grammar = { - "lex": { - "rules": [ - ["\\s+", "/* skip whitespace */"], - ["\\&\\&", "return 'AND';"], - ["\\|\\|", "return 'OR';"], - ["\\(", "return '(';"], - ["\\)", "return ')';"], - ["[0-9A-Za-z]{40,64}", "return 'PARAMETER';"], - ["[0-9]{1,10}", "return 'PARAMETER';"], - ["SIG", "return 'SIG';"], - ["XHX", "return 'XHX';"], - ["CLTV", "return 'CLTV';"], - ["CSV", "return 'CSV';"], - ["$", "return 'EOF';"] - ] + lex: { + rules: [ + ["\\s+", "/* skip whitespace */"], + ["\\&\\&", "return 'AND';"], + ["\\|\\|", "return 'OR';"], + ["\\(", "return '(';"], + ["\\)", "return ')';"], + ["[0-9A-Za-z]{40,64}", "return 'PARAMETER';"], + ["[0-9]{1,10}", "return 'PARAMETER';"], + ["SIG", "return 'SIG';"], + ["XHX", "return 'XHX';"], + ["CLTV", "return 'CLTV';"], + ["CSV", "return 'CSV';"], + ["$", "return 'EOF';"], + ], }, - "operators": [ - ["left", "AND", "OR"] - ], + operators: [["left", "AND", "OR"]], - "bnf": { - "expressions" :[ - [ "e EOF", "return $1;" ] - ], + bnf: { + expressions: [["e EOF", "return $1;"]], - "e" :[ - [ "e AND e", "$$ = $1 && $3;" ], - [ "e OR e", "$$ = $1 || $3;" ], - [ "SIG ( e )","$$ = yy.sig($3);"], - [ "XHX ( e )","$$ = yy.xHx($3);"], - [ "CLTV ( e )","$$ = yy.cltv($3);"], - [ "CSV ( e )","$$ = yy.csv($3);"], - [ "PARAMETER", "$$ = $1;" ], - [ "( e )", "$$ = $2;" ] - ] - } -} + e: [ + ["e AND e", "$$ = $1 && $3;"], + ["e OR e", "$$ = $1 || $3;"], + ["SIG ( e )", "$$ = yy.sig($3);"], + ["XHX ( e )", "$$ = yy.xHx($3);"], + ["CLTV ( e )", "$$ = yy.cltv($3);"], + ["CSV ( e )", "$$ = yy.csv($3);"], + ["PARAMETER", "$$ = $1;"], + ["( e )", "$$ = $2;"], + ], + }, +}; export interface UnlockMetadata { - currentTime?:number - elapsedTime?:number + currentTime?: number; + elapsedTime?: number; } -export function unlock(conditionsStr:string, unlockParams:string[], sigResult:TxSignatureResult, metadata?:UnlockMetadata): boolean|null { - - const params = evalParams(unlockParams, conditionsStr, sigResult) - let parser = new Parser(grammar) - let nbFunctions = 0 +export function unlock( + conditionsStr: string, + unlockParams: string[], + sigResult: TxSignatureResult, + metadata?: UnlockMetadata +): boolean | null { + const params = evalParams(unlockParams, conditionsStr, sigResult); + let parser = new Parser(grammar); + let nbFunctions = 0; parser.yy = { i: 0, - sig: function (pubkey:string) { + sig: function (pubkey: string) { // Counting functions - nbFunctions++ + nbFunctions++; // Make the test - let success = false - let i = 0 + let success = false; + let i = 0; while (!success && i < params.length) { - const p = params[i] - success = p.successful && p.funcName === 'SIG' && p.parameter === pubkey - i++ + const p = params[i]; + success = + p.successful && p.funcName === "SIG" && p.parameter === pubkey; + i++; } - return success + return success; }, - xHx: function(hash:string) { + xHx: function (hash: string) { // Counting functions - nbFunctions++ + nbFunctions++; // Make the test - let success = false - let i = 0 + let success = false; + let i = 0; while (!success && i < params.length) { - const p = params[i] - success = p.successful && p.funcName === 'XHX' && hashf(p.parameter) === hash - i++ + const p = params[i]; + success = + p.successful && p.funcName === "XHX" && hashf(p.parameter) === hash; + i++; } - return success + return success; }, - cltv: function(deadline:string) { + cltv: function (deadline: string) { // Counting functions - nbFunctions++ + nbFunctions++; // Make the test - return metadata && metadata.currentTime && metadata.currentTime >= parseInt(deadline) + return ( + metadata && + metadata.currentTime && + metadata.currentTime >= parseInt(deadline) + ); }, - csv: function(amountToWait:string) { + csv: function (amountToWait: string) { // Counting functions - nbFunctions++ + nbFunctions++; // Make the test - return metadata && metadata.elapsedTime && metadata.elapsedTime >= parseInt(amountToWait) - } - } + return ( + metadata && + metadata.elapsedTime && + metadata.elapsedTime >= parseInt(amountToWait) + ); + }, + }; try { - const areAllValidParameters = params.reduce((success, p) => success && !!(p.successful), true) + const areAllValidParameters = params.reduce( + (success, p) => success && !!p.successful, + true + ); if (!areAllValidParameters) { - throw "All parameters must be successful" + throw "All parameters must be successful"; } - const unlocked = parser.parse(conditionsStr) + const unlocked = parser.parse(conditionsStr); if (unlockParams.length > nbFunctions) { - throw "There must be at most as much params as function calls" + throw "There must be at most as much params as function calls"; } - return unlocked - } catch(e) { - return null + return unlocked; + } catch (e) { + return null; } } -export function checkGrammar(conditionsStr:string): boolean|null { - +export function checkGrammar(conditionsStr: string): boolean | null { let parser = new Parser(grammar); parser.yy = { @@ -134,12 +146,12 @@ export function checkGrammar(conditionsStr:string): boolean|null { sig: () => true, xHx: () => true, cltv: () => true, - csv: () => true - } + csv: () => true, + }; try { - return parser.parse(conditionsStr) - } catch(e) { - return null + return parser.parse(conditionsStr); + } catch (e) { + return null; } } diff --git a/app/lib/common-libs/underscore.ts b/app/lib/common-libs/underscore.ts index 2592220474e242b23f4e9af5b59609f132c6e8eb..3f567e3d81296ec7161300d45a01d39a0fd080af 100644 --- a/app/lib/common-libs/underscore.ts +++ b/app/lib/common-libs/underscore.ts @@ -1,87 +1,102 @@ -import {Map} from "./crypto/map" +import { Map } from "./crypto/map"; -const _underscore_ = require("underscore") +const _underscore_ = require("underscore"); export interface UnderscoreClass<T> { - filter(filterFunc: (t: T) => boolean): UnderscoreClass<T> - where(props: { [k in keyof T]?: T[k] }): UnderscoreClass<T> - sortBy(sortFunc:(element:T) => number): UnderscoreClass<T> - pluck<K extends keyof T>(k:K): UnderscoreClass<T> - uniq<K extends keyof T>(isSorted?:boolean, iteratee?:(t:T) => K): UnderscoreClass<T> - value(): T[] + filter(filterFunc: (t: T) => boolean): UnderscoreClass<T>; + where(props: { [k in keyof T]?: T[k] }): UnderscoreClass<T>; + sortBy(sortFunc: (element: T) => number): UnderscoreClass<T>; + pluck<K extends keyof T>(k: K): UnderscoreClass<T>; + uniq<K extends keyof T>( + isSorted?: boolean, + iteratee?: (t: T) => K + ): UnderscoreClass<T>; + value(): T[]; } export const Underscore = { - - filter: <T>(elements:T[], filterFunc: (t:T) => boolean): T[] => { - return _underscore_.filter(elements, filterFunc) + filter: <T>(elements: T[], filterFunc: (t: T) => boolean): T[] => { + return _underscore_.filter(elements, filterFunc); }, - where: <T>(elements:T[], props: { [k in keyof T]?: T[k] }): T[] => { - return _underscore_.where(elements, props) + where: <T>(elements: T[], props: { [k in keyof T]?: T[k] }): T[] => { + return _underscore_.where(elements, props); }, - findWhere: <T>(elements:T[], props: { [k in keyof T]?: T[k] }): T|null => { - return _underscore_.findWhere(elements, props) + findWhere: <T>(elements: T[], props: { [k in keyof T]?: T[k] }): T | null => { + return _underscore_.findWhere(elements, props); }, - keys: <T>(map:T): (keyof T)[] => { - return _underscore_.keys(map) + keys: <T>(map: T): (keyof T)[] => { + return _underscore_.keys(map); }, - values: <T>(map:{ [k:string]: T }): T[] => { - return _underscore_.values(map) + values: <T>(map: { [k: string]: T }): T[] => { + return _underscore_.values(map); }, - pluck: <T, K extends keyof T>(elements:T[], k:K): T[K][] => { - return _underscore_.pluck(elements, k) + pluck: <T, K extends keyof T>(elements: T[], k: K): T[K][] => { + return _underscore_.pluck(elements, k); }, - pick: <T, K extends keyof T>(elements:T, ...k:K[]): T[K][] => { - return _underscore_.pick(elements, ...k) + pick: <T, K extends keyof T>(elements: T, ...k: K[]): T[K][] => { + return _underscore_.pick(elements, ...k); }, - omit: <T, K extends keyof T>(element:T, ...k:K[]): T[K][] => { - return _underscore_.omit(element, ...k) + omit: <T, K extends keyof T>(element: T, ...k: K[]): T[K][] => { + return _underscore_.omit(element, ...k); }, - uniq: <T, K>(elements:T[], isSorted:boolean = false, iteratee?:(t:T) => K): T[] => { - return _underscore_.uniq(elements, isSorted, iteratee) + uniq: <T, K>( + elements: T[], + isSorted: boolean = false, + iteratee?: (t: T) => K + ): T[] => { + return _underscore_.uniq(elements, isSorted, iteratee); }, - clone: <T>(t:T): T => { - return _underscore_.clone(t) + clone: <T>(t: T): T => { + return _underscore_.clone(t); }, - mapObject: <T, K extends keyof T, L extends keyof (T[K])>(t:T, cb:(k:K) => (T[K])[L]): Map<T[K][L]> => { - return _underscore_.mapObject(t, cb) + mapObject: <T, K extends keyof T, L extends keyof T[K]>( + t: T, + cb: (k: K) => T[K][L] + ): Map<T[K][L]> => { + return _underscore_.mapObject(t, cb); }, - mapObjectByProp: <T, K extends keyof T, L extends keyof (T[K])>(t:T, prop:L): Map<T[K][L]> => { - return _underscore_.mapObject(t, (o:T[K]) => o[prop]) + mapObjectByProp: <T, K extends keyof T, L extends keyof T[K]>( + t: T, + prop: L + ): Map<T[K][L]> => { + return _underscore_.mapObject(t, (o: T[K]) => o[prop]); }, - sortBy: <T, K extends keyof T>(elements:T[], sortFunc:((element:T) => number|string)|K): T[] => { - return _underscore_.sortBy(elements, sortFunc) + sortBy: <T, K extends keyof T>( + elements: T[], + sortFunc: ((element: T) => number | string) | K + ): T[] => { + return _underscore_.sortBy(elements, sortFunc); }, - difference: <T>(array1:T[], array2:T[]): T[] => { - return _underscore_.difference(array1, array2) + difference: <T>(array1: T[], array2: T[]): T[] => { + return _underscore_.difference(array1, array2); }, - shuffle: <T>(elements:T[]): T[] => { - return _underscore_.shuffle(elements) + shuffle: <T>(elements: T[]): T[] => { + return _underscore_.shuffle(elements); }, - extend: <T, U>(t1:T, t2:U): T|U => { - return _underscore_.extend(t1, t2) + extend: <T, U>(t1: T, t2: U): T | U => { + return _underscore_.extend(t1, t2); }, - range: (count:number, end?:number): number[] => { - return _underscore_.range(count, end) + range: (count: number, end?: number): number[] => { + return _underscore_.range(count, end); }, - chain: <T>(element:T[]): UnderscoreClass<T> => { - return _underscore_.chain(element) + chain: <T>(element: T[]): UnderscoreClass<T> => { + return _underscore_.chain(element); }, -} +}; diff --git a/app/lib/common-libs/websocket.ts b/app/lib/common-libs/websocket.ts index a9589d2aba1efa73ac575be99124f706f49f17f3..bc28b44b8843af5824fa287abfc334ce3b677d2f 100644 --- a/app/lib/common-libs/websocket.ts +++ b/app/lib/common-libs/websocket.ts @@ -11,28 +11,28 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as WS from 'ws' +import * as WS from "ws"; export class WebSocket extends WS { constructor(url: string, options?: { agent: any }) { super(url, { agent: options && options.agent, - }) + }); } } export class WebSocketServer extends WS.Server { constructor(options: { - server?: any, - host?: string, - port?: number, - path?: string + server?: any; + host?: string; + port?: number; + path?: string; }) { super({ server: options.server, path: options.path, host: options.host, port: options.port, - }) + }); } } diff --git a/app/lib/common/Tristamp.ts b/app/lib/common/Tristamp.ts index 6c3232a66b55197c7c03ce3f49b11a17bbd92015..9b3d7f807909198b66dac976da185c99d33c5e77 100644 --- a/app/lib/common/Tristamp.ts +++ b/app/lib/common/Tristamp.ts @@ -12,7 +12,7 @@ // GNU Affero General Public License for more details. export interface Tristamp { - number: number - hash: string - medianTime: number + number: number; + hash: string; + medianTime: number; } diff --git a/app/lib/common/package.ts b/app/lib/common/package.ts index 2fdfc677f7e3872593e1c86046a66b66279bd6b3..19fe8d339bf974ef2245d4b3c18ef6f3b4621b88 100644 --- a/app/lib/common/package.ts +++ b/app/lib/common/package.ts @@ -11,25 +11,23 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. - export class Package { - - private json:{ version:string } + private json: { version: string }; private constructor() { - this.json = require('../../../package.json') + this.json = require("../../../package.json"); } get version() { - return this.json.version + return this.json.version; } - private static instance:Package + private static instance: Package; static getInstance() { if (!Package.instance) { - Package.instance = new Package() + Package.instance = new Package(); } - return Package.instance + return Package.instance; } -} \ No newline at end of file +} diff --git a/app/lib/computation/BlockchainContext.ts b/app/lib/computation/BlockchainContext.ts index 899b4b266336e9763a6ec2bc0f87654c3b71af71..3348fa911860bc0c8b96a0e906ea410f893e1062 100644 --- a/app/lib/computation/BlockchainContext.ts +++ b/app/lib/computation/BlockchainContext.ts @@ -11,33 +11,32 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../dto/BlockDTO" -import {DuniterBlockchain} from "../blockchain/DuniterBlockchain" -import {DBHead} from "../db/DBHead" -import {FileDAL} from "../dal/fileDAL" -import {DBBlock} from "../db/DBBlock" -import {Underscore} from "../common-libs/underscore" -import {DataErrors} from "../common-libs/errors" +import { BlockDTO } from "../dto/BlockDTO"; +import { DuniterBlockchain } from "../blockchain/DuniterBlockchain"; +import { DBHead } from "../db/DBHead"; +import { FileDAL } from "../dal/fileDAL"; +import { DBBlock } from "../db/DBBlock"; +import { Underscore } from "../common-libs/underscore"; +import { DataErrors } from "../common-libs/errors"; -const indexer = require('../indexer').Indexer -const constants = require('../constants'); +const indexer = require("../indexer").Indexer; +const constants = require("../constants"); export class BlockchainContext { - - private conf:any - private dal:FileDAL - private logger:any + private conf: any; + private dal: FileDAL; + private logger: any; /** * The virtual next HEAD. Computed each time a new block is added, because a lot of HEAD variables are deterministic * and can be computed one, just after a block is added for later controls. */ - private vHEAD:any + private vHEAD: any; /** * The currently written HEAD, aka. HEAD_1 relatively to incoming HEAD. */ - private vHEAD_1:any + private vHEAD_1: any; private HEADrefreshed: Promise<void> = Promise.resolve(); @@ -58,13 +57,18 @@ export class BlockchainContext { powMin: this.conf.powMin || 0, powZeros: 0, powRemainder: 0, - avgBlockSize: 0 + avgBlockSize: 0, }; } else { block = { version: this.vHEAD_1.version }; } - this.vHEAD = await indexer.completeGlobalScope(BlockDTO.fromJSONObject(block), this.conf, [], this.dal); - })() + this.vHEAD = await indexer.completeGlobalScope( + BlockDTO.fromJSONObject(block), + this.conf, + [], + this.dal + ); + })(); return this.HEADrefreshed; } @@ -92,7 +96,7 @@ export class BlockchainContext { if (!this.vHEAD) { await this.refreshHead(); } - return this.vHEAD_1 + return this.vHEAD_1; } /** @@ -101,71 +105,112 @@ export class BlockchainContext { */ async getIssuerPersonalizedDifficulty(issuer: string): Promise<any> { const local_vHEAD = await this.getvHeadCopy({ issuer }); - await indexer.preparePersonalizedPoW(local_vHEAD, this.vHEAD_1, (n:number, m:number, p:string = "") => this.dal.range(n,m,p), this.conf) + await indexer.preparePersonalizedPoW( + local_vHEAD, + this.vHEAD_1, + (n: number, m: number, p: string = "") => this.dal.range(n, m, p), + this.conf + ); return local_vHEAD.issuerDiff; } setConfDAL(newConf: any, newDAL: any): void { this.dal = newDAL; this.conf = newConf; - this.logger = require('../logger').NewLogger(this.dal.profile); + this.logger = require("../logger").NewLogger(this.dal.profile); } - async checkBlock(block: BlockDTO, withPoWAndSignature:boolean): Promise<any> { - return DuniterBlockchain.checkBlock(block, withPoWAndSignature, this.conf, this.dal) + async checkBlock( + block: BlockDTO, + withPoWAndSignature: boolean + ): Promise<any> { + return DuniterBlockchain.checkBlock( + block, + withPoWAndSignature, + this.conf, + this.dal + ); } - private async addBlock(obj: BlockDTO, index: any = null, HEAD: DBHead | null = null, trim: boolean): Promise<BlockDTO> { - const block = await DuniterBlockchain.pushTheBlock(obj, index, HEAD, this.conf, this.dal, this.logger, trim) - this.vHEAD_1 = this.vHEAD = null - return block + private async addBlock( + obj: BlockDTO, + index: any = null, + HEAD: DBHead | null = null, + trim: boolean + ): Promise<BlockDTO> { + const block = await DuniterBlockchain.pushTheBlock( + obj, + index, + HEAD, + this.conf, + this.dal, + this.logger, + trim + ); + this.vHEAD_1 = this.vHEAD = null; + return block; } - async addSideBlock(obj:BlockDTO): Promise<BlockDTO> { - const dbb = await DuniterBlockchain.pushSideBlock(obj, this.dal, this.logger) - return dbb.toBlockDTO() + async addSideBlock(obj: BlockDTO): Promise<BlockDTO> { + const dbb = await DuniterBlockchain.pushSideBlock( + obj, + this.dal, + this.logger + ); + return dbb.toBlockDTO(); } async revertCurrentBlock(): Promise<DBBlock> { const head_1 = await this.dal.bindexDAL.head(1); - this.logger.debug('Reverting block #%s...', head_1.number); - const block = await this.dal.getAbsoluteValidBlockInForkWindow(head_1.number, head_1.hash) + this.logger.debug("Reverting block #%s...", head_1.number); + const block = await this.dal.getAbsoluteValidBlockInForkWindow( + head_1.number, + head_1.hash + ); if (!block) { - throw DataErrors[DataErrors.BLOCK_TO_REVERT_NOT_FOUND] + throw DataErrors[DataErrors.BLOCK_TO_REVERT_NOT_FOUND]; } - await DuniterBlockchain.revertBlock(head_1.number, head_1.hash, this.dal, block) + await DuniterBlockchain.revertBlock( + head_1.number, + head_1.hash, + this.dal, + block + ); // Invalidates the head, since it has changed. await this.refreshHead(); - return block + return block; } async revertCurrentHead() { const head_1 = await this.dal.bindexDAL.head(1); - this.logger.debug('Reverting HEAD~1... (b#%s)', head_1.number); - await DuniterBlockchain.revertBlock(head_1.number, head_1.hash, this.dal) + this.logger.debug("Reverting HEAD~1... (b#%s)", head_1.number); + await DuniterBlockchain.revertBlock(head_1.number, head_1.hash, this.dal); // Invalidates the head, since it has changed. await this.refreshHead(); } async applyNextAvailableFork(): Promise<any> { const current = await this.current(); - this.logger.debug('Find next potential block #%s...', current.number + 1); + this.logger.debug("Find next potential block #%s...", current.number + 1); const forks = await this.dal.getForkBlocksFollowing(current); if (!forks.length) { throw constants.ERRORS.NO_POTENTIAL_FORK_AS_NEXT; } const block = forks[0]; - await this.checkAndAddBlock(BlockDTO.fromJSONObject(block)) - this.logger.debug('Applied block #%s', block.number); + await this.checkAndAddBlock(BlockDTO.fromJSONObject(block)); + this.logger.debug("Applied block #%s", block.number); } - async checkAndAddBlock(block:BlockDTO, trim = true) { - const { index, HEAD } = await this.checkBlock(block, constants.WITH_SIGNATURES_AND_POW); + async checkAndAddBlock(block: BlockDTO, trim = true) { + const { index, HEAD } = await this.checkBlock( + block, + constants.WITH_SIGNATURES_AND_POW + ); return await this.addBlock(block, index, HEAD, trim); } current(): Promise<any> { - return this.dal.getCurrentBlockOrNull() + return this.dal.getCurrentBlockOrNull(); } async checkHaveEnoughLinks(target: string, newLinks: any): Promise<any> { @@ -175,7 +220,15 @@ export class BlockchainContext { count += newLinks[target].length; } if (count < this.conf.sigQty) { - throw 'Key ' + target + ' does not have enough links (' + count + '/' + this.conf.sigQty + ')'; + throw ( + "Key " + + target + + " does not have enough links (" + + count + + "/" + + this.conf.sigQty + + ")" + ); } } } diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts index ef4a60730f3f39ec84f3120c9623e08aeef39298..0b6eb608d07097467a4752a87c87374fbecb0204 100644 --- a/app/lib/dal/fileDAL.ts +++ b/app/lib/dal/fileDAL.ts @@ -11,14 +11,14 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as fs from 'fs' -import * as path from 'path' -import {SQLiteDriver} from "./drivers/SQLiteDriver" -import {ConfDAL} from "./fileDALs/ConfDAL" -import {ConfDTO} from "../dto/ConfDTO" -import {BlockDTO} from "../dto/BlockDTO" -import {DBHead} from "../db/DBHead" -import {DBIdentity, IdentityDAL} from "./sqliteDAL/IdentityDAL" +import * as fs from "fs"; +import * as path from "path"; +import { SQLiteDriver } from "./drivers/SQLiteDriver"; +import { ConfDAL } from "./fileDALs/ConfDAL"; +import { ConfDTO } from "../dto/ConfDTO"; +import { BlockDTO } from "../dto/BlockDTO"; +import { DBHead } from "../db/DBHead"; +import { DBIdentity, IdentityDAL } from "./sqliteDAL/IdentityDAL"; import { CindexEntry, FullCindexEntry, @@ -29,165 +29,173 @@ import { MindexEntry, SimpleTxInput, SimpleUdEntryForWallet, - SindexEntry -} from "../indexer" -import {TransactionDTO} from "../dto/TransactionDTO" -import {CertDAL, DBCert} from "./sqliteDAL/CertDAL" -import {DBBlock} from "../db/DBBlock" -import {DBMembership, MembershipDAL} from "./sqliteDAL/MembershipDAL" -import {MerkleDTO} from "../dto/MerkleDTO" -import {CommonConstants} from "../common-libs/constants" -import {PowDAL} from "./fileDALs/PowDAL"; -import {Initiable} from "./sqliteDAL/Initiable" -import {MetaDAL} from "./sqliteDAL/MetaDAL" -import {DataErrors} from "../common-libs/errors" -import {BasicRevocableIdentity, IdentityDTO} from "../dto/IdentityDTO" -import {FileSystem} from "../system/directory" -import {Wot} from "dubp-wot-rs" -import {IIndexDAO} from "./indexDAL/abstract/IIndexDAO" -import {BIndexDAO} from "./indexDAL/abstract/BIndexDAO" -import {MIndexDAO} from "./indexDAL/abstract/MIndexDAO" -import {SIndexDAO} from "./indexDAL/abstract/SIndexDAO" -import {CIndexDAO} from "./indexDAL/abstract/CIndexDAO" -import {IdentityForRequirements} from "../../service/BlockchainService" -import {NewLogger} from "../logger" -import {BlockchainDAO} from "./indexDAL/abstract/BlockchainDAO" -import {TxsDAO} from "./indexDAL/abstract/TxsDAO" -import {WalletDAO} from "./indexDAL/abstract/WalletDAO" -import {PeerDAO} from "./indexDAL/abstract/PeerDAO" -import {DBTx} from "../db/DBTx" -import {DBWallet} from "../db/DBWallet" -import {Tristamp} from "../common/Tristamp" -import {CFSCore} from "./fileDALs/CFSCore" -import {Underscore} from "../common-libs/underscore" -import {DBPeer} from "../db/DBPeer" -import {MonitorFlushedIndex} from "../debug/MonitorFlushedIndex" -import {cliprogram} from "../common-libs/programOptions" -import {DividendDAO, UDSource} from "./indexDAL/abstract/DividendDAO" -import {HttpSource, HttpUD} from "../../modules/bma/lib/dtos" -import {GenericDAO} from "./indexDAL/abstract/GenericDAO" -import {MonitorExecutionTime} from "../debug/MonitorExecutionTime" -import {LevelDBDividend} from "./indexDAL/leveldb/LevelDBDividend" -import {LevelDBBindex} from "./indexDAL/leveldb/LevelDBBindex" - -import {LevelUp} from 'levelup'; -import {LevelDBBlockchain} from "./indexDAL/leveldb/LevelDBBlockchain" -import {LevelDBSindex} from "./indexDAL/leveldb/LevelDBSindex" -import {SqliteTransactions} from "./indexDAL/sqlite/SqliteTransactions" -import {SqlitePeers} from "./indexDAL/sqlite/SqlitePeers" -import {LevelDBWallet} from "./indexDAL/leveldb/LevelDBWallet" -import {LevelDBCindex} from "./indexDAL/leveldb/LevelDBCindex" -import {LevelDBIindex} from "./indexDAL/leveldb/LevelDBIindex" -import {LevelDBMindex} from "./indexDAL/leveldb/LevelDBMindex" -import {ConfDAO} from "./indexDAL/abstract/ConfDAO" -import {ServerDAO} from "./server-dao" - -const readline = require('readline') -const indexer = require('../indexer').Indexer -const logger = require('../logger').NewLogger('filedal'); -const constants = require('../constants'); + SindexEntry, +} from "../indexer"; +import { TransactionDTO } from "../dto/TransactionDTO"; +import { CertDAL, DBCert } from "./sqliteDAL/CertDAL"; +import { DBBlock } from "../db/DBBlock"; +import { DBMembership, MembershipDAL } from "./sqliteDAL/MembershipDAL"; +import { MerkleDTO } from "../dto/MerkleDTO"; +import { CommonConstants } from "../common-libs/constants"; +import { PowDAL } from "./fileDALs/PowDAL"; +import { Initiable } from "./sqliteDAL/Initiable"; +import { MetaDAL } from "./sqliteDAL/MetaDAL"; +import { DataErrors } from "../common-libs/errors"; +import { BasicRevocableIdentity, IdentityDTO } from "../dto/IdentityDTO"; +import { FileSystem } from "../system/directory"; +import { Wot } from "dubp-wot-rs"; +import { IIndexDAO } from "./indexDAL/abstract/IIndexDAO"; +import { BIndexDAO } from "./indexDAL/abstract/BIndexDAO"; +import { MIndexDAO } from "./indexDAL/abstract/MIndexDAO"; +import { SIndexDAO } from "./indexDAL/abstract/SIndexDAO"; +import { CIndexDAO } from "./indexDAL/abstract/CIndexDAO"; +import { IdentityForRequirements } from "../../service/BlockchainService"; +import { NewLogger } from "../logger"; +import { BlockchainDAO } from "./indexDAL/abstract/BlockchainDAO"; +import { TxsDAO } from "./indexDAL/abstract/TxsDAO"; +import { WalletDAO } from "./indexDAL/abstract/WalletDAO"; +import { PeerDAO } from "./indexDAL/abstract/PeerDAO"; +import { DBTx } from "../db/DBTx"; +import { DBWallet } from "../db/DBWallet"; +import { Tristamp } from "../common/Tristamp"; +import { CFSCore } from "./fileDALs/CFSCore"; +import { Underscore } from "../common-libs/underscore"; +import { DBPeer } from "../db/DBPeer"; +import { MonitorFlushedIndex } from "../debug/MonitorFlushedIndex"; +import { cliprogram } from "../common-libs/programOptions"; +import { DividendDAO, UDSource } from "./indexDAL/abstract/DividendDAO"; +import { HttpSource, HttpUD } from "../../modules/bma/lib/dtos"; +import { GenericDAO } from "./indexDAL/abstract/GenericDAO"; +import { MonitorExecutionTime } from "../debug/MonitorExecutionTime"; +import { LevelDBDividend } from "./indexDAL/leveldb/LevelDBDividend"; +import { LevelDBBindex } from "./indexDAL/leveldb/LevelDBBindex"; + +import { LevelUp } from "levelup"; +import { LevelDBBlockchain } from "./indexDAL/leveldb/LevelDBBlockchain"; +import { LevelDBSindex } from "./indexDAL/leveldb/LevelDBSindex"; +import { SqliteTransactions } from "./indexDAL/sqlite/SqliteTransactions"; +import { SqlitePeers } from "./indexDAL/sqlite/SqlitePeers"; +import { LevelDBWallet } from "./indexDAL/leveldb/LevelDBWallet"; +import { LevelDBCindex } from "./indexDAL/leveldb/LevelDBCindex"; +import { LevelDBIindex } from "./indexDAL/leveldb/LevelDBIindex"; +import { LevelDBMindex } from "./indexDAL/leveldb/LevelDBMindex"; +import { ConfDAO } from "./indexDAL/abstract/ConfDAO"; +import { ServerDAO } from "./server-dao"; + +const readline = require("readline"); +const indexer = require("../indexer").Indexer; +const logger = require("../logger").NewLogger("filedal"); +const constants = require("../constants"); export interface FileDALParams { - home:string - fs:FileSystem - dbf:() => SQLiteDriver - wotbf:() => Wot + home: string; + fs: FileSystem; + dbf: () => SQLiteDriver; + wotbf: () => Wot; } export interface IndexBatch { - mindex: MindexEntry[] - iindex: IindexEntry[] - sindex: SindexEntry[] - cindex: CindexEntry[] + mindex: MindexEntry[]; + iindex: IindexEntry[]; + sindex: SindexEntry[]; + cindex: CindexEntry[]; } export class FileDAL implements ServerDAO { - - rootPath:string - fs: FileSystem - sqliteDriver:SQLiteDriver - wotb:Wot - profile:string + rootPath: string; + fs: FileSystem; + sqliteDriver: SQLiteDriver; + wotb: Wot; + profile: string; // Simple file accessors - powDAL:PowDAL - coreFS:CFSCore - confDAL:ConfDAO + powDAL: PowDAL; + coreFS: CFSCore; + confDAL: ConfDAO; // SQLite DALs - metaDAL:MetaDAL - idtyDAL:IdentityDAL - certDAL:CertDAL - msDAL:MembershipDAL + metaDAL: MetaDAL; + idtyDAL: IdentityDAL; + certDAL: CertDAL; + msDAL: MembershipDAL; // New DAO entities - blockDAL:BlockchainDAO - txsDAL:TxsDAO - peerDAL:PeerDAO - walletDAL:WalletDAO - bindexDAL:BIndexDAO - mindexDAL:MIndexDAO - iindexDAL:IIndexDAO - sindexDAL:SIndexDAO - cindexDAL:CIndexDAO - dividendDAL:DividendDAO - newDals:{ [k:string]: Initiable } - private dals:(PeerDAO|WalletDAO|GenericDAO<any>)[] - - loadConfHook: (conf:ConfDTO) => Promise<void> - saveConfHook: (conf:ConfDTO) => Promise<ConfDTO> - - constructor(private params:FileDALParams, - public getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>, - public getLevelDB: (dbName: string)=> Promise<LevelUp>, - ) { - this.rootPath = params.home - this.sqliteDriver = params.dbf() - this.profile = 'DAL' - this.fs = params.fs + blockDAL: BlockchainDAO; + txsDAL: TxsDAO; + peerDAL: PeerDAO; + walletDAL: WalletDAO; + bindexDAL: BIndexDAO; + mindexDAL: MIndexDAO; + iindexDAL: IIndexDAO; + sindexDAL: SIndexDAO; + cindexDAL: CIndexDAO; + dividendDAL: DividendDAO; + newDals: { [k: string]: Initiable }; + private dals: (PeerDAO | WalletDAO | GenericDAO<any>)[]; + + loadConfHook: (conf: ConfDTO) => Promise<void>; + saveConfHook: (conf: ConfDTO) => Promise<ConfDTO>; + + constructor( + private params: FileDALParams, + public getSqliteDB: (dbName: string) => Promise<SQLiteDriver>, + public getLevelDB: (dbName: string) => Promise<LevelUp> + ) { + this.rootPath = params.home; + this.sqliteDriver = params.dbf(); + this.profile = "DAL"; + this.fs = params.fs; // DALs - this.coreFS = new CFSCore(this.rootPath, params.fs) - this.powDAL = new PowDAL(this.rootPath, params.fs) - this.confDAL = new ConfDAL(this.rootPath, params.fs) - this.metaDAL = new (require('./sqliteDAL/MetaDAL').MetaDAL)(this.sqliteDriver); - this.idtyDAL = new (require('./sqliteDAL/IdentityDAL').IdentityDAL)(this.sqliteDriver); - this.certDAL = new (require('./sqliteDAL/CertDAL').CertDAL)(this.sqliteDriver); - this.msDAL = new (require('./sqliteDAL/MembershipDAL').MembershipDAL)(this.sqliteDriver); - - this.blockDAL = new LevelDBBlockchain(getLevelDB) - this.txsDAL = new SqliteTransactions(getSqliteDB) - this.peerDAL = new SqlitePeers(getSqliteDB) - this.walletDAL = new LevelDBWallet(getLevelDB) - this.bindexDAL = new LevelDBBindex(getLevelDB) - this.mindexDAL = new LevelDBMindex(getLevelDB) - this.iindexDAL = new LevelDBIindex(getLevelDB) - this.sindexDAL = new LevelDBSindex(getLevelDB) - this.cindexDAL = new LevelDBCindex(getLevelDB) - this.dividendDAL = new LevelDBDividend(getLevelDB) + this.coreFS = new CFSCore(this.rootPath, params.fs); + this.powDAL = new PowDAL(this.rootPath, params.fs); + this.confDAL = new ConfDAL(this.rootPath, params.fs); + this.metaDAL = new (require("./sqliteDAL/MetaDAL").MetaDAL)( + this.sqliteDriver + ); + this.idtyDAL = new (require("./sqliteDAL/IdentityDAL").IdentityDAL)( + this.sqliteDriver + ); + this.certDAL = new (require("./sqliteDAL/CertDAL").CertDAL)( + this.sqliteDriver + ); + this.msDAL = new (require("./sqliteDAL/MembershipDAL").MembershipDAL)( + this.sqliteDriver + ); + + this.blockDAL = new LevelDBBlockchain(getLevelDB); + this.txsDAL = new SqliteTransactions(getSqliteDB); + this.peerDAL = new SqlitePeers(getSqliteDB); + this.walletDAL = new LevelDBWallet(getLevelDB); + this.bindexDAL = new LevelDBBindex(getLevelDB); + this.mindexDAL = new LevelDBMindex(getLevelDB); + this.iindexDAL = new LevelDBIindex(getLevelDB); + this.sindexDAL = new LevelDBSindex(getLevelDB); + this.cindexDAL = new LevelDBCindex(getLevelDB); + this.dividendDAL = new LevelDBDividend(getLevelDB); this.newDals = { - 'powDAL': this.powDAL, - 'metaDAL': this.metaDAL, - 'blockDAL': this.blockDAL, - 'certDAL': this.certDAL, - 'msDAL': this.msDAL, - 'idtyDAL': this.idtyDAL, - 'txsDAL': this.txsDAL, - 'peerDAL': this.peerDAL, - 'confDAL': this.confDAL, - 'walletDAL': this.walletDAL, - 'bindexDAL': this.bindexDAL, - 'mindexDAL': this.mindexDAL, - 'iindexDAL': this.iindexDAL, - 'sindexDAL': this.sindexDAL, - 'cindexDAL': this.cindexDAL, - 'dividendDAL': this.dividendDAL, - } + powDAL: this.powDAL, + metaDAL: this.metaDAL, + blockDAL: this.blockDAL, + certDAL: this.certDAL, + msDAL: this.msDAL, + idtyDAL: this.idtyDAL, + txsDAL: this.txsDAL, + peerDAL: this.peerDAL, + confDAL: this.confDAL, + walletDAL: this.walletDAL, + bindexDAL: this.bindexDAL, + mindexDAL: this.mindexDAL, + iindexDAL: this.iindexDAL, + sindexDAL: this.sindexDAL, + cindexDAL: this.cindexDAL, + dividendDAL: this.dividendDAL, + }; } - async init(conf:ConfDTO) { - this.wotb = this.params.wotbf() + async init(conf: ConfDTO) { + this.wotb = this.params.wotbf(); this.dals = [ this.blockDAL, this.txsDAL, @@ -199,9 +207,9 @@ export class FileDAL implements ServerDAO { this.sindexDAL, this.cindexDAL, this.dividendDAL, - ] + ]; for (const indexDAL of this.dals) { - indexDAL.triggerInit() + indexDAL.triggerInit(); } const dalNames = Underscore.keys(this.newDals); for (const dalName of dalNames) { @@ -212,126 +220,156 @@ export class FileDAL implements ServerDAO { await this.metaDAL.upgradeDatabase(conf); // Update the maximum certifications count a member can issue into the C++ addon const currencyParams = await this.getParameters(); - if (currencyParams && currencyParams.sigStock !== undefined && currencyParams.sigStock !== null) { + if ( + currencyParams && + currencyParams.sigStock !== undefined && + currencyParams.sigStock !== null + ) { this.wotb.setMaxCert(currencyParams.sigStock); } } getDBVersion() { - return this.metaDAL.getVersion() + return this.metaDAL.getVersion(); } - writeFileOfBlock(block:DBBlock) { - return this.blockDAL.saveBlock(block) + writeFileOfBlock(block: DBBlock) { + return this.blockDAL.saveBlock(block); } - writeSideFileOfBlock(block:DBBlock) { - return this.blockDAL.saveSideBlock(block) + writeSideFileOfBlock(block: DBBlock) { + return this.blockDAL.saveSideBlock(block); } listAllPeers() { - return this.peerDAL.listAll() + return this.peerDAL.listAll(); } - async getPeer(pubkey:string) { + async getPeer(pubkey: string) { try { - return await this.peerDAL.getPeer(pubkey) + return await this.peerDAL.getPeer(pubkey); } catch (err) { - throw Error('Unknown peer ' + pubkey); + throw Error("Unknown peer " + pubkey); } } async getWS2Peers() { - return this.peerDAL.getPeersWithEndpointsLike('WS2P') + return this.peerDAL.getPeersWithEndpointsLike("WS2P"); } - getAbsoluteBlockInForkWindowByBlockstamp(blockstamp:string) { + getAbsoluteBlockInForkWindowByBlockstamp(blockstamp: string) { if (!blockstamp) throw "Blockstamp is required to find the block"; - const sp = blockstamp.split('-'); + const sp = blockstamp.split("-"); const number = parseInt(sp[0]); const hash = sp[1]; - return this.getAbsoluteBlockInForkWindow(number, hash) + return this.getAbsoluteBlockInForkWindow(number, hash); } - getAbsoluteValidBlockInForkWindowByBlockstamp(blockstamp:string) { + getAbsoluteValidBlockInForkWindowByBlockstamp(blockstamp: string) { if (!blockstamp) throw "Blockstamp is required to find the block"; - const sp = blockstamp.split('-'); + const sp = blockstamp.split("-"); const number = parseInt(sp[0]); const hash = sp[1]; - return this.getAbsoluteValidBlockInForkWindow(number, hash) + return this.getAbsoluteValidBlockInForkWindow(number, hash); } - async getBlockWeHaveItForSure(number:number): Promise<DBBlock> { - return (await this.blockDAL.getBlock(number)) as DBBlock + async getBlockWeHaveItForSure(number: number): Promise<DBBlock> { + return (await this.blockDAL.getBlock(number)) as DBBlock; } // Duniter-UI dependency - async getBlock(number: number): Promise<DBBlock|null> { - return this.getFullBlockOf(number) + async getBlock(number: number): Promise<DBBlock | null> { + return this.getFullBlockOf(number); } - async getFullBlockOf(number: number): Promise<DBBlock|null> { - return this.blockDAL.getBlock(number) + async getFullBlockOf(number: number): Promise<DBBlock | null> { + return this.blockDAL.getBlock(number); } - async getBlockstampOf(number: number): Promise<string|null> { - const block = await this.getTristampOf(number) + async getBlockstampOf(number: number): Promise<string | null> { + const block = await this.getTristampOf(number); if (block) { - return [block.number, block.hash].join('-') + return [block.number, block.hash].join("-"); } - return null + return null; } - async getTristampOf(number: number): Promise<Tristamp|null> { - return this.blockDAL.getBlock(number) + async getTristampOf(number: number): Promise<Tristamp | null> { + return this.blockDAL.getBlock(number); } - async existsAbsoluteBlockInForkWindow(number:number, hash:string): Promise<boolean> { - return !!(await this.getAbsoluteBlockByNumberAndHash(number, hash, true)) + async existsAbsoluteBlockInForkWindow( + number: number, + hash: string + ): Promise<boolean> { + return !!(await this.getAbsoluteBlockByNumberAndHash(number, hash, true)); } - async getAbsoluteBlockInForkWindow(number:number, hash:string): Promise<DBBlock|null> { - return this.getAbsoluteBlockByNumberAndHash(number, hash) + async getAbsoluteBlockInForkWindow( + number: number, + hash: string + ): Promise<DBBlock | null> { + return this.getAbsoluteBlockByNumberAndHash(number, hash); } - async getAbsoluteValidBlockInForkWindow(number:number, hash:string): Promise<DBBlock|null> { - const block = await this.getAbsoluteBlockByNumberAndHash(number, hash) + async getAbsoluteValidBlockInForkWindow( + number: number, + hash: string + ): Promise<DBBlock | null> { + const block = await this.getAbsoluteBlockByNumberAndHash(number, hash); if (block && !block.fork) { - return block + return block; } - return null + return null; } - async getAbsoluteBlockByNumberAndHash(number:number, hash:string, forceNumberAndHashFinding = false): Promise<DBBlock|null> { + async getAbsoluteBlockByNumberAndHash( + number: number, + hash: string, + forceNumberAndHashFinding = false + ): Promise<DBBlock | null> { if (number > 0 || forceNumberAndHashFinding) { - return await this.blockDAL.getAbsoluteBlock(number, hash) + return await this.blockDAL.getAbsoluteBlock(number, hash); } else { // Block#0 is special - return await this.blockDAL.getBlock(number) + return await this.blockDAL.getBlock(number); } } - async getAbsoluteBlockByBlockstamp(blockstamp: string): Promise<DBBlock|null> { - const sp = blockstamp.split('-') - return this.getAbsoluteBlockByNumberAndHash(parseInt(sp[0]), sp[1]) + async getAbsoluteBlockByBlockstamp( + blockstamp: string + ): Promise<DBBlock | null> { + const sp = blockstamp.split("-"); + return this.getAbsoluteBlockByNumberAndHash(parseInt(sp[0]), sp[1]); } - async existsNonChainableLink(from:string, vHEAD_1:DBHead, sigStock:number) { + async existsNonChainableLink( + from: string, + vHEAD_1: DBHead, + sigStock: number + ) { // Cert period rule const medianTime = vHEAD_1 ? vHEAD_1.medianTime : 0; - const linksFrom:FullCindexEntry[] = await this.cindexDAL.reducablesFrom(from) - const unchainables = Underscore.filter(linksFrom, (link:CindexEntry) => link.chainable_on > medianTime); + const linksFrom: FullCindexEntry[] = await this.cindexDAL.reducablesFrom( + from + ); + const unchainables = Underscore.filter( + linksFrom, + (link: CindexEntry) => link.chainable_on > medianTime + ); if (unchainables.length > 0) return true; // Max stock rule - let activeLinks = Underscore.filter(linksFrom, (link:CindexEntry) => !link.expired_on); + let activeLinks = Underscore.filter( + linksFrom, + (link: CindexEntry) => !link.expired_on + ); return activeLinks.length >= sigStock; } - async getCurrentBlockOrNull() { - let current:DBBlock|null = null; + let current: DBBlock | null = null; try { - current = await this.getBlockCurrent() + current = await this.getBlockCurrent(); } catch (e) { if (e != constants.ERROR.BLOCK.NO_CURRENT_BLOCK) { throw e; @@ -340,22 +378,22 @@ export class FileDAL implements ServerDAO { return current; } - getPromoted(number:number) { - return this.getFullBlockOf(number) + getPromoted(number: number) { + return this.getFullBlockOf(number); } // Block getPotentialRootBlocks() { - return this.blockDAL.getPotentialRoots() + return this.blockDAL.getPotentialRoots(); } - lastBlockOfIssuer(issuer:string) { + lastBlockOfIssuer(issuer: string) { return this.blockDAL.lastBlockOfIssuer(issuer); } - - getCountOfPoW(issuer:string) { - return this.blockDAL.getCountOfBlocksIssuedBy(issuer) + + getCountOfPoW(issuer: string) { + return this.blockDAL.getCountOfBlocksIssuedBy(issuer); } /** @@ -363,97 +401,140 @@ export class FileDAL implements ServerDAO { * @param start Lower number bound (included). * @param end Higher number bound (included). */ - async getBlocksBetween (start:number, end:number): Promise<DBBlock[]> { - start = Math.max(0, start) - end= Math.max(0, end) - return this.blockDAL.getBlocks(Math.max(0, start), end) + async getBlocksBetween(start: number, end: number): Promise<DBBlock[]> { + start = Math.max(0, start); + end = Math.max(0, end); + return this.blockDAL.getBlocks(Math.max(0, start), end); } - getForkBlocksFollowing(current:DBBlock) { - return this.blockDAL.getNextForkBlocks(current.number, current.hash) + getForkBlocksFollowing(current: DBBlock) { + return this.blockDAL.getNextForkBlocks(current.number, current.hash); } - getPotentialForkBlocks(numberStart:number, medianTimeStart:number, maxNumber:number) { - return this.blockDAL.getPotentialForkBlocks(numberStart, medianTimeStart, maxNumber) + getPotentialForkBlocks( + numberStart: number, + medianTimeStart: number, + maxNumber: number + ) { + return this.blockDAL.getPotentialForkBlocks( + numberStart, + medianTimeStart, + maxNumber + ); } async getBlockCurrent() { const current = await this.blockDAL.getCurrent(); - if (!current) - throw 'No current block'; + if (!current) throw "No current block"; return current; } - getValidLinksTo(to:string) { - return this.cindexDAL.getValidLinksTo(to) + getValidLinksTo(to: string) { + return this.cindexDAL.getValidLinksTo(to); } - async getAvailableSourcesByPubkey(pubkey:string): Promise<HttpSource[]> { - const txAvailable = await this.sindexDAL.getAvailableForPubkey(pubkey) - const sources: UDSource[] = await this.dividendDAL.getUDSources(pubkey) - return sources.map(d => { - return { - type: 'D', - noffset: d.pos, - identifier: pubkey, - amount: d.amount, - base: d.base, - conditions: 'SIG(' + pubkey + ')' - } - }).concat(txAvailable.map(s => { - return { - type: 'T', - noffset: s.pos, - identifier: s.identifier, - amount: s.amount, - base: s.base, - conditions: s.conditions - } - })) - } - - async findByIdentifierPosAmountBase(identifier: string, pos: number, amount: number, base: number, isDividend: boolean): Promise<SimpleTxInput[]> { + async getAvailableSourcesByPubkey(pubkey: string): Promise<HttpSource[]> { + const txAvailable = await this.sindexDAL.getAvailableForPubkey(pubkey); + const sources: UDSource[] = await this.dividendDAL.getUDSources(pubkey); + return sources + .map((d) => { + return { + type: "D", + noffset: d.pos, + identifier: pubkey, + amount: d.amount, + base: d.base, + conditions: "SIG(" + pubkey + ")", + }; + }) + .concat( + txAvailable.map((s) => { + return { + type: "T", + noffset: s.pos, + identifier: s.identifier, + amount: s.amount, + base: s.base, + conditions: s.conditions, + }; + }) + ); + } + + async findByIdentifierPosAmountBase( + identifier: string, + pos: number, + amount: number, + base: number, + isDividend: boolean + ): Promise<SimpleTxInput[]> { if (isDividend) { - return this.dividendDAL.findUdSourceByIdentifierPosAmountBase(identifier, pos, amount, base) + return this.dividendDAL.findUdSourceByIdentifierPosAmountBase( + identifier, + pos, + amount, + base + ); } else { - return this.sindexDAL.findTxSourceByIdentifierPosAmountBase(identifier, pos, amount, base) + return this.sindexDAL.findTxSourceByIdentifierPosAmountBase( + identifier, + pos, + amount, + base + ); } } - async getGlobalIdentityByHashForExistence(hash:string): Promise<boolean> { - const pending = await this.idtyDAL.getByHash(hash) + async getGlobalIdentityByHashForExistence(hash: string): Promise<boolean> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return false + return false; } } - return true + return true; } - async getGlobalIdentityByHashForHashingAndSig(hash:string): Promise<{ pubkey:string, uid:string, buid:string, sig:string }|null> { - const pending = await this.idtyDAL.getByHash(hash) + async getGlobalIdentityByHashForHashingAndSig( + hash: string + ): Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } return { pubkey: idty.pub, uid: idty.uid, buid: idty.created_on, - sig: idty.sig - } + sig: idty.sig, + }; } - return pending - } - - async getGlobalIdentityByHashForLookup(hash:string): Promise<{ pubkey:string, uid:string, buid:string, sig:string, member:boolean, wasMember:boolean }|null> { - const pending = await this.idtyDAL.getByHash(hash) + return pending; + } + + async getGlobalIdentityByHashForLookup( + hash: string + ): Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + member: boolean; + wasMember: boolean; + } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } return { pubkey: idty.pub, @@ -461,20 +542,32 @@ export class FileDAL implements ServerDAO { buid: idty.created_on, sig: idty.sig, member: idty.member, - wasMember: idty.wasMember - } + wasMember: idty.wasMember, + }; } - return pending - } - - async getGlobalIdentityByHashForJoining(hash:string): Promise<{ pubkey:string, uid:string, buid:string, sig:string, member:boolean, wasMember:boolean, revoked:boolean }|null> { - const pending = await this.idtyDAL.getByHash(hash) + return pending; + } + + async getGlobalIdentityByHashForJoining( + hash: string + ): Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + member: boolean; + wasMember: boolean; + revoked: boolean; + } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } - const membership = await this.mindexDAL.getReducedMSForImplicitRevocation(idty.pub) as FullMindexEntry + const membership = (await this.mindexDAL.getReducedMSForImplicitRevocation( + idty.pub + )) as FullMindexEntry; return { pubkey: idty.pub, uid: idty.uid, @@ -482,38 +575,54 @@ export class FileDAL implements ServerDAO { sig: idty.sig, member: idty.member, wasMember: idty.wasMember, - revoked: !!(membership.revoked_on) - } + revoked: !!membership.revoked_on, + }; } - return pending + return pending; } - async getGlobalIdentityByHashForIsMember(hash:string): Promise<{ pub:string, member:boolean }|null> { - const pending = await this.idtyDAL.getByHash(hash) + async getGlobalIdentityByHashForIsMember( + hash: string + ): Promise<{ pub: string; member: boolean } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } return { pub: idty.pub, - member: idty.member - } + member: idty.member, + }; } return { pub: pending.pubkey, - member: pending.member - } + member: pending.member, + }; } - async getGlobalIdentityByHashForRevocation(hash:string): Promise<{ pub:string, uid:string, created_on:string, sig:string, member:boolean, wasMember:boolean, revoked:boolean, revocation_sig:string|null, expires_on:number }|null> { - const pending = await this.idtyDAL.getByHash(hash) + async getGlobalIdentityByHashForRevocation( + hash: string + ): Promise<{ + pub: string; + uid: string; + created_on: string; + sig: string; + member: boolean; + wasMember: boolean; + revoked: boolean; + revocation_sig: string | null; + expires_on: number; + } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } - const membership = await this.mindexDAL.getReducedMSForImplicitRevocation(idty.pub) as FullMindexEntry + const membership = (await this.mindexDAL.getReducedMSForImplicitRevocation( + idty.pub + )) as FullMindexEntry; return { pub: idty.pub, uid: idty.uid, @@ -522,9 +631,9 @@ export class FileDAL implements ServerDAO { wasMember: idty.wasMember, expires_on: membership.expires_on, created_on: idty.created_on, - revoked: !!(membership.revoked_on), - revocation_sig: membership.revocation - } + revoked: !!membership.revoked_on, + revocation_sig: membership.revocation, + }; } return { pub: pending.pubkey, @@ -535,150 +644,198 @@ export class FileDAL implements ServerDAO { member: pending.member, wasMember: pending.wasMember, revoked: pending.revoked, - revocation_sig: pending.revocation_sig - } + revocation_sig: pending.revocation_sig, + }; } getMembers() { - return this.iindexDAL.getMembers() + return this.iindexDAL.getMembers(); } - async getWrittenIdtyByPubkeyForHash(pubkey:string): Promise<{ hash:string }> { - return this.getWrittenForSureIdtyByPubkey(pubkey) + async getWrittenIdtyByPubkeyForHash( + pubkey: string + ): Promise<{ hash: string }> { + return this.getWrittenForSureIdtyByPubkey(pubkey); } - async getWrittenIdtyByPubkeyForHashing(pubkey:string): Promise<{ uid:string, created_on:string, pub:string }> { - return this.getWrittenForSureIdtyByPubkey(pubkey) + async getWrittenIdtyByPubkeyForHashing( + pubkey: string + ): Promise<{ uid: string; created_on: string; pub: string }> { + return this.getWrittenForSureIdtyByPubkey(pubkey); } - async getWrittenIdtyByPubkeyForWotbID(pubkey:string): Promise<{ wotb_id:number }> { - return this.getWrittenForSureIdtyByPubkey(pubkey) + async getWrittenIdtyByPubkeyForWotbID( + pubkey: string + ): Promise<{ wotb_id: number }> { + return this.getWrittenForSureIdtyByPubkey(pubkey); } - async getWrittenIdtyByPubkeyForUidAndPubkey(pubkey:string): Promise<{ pub:string, uid:string }> { - return this.getWrittenForSureIdtyByPubkey(pubkey) + async getWrittenIdtyByPubkeyForUidAndPubkey( + pubkey: string + ): Promise<{ pub: string; uid: string }> { + return this.getWrittenForSureIdtyByPubkey(pubkey); } - async getWrittenIdtyByPubkeyForIsMember(pubkey:string): Promise<{ member:boolean }|null> { - return this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForIsMember( + pubkey: string + ): Promise<{ member: boolean } | null> { + return this.iindexDAL.getFromPubkey(pubkey); } - async getWrittenIdtyByPubkeyForUidAndIsMemberAndWasMember(pubkey:string): Promise<{ uid:string, member:boolean, wasMember:boolean }|null> { - return this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForUidAndIsMemberAndWasMember( + pubkey: string + ): Promise<{ uid: string; member: boolean; wasMember: boolean } | null> { + return this.iindexDAL.getFromPubkey(pubkey); } - async getWrittenIdtyByPubkeyOrUidForIsMemberAndPubkey(search:string): Promise<{ pub:string, member:boolean }|null> { - return this.iindexDAL.getFromPubkeyOrUid(search) + async getWrittenIdtyByPubkeyOrUidForIsMemberAndPubkey( + search: string + ): Promise<{ pub: string; member: boolean } | null> { + return this.iindexDAL.getFromPubkeyOrUid(search); } - async getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember(search:string): Promise<{ uid:string, created_on:string, pub:string, member:boolean }|null> { - return await this.iindexDAL.getFromPubkeyOrUid(search) + async getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember( + search: string + ): Promise<{ + uid: string; + created_on: string; + pub: string; + member: boolean; + } | null> { + return await this.iindexDAL.getFromPubkeyOrUid(search); } - async getWrittenIdtyByPubkeyForRevocationCheck(pubkey:string): Promise<{ pub:string, uid:string, created_on:string, sig:string, revoked_on:string|null }|null> { - const idty = await this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForRevocationCheck( + pubkey: string + ): Promise<{ + pub: string; + uid: string; + created_on: string; + sig: string; + revoked_on: string | null; + } | null> { + const idty = await this.iindexDAL.getFromPubkey(pubkey); if (!idty) { - return null + return null; } - const membership = await this.mindexDAL.getReducedMSForImplicitRevocation(pubkey) as FullMindexEntry + const membership = (await this.mindexDAL.getReducedMSForImplicitRevocation( + pubkey + )) as FullMindexEntry; return { pub: idty.pub, uid: idty.uid, sig: idty.sig, created_on: idty.created_on, - revoked_on: membership.revoked_on - } + revoked_on: membership.revoked_on, + }; } - async getWrittenIdtyByPubkeyForCertificationCheck(pubkey:string): Promise<{ pub:string, uid:string, created_on:string, sig:string }|null> { - const idty = await this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForCertificationCheck( + pubkey: string + ): Promise<{ + pub: string; + uid: string; + created_on: string; + sig: string; + } | null> { + const idty = await this.iindexDAL.getFromPubkey(pubkey); if (!idty) { - return null + return null; } return { pub: idty.pub, uid: idty.uid, sig: idty.sig, created_on: idty.created_on, - } + }; } - async getWrittenIdtyByPubkeyForUidAndMemberAndCreatedOn(pubkey:string): Promise<{ uid:string, member:boolean, created_on:string }|null> { - const idty = await this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForUidAndMemberAndCreatedOn( + pubkey: string + ): Promise<{ uid: string; member: boolean; created_on: string } | null> { + const idty = await this.iindexDAL.getFromPubkey(pubkey); if (!idty) { - return null + return null; } return { uid: idty.uid, member: idty.member, created_on: idty.created_on, - } + }; } - private async getWrittenForSureIdtyByPubkey(pubkey:string) { - const idty = await this.iindexDAL.getFromPubkey(pubkey) + private async getWrittenForSureIdtyByPubkey(pubkey: string) { + const idty = await this.iindexDAL.getFromPubkey(pubkey); if (!idty) { - throw Error(DataErrors[DataErrors.MEMBER_NOT_FOUND]) + throw Error(DataErrors[DataErrors.MEMBER_NOT_FOUND]); } - return idty + return idty; } - private async getWrittenForSureIdtyByUid(pubkey:string) { - const idty = (await this.iindexDAL.getFullFromUID(pubkey)) + private async getWrittenForSureIdtyByUid(pubkey: string) { + const idty = await this.iindexDAL.getFullFromUID(pubkey); if (!idty) { - throw Error(DataErrors[DataErrors.MEMBER_NOT_FOUND]) + throw Error(DataErrors[DataErrors.MEMBER_NOT_FOUND]); } - return idty + return idty; } // Duniter-UI dependency - async getWrittenIdtyByPubkey(pub:string): Promise<FullIindexEntry | null> { - return await this.iindexDAL.getFromPubkey(pub) + async getWrittenIdtyByPubkey(pub: string): Promise<FullIindexEntry | null> { + return await this.iindexDAL.getFromPubkey(pub); } - async getWrittenIdtyByPubkeyForExistence(uid:string) { - return !!(await this.iindexDAL.getFromPubkey(uid)) + async getWrittenIdtyByPubkeyForExistence(uid: string) { + return !!(await this.iindexDAL.getFromPubkey(uid)); } - async getWrittenIdtyByUIDForExistence(uid:string) { - return !!(await this.iindexDAL.getFromUID(uid)) + async getWrittenIdtyByUIDForExistence(uid: string) { + return !!(await this.iindexDAL.getFromUID(uid)); } - async getWrittenIdtyByUidForHashing(uid:string): Promise<{ uid:string, created_on:string, pub:string }> { - return this.getWrittenForSureIdtyByUid(uid) + async getWrittenIdtyByUidForHashing( + uid: string + ): Promise<{ uid: string; created_on: string; pub: string }> { + return this.getWrittenForSureIdtyByUid(uid); } - async getWrittenIdtyByUIDForWotbId(uid:string): Promise<{ wotb_id:number }> { - return this.getWrittenForSureIdtyByUid(uid) + async getWrittenIdtyByUIDForWotbId( + uid: string + ): Promise<{ wotb_id: number }> { + return this.getWrittenForSureIdtyByUid(uid); } - async findPeersWhoseHashIsIn(hashes:string[]) { + async findPeersWhoseHashIsIn(hashes: string[]) { const peers = await this.peerDAL.listAll(); - return Underscore.chain(peers).filter((p:DBPeer) => hashes.indexOf(p.hash) !== -1).value() + return Underscore.chain(peers) + .filter((p: DBPeer) => hashes.indexOf(p.hash) !== -1) + .value(); } - getTxByHash(hash:string) { - return this.txsDAL.getTX(hash) + getTxByHash(hash: string) { + return this.txsDAL.getTX(hash); } - removeTxByHash(hash:string) { - return this.txsDAL.removeTX(hash) + removeTxByHash(hash: string) { + return this.txsDAL.removeTX(hash); } getTransactionsPending(versionMin = 0) { - return this.txsDAL.getAllPending(versionMin) + return this.txsDAL.getAllPending(versionMin); } - async getNonWritten(pubkey:string) { + async getNonWritten(pubkey: string) { const pending = await this.idtyDAL.getPendingIdentities(); - return Underscore.chain(pending).where({pubkey: pubkey}).value() + return Underscore.chain(pending).where({ pubkey: pubkey }).value(); } async getRevocatingMembers() { const revoking = await this.idtyDAL.getToRevoke(); const toRevoke = []; for (const pending of revoking) { - const idty = await this.getWrittenIdtyByPubkeyForRevocationCheck(pending.pubkey) + const idty = await this.getWrittenIdtyByPubkeyForRevocationCheck( + pending.pubkey + ); if (idty && !idty.revoked_on) { toRevoke.push(pending); } @@ -687,67 +844,79 @@ export class FileDAL implements ServerDAO { } getToBeKickedPubkeys() { - return this.iindexDAL.getToBeKickedPubkeys() + return this.iindexDAL.getToBeKickedPubkeys(); } getRevokedPubkeys() { - return this.mindexDAL.getRevokedPubkeys() + return this.mindexDAL.getRevokedPubkeys(); } - async searchJustIdentities(search:string): Promise<DBIdentity[]> { + async searchJustIdentities(search: string): Promise<DBIdentity[]> { const pendings = await this.idtyDAL.searchThoseMatching(search); const writtens = await this.iindexDAL.searchThoseMatching(search); - const nonPendings = Underscore.filter(writtens, (w:IindexEntry) => { + const nonPendings = Underscore.filter(writtens, (w: IindexEntry) => { return Underscore.where(pendings, { pubkey: w.pub }).length == 0; }); - const found = pendings.concat(nonPendings.map((i:any) => { - // Use the correct field - i.pubkey = i.pub - return i - })); - return await Promise.all<DBIdentity>(found.map(async (f) => { - const ms = await this.mindexDAL.getReducedMSForImplicitRevocation(f.pubkey); - if (ms) { - f.revoked_on = null; - if (ms.revoked_on) { - const blockOfRevocation = (await this.getAbsoluteBlockByBlockstamp(ms.revoked_on)) as DBBlock - f.revoked_on = blockOfRevocation.medianTime + const found = pendings.concat( + nonPendings.map((i: any) => { + // Use the correct field + i.pubkey = i.pub; + return i; + }) + ); + return await Promise.all<DBIdentity>( + found.map(async (f) => { + const ms = await this.mindexDAL.getReducedMSForImplicitRevocation( + f.pubkey + ); + if (ms) { + f.revoked_on = null; + if (ms.revoked_on) { + const blockOfRevocation = (await this.getAbsoluteBlockByBlockstamp( + ms.revoked_on + )) as DBBlock; + f.revoked_on = blockOfRevocation.medianTime; + } + f.revoked = !!f.revoked_on; + f.revocation_sig = f.revocation_sig || ms.revocation || null; } - f.revoked = !!f.revoked_on; - f.revocation_sig = f.revocation_sig || ms.revocation || null; - } - return f; - })) + return f; + }) + ); } - async certsToTarget(pub:string, hash:string) { + async certsToTarget(pub: string, hash: string) { const certs = await this.certDAL.getToTarget(hash); const links = await this.cindexDAL.getValidLinksTo(pub); let matching = certs; - await Promise.all(links.map(async (entry:any) => { - matching.push(await this.cindexEntry2DBCert(entry)) - })) - matching = Underscore.sortBy(matching, (c:DBCert) => -c.block); + await Promise.all( + links.map(async (entry: any) => { + matching.push(await this.cindexEntry2DBCert(entry)); + }) + ); + matching = Underscore.sortBy(matching, (c: DBCert) => -c.block); matching.reverse(); return matching; } - async certsFrom(pubkey:string) { + async certsFrom(pubkey: string) { const certs = await this.certDAL.getFromPubkeyCerts(pubkey); const links = await this.cindexDAL.getValidLinksFrom(pubkey); let matching = certs; - await Promise.all(links.map(async (entry:CindexEntry) => { - matching.push(await this.cindexEntry2DBCert(entry)) - })) - matching = Underscore.sortBy(matching, (c:DBCert) => -c.block); + await Promise.all( + links.map(async (entry: CindexEntry) => { + matching.push(await this.cindexEntry2DBCert(entry)); + }) + ); + matching = Underscore.sortBy(matching, (c: DBCert) => -c.block); matching.reverse(); return matching; } - async cindexEntry2DBCert(entry:CindexEntry): Promise<DBCert> { - const idty = await this.getWrittenIdtyByPubkeyForHash(entry.receiver) - const wbt = entry.written_on.split('-') - const block = (await this.getBlock(entry.created_on)) as DBBlock + async cindexEntry2DBCert(entry: CindexEntry): Promise<DBCert> { + const idty = await this.getWrittenIdtyByPubkeyForHash(entry.receiver); + const wbt = entry.written_on.split("-"); + const block = (await this.getBlock(entry.created_on)) as DBBlock; return { issuers: [entry.issuer], linked: true, @@ -763,10 +932,10 @@ export class FileDAL implements ServerDAO { block: block.number, expired: !!entry.expired_on, expires_on: entry.expires_on, - } + }; } - async isSentry(pubkey:string, conf:ConfDTO) { + async isSentry(pubkey: string, conf: ConfDTO) { const current = await this.getCurrentBlockOrNull(); if (current) { const dSen = Math.ceil(Math.pow(current.membersCount, 1 / conf.stepMax)); @@ -779,17 +948,24 @@ export class FileDAL implements ServerDAO { async certsFindNew() { const certs = await this.certDAL.getNotLinked(); - return Underscore.chain(certs).where({linked: false}).sortBy((c:DBCert) => -c.block).value() + return Underscore.chain(certs) + .where({ linked: false }) + .sortBy((c: DBCert) => -c.block) + .value(); } - async certsNotLinkedToTarget(hash:string) { + async certsNotLinkedToTarget(hash: string) { const certs = await this.certDAL.getNotLinkedToTarget(hash); - return Underscore.chain(certs).sortBy((c:any) => -c.block).value(); + return Underscore.chain(certs) + .sortBy((c: any) => -c.block) + .value(); } - async getMostRecentMembershipNumberForIssuer(issuer:string) { + async getMostRecentMembershipNumberForIssuer(issuer: string) { const mss = await this.msDAL.getMembershipsOfIssuer(issuer); - const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation(issuer); + const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation( + issuer + ); let max = reduced ? parseInt(reduced.created_on) : -1; for (const ms of mss) { max = Math.max(ms.number, max); @@ -797,64 +973,100 @@ export class FileDAL implements ServerDAO { return max; } - async lastJoinOfIdentity(target:string) { + async lastJoinOfIdentity(target: string) { let pending = await this.msDAL.getPendingINOfTarget(target); - return Underscore.sortBy(pending, (ms:any) => -ms.number)[0]; + return Underscore.sortBy(pending, (ms: any) => -ms.number)[0]; } async findNewcomers(blockMedianTime = 0): Promise<DBMembership[]> { - const pending = await this.msDAL.getPendingIN() - const mss: DBMembership[] = await Promise.all<DBMembership>(pending.map(async (p:any) => { - const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation(p.issuer) - if (!reduced || !reduced.chainable_on || blockMedianTime >= reduced.chainable_on || blockMedianTime < constants.TIME_TO_TURN_ON_BRG_107) { - return p - } - return null - })) - return Underscore.chain(Underscore.filter(mss, ms => !!ms) as DBMembership[]) - .sortBy((ms:DBMembership) => -ms.blockNumber) - .value() + const pending = await this.msDAL.getPendingIN(); + const mss: DBMembership[] = await Promise.all<DBMembership>( + pending.map(async (p: any) => { + const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation( + p.issuer + ); + if ( + !reduced || + !reduced.chainable_on || + blockMedianTime >= reduced.chainable_on || + blockMedianTime < constants.TIME_TO_TURN_ON_BRG_107 + ) { + return p; + } + return null; + }) + ); + return Underscore.chain( + Underscore.filter(mss, (ms) => !!ms) as DBMembership[] + ) + .sortBy((ms: DBMembership) => -ms.blockNumber) + .value(); } async findLeavers(blockMedianTime = 0): Promise<DBMembership[]> { const pending = await this.msDAL.getPendingOUT(); - const mss = await Promise.all<DBMembership|null>(pending.map(async p => { - const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation(p.issuer) - if (!reduced || !reduced.chainable_on || blockMedianTime >= reduced.chainable_on || blockMedianTime < constants.TIME_TO_TURN_ON_BRG_107) { - return p - } - return null - })) - return Underscore.chain(Underscore.filter(mss, ms => !!ms) as DBMembership[]) - .sortBy(ms => -ms.blockNumber) + const mss = await Promise.all<DBMembership | null>( + pending.map(async (p) => { + const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation( + p.issuer + ); + if ( + !reduced || + !reduced.chainable_on || + blockMedianTime >= reduced.chainable_on || + blockMedianTime < constants.TIME_TO_TURN_ON_BRG_107 + ) { + return p; + } + return null; + }) + ); + return Underscore.chain( + Underscore.filter(mss, (ms) => !!ms) as DBMembership[] + ) + .sortBy((ms) => -ms.blockNumber) .value(); } - existsNonReplayableLink(from:string, to:string, medianTime: number, version: number) { - return this.cindexDAL.existsNonReplayableLink(from, to, medianTime, version) - } - - async getSource(identifier:string, pos:number, isDividend: boolean): Promise<SimpleTxInput | null> { + existsNonReplayableLink( + from: string, + to: string, + medianTime: number, + version: number + ) { + return this.cindexDAL.existsNonReplayableLink( + from, + to, + medianTime, + version + ); + } + + async getSource( + identifier: string, + pos: number, + isDividend: boolean + ): Promise<SimpleTxInput | null> { if (isDividend) { - return this.dividendDAL.getUDSource(identifier, pos) + return this.dividendDAL.getUDSource(identifier, pos); } else { - return this.sindexDAL.getTxSource(identifier, pos) + return this.sindexDAL.getTxSource(identifier, pos); } } - async isMember(pubkey:string):Promise<boolean> { + async isMember(pubkey: string): Promise<boolean> { try { const idty = await this.iindexDAL.getFromPubkey(pubkey); if (idty && idty.member) { - return true + return true; } - return false + return false; } catch (err) { return false; } } - async isMemberAndNonLeaver(pubkey:string) { + async isMemberAndNonLeaver(pubkey: string) { try { const idty = await this.iindexDAL.getFromPubkey(pubkey); if (idty && idty.member) { @@ -866,44 +1078,49 @@ export class FileDAL implements ServerDAO { } } - async isLeaving(pubkey:string) { + async isLeaving(pubkey: string) { const ms = await this.mindexDAL.getReducedMSForImplicitRevocation(pubkey); return (ms && ms.leaving) || false; } - async existsCert(cert: DBCert, current: DBBlock|null) { + async existsCert(cert: DBCert, current: DBBlock | null) { const existing = await this.certDAL.existsGivenCert(cert); if (existing) return existing; if (!current) { - return false + return false; } - const existsLink = await this.cindexDAL.existsNonReplayableLink(cert.from, cert.to, current.medianTime, current.version) + const existsLink = await this.cindexDAL.existsNonReplayableLink( + cert.from, + cert.to, + current.medianTime, + current.version + ); return !!existsLink; } - deleteCert(cert:any) { - return this.certDAL.deleteCert(cert) + deleteCert(cert: any) { + return this.certDAL.deleteCert(cert); } - deleteMS(ms:any) { - return this.msDAL.deleteMS(ms) + deleteMS(ms: any) { + return this.msDAL.deleteMS(ms); } - async setRevoked(pubkey:string) { - return await this.idtyDAL.setRevoked(pubkey) + async setRevoked(pubkey: string) { + return await this.idtyDAL.setRevoked(pubkey); } - setRevocating = (idty:BasicRevocableIdentity, revocation_sig:string) => { - const dbIdentity = IdentityDTO.fromBasicIdentity(idty) - dbIdentity.member = idty.member - dbIdentity.wasMember = idty.wasMember - dbIdentity.expires_on = idty.expires_on - dbIdentity.revocation_sig = revocation_sig - dbIdentity.revoked = false - return this.idtyDAL.saveIdentity(dbIdentity) - } + setRevocating = (idty: BasicRevocableIdentity, revocation_sig: string) => { + const dbIdentity = IdentityDTO.fromBasicIdentity(idty); + dbIdentity.member = idty.member; + dbIdentity.wasMember = idty.wasMember; + dbIdentity.expires_on = idty.expires_on; + dbIdentity.revocation_sig = revocation_sig; + dbIdentity.revoked = false; + return this.idtyDAL.saveIdentity(dbIdentity); + }; - async getPeerOrNull(pubkey:string) { + async getPeerOrNull(pubkey: string) { let peer = null; try { peer = await this.getPeer(pubkey); @@ -915,29 +1132,34 @@ export class FileDAL implements ServerDAO { return peer; } - async removePeerByPubkey(pubkey:string) { - return this.peerDAL.removePeerByPubkey(pubkey) + async removePeerByPubkey(pubkey: string) { + return this.peerDAL.removePeerByPubkey(pubkey); } - async findAllPeersBut(pubkeys:string[]) { + async findAllPeersBut(pubkeys: string[]) { const peers = await this.listAllPeers(); - return peers.filter((peer:DBPeer) => pubkeys.indexOf(peer.pubkey) == -1 - && ['UP'].indexOf(peer.status) !== -1); + return peers.filter( + (peer: DBPeer) => + pubkeys.indexOf(peer.pubkey) == -1 && ["UP"].indexOf(peer.status) !== -1 + ); } async listAllPeersWithStatusNewUP() { const peers = await this.peerDAL.listAll(); return Underscore.chain(peers) - .filter((p:DBPeer) => ['UP'] - .indexOf(p.status) !== -1).value(); + .filter((p: DBPeer) => ["UP"].indexOf(p.status) !== -1) + .value(); } - async listAllPeersWithStatusNewUPWithtout(pub:string) { + async listAllPeersWithStatusNewUPWithtout(pub: string) { const peers = await this.peerDAL.listAll(); - return Underscore.chain(peers).filter((p:DBPeer) => p.status == 'UP').filter((p:DBPeer) => p.pubkey !== pub).value(); + return Underscore.chain(peers) + .filter((p: DBPeer) => p.status == "UP") + .filter((p: DBPeer) => p.pubkey !== pub) + .value(); } - async findPeers(pubkey:string): Promise<DBPeer[]> { + async findPeers(pubkey: string): Promise<DBPeer[]> { try { const peer = await this.getPeer(pubkey); return [peer]; @@ -946,15 +1168,15 @@ export class FileDAL implements ServerDAO { } } - async getRandomlyUPsWithout(pubkeys:string[]): Promise<DBPeer[]> { + async getRandomlyUPsWithout(pubkeys: string[]): Promise<DBPeer[]> { const peers = await this.listAllPeersWithStatusNewUP(); - return peers.filter(peer => pubkeys.indexOf(peer.pubkey) == -1) + return peers.filter((peer) => pubkeys.indexOf(peer.pubkey) == -1); } - async setPeerUP(pubkey:string) { + async setPeerUP(pubkey: string) { try { - const p = await this.getPeer(pubkey) - p.status = 'UP'; + const p = await this.getPeer(pubkey); + p.status = "UP"; p.first_down = null; p.last_try = null; return this.peerDAL.savePeer(p); @@ -963,19 +1185,19 @@ export class FileDAL implements ServerDAO { } } - async setPeerDown(pubkey:string) { + async setPeerDown(pubkey: string) { try { // We do not set mirror peers as down (ex. of mirror: 'M1_HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk') if (!pubkey.match(/_/)) { - const p = await this.getPeer(pubkey) + const p = await this.getPeer(pubkey); if (p) { - const now = (new Date()).getTime(); - p.status = 'DOWN'; + const now = new Date().getTime(); + p.status = "DOWN"; if (!p.first_down) { p.first_down = now; } p.last_try = now; - await this.peerDAL.savePeer(p) + await this.peerDAL.savePeer(p); } } } catch (err) { @@ -983,41 +1205,64 @@ export class FileDAL implements ServerDAO { } } - async saveBlock(dbb:DBBlock) { + async saveBlock(dbb: DBBlock) { dbb.wrong = false; await Promise.all([ this.saveBlockInFile(dbb), - this.saveTxsInFiles(dbb.transactions, dbb.number, dbb.medianTime) - ]) + this.saveTxsInFiles(dbb.transactions, dbb.number, dbb.medianTime), + ]); } - async generateIndexes(block:BlockDTO, conf:ConfDTO, index:IndexEntry[], aHEAD:DBHead|null) { + async generateIndexes( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[], + aHEAD: DBHead | null + ) { // We need to recompute the indexes for block#0 - let HEAD:DBHead + let HEAD: DBHead; if (!index || !aHEAD || aHEAD.number == 0) { - index = indexer.localIndex(block, conf) - HEAD = await indexer.completeGlobalScope(block, conf, index, this) + index = indexer.localIndex(block, conf); + HEAD = await indexer.completeGlobalScope(block, conf, index, this); } else { - HEAD = aHEAD + HEAD = aHEAD; } let mindex = indexer.mindex(index); let iindex = indexer.iindex(index); let sindex = indexer.sindex(index); let cindex = indexer.cindex(index); - const dividends = await indexer.ruleIndexGenDividend(HEAD, iindex, this) // Requires that newcomers are already in DividendDAO - sindex = sindex.concat(await indexer.ruleIndexGarbageSmallAccounts(HEAD, sindex, dividends, this)); - cindex = cindex.concat(await indexer.ruleIndexGenCertificationExpiry(HEAD, this)); - mindex = mindex.concat(await indexer.ruleIndexGenMembershipExpiry(HEAD, this)); - iindex = iindex.concat(await indexer.ruleIndexGenExclusionByMembership(HEAD, mindex, this)); - iindex = iindex.concat(await indexer.ruleIndexGenExclusionByCertificatons(HEAD, cindex, iindex, conf, this)); - mindex = mindex.concat(await indexer.ruleIndexGenImplicitRevocation(HEAD, this)); + const dividends = await indexer.ruleIndexGenDividend(HEAD, iindex, this); // Requires that newcomers are already in DividendDAO + sindex = sindex.concat( + await indexer.ruleIndexGarbageSmallAccounts(HEAD, sindex, dividends, this) + ); + cindex = cindex.concat( + await indexer.ruleIndexGenCertificationExpiry(HEAD, this) + ); + mindex = mindex.concat( + await indexer.ruleIndexGenMembershipExpiry(HEAD, this) + ); + iindex = iindex.concat( + await indexer.ruleIndexGenExclusionByMembership(HEAD, mindex, this) + ); + iindex = iindex.concat( + await indexer.ruleIndexGenExclusionByCertificatons( + HEAD, + cindex, + iindex, + conf, + this + ) + ); + mindex = mindex.concat( + await indexer.ruleIndexGenImplicitRevocation(HEAD, this) + ); await indexer.ruleIndexCorrectMembershipExpiryDate(HEAD, mindex, this); await indexer.ruleIndexCorrectCertificationExpiryDate(HEAD, cindex, this); return { HEAD, mindex, iindex, sindex, cindex, dividends }; } - async updateWotbLinks(cindex:CindexEntry[], instance?: Wot) { - const wotb = instance || this.wotb + async updateWotbLinks(cindex: CindexEntry[], instance?: Wot) { + const wotb = instance || this.wotb; for (const entry of cindex) { const from = await this.getWrittenIdtyByPubkeyForWotbID(entry.issuer); const to = await this.getWrittenIdtyByPubkeyForWotbID(entry.receiver); @@ -1026,109 +1271,124 @@ export class FileDAL implements ServerDAO { wotb.addLink(from.wotb_id, to.wotb_id); } else { // Update = removal - NewLogger().trace('removeLink %s -> %s', from.wotb_id, to.wotb_id) + NewLogger().trace("removeLink %s -> %s", from.wotb_id, to.wotb_id); wotb.removeLink(from.wotb_id, to.wotb_id); } } } @MonitorExecutionTime() - async trimIndexes(maxNumber:number) { + async trimIndexes(maxNumber: number) { if (!cliprogram.notrim) { - await this.bindexDAL.trimBlocks(maxNumber) - await this.iindexDAL.trimRecords(maxNumber) - await this.mindexDAL.trimRecords(maxNumber) + await this.bindexDAL.trimBlocks(maxNumber); + await this.iindexDAL.trimRecords(maxNumber); + await this.mindexDAL.trimRecords(maxNumber); if (!cliprogram.notrimc) { - await this.cindexDAL.trimExpiredCerts(maxNumber) + await this.cindexDAL.trimExpiredCerts(maxNumber); } } - await this.sindexDAL.trimConsumedSource(maxNumber) - await this.dividendDAL.trimConsumedUDs(maxNumber) + await this.sindexDAL.trimConsumedSource(maxNumber); + await this.dividendDAL.trimConsumedUDs(maxNumber); } - async trimSandboxes(block:{ medianTime: number }) { + async trimSandboxes(block: { medianTime: number }) { await this.certDAL.trimExpiredCerts(block.medianTime); await this.msDAL.trimExpiredMemberships(block.medianTime); await this.idtyDAL.trimExpiredIdentities(block.medianTime); - await this.txsDAL.trimExpiredNonWrittenTxs(block.medianTime - CommonConstants.TX_WINDOW) + await this.txsDAL.trimExpiredNonWrittenTxs( + block.medianTime - CommonConstants.TX_WINDOW + ); return true; } - savePendingMembership(ms:DBMembership) { - return this.msDAL.savePendingMembership(ms) + savePendingMembership(ms: DBMembership) { + return this.msDAL.savePendingMembership(ms); } - async saveBlockInFile(block:DBBlock) { - await this.writeFileOfBlock(block) + async saveBlockInFile(block: DBBlock) { + await this.writeFileOfBlock(block); } - saveSideBlockInFile(block:DBBlock) { - return this.writeSideFileOfBlock(block) + saveSideBlockInFile(block: DBBlock) { + return this.writeSideFileOfBlock(block); } - async saveTxsInFiles(txs:TransactionDTO[], block_number:number, medianTime:number) { - return Promise.all(txs.map(async (tx) => { - const sp = tx.blockstamp.split('-'); - const basedBlock = (await this.getAbsoluteBlockByNumberAndHash(parseInt(sp[0]), sp[1])) as DBBlock - tx.blockstampTime = basedBlock.medianTime; - const txEntity = TransactionDTO.fromJSONObject(tx) - txEntity.computeAllHashes(); - return this.txsDAL.addLinked(TransactionDTO.fromJSONObject(txEntity), block_number, medianTime); - })) + async saveTxsInFiles( + txs: TransactionDTO[], + block_number: number, + medianTime: number + ) { + return Promise.all( + txs.map(async (tx) => { + const sp = tx.blockstamp.split("-"); + const basedBlock = (await this.getAbsoluteBlockByNumberAndHash( + parseInt(sp[0]), + sp[1] + )) as DBBlock; + tx.blockstampTime = basedBlock.medianTime; + const txEntity = TransactionDTO.fromJSONObject(tx); + txEntity.computeAllHashes(); + return this.txsDAL.addLinked( + TransactionDTO.fromJSONObject(txEntity), + block_number, + medianTime + ); + }) + ); } async merkleForPeers() { let peers = await this.listAllPeersWithStatusNewUP(); - const leaves = peers.map((peer:DBPeer) => peer.hash); + const leaves = peers.map((peer: DBPeer) => peer.hash); const merkle = new MerkleDTO(); merkle.initialize(leaves); return merkle; } - savePendingIdentity(idty:DBIdentity) { - return this.idtyDAL.saveIdentity(idty) + savePendingIdentity(idty: DBIdentity) { + return this.idtyDAL.saveIdentity(idty); } - revokeIdentity(pubkey:string) { - return this.idtyDAL.revokeIdentity(pubkey) + revokeIdentity(pubkey: string) { + return this.idtyDAL.revokeIdentity(pubkey); } - async removeUnWrittenWithPubkey(pubkey:string) { - return await this.idtyDAL.removeUnWrittenWithPubkey(pubkey) + async removeUnWrittenWithPubkey(pubkey: string) { + return await this.idtyDAL.removeUnWrittenWithPubkey(pubkey); } - async removeUnWrittenWithUID(pubkey:string) { + async removeUnWrittenWithUID(pubkey: string) { return await this.idtyDAL.removeUnWrittenWithUID(pubkey); } - registerNewCertification(cert:DBCert) { - return this.certDAL.saveNewCertification(cert) + registerNewCertification(cert: DBCert) { + return this.certDAL.saveNewCertification(cert); } - saveTransaction(tx:DBTx) { - return this.txsDAL.addPending(tx) + saveTransaction(tx: DBTx) { + return this.txsDAL.addPending(tx); } - async getTransactionsHistory(pubkey:string) { - const history:{ - sent: DBTx[] - received: DBTx[] - sending: DBTx[] - receiving: DBTx[] - pending: DBTx[] + async getTransactionsHistory(pubkey: string) { + const history: { + sent: DBTx[]; + received: DBTx[]; + sending: DBTx[]; + receiving: DBTx[]; + pending: DBTx[]; } = { sent: [], received: [], sending: [], receiving: [], - pending: [] + pending: [], }; const res = await Promise.all([ this.txsDAL.getLinkedWithIssuer(pubkey), this.txsDAL.getLinkedWithRecipient(pubkey), this.txsDAL.getPendingWithIssuer(pubkey), - this.txsDAL.getPendingWithRecipient(pubkey) - ]) + this.txsDAL.getPendingWithRecipient(pubkey), + ]); history.sent = res[0] || []; history.received = res[1] || []; history.sending = res[2] || []; @@ -1136,32 +1396,34 @@ export class FileDAL implements ServerDAO { return history; } - async getUDHistory(pubkey:string): Promise<{ history: HttpUD[] }> { - const sources: UDSource[] = await this.dividendDAL.getUDSources(pubkey) + async getUDHistory(pubkey: string): Promise<{ history: HttpUD[] }> { + const sources: UDSource[] = await this.dividendDAL.getUDSources(pubkey); return { - history: (await Promise.all<HttpUD>(sources.map(async (src) => { - const block = await this.getBlockWeHaveItForSure(src.pos) - return { - block_number: src.pos, - time: block.medianTime, - consumed: src.consumed, - amount: src.amount, - base: src.base - } - }))) - } + history: await Promise.all<HttpUD>( + sources.map(async (src) => { + const block = await this.getBlockWeHaveItForSure(src.pos); + return { + block_number: src.pos, + time: block.medianTime, + consumed: src.consumed, + amount: src.amount, + base: src.base, + }; + }) + ), + }; } - savePeer(peer:DBPeer) { - return this.peerDAL.savePeer(peer) + savePeer(peer: DBPeer) { + return this.peerDAL.savePeer(peer); } - async getUniqueIssuersBetween(start:number, end:number) { - const current = (await this.blockDAL.getCurrent()) as DBBlock + async getUniqueIssuersBetween(start: number, end: number) { + const current = (await this.blockDAL.getCurrent()) as DBBlock; const firstBlock = Math.max(0, start); const lastBlock = Math.max(0, Math.min(current.number, end)); const blocks = await this.blockDAL.getBlocks(firstBlock, lastBlock); - return Underscore.uniq(blocks.map(b => b.issuer)) + return Underscore.uniq(blocks.map((b) => b.issuer)); } /** @@ -1170,11 +1432,11 @@ export class FileDAL implements ServerDAO { * @param end The ending entry (max. BINDEX length) * @param property If provided, transforms the range of entries into an array of the asked property. */ - async range(start:number, end:number, property:string) { + async range(start: number, end: number, property: string) { const range = await this.bindexDAL.range(start, end); if (property) { // Filter on a particular property - return range.map((b:any) => b[property]); + return range.map((b: any) => b[property]); } else { return range; } @@ -1184,8 +1446,8 @@ export class FileDAL implements ServerDAO { * Get the last `n`th entry from the BINDEX. * @param n The entry number (min. 1). */ - head(n:number) { - return this.bindexDAL.head(n) + head(n: number) { + return this.bindexDAL.head(n); } /*********************** @@ -1193,28 +1455,29 @@ export class FileDAL implements ServerDAO { **********************/ getParameters() { - return this.confDAL.getParameters() + return this.confDAL.getParameters(); } - async loadConf(overrideConf:ConfDTO, defaultConf = false) { + async loadConf(overrideConf: ConfDTO, defaultConf = false) { let conf = ConfDTO.complete(overrideConf || {}); if (!defaultConf) { - const savedConf = await this.confDAL.loadConf() - conf = Underscore.extend(savedConf, overrideConf || {}) - if (overrideConf.proxiesConf !== undefined) {} else { + const savedConf = await this.confDAL.loadConf(); + conf = Underscore.extend(savedConf, overrideConf || {}); + if (overrideConf.proxiesConf !== undefined) { + } else { } } if (this.loadConfHook) { - await this.loadConfHook(conf) + await this.loadConfHook(conf); } return conf; } - async saveConf(confToSave:ConfDTO) { + async saveConf(confToSave: ConfDTO) { // Save the conf in file let theConf = confToSave; if (this.saveConfHook) { - theConf = await this.saveConfHook(theConf) + theConf = await this.saveConfHook(theConf); } return this.confDAL.saveConf(theConf); } @@ -1223,16 +1486,16 @@ export class FileDAL implements ServerDAO { * WALLETS **********************/ - async getWallet(conditions:string) { - let wallet = await this.walletDAL.getWallet(conditions) + async getWallet(conditions: string) { + let wallet = await this.walletDAL.getWallet(conditions); if (!wallet) { - wallet = { conditions, balance: 0 } + wallet = { conditions, balance: 0 }; } - return wallet + return wallet; } - saveWallet(wallet:DBWallet) { - return this.walletDAL.saveWallet(wallet) + saveWallet(wallet: DBWallet) { + return this.walletDAL.saveWallet(wallet); } /*********************** @@ -1242,74 +1505,81 @@ export class FileDAL implements ServerDAO { getStat(name: StatName) { switch (name) { case "newcomers": - return this.blockDAL.findWithIdentities() + return this.blockDAL.findWithIdentities(); case "certs": - return this.blockDAL.findWithCertifications() + return this.blockDAL.findWithCertifications(); case "joiners": - return this.blockDAL.findWithJoiners() + return this.blockDAL.findWithJoiners(); case "actives": - return this.blockDAL.findWithActives() + return this.blockDAL.findWithActives(); case "leavers": - return this.blockDAL.findWithLeavers() + return this.blockDAL.findWithLeavers(); case "excluded": - return this.blockDAL.findWithExcluded() + return this.blockDAL.findWithExcluded(); case "revoked": - return this.blockDAL.findWithRevoked() + return this.blockDAL.findWithRevoked(); case "ud": - return this.blockDAL.findWithUD() + return this.blockDAL.findWithUD(); case "tx": - return this.blockDAL.findWithTXs() + return this.blockDAL.findWithTXs(); default: - throw DataErrors[DataErrors.WRONG_STAT_NAME] + throw DataErrors[DataErrors.WRONG_STAT_NAME]; } } async cleanCaches() { - await Underscore.values(this.newDals).map((dal:Initiable) => dal.cleanCache && dal.cleanCache()) + await Underscore.values(this.newDals).map( + (dal: Initiable) => dal.cleanCache && dal.cleanCache() + ); } async close() { - await Promise.all(Underscore.values(this.newDals).map(async (dal:Initiable) => { - dal.cleanCache() - await dal.close() - })) + await Promise.all( + Underscore.values(this.newDals).map(async (dal: Initiable) => { + dal.cleanCache(); + await dal.close(); + }) + ); await this.sqliteDriver.closeConnection(); } async resetPeers() { await this.peerDAL.removeAll(); - return await this.close() + return await this.close(); } - getLogContent(linesQuantity:number) { + getLogContent(linesQuantity: number) { return new Promise((resolve, reject) => { try { - let lines:string[] = [], i = 0; - const logPath = path.join(this.rootPath, 'duniter.log'); + let lines: string[] = [], + i = 0; + const logPath = path.join(this.rootPath, "duniter.log"); const readStream = fs.createReadStream(logPath); - readStream.on('error', (err:any) => reject(err)); + readStream.on("error", (err: any) => reject(err)); const lineReader = readline.createInterface({ - input: readStream + input: readStream, }); - lineReader.on('line', (line:string) => { + lineReader.on("line", (line: string) => { line = "\n" + line; lines.push(line); i++; if (i >= linesQuantity) lines.shift(); }); - lineReader.on('close', () => resolve(lines)); - lineReader.on('error', (err:any) => reject(err)); + lineReader.on("close", () => resolve(lines)); + lineReader.on("error", (err: any) => reject(err)); } catch (e) { reject(e); } - }) + }); } async findReceiversAbove(minsig: number) { - const receiversAbove:string[] = await this.cindexDAL.getReceiversAbove(minsig) - const members:IdentityForRequirements[] = [] + const receiversAbove: string[] = await this.cindexDAL.getReceiversAbove( + minsig + ); + const members: IdentityForRequirements[] = []; for (const r of receiversAbove) { - const i = await this.iindexDAL.getFullFromPubkey(r) + const i = await this.iindexDAL.getFullFromPubkey(r); members.push({ hash: i.hash || "", member: i.member || false, @@ -1320,39 +1590,58 @@ export class FileDAL implements ServerDAO { sig: i.sig || "", revocation_sig: "", revoked: false, - revoked_on: 0 - }) + revoked_on: 0, + }); } - return members + return members; } @MonitorFlushedIndex() async flushIndexes(indexes: IndexBatch) { if (indexes.mindex.length) { - await this.mindexDAL.insertBatch(indexes.mindex) + await this.mindexDAL.insertBatch(indexes.mindex); } if (indexes.iindex.length) { - await this.iindexDAL.insertBatch(indexes.iindex) + await this.iindexDAL.insertBatch(indexes.iindex); } - const sindex_txs = indexes.sindex.filter(s => s.srcType === 'T') + const sindex_txs = indexes.sindex.filter((s) => s.srcType === "T"); if (sindex_txs.length) { - await this.sindexDAL.insertBatch(sindex_txs) // We don't store dividends in SINDEX + await this.sindexDAL.insertBatch(sindex_txs); // We don't store dividends in SINDEX } - const sindex_uds = indexes.sindex.filter(s => s.srcType === 'D') + const sindex_uds = indexes.sindex.filter((s) => s.srcType === "D"); if (sindex_uds.length) { - await this.dividendDAL.consume(sindex_uds) + await this.dividendDAL.consume(sindex_uds); } if (indexes.cindex.length) { - await this.cindexDAL.insertBatch(indexes.cindex) + await this.cindexDAL.insertBatch(indexes.cindex); } } - async updateDividend(blockNumber: number, dividend: number|null, unitbase: number, local_iindex: IindexEntry[]): Promise<SimpleUdEntryForWallet[]> { + async updateDividend( + blockNumber: number, + dividend: number | null, + unitbase: number, + local_iindex: IindexEntry[] + ): Promise<SimpleUdEntryForWallet[]> { if (dividend) { - return this.dividendDAL.produceDividend(blockNumber, dividend, unitbase, local_iindex) + return this.dividendDAL.produceDividend( + blockNumber, + dividend, + unitbase, + local_iindex + ); } - return [] + return []; } } -export type StatName = 'newcomers'|'certs'|'joiners'|'actives'|'leavers'|'revoked'|'excluded'|'ud'|'tx' +export type StatName = + | "newcomers" + | "certs" + | "joiners" + | "actives" + | "leavers" + | "revoked" + | "excluded" + | "ud" + | "tx"; diff --git a/app/lib/dal/server-dao.ts b/app/lib/dal/server-dao.ts index 456c9604edca8ef562f47d08c8c9a64dd605f63a..2d6a2453b4a34d24b11ab6197a0f280a7c835edb 100644 --- a/app/lib/dal/server-dao.ts +++ b/app/lib/dal/server-dao.ts @@ -1,7 +1,6 @@ -import {DBBlock} from "../db/DBBlock" +import { DBBlock } from "../db/DBBlock"; export interface ServerDAO { - // TODO: check that a module is actually using this method - lastBlockOfIssuer(issuer:string): Promise<DBBlock | null> -} \ No newline at end of file + lastBlockOfIssuer(issuer: string): Promise<DBBlock | null>; +} diff --git a/app/lib/db/DBBlock.ts b/app/lib/db/DBBlock.ts index 05e54ba8fca03f753d2153772d1b9f01666a45b9..7f8eb593bbf2d442eb432fcbf57cbd58ea35ad48 100644 --- a/app/lib/db/DBBlock.ts +++ b/app/lib/db/DBBlock.ts @@ -11,89 +11,89 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../dto/BlockDTO" -import {TransactionDTO} from "../dto/TransactionDTO" +import { BlockDTO } from "../dto/BlockDTO"; +import { TransactionDTO } from "../dto/TransactionDTO"; export class DBBlock { + version: number; + number: number; + currency: string; + hash: string; + inner_hash: string; + signature: string; + previousHash: string; + issuer: string; + previousIssuer: string; + time: number; + powMin: number; + unitbase: number; + membersCount: number; + issuersCount: number; + issuersFrame: number; + issuersFrameVar: number; + identities: string[]; + joiners: string[]; + actives: string[]; + leavers: string[]; + revoked: string[]; + excluded: string[]; + certifications: string[]; + transactions: TransactionDTO[]; + medianTime: number; + nonce: number; + fork: boolean; + parameters: string; + monetaryMass: number; + dividend: number | null; + UDTime: number; + writtenOn: number; + written_on: string; + wrong = false; - version: number - number: number - currency: string - hash: string - inner_hash: string - signature: string - previousHash: string - issuer: string - previousIssuer: string - time: number - powMin: number - unitbase: number - membersCount: number - issuersCount: number - issuersFrame: number - issuersFrameVar: number - identities: string[] - joiners: string[] - actives: string[] - leavers: string[] - revoked: string[] - excluded: string[] - certifications: string[] - transactions: TransactionDTO[] - medianTime: number - nonce: number - fork: boolean - parameters: string - monetaryMass: number - dividend: number | null - UDTime: number - writtenOn: number - written_on: string - wrong = false - - constructor( - ) { - } + constructor() {} toBlockDTO() { - return BlockDTO.fromJSONObject(this) + return BlockDTO.fromJSONObject(this); } - static fromBlockDTO(b:BlockDTO) { - const dbb = new DBBlock() - dbb.version = b.version - dbb.number = b.number - dbb.currency = b.currency - dbb.hash = b.hash - dbb.previousHash = b.previousHash - dbb.issuer = b.issuer - dbb.previousIssuer = b.previousIssuer - dbb.dividend = (b.dividend === null || b.dividend === undefined ? b.dividend : parseInt(String(b.dividend))) - dbb.time = b.time - dbb.powMin = b.powMin - dbb.unitbase = b.unitbase - dbb.membersCount = b.membersCount - dbb.issuersCount = b.issuersCount - dbb.issuersFrame = b.issuersFrame - dbb.issuersFrameVar = b.issuersFrameVar - dbb.identities = b.identities - dbb.joiners = b.joiners - dbb.actives = b.actives - dbb.leavers = b.leavers - dbb.revoked = b.revoked - dbb.excluded = b.excluded - dbb.certifications = b.certifications - dbb.transactions = b.transactions - dbb.medianTime = b.medianTime - dbb.fork = b.fork - dbb.parameters = b.parameters - dbb.inner_hash = b.inner_hash - dbb.signature = b.signature - dbb.nonce = b.nonce - dbb.UDTime = b.UDTime - dbb.monetaryMass = b.monetaryMass - dbb.writtenOn = b.number - dbb.written_on = [b.number, b.hash].join('-') - return dbb + static fromBlockDTO(b: BlockDTO) { + const dbb = new DBBlock(); + dbb.version = b.version; + dbb.number = b.number; + dbb.currency = b.currency; + dbb.hash = b.hash; + dbb.previousHash = b.previousHash; + dbb.issuer = b.issuer; + dbb.previousIssuer = b.previousIssuer; + dbb.dividend = + b.dividend === null || b.dividend === undefined + ? b.dividend + : parseInt(String(b.dividend)); + dbb.time = b.time; + dbb.powMin = b.powMin; + dbb.unitbase = b.unitbase; + dbb.membersCount = b.membersCount; + dbb.issuersCount = b.issuersCount; + dbb.issuersFrame = b.issuersFrame; + dbb.issuersFrameVar = b.issuersFrameVar; + dbb.identities = b.identities; + dbb.joiners = b.joiners; + dbb.actives = b.actives; + dbb.leavers = b.leavers; + dbb.revoked = b.revoked; + dbb.excluded = b.excluded; + dbb.certifications = b.certifications; + dbb.transactions = b.transactions; + dbb.medianTime = b.medianTime; + dbb.fork = b.fork; + dbb.parameters = b.parameters; + dbb.inner_hash = b.inner_hash; + dbb.signature = b.signature; + dbb.nonce = b.nonce; + dbb.UDTime = b.UDTime; + dbb.monetaryMass = b.monetaryMass; + dbb.writtenOn = b.number; + dbb.written_on = [b.number, b.hash].join("-"); + return dbb; } -} \ No newline at end of file +} diff --git a/app/lib/db/DBHead.ts b/app/lib/db/DBHead.ts index 8c97197e4a4a2ffadf81210cfad40c52ddedfbbe..9cc2a55195b1ba42d285160e1a98e85caa8e81b6 100644 --- a/app/lib/db/DBHead.ts +++ b/app/lib/db/DBHead.ts @@ -12,42 +12,40 @@ // GNU Affero General Public License for more details. export class DBHead { - // TODO: some properties are not registered in the DB, we should create another class - version: number - currency: string | null - bsize: number - avgBlockSize: number - udTime: number - udReevalTime: number - massReeval: number - mass: number - hash: string - previousHash: string | null - previousIssuer: string | null - issuer: string - time: number - medianTime: number - number: number - powMin: number - diffNumber: number - issuersCount: number - issuersFrame: number - issuersFrameVar: number - dtDiffEval: number - issuerDiff: number - powZeros: number - powRemainder: number - speed: number - unitBase: number - membersCount: number - dividend: number - new_dividend: number | null - issuerIsMember: boolean - written_on: string - writtenOn: number + version: number; + currency: string | null; + bsize: number; + avgBlockSize: number; + udTime: number; + udReevalTime: number; + massReeval: number; + mass: number; + hash: string; + previousHash: string | null; + previousIssuer: string | null; + issuer: string; + time: number; + medianTime: number; + number: number; + powMin: number; + diffNumber: number; + issuersCount: number; + issuersFrame: number; + issuersFrameVar: number; + dtDiffEval: number; + issuerDiff: number; + powZeros: number; + powRemainder: number; + speed: number; + unitBase: number; + membersCount: number; + dividend: number; + new_dividend: number | null; + issuerIsMember: boolean; + written_on: string; + writtenOn: number; - constructor( - ) {} -} \ No newline at end of file + constructor() {} +} diff --git a/app/lib/db/DBPeer.ts b/app/lib/db/DBPeer.ts index e4bbada5d9ac61ad12dfcc1bf1a23fcd71e26673..f115a713fdc95a1b2428ed119679545b19651067 100644 --- a/app/lib/db/DBPeer.ts +++ b/app/lib/db/DBPeer.ts @@ -1,23 +1,22 @@ -import {PeerDTO} from "../dto/PeerDTO" +import { PeerDTO } from "../dto/PeerDTO"; export class DBPeer { + version: number; + currency: string; + status: string; + statusTS: number; + hash: string; + first_down: number | null; + last_try: number | null; + lastContact: number = Math.floor(Date.now() / 1000); + pubkey: string; + block: string; + signature: string; + endpoints: string[]; + raw: string; + nonWoT: boolean = true; // Security measure: a peer is presumed nonWoT. - version: number - currency: string - status: string - statusTS: number - hash: string - first_down: number | null - last_try: number | null - lastContact: number = Math.floor(Date.now() / 1000) - pubkey: string - block: string - signature: string - endpoints: string[] - raw: string - nonWoT: boolean = true // Security measure: a peer is presumed nonWoT. - - static json(peer:DBPeer): JSONDBPeer { + static json(peer: DBPeer): JSONDBPeer { return { version: peer.version, currency: peer.currency, @@ -28,22 +27,22 @@ export class DBPeer { block: peer.block, signature: peer.signature, endpoints: peer.endpoints, - } + }; } - static fromPeerDTO(peer:PeerDTO): DBPeer { - return peer.toDBPeer() + static fromPeerDTO(peer: PeerDTO): DBPeer { + return peer.toDBPeer(); } } export class JSONDBPeer { - version: number - currency: string - status: string - first_down: number | null - last_try: number | null - pubkey: string - block: string - signature: string - endpoints: string[] + version: number; + currency: string; + status: string; + first_down: number | null; + last_try: number | null; + pubkey: string; + block: string; + signature: string; + endpoints: string[]; } diff --git a/app/lib/db/DBTx.ts b/app/lib/db/DBTx.ts index 1d47b7bb07530f1733ea58adc1b39f2fa2808b59..1f0fd0e64b4e2c173b8d69a2aa8c0082c81fb1cb 100644 --- a/app/lib/db/DBTx.ts +++ b/app/lib/db/DBTx.ts @@ -1,60 +1,60 @@ -import {TransactionDTO} from "../dto/TransactionDTO" +import { TransactionDTO } from "../dto/TransactionDTO"; export class DBTx { - hash: string - block_number: number | null - locktime: number - version: number - currency: string - comment: string - blockstamp: string - blockstampTime: number | null - time: number | null - inputs: string[] - unlocks: string[] - outputs: string[] - issuers: string[] - signatures: string[] - recipients: string[] - written: boolean - removed: boolean - received: number - output_base: number - output_amount: number - written_on: string - writtenOn: number + hash: string; + block_number: number | null; + locktime: number; + version: number; + currency: string; + comment: string; + blockstamp: string; + blockstampTime: number | null; + time: number | null; + inputs: string[]; + unlocks: string[]; + outputs: string[]; + issuers: string[]; + signatures: string[]; + recipients: string[]; + written: boolean; + removed: boolean; + received: number; + output_base: number; + output_amount: number; + written_on: string; + writtenOn: number; - static fromTransactionDTO(tx:TransactionDTO) { - const dbTx = new DBTx() - dbTx.hash = tx.hash - dbTx.locktime = tx.locktime - dbTx.version = tx.version - dbTx.currency = tx.currency - dbTx.blockstamp = tx.blockstamp - dbTx.blockstampTime = tx.blockstampTime - dbTx.comment = tx.comment || "" - dbTx.inputs = tx.inputs - dbTx.unlocks = tx.unlocks - dbTx.outputs = tx.outputs - dbTx.issuers = tx.issuers - dbTx.signatures = tx.signatures - dbTx.recipients = tx.outputsAsRecipients() - dbTx.written = false - dbTx.removed = false - dbTx.output_base = tx.output_base - dbTx.output_amount = tx.output_amount - return dbTx + static fromTransactionDTO(tx: TransactionDTO) { + const dbTx = new DBTx(); + dbTx.hash = tx.hash; + dbTx.locktime = tx.locktime; + dbTx.version = tx.version; + dbTx.currency = tx.currency; + dbTx.blockstamp = tx.blockstamp; + dbTx.blockstampTime = tx.blockstampTime; + dbTx.comment = tx.comment || ""; + dbTx.inputs = tx.inputs; + dbTx.unlocks = tx.unlocks; + dbTx.outputs = tx.outputs; + dbTx.issuers = tx.issuers; + dbTx.signatures = tx.signatures; + dbTx.recipients = tx.outputsAsRecipients(); + dbTx.written = false; + dbTx.removed = false; + dbTx.output_base = tx.output_base; + dbTx.output_amount = tx.output_amount; + return dbTx; } - static setRecipients(txs:DBTx[]) { + static setRecipients(txs: DBTx[]) { // Each transaction must have a good "recipients" field for future searchs - txs.forEach((tx) => tx.recipients = DBTx.outputs2recipients(tx)) + txs.forEach((tx) => (tx.recipients = DBTx.outputs2recipients(tx))); } - static outputs2recipients(tx:DBTx) { - return tx.outputs.map(function(out) { - const recipent = out.match('SIG\\((.*)\\)') - return (recipent && recipent[1]) || 'UNKNOWN' - }) + static outputs2recipients(tx: DBTx) { + return tx.outputs.map(function (out) { + const recipent = out.match("SIG\\((.*)\\)"); + return (recipent && recipent[1]) || "UNKNOWN"; + }); } } diff --git a/app/lib/db/DBWallet.ts b/app/lib/db/DBWallet.ts index d59c617d685ce73a1a4d0d7dc0ba7bf0394b8f23..b244513f5bba813bfb1d99d5df4ae3c6e347034a 100644 --- a/app/lib/db/DBWallet.ts +++ b/app/lib/db/DBWallet.ts @@ -1,4 +1,4 @@ export interface DBWallet { - conditions: string - balance: number + conditions: string; + balance: number; } diff --git a/app/lib/db/OldIindexEntry.ts b/app/lib/db/OldIindexEntry.ts index 308969f906b983034a717c6503d54ac9ca1eee61..9ae7e0d844ad243a8bcbefdc33fc1d84a060375d 100644 --- a/app/lib/db/OldIindexEntry.ts +++ b/app/lib/db/OldIindexEntry.ts @@ -1,7 +1,7 @@ -import {IindexEntry} from "../indexer" +import { IindexEntry } from "../indexer"; export interface OldIindexEntry extends IindexEntry { - pubkey: string - buid: string | null - revocation_sig:string | null + pubkey: string; + buid: string | null; + revocation_sig: string | null; } diff --git a/app/lib/debug/MonitorExecutionTime.ts b/app/lib/debug/MonitorExecutionTime.ts index 8c3b740aa3be77b4bf59aca5135b7202942c25c3..06946c3d7806f8fae648af0b4498ce80bfb28821 100644 --- a/app/lib/debug/MonitorExecutionTime.ts +++ b/app/lib/debug/MonitorExecutionTime.ts @@ -11,82 +11,100 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {getDurationInMicroSeconds, getMicrosecondsTime} from "../../ProcessCpuProfiler" -import {OtherConstants} from "../other_constants" -import {Underscore} from "../common-libs/underscore" +import { + getDurationInMicroSeconds, + getMicrosecondsTime, +} from "../../ProcessCpuProfiler"; +import { OtherConstants } from "../other_constants"; +import { Underscore } from "../common-libs/underscore"; const monitorings: { [k: string]: { times: { - time: number - }[] - } -} = {} + time: number; + }[]; + }; +} = {}; -process.on('exit', () => { - let traces: { name: string, times: number, avg: number, total: number }[] = [] - Object - .keys(monitorings) - .forEach(k => { - const m = monitorings[k] - const total = m.times.reduce((s, t) => s + t.time / 1000, 0) - const avg = m.times.length ? total / m.times.length : 0 - traces.push({ - name: k, - times: m.times.length, - avg, - total - }) - }) - traces = Underscore.sortBy(traces, t => t.total) - traces - .forEach(t => { - console.log('%s %s times %sms (average) %sms (total time)', - (t.name + ':').padEnd(50, ' '), - String(t.times).padStart(10, ' '), - t.avg.toFixed(3).padStart(10, ' '), - t.total.toFixed(0).padStart(10, ' ') - ) - }) -}) +process.on("exit", () => { + let traces: { + name: string; + times: number; + avg: number; + total: number; + }[] = []; + Object.keys(monitorings).forEach((k) => { + const m = monitorings[k]; + const total = m.times.reduce((s, t) => s + t.time / 1000, 0); + const avg = m.times.length ? total / m.times.length : 0; + traces.push({ + name: k, + times: m.times.length, + avg, + total, + }); + }); + traces = Underscore.sortBy(traces, (t) => t.total); + traces.forEach((t) => { + console.log( + "%s %s times %sms (average) %sms (total time)", + (t.name + ":").padEnd(50, " "), + String(t.times).padStart(10, " "), + t.avg.toFixed(3).padStart(10, " "), + t.total.toFixed(0).padStart(10, " ") + ); + }); +}); export const MonitorExecutionTime = function (idProperty?: string) { - return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) { + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { if (OtherConstants.ENABLE_MONITORING) { - const original = descriptor.value + const original = descriptor.value; if (original.__proto__.constructor.name === "AsyncFunction") { descriptor.value = async function (...args: any[]) { - const start = getMicrosecondsTime() - const entities: any[] = await original.apply(this, args) - const duration = getDurationInMicroSeconds(start) - const k = target.constructor.name + '.' + propertyKey + (idProperty ? `[${(this as any)[idProperty]}]` : '') + const start = getMicrosecondsTime(); + const entities: any[] = await original.apply(this, args); + const duration = getDurationInMicroSeconds(start); + const k = + target.constructor.name + + "." + + propertyKey + + (idProperty ? `[${(this as any)[idProperty]}]` : ""); if (!monitorings[k]) { monitorings[k] = { - times: [] - } + times: [], + }; } monitorings[k].times.push({ - time: duration - }) - return entities - } + time: duration, + }); + return entities; + }; } else { descriptor.value = function (...args: any[]) { - const start = getMicrosecondsTime() - const entities: any[] = original.apply(this, args) - const duration = getDurationInMicroSeconds(start) - const k = target.constructor.name + '.' + propertyKey + (idProperty ? `[${(this as any)[idProperty]}]` : '') + const start = getMicrosecondsTime(); + const entities: any[] = original.apply(this, args); + const duration = getDurationInMicroSeconds(start); + const k = + target.constructor.name + + "." + + propertyKey + + (idProperty ? `[${(this as any)[idProperty]}]` : ""); if (!monitorings[k]) { monitorings[k] = { - times: [] - } + times: [], + }; } monitorings[k].times.push({ - time: duration - }) - return entities - } + time: duration, + }); + return entities; + }; } } - } -} \ No newline at end of file + }; +}; diff --git a/app/lib/debug/MonitorFlushedIndex.ts b/app/lib/debug/MonitorFlushedIndex.ts index f2d083ccef8a0488a4f4cce955fe77cd6cf92072..35a361db0a7a5c7e242c4baa0b96496c93eb6634 100644 --- a/app/lib/debug/MonitorFlushedIndex.ts +++ b/app/lib/debug/MonitorFlushedIndex.ts @@ -11,42 +11,46 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {cliprogram} from "../common-libs/programOptions" -import {IndexBatch} from "../dal/fileDAL" +import { cliprogram } from "../common-libs/programOptions"; +import { IndexBatch } from "../dal/fileDAL"; export const MonitorFlushedIndex = function () { - return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) { - const original = descriptor.value + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { + const original = descriptor.value; if (original.__proto__.constructor.name === "AsyncFunction") { - descriptor.value = async function (...args:any[]) { - const pub = cliprogram.syncTrace + descriptor.value = async function (...args: any[]) { + const pub = cliprogram.syncTrace; if (pub) { - const batch: IndexBatch = args[0] - batch.iindex.forEach(e => { + const batch: IndexBatch = args[0]; + batch.iindex.forEach((e) => { if (e.pub === pub) { - console.log(JSON.stringify(e)) + console.log(JSON.stringify(e)); } - }) - batch.mindex.forEach(e => { + }); + batch.mindex.forEach((e) => { if (e.pub === pub) { - console.log(JSON.stringify(e)) + console.log(JSON.stringify(e)); } - }) - batch.cindex.forEach(e => { + }); + batch.cindex.forEach((e) => { if (e.issuer === pub || e.receiver === pub) { - console.log(JSON.stringify(e)) + console.log(JSON.stringify(e)); } - }) - batch.sindex.forEach(e => { - if (e.conditions.indexOf(pub || '') !== -1) { - console.log(JSON.stringify(e)) + }); + batch.sindex.forEach((e) => { + if (e.conditions.indexOf(pub || "") !== -1) { + console.log(JSON.stringify(e)); } - }) + }); } - return await original.apply(this, args) - } + return await original.apply(this, args); + }; } else { - throw Error("Monitoring a synchronous function is not allowed.") + throw Error("Monitoring a synchronous function is not allowed."); } - } -} \ No newline at end of file + }; +}; diff --git a/app/lib/debug/MonitorLokiExecutionTime.ts b/app/lib/debug/MonitorLokiExecutionTime.ts index 0491951af3e646bba96c9cfcc8ef5f55f0cb2297..3a30f586f98b7315d7cfd5bf9496e2d5eeb1b4e1 100644 --- a/app/lib/debug/MonitorLokiExecutionTime.ts +++ b/app/lib/debug/MonitorLokiExecutionTime.ts @@ -11,31 +11,46 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {NewLogger} from "../logger" -import {getMicrosecondsTime} from "../../ProcessCpuProfiler" -import {OtherConstants} from "../other_constants" +import { NewLogger } from "../logger"; +import { getMicrosecondsTime } from "../../ProcessCpuProfiler"; +import { OtherConstants } from "../other_constants"; -const theLogger = NewLogger() +const theLogger = NewLogger(); export const MonitorLokiExecutionTime = function (dumpFirstParam = false) { - return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) { + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { if (OtherConstants.ENABLE_LOKI_MONITORING) { - const original = descriptor.value + const original = descriptor.value; if (original.__proto__.constructor.name === "AsyncFunction") { - descriptor.value = async function (...args:any[]) { - const that :any = this - const now = getMicrosecondsTime() - const result = await original.apply(this, args) + descriptor.value = async function (...args: any[]) { + const that: any = this; + const now = getMicrosecondsTime(); + const result = await original.apply(this, args); if (dumpFirstParam) { - theLogger.trace('[loki][%s][%s] => %sµs', that.collectionName, propertyKey, (getMicrosecondsTime() - now), args && args[0]) + theLogger.trace( + "[loki][%s][%s] => %sµs", + that.collectionName, + propertyKey, + getMicrosecondsTime() - now, + args && args[0] + ); } else { - theLogger.trace('[loki][%s][%s] => %sµs', that.collectionName, propertyKey, (getMicrosecondsTime() - now)) + theLogger.trace( + "[loki][%s][%s] => %sµs", + that.collectionName, + propertyKey, + getMicrosecondsTime() - now + ); } - return result - } + return result; + }; } else { - throw Error("Monitoring a Loki synchronous function is not allowed.") + throw Error("Monitoring a Loki synchronous function is not allowed."); } } - } -} \ No newline at end of file + }; +}; diff --git a/app/lib/debug/MonitorSQLExecutionTime.ts b/app/lib/debug/MonitorSQLExecutionTime.ts index 65724c527347f3df256933cbf1c1dcf582edd44d..72a4fcf64252baddf344131b2883de651cfabf24 100644 --- a/app/lib/debug/MonitorSQLExecutionTime.ts +++ b/app/lib/debug/MonitorSQLExecutionTime.ts @@ -11,29 +11,41 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {getDurationInMicroSeconds, getMicrosecondsTime} from "../../ProcessCpuProfiler" -import {NewLogger} from "../logger" -import {OtherConstants} from "../other_constants" +import { + getDurationInMicroSeconds, + getMicrosecondsTime, +} from "../../ProcessCpuProfiler"; +import { NewLogger } from "../logger"; +import { OtherConstants } from "../other_constants"; -const theLogger = NewLogger() +const theLogger = NewLogger(); export const MonitorSQLExecutionTime = function () { - return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) { + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { if (OtherConstants.ENABLE_SQL_MONITORING) { - const original = descriptor.value + const original = descriptor.value; if (original.__proto__.constructor.name === "AsyncFunction") { descriptor.value = async function (...args: any[]) { - const start = getMicrosecondsTime() - const sql: string = args[0] - const params: any[] = args[1] - const entities: any[] = await original.apply(this, args) - const duration = getDurationInMicroSeconds(start) - theLogger.trace('[sqlite][query] %s %s %sµs', sql, JSON.stringify(params || []), duration) - return entities - } + const start = getMicrosecondsTime(); + const sql: string = args[0]; + const params: any[] = args[1]; + const entities: any[] = await original.apply(this, args); + const duration = getDurationInMicroSeconds(start); + theLogger.trace( + "[sqlite][query] %s %s %sµs", + sql, + JSON.stringify(params || []), + duration + ); + return entities; + }; } else { - throw Error("Monitoring an SQL synchronous function is not allowed.") + throw Error("Monitoring an SQL synchronous function is not allowed."); } } - } -} \ No newline at end of file + }; +}; diff --git a/app/lib/debug/dump.ts b/app/lib/debug/dump.ts index ac8410429572b77d773d8273649a89e27a7782bb..620edbdd445090374e00ddab6e5fb087d3c1cb5e 100644 --- a/app/lib/debug/dump.ts +++ b/app/lib/debug/dump.ts @@ -1,66 +1,174 @@ -import {CindexEntry} from "../indexer" +import { CindexEntry } from "../indexer"; -const Table = require('cli-table') +const Table = require("cli-table"); export function dumpBindex(rows: CindexEntry[]) { - return dump(rows, ['version','bsize','hash','issuer','time','number','membersCount','issuersCount','issuersFrame','issuersFrameVar','issuerDiff','avgBlockSize','medianTime','dividend','mass','unitBase','powMin','udTime','udReevalTime','diffNumber','speed','massReeval']) + return dump(rows, [ + "version", + "bsize", + "hash", + "issuer", + "time", + "number", + "membersCount", + "issuersCount", + "issuersFrame", + "issuersFrameVar", + "issuerDiff", + "avgBlockSize", + "medianTime", + "dividend", + "mass", + "unitBase", + "powMin", + "udTime", + "udReevalTime", + "diffNumber", + "speed", + "massReeval", + ]); } export function dumpIindex(rows: CindexEntry[]) { - return dump(rows, ['op','uid','pub','hash','sig','created_on','written_on','member','wasMember','kick','wotb_id']) + return dump(rows, [ + "op", + "uid", + "pub", + "hash", + "sig", + "created_on", + "written_on", + "member", + "wasMember", + "kick", + "wotb_id", + ]); } export function dumpCindex(rows: CindexEntry[]) { - return dump(rows, ['op','issuer','receiver','created_on','written_on','sig','expires_on','expired_on','chainable_on','from_wid','to_wid','replayable_on']) + return dump(rows, [ + "op", + "issuer", + "receiver", + "created_on", + "written_on", + "sig", + "expires_on", + "expired_on", + "chainable_on", + "from_wid", + "to_wid", + "replayable_on", + ]); } -export function dumpCindexPretty(rows: CindexEntry[], getUid: (pub: string) => Promise<string>) { - return dumpPretty(rows, ['row','op','issuer','created_on','written_on','expires_on','expired_on','chainable_on','replayable_on'], async (f, v) => { - if (f === 'issuer') { - return await getUid(v) - } - if (f === 'written_on') { - return String(v).substr(0, 15) +export function dumpCindexPretty( + rows: CindexEntry[], + getUid: (pub: string) => Promise<string> +) { + return dumpPretty( + rows, + [ + "row", + "op", + "issuer", + "created_on", + "written_on", + "expires_on", + "expired_on", + "chainable_on", + "replayable_on", + ], + async (f, v) => { + if (f === "issuer") { + return await getUid(v); + } + if (f === "written_on") { + return String(v).substr(0, 15); + } + return v; } - return v - }) + ); } export function dumpMindex(rows: CindexEntry[]) { - return dump(rows, ['op','pub','created_on','written_on','expires_on','expired_on','revokes_on','revoked_on','leaving','revocation','chainable_on']) + return dump(rows, [ + "op", + "pub", + "created_on", + "written_on", + "expires_on", + "expired_on", + "revokes_on", + "revoked_on", + "leaving", + "revocation", + "chainable_on", + ]); } export function dumpSindex(rows: CindexEntry[]) { - return dump(rows, ['op','tx','identifier','pos','created_on','amount','base','locktime','consumed','conditions', 'writtenOn']) + return dump(rows, [ + "op", + "tx", + "identifier", + "pos", + "created_on", + "amount", + "base", + "locktime", + "consumed", + "conditions", + "writtenOn", + ]); } -async function dumpPretty(rows: any[], columns: string[], transform: (field: string, value: any) => Promise<string> = (f, v) => Promise.resolve(v)) { - return dump(rows, columns, transform, {'mid': '', 'left-mid': '', 'mid-mid': '', 'right-mid': ''}) +async function dumpPretty( + rows: any[], + columns: string[], + transform: (field: string, value: any) => Promise<string> = (f, v) => + Promise.resolve(v) +) { + return dump(rows, columns, transform, { + mid: "", + "left-mid": "", + "mid-mid": "", + "right-mid": "", + }); } -async function dump(rows: any[], columns: string[], transform: (field: string, value: any) => Promise<string> = (f, v) => Promise.resolve(v), chars?: any) { +async function dump( + rows: any[], + columns: string[], + transform: (field: string, value: any) => Promise<string> = (f, v) => + Promise.resolve(v), + chars?: any +) { // Table columns - const t = chars ? new Table({ head: columns, chars }) : new Table({ head: columns }); + const t = chars + ? new Table({ head: columns, chars }) + : new Table({ head: columns }); let i = 0; for (const row of rows) { - t.push(await Promise.all(columns.map(async (c) => { - if (c === 'row') { - return i - } - else if (row[c] === null) { - return "NULL" - } - else if (row[c] === undefined) { - return 'NULL' - } - else if (typeof row[c] === 'boolean') { - const v = await transform(c, row[c] ? 1 : 0) - return v - } - const v = await transform(c, row[c]) - return v - }))); - i++ + t.push( + await Promise.all( + columns.map(async (c) => { + if (c === "row") { + return i; + } else if (row[c] === null) { + return "NULL"; + } else if (row[c] === undefined) { + return "NULL"; + } else if (typeof row[c] === "boolean") { + const v = await transform(c, row[c] ? 1 : 0); + return v; + } + const v = await transform(c, row[c]); + return v; + }) + ) + ); + i++; } try { - const dumped = t.toString() - console.log(dumped) + const dumped = t.toString(); + console.log(dumped); } catch (e) { - console.error(e) + console.error(e); } } diff --git a/app/lib/dto/BlockDTO.ts b/app/lib/dto/BlockDTO.ts index 1dc973b9f2f65d17d1a9e2d00c81409a491b417e..91f4a438198c978cb6f172011e337086adb11be2 100644 --- a/app/lib/dto/BlockDTO.ts +++ b/app/lib/dto/BlockDTO.ts @@ -11,54 +11,52 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {TransactionDTO} from "./TransactionDTO" -import {CurrencyConfDTO} from "./ConfDTO" -import {hashf} from "../common" -import {Cloneable} from "./Cloneable" -import {MonitorExecutionTime} from "../debug/MonitorExecutionTime" +import { TransactionDTO } from "./TransactionDTO"; +import { CurrencyConfDTO } from "./ConfDTO"; +import { hashf } from "../common"; +import { Cloneable } from "./Cloneable"; +import { MonitorExecutionTime } from "../debug/MonitorExecutionTime"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export class BlockDTO implements Cloneable { - clone(): any { - return BlockDTO.fromJSONObject(this) + return BlockDTO.fromJSONObject(this); } - version: number - number: number - currency: string - hash: string - inner_hash: string - previousHash: string - issuer: string - previousIssuer: string - dividend: number|null - time: number - powMin: number - unitbase: number - membersCount: number - issuersCount: number - issuersFrame: number - issuersFrameVar: number - identities: string[] = [] - joiners: string[] = [] - actives: string[] = [] - leavers: string[] = [] - revoked: string[] = [] - excluded: string[] = [] - certifications: string[] = [] - transactions: TransactionDTO[] = [] - medianTime: number - nonce: number - fork: boolean - parameters: string - signature: string - monetaryMass: number - UDTime: number + version: number; + number: number; + currency: string; + hash: string; + inner_hash: string; + previousHash: string; + issuer: string; + previousIssuer: string; + dividend: number | null; + time: number; + powMin: number; + unitbase: number; + membersCount: number; + issuersCount: number; + issuersFrame: number; + issuersFrameVar: number; + identities: string[] = []; + joiners: string[] = []; + actives: string[] = []; + leavers: string[] = []; + revoked: string[] = []; + excluded: string[] = []; + certifications: string[] = []; + transactions: TransactionDTO[] = []; + medianTime: number; + nonce: number; + fork: boolean; + parameters: string; + signature: string; + monetaryMass: number; + UDTime: number; - constructor() { - } + constructor() {} json() { return { @@ -103,27 +101,29 @@ export class BlockDTO implements Cloneable { outputs: tx.outputs, unlocks: tx.unlocks, signatures: tx.signatures, - comment: tx.comment - } - }) - } + comment: tx.comment, + }; + }), + }; } get len() { - return this.identities.length + + return ( + this.identities.length + this.joiners.length + this.actives.length + this.leavers.length + this.revoked.length + this.certifications.length + this.transactions.reduce((sum, tx) => sum + tx.getLen(), 0) + ); } - getInlineIdentity(pubkey:string): string | null { + getInlineIdentity(pubkey: string): string | null { let i = 0; let found = null; while (!found && i < this.identities.length) { - if (this.identities[i].match(new RegExp('^' + pubkey))) + if (this.identities[i].match(new RegExp("^" + pubkey))) found = this.identities[i]; i++; } @@ -131,20 +131,21 @@ export class BlockDTO implements Cloneable { } getRawUnSigned() { - return this.getRawInnerPart() + this.getSignedPart() + return this.getRawInnerPart() + this.getSignedPart(); } getRawSigned() { - return this.getRawUnSigned() + this.signature + "\n" + return this.getRawUnSigned() + this.signature + "\n"; } getSignedPart() { - return "InnerHash: " + this.inner_hash + "\n" + - "Nonce: " + this.nonce + "\n" + return ( + "InnerHash: " + this.inner_hash + "\n" + "Nonce: " + this.nonce + "\n" + ); } getSignedPartSigned() { - return this.getSignedPart() + this.signature + "\n" + return this.getSignedPart() + this.signature + "\n"; } getRawInnerPart() { @@ -156,107 +157,106 @@ export class BlockDTO implements Cloneable { raw += "PoWMin: " + this.powMin + "\n"; raw += "Time: " + this.time + "\n"; raw += "MedianTime: " + this.medianTime + "\n"; - if (this.dividend) - raw += "UniversalDividend: " + this.dividend + "\n"; + if (this.dividend) raw += "UniversalDividend: " + this.dividend + "\n"; raw += "UnitBase: " + this.unitbase + "\n"; raw += "Issuer: " + this.issuer + "\n"; raw += "IssuersFrame: " + this.issuersFrame + "\n"; raw += "IssuersFrameVar: " + this.issuersFrameVar + "\n"; raw += "DifferentIssuersCount: " + this.issuersCount + "\n"; - if(this.previousHash) - raw += "PreviousHash: " + this.previousHash + "\n"; - if(this.previousIssuer) + if (this.previousHash) raw += "PreviousHash: " + this.previousHash + "\n"; + if (this.previousIssuer) raw += "PreviousIssuer: " + this.previousIssuer + "\n"; - if(this.parameters) - raw += "Parameters: " + this.parameters + "\n"; + if (this.parameters) raw += "Parameters: " + this.parameters + "\n"; raw += "MembersCount: " + this.membersCount + "\n"; raw += "Identities:\n"; - for (const idty of (this.identities || [])){ + for (const idty of this.identities || []) { raw += idty + "\n"; } raw += "Joiners:\n"; - for (const joiner of (this.joiners || [])){ + for (const joiner of this.joiners || []) { raw += joiner + "\n"; } raw += "Actives:\n"; - for (const active of (this.actives || [])){ + for (const active of this.actives || []) { raw += active + "\n"; } raw += "Leavers:\n"; - for (const leaver of (this.leavers || [])){ + for (const leaver of this.leavers || []) { raw += leaver + "\n"; } raw += "Revoked:\n"; - for (const revoked of (this.revoked || [])){ + for (const revoked of this.revoked || []) { raw += revoked + "\n"; } raw += "Excluded:\n"; - for (const excluded of (this.excluded || [])){ + for (const excluded of this.excluded || []) { raw += excluded + "\n"; } raw += "Certifications:\n"; - for (const cert of (this.certifications || [])){ + for (const cert of this.certifications || []) { raw += cert + "\n"; } raw += "Transactions:\n"; - for (const tx of (this.transactions || [])){ + for (const tx of this.transactions || []) { raw += tx.getCompactVersion(); } - return raw + return raw; } getHash() { - return hashf(this.getSignedPartSigned()) + return hashf(this.getSignedPartSigned()); } get blockstamp() { - return BlockDTO.blockstamp({ number: this.number, hash: this.getHash() }) + return BlockDTO.blockstamp({ number: this.number, hash: this.getHash() }); } @MonitorExecutionTime() - static fromJSONObject(obj:any) { - const dto = new BlockDTO() - dto.version = parseInt(obj.version) || DEFAULT_DOCUMENT_VERSION - dto.number = parseInt(obj.number) - dto.currency = obj.currency || "" - dto.hash = obj.hash || "" - dto.inner_hash = obj.inner_hash - dto.previousHash = obj.previousHash - dto.issuer = obj.issuer || "" - dto.previousIssuer = obj.previousIssuer - dto.dividend = obj.dividend || null - dto.time = parseInt(obj.time) - dto.powMin = parseInt(obj.powMin) - dto.monetaryMass = parseInt(obj.monetaryMass) + static fromJSONObject(obj: any) { + const dto = new BlockDTO(); + dto.version = parseInt(obj.version) || DEFAULT_DOCUMENT_VERSION; + dto.number = parseInt(obj.number); + dto.currency = obj.currency || ""; + dto.hash = obj.hash || ""; + dto.inner_hash = obj.inner_hash; + dto.previousHash = obj.previousHash; + dto.issuer = obj.issuer || ""; + dto.previousIssuer = obj.previousIssuer; + dto.dividend = obj.dividend || null; + dto.time = parseInt(obj.time); + dto.powMin = parseInt(obj.powMin); + dto.monetaryMass = parseInt(obj.monetaryMass); if (isNaN(dto.monetaryMass) && obj.mass !== undefined) { - dto.monetaryMass = parseInt(obj.mass) + dto.monetaryMass = parseInt(obj.mass); } if (isNaN(dto.monetaryMass)) { - dto.monetaryMass = 0 + dto.monetaryMass = 0; } - dto.unitbase = parseInt(obj.unitbase) - dto.membersCount = parseInt(obj.membersCount) - dto.issuersCount = parseInt(obj.issuersCount) - dto.issuersFrame = parseInt(obj.issuersFrame) - dto.issuersFrameVar = parseInt(obj.issuersFrameVar) - dto.identities = obj.identities || [] - dto.joiners = obj.joiners || [] - dto.actives = obj.actives || [] - dto.leavers = obj.leavers || [] - dto.revoked = obj.revoked || [] - dto.excluded = obj.excluded || [] - dto.certifications = obj.certifications || [] - dto.transactions = (obj.transactions || []).map((tx:any) => TransactionDTO.fromJSONObject(tx)) - dto.medianTime = parseInt(obj.medianTime) - dto.fork = !!obj.fork - dto.parameters = obj.parameters || "" - dto.signature = obj.signature || "" - dto.nonce = parseInt(obj.nonce) - return dto + dto.unitbase = parseInt(obj.unitbase); + dto.membersCount = parseInt(obj.membersCount); + dto.issuersCount = parseInt(obj.issuersCount); + dto.issuersFrame = parseInt(obj.issuersFrame); + dto.issuersFrameVar = parseInt(obj.issuersFrameVar); + dto.identities = obj.identities || []; + dto.joiners = obj.joiners || []; + dto.actives = obj.actives || []; + dto.leavers = obj.leavers || []; + dto.revoked = obj.revoked || []; + dto.excluded = obj.excluded || []; + dto.certifications = obj.certifications || []; + dto.transactions = (obj.transactions || []).map((tx: any) => + TransactionDTO.fromJSONObject(tx) + ); + dto.medianTime = parseInt(obj.medianTime); + dto.fork = !!obj.fork; + dto.parameters = obj.parameters || ""; + dto.signature = obj.signature || ""; + dto.nonce = parseInt(obj.nonce); + return dto; } - static getConf(block:BlockDTO): CurrencyConfDTO { - const sp = block.parameters.split(':'); + static getConf(block: BlockDTO): CurrencyConfDTO { + const sp = block.parameters.split(":"); return { currency: block.currency, c: parseFloat(sp[0]), @@ -282,18 +282,18 @@ export class BlockDTO implements Cloneable { // New parameters, defaults to msWindow msPeriod: parseInt(sp[9]), sigReplay: parseInt(sp[9]), - } + }; } - static getLen(block:any) { - return BlockDTO.fromJSONObject(block).len + static getLen(block: any) { + return BlockDTO.fromJSONObject(block).len; } - static getHash(block:any) { - return BlockDTO.fromJSONObject(block).getHash() + static getHash(block: any) { + return BlockDTO.fromJSONObject(block).getHash(); } - static blockstamp(b: { number: number, hash: string }) { - return [b.number, b.hash].join('-') + static blockstamp(b: { number: number; hash: string }) { + return [b.number, b.hash].join("-"); } } diff --git a/app/lib/dto/CertificationDTO.ts b/app/lib/dto/CertificationDTO.ts index 66b76c92d9df315a8267dfece93d8cf622e3f7c5..047206d56a4b07cbe4d7a38a79285e86326083ae 100644 --- a/app/lib/dto/CertificationDTO.ts +++ b/app/lib/dto/CertificationDTO.ts @@ -11,15 +11,14 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {IdentityDTO} from "./IdentityDTO" -import {Buid} from "../common-libs/buid" -import {Cloneable} from "./Cloneable"; -import {hashf} from "../common"; +import { IdentityDTO } from "./IdentityDTO"; +import { Buid } from "../common-libs/buid"; +import { Cloneable } from "./Cloneable"; +import { hashf } from "../common"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export class ShortCertificationDTO { - constructor( public pubkey: string, public block_number: number, @@ -28,22 +27,22 @@ export class ShortCertificationDTO { ) {} get issuer() { - return this.pubkey + return this.pubkey; } get from() { - return this.pubkey + return this.pubkey; } get to() { - return this.idty_issuer + return this.idty_issuer; } } -export class CertificationDTO extends ShortCertificationDTO implements Cloneable { - +export class CertificationDTO extends ShortCertificationDTO + implements Cloneable { clone(): any { - return CertificationDTO.fromJSONObject(this) + return CertificationDTO.fromJSONObject(this); } constructor( @@ -52,20 +51,20 @@ export class CertificationDTO extends ShortCertificationDTO implements Cloneable public pubkey: string, public buid: string, public sig: string, - public idty_issuer:string, - public idty_uid:string, - public idty_buid:string, - public idty_sig:string + public idty_issuer: string, + public idty_uid: string, + public idty_buid: string, + public idty_sig: string ) { - super(pubkey, parseInt(buid.split(':')[0]), sig, idty_issuer) + super(pubkey, parseInt(buid.split(":")[0]), sig, idty_issuer); } getTargetHash() { return IdentityDTO.getTargetHash({ uid: this.idty_uid, created_on: this.idty_buid, - pub: this.idty_issuer - }) + pub: this.idty_issuer, + }); } getRawUnSigned() { @@ -74,42 +73,42 @@ export class CertificationDTO extends ShortCertificationDTO implements Cloneable raw += "Type: Certification\n"; raw += "Currency: " + this.currency + "\n"; raw += "Issuer: " + this.pubkey + "\n"; - raw += "IdtyIssuer: " + this.idty_issuer + '\n'; - raw += "IdtyUniqueID: " + this.idty_uid + '\n'; - raw += "IdtyTimestamp: " + this.idty_buid + '\n'; - raw += "IdtySignature: " + this.idty_sig + '\n'; - raw += "CertTimestamp: " + this.buid + '\n'; - return raw + raw += "IdtyIssuer: " + this.idty_issuer + "\n"; + raw += "IdtyUniqueID: " + this.idty_uid + "\n"; + raw += "IdtyTimestamp: " + this.idty_buid + "\n"; + raw += "IdtySignature: " + this.idty_sig + "\n"; + raw += "CertTimestamp: " + this.buid + "\n"; + return raw; } getRawSigned() { - return this.getRawUnSigned() + this.sig + '\n' + return this.getRawUnSigned() + this.sig + "\n"; } json() { return { - "issuer": this.pubkey, - "timestamp": this.buid, - "sig": this.sig, - "target": { - "issuer": this.idty_issuer, - "uid": this.idty_uid, - "timestamp": this.idty_buid, - "sig": this.idty_sig - } - } + issuer: this.pubkey, + timestamp: this.buid, + sig: this.sig, + target: { + issuer: this.idty_issuer, + uid: this.idty_uid, + timestamp: this.idty_buid, + sig: this.idty_sig, + }, + }; } inline() { - return [this.pubkey, this.to, this.block_number, this.sig].join(':') + return [this.pubkey, this.to, this.block_number, this.sig].join(":"); } - static fromInline(inline:string): ShortCertificationDTO { - const [pubkey, to, block_number, sig]: string[] = inline.split(':') - return new ShortCertificationDTO(pubkey, parseInt(block_number), sig, to) + static fromInline(inline: string): ShortCertificationDTO { + const [pubkey, to, block_number, sig]: string[] = inline.split(":"); + return new ShortCertificationDTO(pubkey, parseInt(block_number), sig, to); } - static fromJSONObject(obj:any) { + static fromJSONObject(obj: any) { return new CertificationDTO( obj.version || DEFAULT_DOCUMENT_VERSION, obj.currency, @@ -120,10 +119,10 @@ export class CertificationDTO extends ShortCertificationDTO implements Cloneable obj.idty_uid, obj.idty_buid, obj.idty_sig - ) + ); } getHash() { - return hashf(this.getRawSigned()) + return hashf(this.getRawSigned()); } -} \ No newline at end of file +} diff --git a/app/lib/dto/Cloneable.ts b/app/lib/dto/Cloneable.ts index 358cc75ac8030c014e53270f5834da334536f5c4..8966718703a47b89c9c93e56d94ad431e277b41e 100644 --- a/app/lib/dto/Cloneable.ts +++ b/app/lib/dto/Cloneable.ts @@ -12,5 +12,5 @@ // GNU Affero General Public License for more details. export interface Cloneable { - clone(): any -} \ No newline at end of file + clone(): any; +} diff --git a/app/lib/dto/ConfDTO.ts b/app/lib/dto/ConfDTO.ts index 59c229f19a724175c2e1077d5f7630351e7dda11..7d5d8247423998f526d6ef3059966f5d492309f0 100644 --- a/app/lib/dto/ConfDTO.ts +++ b/app/lib/dto/ConfDTO.ts @@ -11,108 +11,115 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../common-libs/constants" -import {ProxiesConf} from '../proxy'; -import {Underscore} from "../common-libs/underscore" +import { CommonConstants } from "../common-libs/constants"; +import { ProxiesConf } from "../proxy"; +import { Underscore } from "../common-libs/underscore"; -const constants = require('../constants'); +const constants = require("../constants"); export interface Keypair { - pub: string - sec: string + pub: string; + sec: string; } export interface StorageDTO { storage?: { - transactions?:boolean - wotwizard?:boolean - } + transactions?: boolean; + wotwizard?: boolean; + }; } export interface PowDTO { - powNoSecurity:boolean + powNoSecurity: boolean; } export interface BranchingDTO { - switchOnHeadAdvance:number - avgGenTime:number - forksize:number + switchOnHeadAdvance: number; + avgGenTime: number; + forksize: number; } export interface CurrencyConfDTO { - currency: string - c: number - dt: number - ud0: number - sigPeriod: number - sigReplay: number - sigStock: number - sigWindow: number - sigValidity: number - sigQty: number - idtyWindow: number - msWindow: number - msPeriod: number - xpercent: number - msValidity: number - stepMax: number - medianTimeBlocks: number - avgGenTime: number - dtDiffEval: number - percentRot: number - udTime0: number - udReevalTime0: number - dtReeval: number + currency: string; + c: number; + dt: number; + ud0: number; + sigPeriod: number; + sigReplay: number; + sigStock: number; + sigWindow: number; + sigValidity: number; + sigQty: number; + idtyWindow: number; + msWindow: number; + msPeriod: number; + xpercent: number; + msValidity: number; + stepMax: number; + medianTimeBlocks: number; + avgGenTime: number; + dtDiffEval: number; + percentRot: number; + udTime0: number; + udReevalTime0: number; + dtReeval: number; } export interface KeypairConfDTO { - pair: Keypair - oldPair: Keypair|null - salt: string - passwd: string + pair: Keypair; + oldPair: Keypair | null; + salt: string; + passwd: string; } export interface NetworkConfDTO { - proxiesConf: ProxiesConf|undefined - nobma: boolean - bmaWithCrawler: boolean - remoteport: number - remotehost: string|null - remoteipv4: string|null - remoteipv6: string|null - port: number - ipv4: string - ipv6: string - dos:any - upnp:boolean - httplogs:boolean - nonWoTPeersLimit: number + proxiesConf: ProxiesConf | undefined; + nobma: boolean; + bmaWithCrawler: boolean; + remoteport: number; + remotehost: string | null; + remoteipv4: string | null; + remoteipv6: string | null; + port: number; + ipv4: string; + ipv6: string; + dos: any; + upnp: boolean; + httplogs: boolean; + nonWoTPeersLimit: number; } export interface WS2PConfDTO { ws2p?: { - privateAccess?: boolean - publicAccess?: boolean - sync?: boolean - uuid?: string - upnp?: boolean - remotehost?: string|null - remoteport?: number|null - remotepath?: string - port?: number - host?: string - maxPublic?:number - maxPrivate?:number - preferedNodes?: string[] - preferedOnly: boolean - privilegedNodes?: string[] - privilegedOnly: boolean - syncLimit?: number - } + privateAccess?: boolean; + publicAccess?: boolean; + sync?: boolean; + uuid?: string; + upnp?: boolean; + remotehost?: string | null; + remoteport?: number | null; + remotepath?: string; + port?: number; + host?: string; + maxPublic?: number; + maxPrivate?: number; + preferedNodes?: string[]; + preferedOnly: boolean; + privilegedNodes?: string[]; + privilegedOnly: boolean; + syncLimit?: number; + }; } -export class ConfDTO implements StorageDTO, CurrencyConfDTO, KeypairConfDTO, NetworkConfDTO, BranchingDTO, WS2PConfDTO, PowDTO { - +export class ConfDTO + implements + StorageDTO, + CurrencyConfDTO, + KeypairConfDTO, + NetworkConfDTO, + BranchingDTO, + WS2PConfDTO, + PowDTO { constructor( public loglevel: string, public currency: string, @@ -154,13 +161,13 @@ export class ConfDTO implements StorageDTO, CurrencyConfDTO, KeypairConfDTO, Net public sigWindow: number, public switchOnHeadAdvance: number, public pair: Keypair, - public oldPair: Keypair|null, + public oldPair: Keypair | null, public salt: string, public passwd: string, public remoteport: number, - public remotehost: string|null, - public remoteipv4: string|null, - public remoteipv6: string|null, + public remotehost: string | null, + public remoteipv4: string | null, + public remoteipv6: string | null, public host: string, public port: number, public ipv4: string, @@ -172,70 +179,131 @@ export class ConfDTO implements StorageDTO, CurrencyConfDTO, KeypairConfDTO, Net public nobma: boolean, public bmaWithCrawler: boolean, public nonWoTPeersLimit: number, - public proxiesConf: ProxiesConf|undefined, + public proxiesConf: ProxiesConf | undefined, public ws2p?: { - privateAccess?: boolean - publicAccess?: boolean - uuid?: string - upnp?: boolean - remotehost?: string|null - remoteport?: number|null - remotepath?: string - port?: number - host?: string - preferedNodes?: string[] - preferedOnly: boolean - privilegedNodes?: string[] - privilegedOnly: boolean - maxPublic?:number - maxPrivate?:number - syncLimit?:number + privateAccess?: boolean; + publicAccess?: boolean; + uuid?: string; + upnp?: boolean; + remotehost?: string | null; + remoteport?: number | null; + remotepath?: string; + port?: number; + host?: string; + preferedNodes?: string[]; + preferedOnly: boolean; + privilegedNodes?: string[]; + privilegedOnly: boolean; + maxPublic?: number; + maxPrivate?: number; + syncLimit?: number; }, public powNoSecurity = false, public storage = { transactions: false, wotwizard: false, - }, -) {} + } + ) {} static mock() { - return new ConfDTO("", "", [], [], 0, 3600 * 1000, constants.PROOF_OF_WORK.DEFAULT.CPU, 1, constants.PROOF_OF_WORK.DEFAULT.PREFIX, 0, 0, constants.CONTRACT.DEFAULT.C, constants.CONTRACT.DEFAULT.DT, constants.CONTRACT.DEFAULT.DT_REEVAL, 0, constants.CONTRACT.DEFAULT.UD0, 0, 0, constants.CONTRACT.DEFAULT.STEPMAX, constants.CONTRACT.DEFAULT.SIGPERIOD, constants.CONTRACT.DEFAULT.SIGREPLAY, 0, constants.CONTRACT.DEFAULT.SIGVALIDITY, constants.CONTRACT.DEFAULT.MSVALIDITY, constants.CONTRACT.DEFAULT.SIGQTY, constants.CONTRACT.DEFAULT.SIGSTOCK, constants.CONTRACT.DEFAULT.X_PERCENT, constants.CONTRACT.DEFAULT.PERCENTROT, constants.CONTRACT.DEFAULT.POWDELAY, constants.CONTRACT.DEFAULT.AVGGENTIME, constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, false, 3000, false, constants.BRANCHES.DEFAULT_WINDOW_SIZE, constants.CONTRACT.DEFAULT.IDTYWINDOW, constants.CONTRACT.DEFAULT.MSWINDOW, constants.CONTRACT.DEFAULT.SIGWINDOW, 0, { pub:'', sec:'' }, null, "", "", 0, "", "", "", "", 0, "", "", null, false, "", true, true, false, 100, new ProxiesConf(), undefined) + return new ConfDTO( + "", + "", + [], + [], + 0, + 3600 * 1000, + constants.PROOF_OF_WORK.DEFAULT.CPU, + 1, + constants.PROOF_OF_WORK.DEFAULT.PREFIX, + 0, + 0, + constants.CONTRACT.DEFAULT.C, + constants.CONTRACT.DEFAULT.DT, + constants.CONTRACT.DEFAULT.DT_REEVAL, + 0, + constants.CONTRACT.DEFAULT.UD0, + 0, + 0, + constants.CONTRACT.DEFAULT.STEPMAX, + constants.CONTRACT.DEFAULT.SIGPERIOD, + constants.CONTRACT.DEFAULT.SIGREPLAY, + 0, + constants.CONTRACT.DEFAULT.SIGVALIDITY, + constants.CONTRACT.DEFAULT.MSVALIDITY, + constants.CONTRACT.DEFAULT.SIGQTY, + constants.CONTRACT.DEFAULT.SIGSTOCK, + constants.CONTRACT.DEFAULT.X_PERCENT, + constants.CONTRACT.DEFAULT.PERCENTROT, + constants.CONTRACT.DEFAULT.POWDELAY, + constants.CONTRACT.DEFAULT.AVGGENTIME, + constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, + false, + 3000, + false, + constants.BRANCHES.DEFAULT_WINDOW_SIZE, + constants.CONTRACT.DEFAULT.IDTYWINDOW, + constants.CONTRACT.DEFAULT.MSWINDOW, + constants.CONTRACT.DEFAULT.SIGWINDOW, + 0, + { pub: "", sec: "" }, + null, + "", + "", + 0, + "", + "", + "", + "", + 0, + "", + "", + null, + false, + "", + true, + true, + false, + 100, + new ProxiesConf(), + undefined + ); } static defaultConf() { /*return new ConfDTO("", "", [], [], 0, 3600 * 1000, constants.PROOF_OF_WORK.DEFAULT.CPU, 1, constants.PROOF_OF_WORK.DEFAULT.PREFIX, 0, 0, constants.CONTRACT.DEFAULT.C, constants.CONTRACT.DEFAULT.DT, constants.CONTRACT.DEFAULT.DT_REEVAL, 0, constants.CONTRACT.DEFAULT.UD0, 0, 0, constants.CONTRACT.DEFAULT.STEPMAX, constants.CONTRACT.DEFAULT.SIGPERIOD, 0, constants.CONTRACT.DEFAULT.SIGVALIDITY, constants.CONTRACT.DEFAULT.MSVALIDITY, constants.CONTRACT.DEFAULT.SIGQTY, constants.CONTRACT.DEFAULT.SIGSTOCK, constants.CONTRACT.DEFAULT.X_PERCENT, constants.CONTRACT.DEFAULT.PERCENTROT, constants.CONTRACT.DEFAULT.POWDELAY, constants.CONTRACT.DEFAULT.AVGGENTIME, constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, false, 3000, false, constants.BRANCHES.DEFAULT_WINDOW_SIZE, constants.CONTRACT.DEFAULT.IDTYWINDOW, constants.CONTRACT.DEFAULT.MSWINDOW, constants.CONTRACT.DEFAULT.SIGWINDOW, 0, { pub:'', sec:'' }, null, "", "", 0, "", "", "", "", 0, "", "", null, false, "", true, true)*/ return { - "currency": null, - "endpoints": [], - "rmEndpoints": [], - "upInterval": 3600 * 1000, - "c": constants.CONTRACT.DEFAULT.C, - "dt": constants.CONTRACT.DEFAULT.DT, - "dtReeval": constants.CONTRACT.DEFAULT.DT_REEVAL, - "ud0": constants.CONTRACT.DEFAULT.UD0, - "stepMax": constants.CONTRACT.DEFAULT.STEPMAX, - "sigPeriod": constants.CONTRACT.DEFAULT.SIGPERIOD, - "sigReplay": constants.CONTRACT.DEFAULT.SIGREPLAY, - "sigValidity": constants.CONTRACT.DEFAULT.SIGVALIDITY, - "msValidity": constants.CONTRACT.DEFAULT.MSVALIDITY, - "sigQty": constants.CONTRACT.DEFAULT.SIGQTY, - "xpercent": constants.CONTRACT.DEFAULT.X_PERCENT, - "percentRot": constants.CONTRACT.DEFAULT.PERCENTROT, - "powDelay": constants.CONTRACT.DEFAULT.POWDELAY, - "avgGenTime": constants.CONTRACT.DEFAULT.AVGGENTIME, - "dtDiffEval": constants.CONTRACT.DEFAULT.DTDIFFEVAL, - "medianTimeBlocks": constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, - "httplogs": false, - "udid2": false, - "timeout": 3000, - "isolate": false, - "forksize": constants.BRANCHES.DEFAULT_WINDOW_SIZE, - "switchOnHeadAdvance": CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS, - "nonWoTPeersLimit": CommonConstants.DEFAULT_NON_WOT_PEERS_LIMIT, + currency: null, + endpoints: [], + rmEndpoints: [], + upInterval: 3600 * 1000, + c: constants.CONTRACT.DEFAULT.C, + dt: constants.CONTRACT.DEFAULT.DT, + dtReeval: constants.CONTRACT.DEFAULT.DT_REEVAL, + ud0: constants.CONTRACT.DEFAULT.UD0, + stepMax: constants.CONTRACT.DEFAULT.STEPMAX, + sigPeriod: constants.CONTRACT.DEFAULT.SIGPERIOD, + sigReplay: constants.CONTRACT.DEFAULT.SIGREPLAY, + sigValidity: constants.CONTRACT.DEFAULT.SIGVALIDITY, + msValidity: constants.CONTRACT.DEFAULT.MSVALIDITY, + sigQty: constants.CONTRACT.DEFAULT.SIGQTY, + xpercent: constants.CONTRACT.DEFAULT.X_PERCENT, + percentRot: constants.CONTRACT.DEFAULT.PERCENTROT, + powDelay: constants.CONTRACT.DEFAULT.POWDELAY, + avgGenTime: constants.CONTRACT.DEFAULT.AVGGENTIME, + dtDiffEval: constants.CONTRACT.DEFAULT.DTDIFFEVAL, + medianTimeBlocks: constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, + httplogs: false, + udid2: false, + timeout: 3000, + isolate: false, + forksize: constants.BRANCHES.DEFAULT_WINDOW_SIZE, + switchOnHeadAdvance: CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS, + nonWoTPeersLimit: CommonConstants.DEFAULT_NON_WOT_PEERS_LIMIT, }; } - static complete(conf:any) { - return Underscore.extend(ConfDTO.defaultConf(), conf) + static complete(conf: any) { + return Underscore.extend(ConfDTO.defaultConf(), conf); } -} \ No newline at end of file +} diff --git a/app/lib/dto/IdentityDTO.ts b/app/lib/dto/IdentityDTO.ts index 5671987b0c2801dd1f820cae2d821a408ec48048..7ce0f76dc2bbcb93701a71109885fe3f475dabe5 100644 --- a/app/lib/dto/IdentityDTO.ts +++ b/app/lib/dto/IdentityDTO.ts @@ -11,37 +11,36 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {RevocationDTO} from "./RevocationDTO" -import {hashf} from "../common" -import {DBIdentity, NewDBIdentity} from "../dal/sqliteDAL/IdentityDAL" +import { RevocationDTO } from "./RevocationDTO"; +import { hashf } from "../common"; +import { DBIdentity, NewDBIdentity } from "../dal/sqliteDAL/IdentityDAL"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export interface HashableIdentity { - created_on: string - uid: string - pub: string + created_on: string; + uid: string; + pub: string; } export interface BasicIdentity { - buid: string - uid: string - pubkey: string - sig: string + buid: string; + uid: string; + pubkey: string; + sig: string; } export interface BasicRevocableIdentity { - buid: string - uid: string - pubkey: string - sig: string - member: boolean - wasMember: boolean - expires_on: number + buid: string; + uid: string; + pubkey: string; + sig: string; + member: boolean; + wasMember: boolean; + expires_on: number; } export class IdentityDTO { - constructor( public version: number, public currency: string, @@ -52,38 +51,38 @@ export class IdentityDTO { ) {} get hash() { - return this.getTargetHash() + return this.getTargetHash(); } private getTargetHash() { - return hashf(this.uid + this.buid + this.pubkey) + return hashf(this.uid + this.buid + this.pubkey); } inline() { - return [this.pubkey, this.sig, this.buid, this.uid].join(':') + return [this.pubkey, this.sig, this.buid, this.uid].join(":"); } rawWithoutSig() { - let raw = "" - raw += "Version: " + this.version + "\n" - raw += "Type: Identity\n" - raw += "Currency: " + this.currency + "\n" - raw += "Issuer: " + this.pubkey + "\n" - raw += "UniqueID: " + this.uid + '\n' - raw += "Timestamp: " + this.buid + '\n' - return raw + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Identity\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "Issuer: " + this.pubkey + "\n"; + raw += "UniqueID: " + this.uid + "\n"; + raw += "Timestamp: " + this.buid + "\n"; + return raw; } getRawUnSigned() { - return this.rawWithoutSig() + return this.rawWithoutSig(); } getRawSigned() { - return this.rawWithoutSig() + this.sig + "\n" + return this.rawWithoutSig() + this.sig + "\n"; } - static fromInline(inline:string, currency:string = ""): IdentityDTO { - const [pubkey, sig, buid, uid] = inline.split(':') + static fromInline(inline: string, currency: string = ""): IdentityDTO { + const [pubkey, sig, buid, uid] = inline.split(":"); return new IdentityDTO( DEFAULT_DOCUMENT_VERSION, currency, @@ -91,14 +90,14 @@ export class IdentityDTO { sig, buid, uid - ) + ); } - static getTargetHash(idty:HashableIdentity) { - return hashf(idty.uid + idty.created_on + idty.pub) + static getTargetHash(idty: HashableIdentity) { + return hashf(idty.uid + idty.created_on + idty.pub); } - static fromJSONObject(obj:any) { + static fromJSONObject(obj: any) { return new IdentityDTO( obj.version || DEFAULT_DOCUMENT_VERSION, obj.currency, @@ -106,10 +105,10 @@ export class IdentityDTO { obj.signature || obj.sig, obj.buid || obj.blockstamp, obj.uid - ) + ); } - static fromBasicIdentity(basic:BasicIdentity): DBIdentity { + static fromBasicIdentity(basic: BasicIdentity): DBIdentity { return new NewDBIdentity( basic.pubkey, basic.sig, @@ -118,12 +117,12 @@ export class IdentityDTO { IdentityDTO.getTargetHash({ pub: basic.pubkey, created_on: basic.buid, - uid: basic.uid + uid: basic.uid, }) - ) + ); } - static fromRevocation(revoc:RevocationDTO): DBIdentity { + static fromRevocation(revoc: RevocationDTO): DBIdentity { return new NewDBIdentity( revoc.pubkey, revoc.idty_sig, @@ -132,12 +131,12 @@ export class IdentityDTO { IdentityDTO.getTargetHash({ pub: revoc.pubkey, created_on: revoc.idty_buid, - uid: revoc.idty_uid + uid: revoc.idty_uid, }) - ) + ); } getHash() { - return hashf(this.getRawSigned()) + return hashf(this.getRawSigned()); } -} \ No newline at end of file +} diff --git a/app/lib/dto/Jsonable.ts b/app/lib/dto/Jsonable.ts index fa028a88d5cfa103ae7cc2a6bfb3ff9b9a2737a6..1a5ba0a0dd2c810d9c2ae7c67842f3f96ec94e03 100644 --- a/app/lib/dto/Jsonable.ts +++ b/app/lib/dto/Jsonable.ts @@ -12,5 +12,5 @@ // GNU Affero General Public License for more details. export interface Jsonable { - json(): any -} \ No newline at end of file + json(): any; +} diff --git a/app/lib/dto/MembershipDTO.ts b/app/lib/dto/MembershipDTO.ts index 267106ab0d634bd17bf9162b771fcfcfda748db6..e2b1de529790cf68b572ee81ec522740be75c525 100644 --- a/app/lib/dto/MembershipDTO.ts +++ b/app/lib/dto/MembershipDTO.ts @@ -11,59 +11,58 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as moment from "moment" -import {IdentityDTO} from "./IdentityDTO" -import {Cloneable} from "./Cloneable"; -import {hashf} from "../common"; +import * as moment from "moment"; +import { IdentityDTO } from "./IdentityDTO"; +import { Cloneable } from "./Cloneable"; +import { hashf } from "../common"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export class MembershipDTO implements Cloneable { - clone(): any { - return MembershipDTO.fromJSONObject(this) + return MembershipDTO.fromJSONObject(this); } - sigDate?:number - date?:number + sigDate?: number; + date?: number; constructor( public version: number, public currency: string, public issuer: string, public type: string, - public blockstamp:string, - public userid:string, - public certts:string, - public signature:string + public blockstamp: string, + public userid: string, + public certts: string, + public signature: string ) {} get pubkey() { - return this.issuer + return this.issuer; } get pub() { - return this.issuer + return this.issuer; } get membership() { - return this.type + return this.type; } get fpr() { - return this.blockstamp.split('-')[1] + return this.blockstamp.split("-")[1]; } get number() { - return parseInt(this.blockstamp) + return parseInt(this.blockstamp); } get block_number() { - return parseInt(this.blockstamp) + return parseInt(this.blockstamp); } get block_hash() { - return this.blockstamp.split('-')[1] + return this.blockstamp.split("-")[1]; } inline() { @@ -72,33 +71,33 @@ export class MembershipDTO implements Cloneable { this.signature, this.blockstamp, this.certts, - this.userid - ].join(':') + this.userid, + ].join(":"); } getIdtyHash() { return IdentityDTO.getTargetHash({ created_on: this.certts, uid: this.userid, - pub: this.issuer - }) + pub: this.issuer, + }); } getRaw() { - let raw = "" - raw += "Version: " + this.version + "\n" - raw += "Type: Membership\n" - raw += "Currency: " + this.currency + "\n" - raw += "Issuer: " + this.issuer + "\n" - raw += "Block: " + this.blockstamp + "\n" - raw += "Membership: " + this.type + "\n" - raw += "UserID: " + this.userid + "\n" - raw += "CertTS: " + this.certts + "\n" - return raw + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Membership\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "Issuer: " + this.issuer + "\n"; + raw += "Block: " + this.blockstamp + "\n"; + raw += "Membership: " + this.type + "\n"; + raw += "UserID: " + this.userid + "\n"; + raw += "CertTS: " + this.certts + "\n"; + return raw; } getRawSigned() { - return this.getRaw() + this.signature + "\n" + return this.getRaw() + this.signature + "\n"; } json() { @@ -111,13 +110,17 @@ export class MembershipDTO implements Cloneable { membership: this.type, date: this.date && moment(this.date).unix(), sigDate: this.sigDate && moment(this.sigDate).unix(), - raw: this.getRaw() - } + raw: this.getRaw(), + }, }; } - static fromInline(inlineMS:string, type:string = "", currency:string = "") { - const [issuer, sig, blockstamp, certts, userid] = inlineMS.split(':'); + static fromInline( + inlineMS: string, + type: string = "", + currency: string = "" + ) { + const [issuer, sig, blockstamp, certts, userid] = inlineMS.split(":"); return new MembershipDTO( DEFAULT_DOCUMENT_VERSION, currency, @@ -127,23 +130,23 @@ export class MembershipDTO implements Cloneable { userid, certts, sig - ) + ); } - static fromJSONObject(obj:any) { + static fromJSONObject(obj: any) { return new MembershipDTO( obj.version || DEFAULT_DOCUMENT_VERSION, obj.currency, obj.issuer || obj.pubkey, - obj.type || obj.membership, - obj.blockstamp || obj.block, + obj.type || obj.membership, + obj.blockstamp || obj.block, obj.userid, obj.certts, obj.signature - ) + ); } getHash() { - return hashf(this.getRawSigned()) + return hashf(this.getRawSigned()); } } diff --git a/app/lib/dto/MerkleDTO.ts b/app/lib/dto/MerkleDTO.ts index 1d6d136343fa40d2cf3f0b528959804be82b8bec..cdc62e8d5275580b38ffd4fd2a62e371bc5436ed 100644 --- a/app/lib/dto/MerkleDTO.ts +++ b/app/lib/dto/MerkleDTO.ts @@ -13,16 +13,15 @@ "use strict"; -const merkle = require('merkle'); +const merkle = require("merkle"); export class MerkleDTO { + private levels: any[]; + nodes: any[]; + depth: number; - private levels:any[] - nodes:any[] - depth:number - - initialize(leaves:string[]) { - const tree = merkle('sha256').sync(leaves); + initialize(leaves: string[]) { + const tree = merkle("sha256").sync(leaves); this.depth = tree.depth(); this.nodes = tree.nodes(); this.levels = []; @@ -32,12 +31,12 @@ export class MerkleDTO { return this; } - remove(leaf:string) { + remove(leaf: string) { // If leaf IS present - if(~this.levels[this.depth].indexOf(leaf)){ + if (~this.levels[this.depth].indexOf(leaf)) { const leaves = this.leaves(); const index = leaves.indexOf(leaf); - if(~index){ + if (~index) { // Replacement: remove previous hash leaves.splice(index, 1); } @@ -46,13 +45,13 @@ export class MerkleDTO { } } - removeMany(leaves:string[]) { - leaves.forEach((leaf:string) => { + removeMany(leaves: string[]) { + leaves.forEach((leaf: string) => { // If leaf IS present - if(~this.levels[this.depth].indexOf(leaf)){ + if (~this.levels[this.depth].indexOf(leaf)) { const theLeaves = this.leaves(); const index = theLeaves.indexOf(leaf); - if(~index){ + if (~index) { // Replacement: remove previous hash theLeaves.splice(index, 1); } @@ -60,16 +59,16 @@ export class MerkleDTO { }); leaves.sort(); this.initialize(leaves); - }; + } - push(leaf:string, previous:string) { + push(leaf: string, previous: string) { // If leaf is not present - if(this.levels[this.depth].indexOf(leaf) == -1){ + if (this.levels[this.depth].indexOf(leaf) == -1) { const leaves = this.leaves(); // Update or replacement ? - if(previous && leaf != previous){ + if (previous && leaf != previous) { const index = leaves.indexOf(previous); - if(~index){ + if (~index) { // Replacement: remove previous hash leaves.splice(index, 1); } @@ -80,10 +79,10 @@ export class MerkleDTO { } } - pushMany(leaves:string[]) { + pushMany(leaves: string[]) { leaves.forEach((leaf) => { // If leaf is not present - if(this.levels[this.depth].indexOf(leaf) == -1){ + if (this.levels[this.depth].indexOf(leaf) == -1) { this.leaves().push(leaf); } }); @@ -92,14 +91,14 @@ export class MerkleDTO { } root() { - return this.levels.length > 0 ? this.levels[0][0] : '' + return this.levels.length > 0 ? this.levels[0][0] : ""; } leaves() { - return this.levels[this.depth] + return this.levels[this.depth]; } count() { - return this.leaves().length + return this.leaves().length; } } diff --git a/app/lib/dto/PeerDTO.ts b/app/lib/dto/PeerDTO.ts index c241736d526a8bf8f3d09d3742c355320cd505f8..50c9b109ca56623ea955084adced5ef366bb6bb8 100644 --- a/app/lib/dto/PeerDTO.ts +++ b/app/lib/dto/PeerDTO.ts @@ -11,73 +11,74 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {hashf} from "../common" -import {CommonConstants} from "../common-libs/constants" -import {Cloneable} from "./Cloneable" -import {DBPeer} from "../db/DBPeer" +import { hashf } from "../common"; +import { CommonConstants } from "../common-libs/constants"; +import { Cloneable } from "./Cloneable"; +import { DBPeer } from "../db/DBPeer"; export interface WS2PEndpoint { - version:number - uuid:string - host:string - port:number - path:string + version: number; + uuid: string; + host: string; + port: number; + path: string; } export class PeerDTO implements Cloneable { - clone(): any { - return PeerDTO.fromJSONObject(this) + return PeerDTO.fromJSONObject(this); } - member = false + member = false; constructor( - public version:number, - public currency:string, - public pubkey:string, - public blockstamp:string, - public endpoints:string[], - public signature:string, - public status:string, - public statusTS:number, + public version: number, + public currency: string, + public pubkey: string, + public blockstamp: string, + public endpoints: string[], + public signature: string, + public status: string, + public statusTS: number, member = false ) { - this.member = member + this.member = member; } get block() { - return this.blockstamp + return this.blockstamp; } blockNumber() { - return parseInt(this.blockstamp) + return parseInt(this.blockstamp); } keyID() { - return this.pubkey && this.pubkey.length > 10 ? this.pubkey.substring(0, 10) : "Unknown" + return this.pubkey && this.pubkey.length > 10 + ? this.pubkey.substring(0, 10) + : "Unknown"; } getRawUnsigned() { - return this.getRaw() + return this.getRaw(); } getRaw() { - let raw = "" - raw += "Version: " + this.version + "\n" - raw += "Type: Peer\n" - raw += "Currency: " + this.currency + "\n" - raw += "PublicKey: " + this.pubkey + "\n" - raw += "Block: " + this.blockstamp + "\n" - raw += "Endpoints:" + "\n" - for(const ep of this.endpoints) { - raw += ep + "\n" + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Peer\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "PublicKey: " + this.pubkey + "\n"; + raw += "Block: " + this.blockstamp + "\n"; + raw += "Endpoints:" + "\n"; + for (const ep of this.endpoints) { + raw += ep + "\n"; } - return raw + return raw; } getRawSigned() { - return this.getRaw() + this.signature + "\n" + return this.getRaw() + this.signature + "\n"; } json() { @@ -89,113 +90,152 @@ export class PeerDTO implements Cloneable { block: this.block, signature: this.signature, raw: this.getRawSigned(), - pubkey: this.pubkey - } + pubkey: this.pubkey, + }; } getBMA() { - let bma: { dns?: string, ipv4?: string, ipv6?: string, port?: number, path?: string } = {} - let notFound = true + let bma: { + dns?: string; + ipv4?: string; + ipv6?: string; + port?: number; + path?: string; + } = {}; + let notFound = true; this.endpoints.forEach((ep) => { const matchesBMA = notFound && ep.match(CommonConstants.BMA_REGEXP); const matchesBMAS = notFound && ep.match(CommonConstants.BMAS_REGEXP); if (matchesBMA) { - notFound = false + notFound = false; bma = { - "dns": matchesBMA[2] || '', - "ipv4": matchesBMA[4] || '', - "ipv6": matchesBMA[6] || '', - "port": parseInt(matchesBMA[8]) || 9101 + dns: matchesBMA[2] || "", + ipv4: matchesBMA[4] || "", + ipv6: matchesBMA[6] || "", + port: parseInt(matchesBMA[8]) || 9101, }; - } - else if (matchesBMAS) { - notFound = false + } else if (matchesBMAS) { + notFound = false; bma = { - "dns": matchesBMAS[2] || '', - "ipv4": matchesBMAS[4] || '', - "ipv6": matchesBMAS[6] || '', - "port": parseInt(matchesBMAS[8]) || 9101, - "path": matchesBMAS[10] || '' + dns: matchesBMAS[2] || "", + ipv4: matchesBMAS[4] || "", + ipv6: matchesBMAS[6] || "", + port: parseInt(matchesBMAS[8]) || 9101, + path: matchesBMAS[10] || "", }; } }); - return bma + return bma; } - getOnceWS2PEndpoint(canReachTorEp:boolean, canReachClearEp:boolean, uuidExcluded:string[] = []) { - let api:WS2PEndpoint|null = null - let bestWS2PVersionAvailable:number = 0 - let bestWS2PTORVersionAvailable:number = 0 + getOnceWS2PEndpoint( + canReachTorEp: boolean, + canReachClearEp: boolean, + uuidExcluded: string[] = [] + ) { + let api: WS2PEndpoint | null = null; + let bestWS2PVersionAvailable: number = 0; + let bestWS2PTORVersionAvailable: number = 0; for (const ep of this.endpoints) { if (canReachTorEp) { - let matches:RegExpMatchArray | null = ep.match(CommonConstants.WS2PTOR_V2_REGEXP) - if (matches && parseInt(matches[1]) > bestWS2PTORVersionAvailable && (uuidExcluded.indexOf(matches[2]) === -1)) { - bestWS2PTORVersionAvailable = parseInt(matches[1]) + let matches: RegExpMatchArray | null = ep.match( + CommonConstants.WS2PTOR_V2_REGEXP + ); + if ( + matches && + parseInt(matches[1]) > bestWS2PTORVersionAvailable && + uuidExcluded.indexOf(matches[2]) === -1 + ) { + bestWS2PTORVersionAvailable = parseInt(matches[1]); api = { version: parseInt(matches[1]), uuid: matches[2], - host: matches[3] || '', + host: matches[3] || "", port: parseInt(matches[4]) || 0, - path: matches[5] - } + path: matches[5], + }; } else { - matches = ep.match(CommonConstants.WS2PTOR_REGEXP) - if (matches && bestWS2PTORVersionAvailable == 0 && (uuidExcluded.indexOf(matches[1]) === -1)) { - bestWS2PTORVersionAvailable = 1 + matches = ep.match(CommonConstants.WS2PTOR_REGEXP); + if ( + matches && + bestWS2PTORVersionAvailable == 0 && + uuidExcluded.indexOf(matches[1]) === -1 + ) { + bestWS2PTORVersionAvailable = 1; api = { version: 1, uuid: matches[1], - host: matches[2] || '', + host: matches[2] || "", port: parseInt(matches[3]) || 0, - path: matches[4] - } + path: matches[4], + }; } } } // If can reach clear endpoint and not found tor endpoint if (canReachClearEp && bestWS2PTORVersionAvailable == 0) { - let matches:any = ep.match(CommonConstants.WS2P_V2_REGEXP) - if (matches && parseInt(matches[1]) > bestWS2PVersionAvailable && (uuidExcluded.indexOf(matches[2]) === -1)) { - bestWS2PVersionAvailable = parseInt(matches[1]) + let matches: any = ep.match(CommonConstants.WS2P_V2_REGEXP); + if ( + matches && + parseInt(matches[1]) > bestWS2PVersionAvailable && + uuidExcluded.indexOf(matches[2]) === -1 + ) { + bestWS2PVersionAvailable = parseInt(matches[1]); api = { version: parseInt(matches[1]), uuid: matches[2], - host: matches[3] || '', + host: matches[3] || "", port: parseInt(matches[4]) || 0, - path: matches[5] - } + path: matches[5], + }; } else { - matches = ep.match(CommonConstants.WS2P_REGEXP) - if (matches && bestWS2PVersionAvailable == 0 && (uuidExcluded.indexOf(matches[1]) === -1)) { - bestWS2PVersionAvailable = 1 + matches = ep.match(CommonConstants.WS2P_REGEXP); + if ( + matches && + bestWS2PVersionAvailable == 0 && + uuidExcluded.indexOf(matches[1]) === -1 + ) { + bestWS2PVersionAvailable = 1; api = { version: 1, uuid: matches[1], - host: matches[2] || '', + host: matches[2] || "", port: parseInt(matches[3]) || 0, - path: matches[4] - } + path: matches[4], + }; } } } } - return api || null + return api || null; } - getAllWS2PEndpoints(canReachTorEp:boolean, canReachClearEp:boolean, myUUID:string) { - let apis:WS2PEndpoint[] = [] - let uuidExcluded:string[] = [myUUID] - let api = this.getOnceWS2PEndpoint(canReachTorEp, canReachClearEp, uuidExcluded) + getAllWS2PEndpoints( + canReachTorEp: boolean, + canReachClearEp: boolean, + myUUID: string + ) { + let apis: WS2PEndpoint[] = []; + let uuidExcluded: string[] = [myUUID]; + let api = this.getOnceWS2PEndpoint( + canReachTorEp, + canReachClearEp, + uuidExcluded + ); while (api !== null) { - uuidExcluded.push(api.uuid) - apis.push(api) - api = this.getOnceWS2PEndpoint(canReachTorEp, canReachClearEp, uuidExcluded) + uuidExcluded.push(api.uuid); + apis.push(api); + api = this.getOnceWS2PEndpoint( + canReachTorEp, + canReachClearEp, + uuidExcluded + ); } - return apis + return apis; } getFirstNonTorWS2P() { - return this.getOnceWS2PEndpoint(false, true) + return this.getOnceWS2PEndpoint(false, true); } getDns() { @@ -225,102 +265,116 @@ export class PeerDTO implements Cloneable { getHostPreferDNS() { const bma = this.getBMA(); - return (bma.dns ? bma.dns : - (bma.ipv4 ? bma.ipv4 : - (bma.ipv6 ? bma.ipv6 : ''))) + return bma.dns ? bma.dns : bma.ipv4 ? bma.ipv4 : bma.ipv6 ? bma.ipv6 : ""; } getURL() { const bma = this.getBMA(); let base = this.getHostPreferDNS(); - if(bma.port) - base += ':' + bma.port; + if (bma.port) base += ":" + bma.port; return base; } - hasValid4(bma:any) { - return !!(bma.ipv4 && !bma.ipv4.match(/^127.0/) && !bma.ipv4.match(/^192.168/)) + hasValid4(bma: any) { + return !!( + bma.ipv4 && + !bma.ipv4.match(/^127.0/) && + !bma.ipv4.match(/^192.168/) + ); } getNamedURL() { - return this.getURL() + return this.getURL(); } isReachable() { - return !!(this.getURL()) + return !!this.getURL(); } - containsEndpoint(ep:string) { - return this.endpoints.reduce((found:boolean, endpoint:string) => found || endpoint == ep, false) + containsEndpoint(ep: string) { + return this.endpoints.reduce( + (found: boolean, endpoint: string) => found || endpoint == ep, + false + ); } - containsAllEndpoints(endpoints:string[]) { + containsAllEndpoints(endpoints: string[]) { for (const ep of endpoints) { if (!this.containsEndpoint(ep)) { - return false + return false; } } - return true + return true; } endpointSum() { - return this.endpoints.join('_') + return this.endpoints.join("_"); } getHash() { - return hashf(this.getRawSigned()) + return hashf(this.getRawSigned()); } toDBPeer(): DBPeer { - const p = new DBPeer() - p.version = this.version - p.currency = this.currency - p.status = this.status || "DOWN" - p.statusTS = this.statusTS || 0 - p.hash = this.getHash() - p.first_down = 0 - p.last_try = 0 - p.pubkey = this.pubkey - p.block = this.block - p.signature = this.signature - p.endpoints = this.endpoints - p.raw = this.getRawSigned() - return p - } - - static blockNumber(blockstamp:string) { - return parseInt(blockstamp) - } - - static fromDBPeer(p:DBPeer) { - return new PeerDTO(p.version, p.currency, p.pubkey, p.block, p.endpoints, p.signature, p.status, p.statusTS, false) - } - - static fromJSONObject(obj:any) { + const p = new DBPeer(); + p.version = this.version; + p.currency = this.currency; + p.status = this.status || "DOWN"; + p.statusTS = this.statusTS || 0; + p.hash = this.getHash(); + p.first_down = 0; + p.last_try = 0; + p.pubkey = this.pubkey; + p.block = this.block; + p.signature = this.signature; + p.endpoints = this.endpoints; + p.raw = this.getRawSigned(); + return p; + } + + static blockNumber(blockstamp: string) { + return parseInt(blockstamp); + } + + static fromDBPeer(p: DBPeer) { + return new PeerDTO( + p.version, + p.currency, + p.pubkey, + p.block, + p.endpoints, + p.signature, + p.status, + p.statusTS, + false + ); + } + + static fromJSONObject(obj: any) { return new PeerDTO( parseInt(obj.version), obj.currency || "", obj.pubkey || obj.pub || obj.issuer || "", obj.blockstamp || obj.block, - obj.endpoints || [], + obj.endpoints || [], obj.signature || obj.sig, obj.status || "DOWN", obj.statusTS || 0, obj.member - ) + ); } - static endpoint2host(endpoint:string) { - return PeerDTO.fromJSONObject({ endpoints: [endpoint] }).getURL() + static endpoint2host(endpoint: string) { + return PeerDTO.fromJSONObject({ endpoints: [endpoint] }).getURL(); } - static indexOfFirst(endpoints:string[], intoEndpoints:string[]) { + static indexOfFirst(endpoints: string[], intoEndpoints: string[]) { for (let i = 0; i < intoEndpoints.length; i++) { - const index = endpoints.indexOf(intoEndpoints[i]) + const index = endpoints.indexOf(intoEndpoints[i]); if (index !== -1) { - return index + return index; } } - return 0 + return 0; } -} \ No newline at end of file +} diff --git a/app/lib/dto/RevocationDTO.ts b/app/lib/dto/RevocationDTO.ts index 1185188dafe53668ee0b3aa4d05112795a63f0a7..284294ce2d941eba4dd789d78d11e9cde7f514b0 100644 --- a/app/lib/dto/RevocationDTO.ts +++ b/app/lib/dto/RevocationDTO.ts @@ -11,20 +11,19 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Cloneable} from "./Cloneable"; -import {hashf} from "../common"; +import { Cloneable } from "./Cloneable"; +import { hashf } from "../common"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export interface ShortRevocation { - pubkey: string - revocation: string + pubkey: string; + revocation: string; } export class RevocationDTO implements ShortRevocation, Cloneable { - clone(): any { - return RevocationDTO.fromJSONObject(this) + return RevocationDTO.fromJSONObject(this); } constructor( @@ -38,38 +37,38 @@ export class RevocationDTO implements ShortRevocation, Cloneable { ) {} rawWithoutSig() { - let raw = "" - raw += "Version: " + this.version + "\n" - raw += "Type: Revocation\n" - raw += "Currency: " + this.currency + "\n" - raw += "Issuer: " + this.pubkey + "\n" - raw += "IdtyUniqueID: " + this.idty_uid+ '\n' - raw += "IdtyTimestamp: " + this.idty_buid + '\n' - raw += "IdtySignature: " + this.idty_sig + '\n' - return raw + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Revocation\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "Issuer: " + this.pubkey + "\n"; + raw += "IdtyUniqueID: " + this.idty_uid + "\n"; + raw += "IdtyTimestamp: " + this.idty_buid + "\n"; + raw += "IdtySignature: " + this.idty_sig + "\n"; + return raw; } getRaw() { - return this.rawWithoutSig() + this.revocation + "\n" + return this.rawWithoutSig() + this.revocation + "\n"; } getRawUnsigned() { - return this.rawWithoutSig() + return this.rawWithoutSig(); } // TODO: to remove when BMA has been merged in duniter/duniter repo json() { return { - result: true - } + result: true, + }; } - static fromInline(inline:string): ShortRevocation { - const [pubkey, revocation] = inline.split(':') - return { pubkey, revocation } + static fromInline(inline: string): ShortRevocation { + const [pubkey, revocation] = inline.split(":"); + return { pubkey, revocation }; } - static fromJSONObject(json:any) { + static fromJSONObject(json: any) { return new RevocationDTO( json.version || DEFAULT_DOCUMENT_VERSION, json.currency, @@ -78,10 +77,10 @@ export class RevocationDTO implements ShortRevocation, Cloneable { json.idty_buid || json.buid, json.idty_sig || json.sig, json.revocation || json.revocation - ) + ); } getHash() { - return hashf(this.getRaw()) + return hashf(this.getRaw()); } -} \ No newline at end of file +} diff --git a/app/lib/dto/TransactionDTO.ts b/app/lib/dto/TransactionDTO.ts index 64fcd3ed96a1324db9ad7dff5c014e5d80f58332..9ce08ae252343048d0edc9d7e3f1147fa351910b 100644 --- a/app/lib/dto/TransactionDTO.ts +++ b/app/lib/dto/TransactionDTO.ts @@ -11,19 +11,19 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {hashf} from "../common" -import {Cloneable} from "./Cloneable" -import {verify, verifyBuggy} from "../common-libs/crypto/keyring" +import { hashf } from "../common"; +import { Cloneable } from "./Cloneable"; +import { verify, verifyBuggy } from "../common-libs/crypto/keyring"; export interface BaseDTO { - base: number + base: number; } export class InputDTO implements BaseDTO { constructor( public amount: number, public base: number, - public type: 'T'|'D', + public type: "T" | "D", public identifier: string, public pos: number, public raw: string @@ -40,35 +40,33 @@ export class OutputDTO implements BaseDTO { } export interface TxSignatureResult { - sigs:{ - k:string - ok:boolean - }[] + sigs: { + k: string; + ok: boolean; + }[]; } export class TxSignatureResultImpl implements TxSignatureResult { - // The signature results - public sigs:{ - k:string - ok:boolean - }[] - - constructor(issuers:string[]) { - this.sigs = issuers.map(k => { - return { k, ok: false } - }) + public sigs: { + k: string; + ok: boolean; + }[]; + + constructor(issuers: string[]) { + this.sigs = issuers.map((k) => { + return { k, ok: false }; + }); } get allMatching() { - return this.sigs.reduce((ok, s) => ok && s.ok, true) + return this.sigs.reduce((ok, s) => ok && s.ok, true); } } export class TransactionDTO implements Cloneable { - clone(): any { - return TransactionDTO.fromJSONObject(this) + return TransactionDTO.fromJSONObject(this); } constructor( @@ -87,132 +85,149 @@ export class TransactionDTO implements Cloneable { ) { // Compute the hash if not given if (!hash) { - this.hash = this.getHash() + this.hash = this.getHash(); } } get signature() { - return this.signatures[0] + return this.signatures[0]; } get output_amount() { - return this.outputs.reduce((maxBase, output) => Math.max(maxBase, parseInt(output.split(':')[0])), 0) + return this.outputs.reduce( + (maxBase, output) => Math.max(maxBase, parseInt(output.split(":")[0])), + 0 + ); } get output_base() { - return this.outputs.reduce((sum, output) => sum + parseInt(output.split(':')[1]), 0) + return this.outputs.reduce( + (sum, output) => sum + parseInt(output.split(":")[1]), + 0 + ); } get blockNumber() { - return parseInt(this.blockstamp) + return parseInt(this.blockstamp); } get block_hash() { - return this.blockstamp.split('-')[1] + return this.blockstamp.split("-")[1]; } getLen() { - return 2 // header + blockstamp - + this.issuers.length * 2 // issuers + signatures - + this.inputs.length * 2 // inputs + unlocks - + (this.comment ? 1 : 0) - + this.outputs.length + return ( + 2 + // header + blockstamp + this.issuers.length * 2 + // issuers + signatures + this.inputs.length * 2 + // inputs + unlocks + (this.comment ? 1 : 0) + + this.outputs.length + ); } getHash() { - const raw = TransactionDTO.toRAW(this) - return hashf(raw) + const raw = TransactionDTO.toRAW(this); + return hashf(raw); } getRawTxNoSig() { - return TransactionDTO.toRAW(this, true) + return TransactionDTO.toRAW(this, true); } inputsAsObjects(): InputDTO[] { - return this.inputs.map(input => { - const [amount, base, type, identifier, pos] = input.split(':') + return this.inputs.map((input) => { + const [amount, base, type, identifier, pos] = input.split(":"); return new InputDTO( parseInt(amount), parseInt(base), - type as 'T'|'D', + type as "T" | "D", identifier, parseInt(pos), input - ) - }) + ); + }); } outputsAsObjects(): OutputDTO[] { - return this.outputs.map(output => { - const [amount, base, conditions] = output.split(':') + return this.outputs.map((output) => { + const [amount, base, conditions] = output.split(":"); return new OutputDTO( parseInt(amount), parseInt(base), conditions, output - ) - }) + ); + }); } outputsAsRecipients(): string[] { return this.outputs.map((out) => { - const recipent = out.match('SIG\\((.*)\\)'); - return (recipent && recipent[1]) || 'UNKNOWN'; - }) + const recipent = out.match("SIG\\((.*)\\)"); + return (recipent && recipent[1]) || "UNKNOWN"; + }); } getRaw() { - let raw = "" - raw += "Version: " + (this.version) + "\n" - raw += "Type: Transaction\n" - raw += "Currency: " + this.currency + "\n" - raw += "Blockstamp: " + this.blockstamp + "\n" - raw += "Locktime: " + this.locktime + "\n" + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Transaction\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "Blockstamp: " + this.blockstamp + "\n"; + raw += "Locktime: " + this.locktime + "\n"; raw += "Issuers:\n"; (this.issuers || []).forEach((issuer) => { - raw += issuer + '\n' - }) + raw += issuer + "\n"; + }); raw += "Inputs:\n"; this.inputs.forEach((input) => { - raw += input + '\n' - }) + raw += input + "\n"; + }); raw += "Unlocks:\n"; this.unlocks.forEach((unlock) => { - raw += unlock + '\n' - }) + raw += unlock + "\n"; + }); raw += "Outputs:\n"; this.outputs.forEach((output) => { - raw += output + '\n' - }) + raw += output + "\n"; + }); raw += "Comment: " + (this.comment || "") + "\n"; this.signatures.forEach((signature) => { - raw += signature + '\n' - }) - return raw + raw += signature + "\n"; + }); + return raw; } getCompactVersion() { let issuers = this.issuers; - let raw = ["TX", this.version, issuers.length, this.inputs.length, this.unlocks.length, this.outputs.length, this.comment ? 1 : 0, this.locktime || 0].join(':') + '\n'; + let raw = + [ + "TX", + this.version, + issuers.length, + this.inputs.length, + this.unlocks.length, + this.outputs.length, + this.comment ? 1 : 0, + this.locktime || 0, + ].join(":") + "\n"; raw += this.blockstamp + "\n"; (issuers || []).forEach((issuer) => { - raw += issuer + '\n'; + raw += issuer + "\n"; }); (this.inputs || []).forEach((input) => { - raw += input + '\n'; + raw += input + "\n"; }); (this.unlocks || []).forEach((input) => { - raw += input + '\n'; + raw += input + "\n"; }); (this.outputs || []).forEach((output) => { - raw += output + '\n'; + raw += output + "\n"; }); - if (this.comment) - raw += this.comment + '\n'; + if (this.comment) raw += this.comment + "\n"; (this.signatures || []).forEach((signature) => { - raw += signature + '\n' - }) - return raw + raw += signature + "\n"; + }); + return raw; } computeAllHashes() { @@ -221,46 +236,46 @@ export class TransactionDTO implements Cloneable { json() { return { - 'version': this.version, - 'currency': this.currency, - 'issuers': this.issuers, - 'inputs': this.inputs, - 'unlocks': this.unlocks, - 'outputs': this.outputs, - 'comment': this.comment, - 'locktime': this.locktime, - 'blockstamp': this.blockstamp, - 'blockstampTime': this.blockstampTime, - 'signatures': this.signatures, - 'raw': this.getRaw(), - 'hash': this.hash - } + version: this.version, + currency: this.currency, + issuers: this.issuers, + inputs: this.inputs, + unlocks: this.unlocks, + outputs: this.outputs, + comment: this.comment, + locktime: this.locktime, + blockstamp: this.blockstamp, + blockstampTime: this.blockstampTime, + signatures: this.signatures, + raw: this.getRaw(), + hash: this.hash, + }; } getTransactionSigResult(dubp_version: number) { - const sigResult = new TxSignatureResultImpl(this.issuers.slice()) - let i = 0 - const raw = this.getRawTxNoSig() - let matching = true + const sigResult = new TxSignatureResultImpl(this.issuers.slice()); + let i = 0; + const raw = this.getRawTxNoSig(); + let matching = true; while (matching && i < this.signatures.length) { - const sig = this.signatures[i] - const pub = this.issuers[i] + const sig = this.signatures[i]; + const pub = this.issuers[i]; if (dubp_version >= 12) { - sigResult.sigs[i].ok = verify(raw, sig, pub) + sigResult.sigs[i].ok = verify(raw, sig, pub); } else { - sigResult.sigs[i].ok = verifyBuggy(raw, sig, pub) + sigResult.sigs[i].ok = verifyBuggy(raw, sig, pub); } - matching = sigResult.sigs[i].ok - i++ + matching = sigResult.sigs[i].ok; + i++; } - return sigResult + return sigResult; } checkSignatures(dubp_version: number) { - return this.getTransactionSigResult(dubp_version).allMatching + return this.getTransactionSigResult(dubp_version).allMatching; } - static fromJSONObject(obj:any, currency:string = "") { + static fromJSONObject(obj: any, currency: string = "") { return new TransactionDTO( obj.version || 10, currency || obj.currency || "", @@ -274,80 +289,80 @@ export class TransactionDTO implements Cloneable { obj.unlocks || [], obj.signatures || [], obj.comment || "" - ) + ); } - static toRAW(json:TransactionDTO, noSig = false) { - let raw = "" - raw += "Version: " + (json.version) + "\n" - raw += "Type: Transaction\n" - raw += "Currency: " + json.currency + "\n" - raw += "Blockstamp: " + json.blockstamp + "\n" - raw += "Locktime: " + json.locktime + "\n" + static toRAW(json: TransactionDTO, noSig = false) { + let raw = ""; + raw += "Version: " + json.version + "\n"; + raw += "Type: Transaction\n"; + raw += "Currency: " + json.currency + "\n"; + raw += "Blockstamp: " + json.blockstamp + "\n"; + raw += "Locktime: " + json.locktime + "\n"; raw += "Issuers:\n"; (json.issuers || []).forEach((issuer) => { - raw += issuer + '\n' - }) + raw += issuer + "\n"; + }); raw += "Inputs:\n"; (json.inputs || []).forEach((input) => { - raw += input + '\n' - }) + raw += input + "\n"; + }); raw += "Unlocks:\n"; (json.unlocks || []).forEach((unlock) => { - raw += unlock + '\n' - }) + raw += unlock + "\n"; + }); raw += "Outputs:\n"; - (json.outputs || []).forEach((output) => { - raw += output + '\n' - }) + (json.outputs || []).forEach((output) => { + raw += output + "\n"; + }); raw += "Comment: " + (json.comment || "") + "\n"; if (!noSig) { (json.signatures || []).forEach((signature) => { - raw += signature + '\n' - }) + raw += signature + "\n"; + }); } - return raw + return raw; } - static outputObj2Str(o:OutputDTO) { - return [o.amount, o.base, o.conditions].join(':') + static outputObj2Str(o: OutputDTO) { + return [o.amount, o.base, o.conditions].join(":"); } - static inputObj2Str(i:InputDTO) { - return [i.amount, i.base, i.type, i.identifier, i.pos].join(':') + static inputObj2Str(i: InputDTO) { + return [i.amount, i.base, i.type, i.identifier, i.pos].join(":"); } - static outputStr2Obj(outputStr:string) { - const sp = outputStr.split(':'); + static outputStr2Obj(outputStr: string) { + const sp = outputStr.split(":"); return { amount: parseInt(sp[0]), base: parseInt(sp[1]), conditions: sp[2], - raw: outputStr + raw: outputStr, }; } - static inputStr2Obj(inputStr:string) { - const sp = inputStr.split(':') + static inputStr2Obj(inputStr: string) { + const sp = inputStr.split(":"); return { - amount: sp[0], - base: sp[1], - type: sp[2], + amount: sp[0], + base: sp[1], + type: sp[2], identifier: sp[3], - pos: parseInt(sp[4]), - raw: inputStr - } + pos: parseInt(sp[4]), + raw: inputStr, + }; } - static unlock2params(unlock:string) { - const match = unlock.match(/^\d+:(.*)$/) + static unlock2params(unlock: string) { + const match = unlock.match(/^\d+:(.*)$/); if (match) { - return match[1].split(' ') + return match[1].split(" "); } - return [] + return []; } static mock() { - return new TransactionDTO(1, "", 0, "", "", 0, [], [], [], [], [], "") + return new TransactionDTO(1, "", 0, "", "", 0, [], [], [], [], [], ""); } } diff --git a/app/lib/helpers/merkle.ts b/app/lib/helpers/merkle.ts index db89f0b735164b8e5e4347b3a80c305ff1256937..81c2e9784c51636bd69e65fd058f907c5ce68c4e 100644 --- a/app/lib/helpers/merkle.ts +++ b/app/lib/helpers/merkle.ts @@ -11,14 +11,18 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -export const processForURL = async (req:any, merkle:any, valueCoroutine:any) => { +export const processForURL = async ( + req: any, + merkle: any, + valueCoroutine: any +) => { // Result - const json:any = { - "depth": merkle.depth, - "nodesCount": merkle.nodes, - "leavesCount": merkle.levels[merkle.depth].length, - "root": merkle.levels[0][0] || "", - "leaves": [] + const json: any = { + depth: merkle.depth, + nodesCount: merkle.nodes, + leavesCount: merkle.levels[merkle.depth].length, + root: merkle.levels[0][0] || "", + leaves: [], }; if (req.query.leaves) { // Leaves @@ -26,18 +30,18 @@ export const processForURL = async (req:any, merkle:any, valueCoroutine:any) => return json; } else if (req.query.leaf) { // Extract of a leaf - json.leaves = [] + json.leaves = []; const hashes = [req.query.leaf]; // This code is in a loop for historic reasons. Should be set to non-loop style. const values = await valueCoroutine(hashes); hashes.forEach((hash) => { json.leaf = { - "hash": hash, - "value": values[hash] || "" + hash: hash, + value: values[hash] || "", }; }); return json; } else { return json; } -} +}; diff --git a/app/lib/rules/global_rules.ts b/app/lib/rules/global_rules.ts index 21779706ac242ef610004fd1b321625be956fe19..f2ccd623d457fa49a861c1b01602e056a05045e6 100644 --- a/app/lib/rules/global_rules.ts +++ b/app/lib/rules/global_rules.ts @@ -11,112 +11,144 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../dto/ConfDTO" -import {FileDAL} from "../dal/fileDAL" -import {DBBlock} from "../db/DBBlock" -import {TransactionDTO, TxSignatureResult} from "../dto/TransactionDTO" -import {BlockDTO} from "../dto/BlockDTO" -import {verifyBuggy} from "../common-libs/crypto/keyring" -import {rawer, txunlock} from "../common-libs/index" -import {CommonConstants} from "../common-libs/constants" -import {IdentityDTO} from "../dto/IdentityDTO" -import {hashf} from "../common" -import {Indexer, SimpleTxInput} from "../indexer" -import {DBTx} from "../db/DBTx" -import {Tristamp} from "../common/Tristamp" -import {DataErrors} from "../common-libs/errors" +import { ConfDTO } from "../dto/ConfDTO"; +import { FileDAL } from "../dal/fileDAL"; +import { DBBlock } from "../db/DBBlock"; +import { TransactionDTO, TxSignatureResult } from "../dto/TransactionDTO"; +import { BlockDTO } from "../dto/BlockDTO"; +import { verifyBuggy } from "../common-libs/crypto/keyring"; +import { rawer, txunlock } from "../common-libs/index"; +import { CommonConstants } from "../common-libs/constants"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { hashf } from "../common"; +import { Indexer, SimpleTxInput } from "../indexer"; +import { DBTx } from "../db/DBTx"; +import { Tristamp } from "../common/Tristamp"; +import { DataErrors } from "../common-libs/errors"; -const constants = CommonConstants +const constants = CommonConstants; // Empty logger by default let logger = { - debug: (...args:any[]) => {}, - warn: (...args:any[]) => {} -} + debug: (...args: any[]) => {}, + warn: (...args: any[]) => {}, +}; // TODO: all the global rules should be replaced by index rule someday export interface ParamEval { - successful:boolean - funcName:string - parameter:string + successful: boolean; + funcName: string; + parameter: string; } -export function evalParams(params:string[], conditions = '', sigResult:TxSignatureResult): ParamEval[] { - const res:ParamEval[] = [] - const issuers = sigResult.sigs.map(s => s.k) +export function evalParams( + params: string[], + conditions = "", + sigResult: TxSignatureResult +): ParamEval[] { + const res: ParamEval[] = []; + const issuers = sigResult.sigs.map((s) => s.k); for (const func of params) { if (func.match(/^SIG/)) { - const param = (func.match(/^SIG\((.*)\)$/) as string[])[1] - const index = parseInt(param) - const sigEntry = !isNaN(index) && index < issuers.length && sigResult.sigs[index] - const signatory:{ k:string, ok:boolean } = sigEntry || { k: '', ok: false } + const param = (func.match(/^SIG\((.*)\)$/) as string[])[1]; + const index = parseInt(param); + const sigEntry = + !isNaN(index) && index < issuers.length && sigResult.sigs[index]; + const signatory: { k: string; ok: boolean } = sigEntry || { + k: "", + ok: false, + }; res.push({ - funcName: 'SIG', + funcName: "SIG", parameter: signatory.k, - successful: signatory.ok - }) - } - else if (func.match(/^XHX/)) { - const password = (func.match(/^XHX\((.*)\)$/) as string[])[1] - const hash = hashf(password) + successful: signatory.ok, + }); + } else if (func.match(/^XHX/)) { + const password = (func.match(/^XHX\((.*)\)$/) as string[])[1]; + const hash = hashf(password); res.push({ - funcName: 'XHX', + funcName: "XHX", parameter: password, - successful: conditions.indexOf('XHX(' + hash + ')') !== -1 - }) + successful: conditions.indexOf("XHX(" + hash + ")") !== -1, + }); } } - return res + return res; } export const GLOBAL_RULES_FUNCTIONS = { - - checkIdentitiesAreWritable: async (block:{ identities:string[], version: number }, conf:ConfDTO, dal:FileDAL) => { + checkIdentitiesAreWritable: async ( + block: { identities: string[]; version: number }, + conf: ConfDTO, + dal: FileDAL + ) => { let current = await dal.getCurrentBlockOrNull(); for (const obj of block.identities) { let idty = IdentityDTO.fromInline(obj); - let found = await dal.getWrittenIdtyByUIDForExistence(idty.uid) + let found = await dal.getWrittenIdtyByUIDForExistence(idty.uid); if (found) { - throw Error('Identity already used'); + throw Error("Identity already used"); } // Because the window rule does not apply on initial certifications if (current && idty.buid != constants.SPECIAL_BLOCK) { // From DUP 0.5: we fully check the blockstamp - const basedBlock = await dal.getAbsoluteValidBlockInForkWindowByBlockstamp(idty.buid) || { medianTime: 0 } + const basedBlock = (await dal.getAbsoluteValidBlockInForkWindowByBlockstamp( + idty.buid + )) || { medianTime: 0 }; // Check if writable - let duration = current.medianTime - basedBlock.medianTime + let duration = current.medianTime - basedBlock.medianTime; if (duration > conf.idtyWindow) { - throw Error('Identity is too old and cannot be written'); + throw Error("Identity is too old and cannot be written"); } } } return true; }, - checkSourcesAvailability: async (block:{ version: number, transactions:TransactionDTO[], medianTime: number }, conf:ConfDTO, dal:FileDAL, findSourceTx:(txHash:string) => Promise<DBTx|null>) => { - const txs = block.transactions + checkSourcesAvailability: async ( + block: { + version: number; + transactions: TransactionDTO[]; + medianTime: number; + }, + conf: ConfDTO, + dal: FileDAL, + findSourceTx: (txHash: string) => Promise<DBTx | null> + ) => { + const txs = block.transactions; const current = await dal.getCurrentBlockOrNull(); for (const tx of txs) { - const inputs = tx.inputsAsObjects() - const outputs = tx.outputsAsObjects() - let unlocks:any = {}; + const inputs = tx.inputsAsObjects(); + const outputs = tx.outputsAsObjects(); + let unlocks: any = {}; let sumOfInputs = 0; - let maxOutputBase = current && current.unitbase || 0; + let maxOutputBase = (current && current.unitbase) || 0; for (const theUnlock of tx.unlocks) { - let sp = theUnlock.split(':'); + let sp = theUnlock.split(":"); let index = parseInt(sp[0]); unlocks[index] = sp[1]; } for (let k = 0, len2 = inputs.length; k < len2; k++) { let src = inputs[k]; - let dbSrc: SimpleTxInput|null = await dal.getSource(src.identifier, src.pos, src.type === 'D'); - logger.debug('Source %s:%s:%s:%s = %s', src.amount, src.base, src.identifier, src.pos, dbSrc && dbSrc.consumed); + let dbSrc: SimpleTxInput | null = await dal.getSource( + src.identifier, + src.pos, + src.type === "D" + ); + logger.debug( + "Source %s:%s:%s:%s = %s", + src.amount, + src.base, + src.identifier, + src.pos, + dbSrc && dbSrc.consumed + ); if (!dbSrc) { // For chained transactions which are checked on sandbox submission, we accept them if there is already // a previous transaction of the chain already recorded in the pool dbSrc = await (async () => { - let hypotheticSrc:any = null; + let hypotheticSrc: any = null; let targetTX = await findSourceTx(src.identifier); if (targetTX) { let outputStr = targetTX.outputs[src.pos]; @@ -127,21 +159,24 @@ export const GLOBAL_RULES_FUNCTIONS = { } } return hypotheticSrc; - })() + })(); } if (!dbSrc || dbSrc.consumed) { - logger.warn('Source ' + [src.type, src.identifier, src.pos].join(':') + ' is not available'); + logger.warn( + "Source " + + [src.type, src.identifier, src.pos].join(":") + + " is not available" + ); throw constants.ERRORS.SOURCE_ALREADY_CONSUMED; } sumOfInputs += dbSrc.amount * Math.pow(10, dbSrc.base); if (block.medianTime - dbSrc.written_time < tx.locktime) { throw constants.ERRORS.LOCKTIME_PREVENT; } - let unlockValues = unlocks[k] - let unlocksForCondition:string[] = (unlockValues || '').split(' ') - let unlocksMetadata:any = {}; + let unlockValues = unlocks[k]; + let unlocksForCondition: string[] = (unlockValues || "").split(" "); + let unlocksMetadata: any = {}; if (dbSrc.conditions) { - if (dbSrc.conditions.match(/CLTV/)) { unlocksMetadata.currentTime = block.medianTime; } @@ -150,97 +185,174 @@ export const GLOBAL_RULES_FUNCTIONS = { unlocksMetadata.elapsedTime = block.medianTime - dbSrc.written_time; } - const sigs = tx.getTransactionSigResult(block.version) + const sigs = tx.getTransactionSigResult(block.version); try { - if (!txunlock(dbSrc.conditions, unlocksForCondition, sigs, unlocksMetadata)) { - throw Error('Locked'); + if ( + !txunlock( + dbSrc.conditions, + unlocksForCondition, + sigs, + unlocksMetadata + ) + ) { + throw Error("Locked"); } } catch (e) { - logger.warn('Source ' + [src.amount, src.base, src.type, src.identifier, src.pos].join(':') + ' unlock fail'); + logger.warn( + "Source " + + [src.amount, src.base, src.type, src.identifier, src.pos].join( + ":" + ) + + " unlock fail" + ); throw constants.ERRORS.WRONG_UNLOCKER; } } else { - throw Error("Source with no conditions") + throw Error("Source with no conditions"); } } - let sumOfOutputs = outputs.reduce(function(p, output) { + let sumOfOutputs = outputs.reduce(function (p, output) { if (output.base > maxOutputBase) { throw constants.ERRORS.WRONG_OUTPUT_BASE; } return p + output.amount * Math.pow(10, output.base); }, 0); if (sumOfInputs !== sumOfOutputs) { - logger.warn('Inputs/Outputs != 1 (%s/%s)', sumOfInputs, sumOfOutputs); + logger.warn("Inputs/Outputs != 1 (%s/%s)", sumOfInputs, sumOfOutputs); throw constants.ERRORS.WRONG_AMOUNTS; } } return true; - } -} + }, +}; export const GLOBAL_RULES_HELPERS = { - // Functions used in an external context too - checkMembershipBlock: (ms:any, current:DBBlock|null, conf:ConfDTO, dal:FileDAL) => checkMSTarget(ms, current ? { number: current.number + 1} : { number: 0 }, conf, dal), + checkMembershipBlock: ( + ms: any, + current: DBBlock | null, + conf: ConfDTO, + dal: FileDAL + ) => + checkMSTarget( + ms, + current ? { number: current.number + 1 } : { number: 0 }, + conf, + dal + ), - checkCertificationIsValidInSandbox: (cert:any, current:BlockDTO, findIdtyFunc:any, conf:ConfDTO, dal:FileDAL) => { - return checkCertificationShouldBeValid(current ? current : { number: 0, currency: '' }, cert, findIdtyFunc, conf, dal) + checkCertificationIsValidInSandbox: ( + cert: any, + current: BlockDTO, + findIdtyFunc: any, + conf: ConfDTO, + dal: FileDAL + ) => { + return checkCertificationShouldBeValid( + current ? current : { number: 0, currency: "" }, + cert, + findIdtyFunc, + conf, + dal + ); }, - checkCertificationIsValidForBlock: (cert:any, block:{ number:number, currency:string }, findIdtyFunc:(b:{ number:number, currency:string }, pubkey:string, dal:FileDAL) => Promise<{ - pubkey:string - uid:string - buid:string - sig:string}|null>, conf:ConfDTO, dal:FileDAL) => { - return checkCertificationShouldBeValid(block, cert, findIdtyFunc, conf, dal) + checkCertificationIsValidForBlock: ( + cert: any, + block: { number: number; currency: string }, + findIdtyFunc: ( + b: { number: number; currency: string }, + pubkey: string, + dal: FileDAL + ) => Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + } | null>, + conf: ConfDTO, + dal: FileDAL + ) => { + return checkCertificationShouldBeValid( + block, + cert, + findIdtyFunc, + conf, + dal + ); }, - isOver3Hops: async (member:any, newLinks:any, newcomers:string[], current:DBBlock|null, conf:ConfDTO, dal:FileDAL) => { + isOver3Hops: async ( + member: any, + newLinks: any, + newcomers: string[], + current: DBBlock | null, + conf: ConfDTO, + dal: FileDAL + ) => { if (!current) { return Promise.resolve(false); } try { - return Indexer.DUP_HELPERS.checkPeopleAreNotOudistanced([member], newLinks, newcomers, conf, dal); + return Indexer.DUP_HELPERS.checkPeopleAreNotOudistanced( + [member], + newLinks, + newcomers, + conf, + dal + ); } catch (e) { return true; } }, - checkExistsUserID: (uid:string, dal:FileDAL) => dal.getWrittenIdtyByUIDForExistence(uid), + checkExistsUserID: (uid: string, dal: FileDAL) => + dal.getWrittenIdtyByUIDForExistence(uid), - checkExistsPubkey: (pub:string, dal:FileDAL) => dal.getWrittenIdtyByPubkeyForExistence(pub), + checkExistsPubkey: (pub: string, dal: FileDAL) => + dal.getWrittenIdtyByPubkeyForExistence(pub), checkSingleTransaction: ( - tx:TransactionDTO, + tx: TransactionDTO, dubp_version: number, medianTime: number, - conf:ConfDTO, - dal:FileDAL, - findSourceTx:(txHash:string) => Promise<DBTx|null>) => GLOBAL_RULES_FUNCTIONS.checkSourcesAvailability( + conf: ConfDTO, + dal: FileDAL, + findSourceTx: (txHash: string) => Promise<DBTx | null> + ) => + GLOBAL_RULES_FUNCTIONS.checkSourcesAvailability( { version: dubp_version, transactions: [tx], - medianTime: medianTime + medianTime: medianTime, }, - conf, dal, findSourceTx + conf, + dal, + findSourceTx ), - checkTxBlockStamp: async (tx:TransactionDTO, dal:FileDAL) => { - const number = parseInt(tx.blockstamp.split('-')[0]) - const hash = tx.blockstamp.split('-')[1]; - const basedBlock = await dal.getAbsoluteValidBlockInForkWindow(number, hash) + checkTxBlockStamp: async (tx: TransactionDTO, dal: FileDAL) => { + const number = parseInt(tx.blockstamp.split("-")[0]); + const hash = tx.blockstamp.split("-")[1]; + const basedBlock = await dal.getAbsoluteValidBlockInForkWindow( + number, + hash + ); if (!basedBlock) { throw "Wrong blockstamp for transaction"; } // Valuates the blockstampTime field tx.blockstampTime = basedBlock.medianTime; const current = await dal.getCurrentBlockOrNull(); - if (current && current.medianTime > basedBlock.medianTime + constants.TX_WINDOW) { + if ( + current && + current.medianTime > basedBlock.medianTime + constants.TX_WINDOW + ) { throw DataErrors[DataErrors.TRANSACTION_WINDOW_IS_PASSED]; } - } -} + }, +}; /***************************** * @@ -248,70 +360,100 @@ export const GLOBAL_RULES_HELPERS = { * *****************************/ -async function checkMSTarget (ms:any, block:any, conf:ConfDTO, dal:FileDAL) { +async function checkMSTarget(ms: any, block: any, conf: ConfDTO, dal: FileDAL) { if (block.number == 0 && ms.number != 0) { - throw Error('Number must be 0 for root block\'s memberships'); - } - else if (block.number == 0 && ms.fpr != 'E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855') { - throw Error('Hash must be E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855 for root block\'s memberships'); - } - else if (block.number == 0) { + throw Error("Number must be 0 for root block's memberships"); + } else if ( + block.number == 0 && + ms.fpr != "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855" + ) { + throw Error( + "Hash must be E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855 for root block's memberships" + ); + } else if (block.number == 0) { return null; // Valid for root block } else { - const basedBlock = await dal.getAbsoluteValidBlockInForkWindow(ms.number, ms.fpr) + const basedBlock = await dal.getAbsoluteValidBlockInForkWindow( + ms.number, + ms.fpr + ); if (!basedBlock) { - throw Error('Membership based on an unexisting block') + throw Error("Membership based on an unexisting block"); } let current = await dal.getCurrentBlockOrNull(); - if (current && current.medianTime > basedBlock.medianTime + conf.msValidity) { - throw Error('Membership has expired'); + if ( + current && + current.medianTime > basedBlock.medianTime + conf.msValidity + ) { + throw Error("Membership has expired"); } return basedBlock; } } -async function checkCertificationShouldBeValid (block:{ number:number, currency:string }, cert:any, findIdtyFunc:(b:{ number:number, currency:string }, pubkey:string, dal:FileDAL) => Promise<{ - pubkey:string - uid:string - buid:string - sig:string -}|null>, conf:ConfDTO, dal:FileDAL) { +async function checkCertificationShouldBeValid( + block: { number: number; currency: string }, + cert: any, + findIdtyFunc: ( + b: { number: number; currency: string }, + pubkey: string, + dal: FileDAL + ) => Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + } | null>, + conf: ConfDTO, + dal: FileDAL +) { if (block.number == 0 && cert.block_number != 0) { - throw Error('Number must be 0 for root block\'s certifications'); + throw Error("Number must be 0 for root block's certifications"); } else { - let basedBlock:Tristamp|null = { + let basedBlock: Tristamp | null = { number: 0, hash: constants.SPECIAL_HASH, - medianTime: 0 - } + medianTime: 0, + }; if (block.number != 0) { - basedBlock = await dal.getTristampOf(cert.block_number) + basedBlock = await dal.getTristampOf(cert.block_number); if (!basedBlock) { - throw Error('Certification based on an unexisting block'); + throw Error("Certification based on an unexisting block"); } try { - const issuer = await dal.getWrittenIdtyByPubkeyForIsMember(cert.from) + const issuer = await dal.getWrittenIdtyByPubkeyForIsMember(cert.from); if (!issuer || !issuer.member) { - throw Error('Issuer is not a member') + throw Error("Issuer is not a member"); } } catch (e) { - throw Error('Certifier must be a member') + throw Error("Certifier must be a member"); } } - let idty = await findIdtyFunc(block, cert.to, dal) + let idty = await findIdtyFunc(block, cert.to, dal); let current = block.number == 0 ? null : await dal.getCurrentBlockOrNull(); if (!idty) { - throw Error('Identity does not exist for certified'); - } - else if (current && current.medianTime > basedBlock.medianTime + conf.sigValidity) { - throw Error('Certification has expired'); - } - else if (cert.from == idty.pubkey) - throw Error('Rejected certification: certifying its own self-certification has no meaning'); + throw Error("Identity does not exist for certified"); + } else if ( + current && + current.medianTime > basedBlock.medianTime + conf.sigValidity + ) { + throw Error("Certification has expired"); + } else if (cert.from == idty.pubkey) + throw Error( + "Rejected certification: certifying its own self-certification has no meaning" + ); else { - const buid = [cert.block_number, basedBlock.hash].join('-'); - if (cert.block_hash && buid != [cert.block_number, cert.block_hash].join('-')) - throw Error('Certification based on an unexisting block buid. from ' + cert.from.substring(0,8) + ' to ' + idty.pubkey.substring(0,8)); + const buid = [cert.block_number, basedBlock.hash].join("-"); + if ( + cert.block_hash && + buid != [cert.block_number, cert.block_hash].join("-") + ) + throw Error( + "Certification based on an unexisting block buid. from " + + cert.from.substring(0, 8) + + " to " + + idty.pubkey.substring(0, 8) + ); const raw = rawer.getOfficialCertification({ currency: conf.currency, idty_issuer: idty.pubkey, @@ -320,17 +462,17 @@ async function checkCertificationShouldBeValid (block:{ number:number, currency: idty_sig: idty.sig, issuer: cert.from, buid: buid, - sig: '' - }) + sig: "", + }); const verified = verifyBuggy(raw, cert.sig, cert.from); if (!verified) { - throw constants.ERRORS.WRONG_SIGNATURE_FOR_CERT + throw constants.ERRORS.WRONG_SIGNATURE_FOR_CERT; } return true; } } } -export function setLogger(newLogger:any) { - logger = newLogger +export function setLogger(newLogger: any) { + logger = newLogger; } diff --git a/app/lib/rules/helpers.ts b/app/lib/rules/helpers.ts index ee7d194d9279cb74b2f529c8477a06b460197306..d743f0c16e86c6b71d77ff074ffed65ece05daf5 100644 --- a/app/lib/rules/helpers.ts +++ b/app/lib/rules/helpers.ts @@ -11,12 +11,14 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../dto/ConfDTO" -import {CommonConstants} from "../common-libs/constants" +import { ConfDTO } from "../dto/ConfDTO"; +import { CommonConstants } from "../common-libs/constants"; -const constants = CommonConstants +const constants = CommonConstants; -export function maxAcceleration (conf:ConfDTO) { - let maxGenTime = Math.ceil(conf.avgGenTime * constants.POW_DIFFICULTY_RANGE_RATIO); +export function maxAcceleration(conf: ConfDTO) { + let maxGenTime = Math.ceil( + conf.avgGenTime * constants.POW_DIFFICULTY_RANGE_RATIO + ); return Math.ceil(maxGenTime * conf.medianTimeBlocks); } diff --git a/app/lib/rules/index.ts b/app/lib/rules/index.ts index a3091ce798abf53a5536042065a6a0d09ea2f581..c43f7ccc92b358aa0a30691d16e99db3490252e8 100644 --- a/app/lib/rules/index.ts +++ b/app/lib/rules/index.ts @@ -12,14 +12,13 @@ // GNU Affero General Public License for more details. "use strict"; -import {BlockDTO} from "../dto/BlockDTO" -import {ConfDTO} from "../dto/ConfDTO" -import {IndexEntry} from "../indexer" -import {LOCAL_RULES_FUNCTIONS} from "./local_rules" +import { BlockDTO } from "../dto/BlockDTO"; +import { ConfDTO } from "../dto/ConfDTO"; +import { IndexEntry } from "../indexer"; +import { LOCAL_RULES_FUNCTIONS } from "./local_rules"; export const ALIAS = { - - ALL_LOCAL: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + ALL_LOCAL: async (block: BlockDTO, conf: ConfDTO, index: IndexEntry[]) => { await LOCAL_RULES_FUNCTIONS.checkParameters(block); await LOCAL_RULES_FUNCTIONS.checkProofOfWork(block); await LOCAL_RULES_FUNCTIONS.checkInnerHash(block); @@ -29,27 +28,51 @@ export const ALIAS = { await LOCAL_RULES_FUNCTIONS.checkBlockSignature(block); await LOCAL_RULES_FUNCTIONS.checkBlockTimes(block, conf); await LOCAL_RULES_FUNCTIONS.checkIdentitiesSignature(block); - await LOCAL_RULES_FUNCTIONS.checkIdentitiesUserIDConflict(block, conf, index); - await LOCAL_RULES_FUNCTIONS.checkIdentitiesPubkeyConflict(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkIdentitiesUserIDConflict( + block, + conf, + index + ); + await LOCAL_RULES_FUNCTIONS.checkIdentitiesPubkeyConflict( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkIdentitiesMatchJoin(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkMembershipUnicity(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkRevokedUnicity(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkRevokedAreExcluded(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkMembershipsSignature(block); await LOCAL_RULES_FUNCTIONS.checkPubkeyUnicity(block); - await LOCAL_RULES_FUNCTIONS.checkCertificationOneByIssuer(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkCertificationOneByIssuer( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkCertificationUnicity(block, conf, index); - await LOCAL_RULES_FUNCTIONS.checkCertificationIsntForLeaverOrExcluded(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkCertificationIsntForLeaverOrExcluded( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkTxVersion(block); await LOCAL_RULES_FUNCTIONS.checkTxIssuers(block); await LOCAL_RULES_FUNCTIONS.checkTxSources(block); await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block); await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block); await LOCAL_RULES_FUNCTIONS.checkTxSignature(block); - await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth( + block, + conf, + index + ); }, - ALL_LOCAL_BUT_POW_AND_SIGNATURE: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + ALL_LOCAL_BUT_POW_AND_SIGNATURE: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { await LOCAL_RULES_FUNCTIONS.checkParameters(block); await LOCAL_RULES_FUNCTIONS.checkInnerHash(block); await LOCAL_RULES_FUNCTIONS.checkPreviousHash(block); @@ -57,39 +80,70 @@ export const ALIAS = { await LOCAL_RULES_FUNCTIONS.checkUnitBase(block); await LOCAL_RULES_FUNCTIONS.checkBlockTimes(block, conf); await LOCAL_RULES_FUNCTIONS.checkIdentitiesSignature(block); - await LOCAL_RULES_FUNCTIONS.checkIdentitiesUserIDConflict(block, conf, index); - await LOCAL_RULES_FUNCTIONS.checkIdentitiesPubkeyConflict(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkIdentitiesUserIDConflict( + block, + conf, + index + ); + await LOCAL_RULES_FUNCTIONS.checkIdentitiesPubkeyConflict( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkIdentitiesMatchJoin(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkMembershipUnicity(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkRevokedUnicity(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkRevokedAreExcluded(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkMembershipsSignature(block); await LOCAL_RULES_FUNCTIONS.checkPubkeyUnicity(block); - await LOCAL_RULES_FUNCTIONS.checkCertificationOneByIssuer(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkCertificationOneByIssuer( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkCertificationUnicity(block, conf, index); - await LOCAL_RULES_FUNCTIONS.checkCertificationIsntForLeaverOrExcluded(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkCertificationIsntForLeaverOrExcluded( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkTxVersion(block); await LOCAL_RULES_FUNCTIONS.checkTxIssuers(block); await LOCAL_RULES_FUNCTIONS.checkTxSources(block); await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block); await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block); await LOCAL_RULES_FUNCTIONS.checkTxSignature(block); - await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index); - } -} + await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth( + block, + conf, + index + ); + }, +}; export const CHECK = { ASYNC: { ALL_LOCAL: checkLocal(ALIAS.ALL_LOCAL), - ALL_LOCAL_BUT_POW: checkLocal(ALIAS.ALL_LOCAL_BUT_POW_AND_SIGNATURE) - } + ALL_LOCAL_BUT_POW: checkLocal(ALIAS.ALL_LOCAL_BUT_POW_AND_SIGNATURE), + }, }; -function checkLocal(contract:(block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => Promise<void>) { - return async (b:BlockDTO, conf:ConfDTO, index:IndexEntry[], done:any = undefined) => { +function checkLocal( + contract: ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => Promise<void> +) { + return async ( + b: BlockDTO, + conf: ConfDTO, + index: IndexEntry[], + done: any = undefined + ) => { try { - const block = BlockDTO.fromJSONObject(b) - await contract(block, conf, index) + const block = BlockDTO.fromJSONObject(b); + await contract(block, conf, index); done && done(); } catch (err) { if (done) return done(err); diff --git a/app/lib/rules/local_rules.ts b/app/lib/rules/local_rules.ts index ad297305314e69a7c8d09ad1f3f3d913e5d67eaf..670026f344e384c1b9af980521b5e71c9134b876 100644 --- a/app/lib/rules/local_rules.ts +++ b/app/lib/rules/local_rules.ts @@ -11,103 +11,117 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../dto/BlockDTO" -import {ConfDTO} from "../dto/ConfDTO" -import {CindexEntry, IndexEntry, Indexer, MindexEntry, SindexEntry} from "../indexer" -import {BaseDTO, TransactionDTO} from "../dto/TransactionDTO" -import {DBBlock} from "../db/DBBlock" -import {verify, verifyBuggy} from "../common-libs/crypto/keyring" -import {hashf} from "../common" -import {CommonConstants} from "../common-libs/constants" -import {IdentityDTO} from "../dto/IdentityDTO" -import {MembershipDTO} from "../dto/MembershipDTO" -import {Underscore} from "../common-libs/underscore" -import {FileDAL} from "../dal/fileDAL" - -const constants = CommonConstants -const maxAcceleration = require('./helpers').maxAcceleration +import { BlockDTO } from "../dto/BlockDTO"; +import { ConfDTO } from "../dto/ConfDTO"; +import { + CindexEntry, + IndexEntry, + Indexer, + MindexEntry, + SindexEntry, +} from "../indexer"; +import { BaseDTO, TransactionDTO } from "../dto/TransactionDTO"; +import { DBBlock } from "../db/DBBlock"; +import { verify, verifyBuggy } from "../common-libs/crypto/keyring"; +import { hashf } from "../common"; +import { CommonConstants } from "../common-libs/constants"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { MembershipDTO } from "../dto/MembershipDTO"; +import { Underscore } from "../common-libs/underscore"; +import { FileDAL } from "../dal/fileDAL"; + +const constants = CommonConstants; +const maxAcceleration = require("./helpers").maxAcceleration; export const LOCAL_RULES_FUNCTIONS = { - - checkParameters: async (block:BlockDTO) => { + checkParameters: async (block: BlockDTO) => { if (block.number == 0 && !block.parameters) { - throw Error('Parameters must be provided for root block'); - } - else if (block.number > 0 && block.parameters) { - throw Error('Parameters must not be provided for non-root block'); + throw Error("Parameters must be provided for root block"); + } else if (block.number > 0 && block.parameters) { + throw Error("Parameters must not be provided for non-root block"); } return true; }, - isProofOfWorkCorrect: (block:BlockDTO) => { + isProofOfWorkCorrect: (block: BlockDTO) => { let remainder = block.powMin % 16; let nb_zeros = (block.powMin - remainder) / 16; - const powRegexp = new RegExp('^0{' + nb_zeros + '}'); - return !!block.hash.match(powRegexp) + const powRegexp = new RegExp("^0{" + nb_zeros + "}"); + return !!block.hash.match(powRegexp); }, - checkProofOfWork: async (block:BlockDTO) => { + checkProofOfWork: async (block: BlockDTO) => { if (!LOCAL_RULES_FUNCTIONS.isProofOfWorkCorrect(block)) { - throw Error('Not a proof-of-work'); + throw Error("Not a proof-of-work"); } return true; }, - checkInnerHash: async (block:BlockDTO) => { + checkInnerHash: async (block: BlockDTO) => { let inner_hash = hashf(block.getRawInnerPart()).toUpperCase(); if (block.inner_hash != inner_hash) { - throw Error('Wrong inner hash'); + throw Error("Wrong inner hash"); } return true; }, - checkPreviousHash: async (block:BlockDTO) => { + checkPreviousHash: async (block: BlockDTO) => { if (block.number == 0 && block.previousHash) { - throw Error('PreviousHash must not be provided for root block'); - } - else if (block.number > 0 && !block.previousHash) { - throw Error('PreviousHash must be provided for non-root block'); + throw Error("PreviousHash must not be provided for root block"); + } else if (block.number > 0 && !block.previousHash) { + throw Error("PreviousHash must be provided for non-root block"); } return true; }, - checkPreviousIssuer: async (block:BlockDTO) => { + checkPreviousIssuer: async (block: BlockDTO) => { if (block.number == 0 && block.previousIssuer) - throw Error('PreviousIssuer must not be provided for root block'); + throw Error("PreviousIssuer must not be provided for root block"); else if (block.number > 0 && !block.previousIssuer) - throw Error('PreviousIssuer must be provided for non-root block'); + throw Error("PreviousIssuer must be provided for non-root block"); return true; }, - checkUnitBase: async (block:BlockDTO) => { + checkUnitBase: async (block: BlockDTO) => { if (block.number == 0 && block.unitbase != 0) { - throw Error('UnitBase must equal 0 for root block'); + throw Error("UnitBase must equal 0 for root block"); } return true; }, - checkBlockSignature: async (block:BlockDTO) => { + checkBlockSignature: async (block: BlockDTO) => { // Historically, Duniter used a buggy version of TweetNaCl (see #1390) - // Starting with the v12 blocks, Duniter uses a fixed version of TweetNaCl. - if (block.version >= 12 && !verify(block.getSignedPart(), block.signature, block.issuer)) { - throw Error('Block\'s signature must match'); - } else if (!verifyBuggy(block.getSignedPart(), block.signature, block.issuer)) { - throw Error('Block\'s signature must match'); + // Starting with the v12 blocks, Duniter uses a fixed version of TweetNaCl. + if ( + block.version >= 12 && + !verify(block.getSignedPart(), block.signature, block.issuer) + ) { + throw Error("Block's signature must match"); + } else if ( + !verifyBuggy(block.getSignedPart(), block.signature, block.issuer) + ) { + throw Error("Block's signature must match"); } return true; }, - checkBlockTimes: async (block:BlockDTO, conf:ConfDTO) => { - const time = block.time - const medianTime = block.medianTime - if (block.number > 0 && (time < medianTime || time > medianTime + maxAcceleration(conf))) - throw Error('A block must have its Time between MedianTime and MedianTime + ' + maxAcceleration(conf)); + checkBlockTimes: async (block: BlockDTO, conf: ConfDTO) => { + const time = block.time; + const medianTime = block.medianTime; + if ( + block.number > 0 && + (time < medianTime || time > medianTime + maxAcceleration(conf)) + ) + throw Error( + "A block must have its Time between MedianTime and MedianTime + " + + maxAcceleration(conf) + ); else if (block.number == 0 && time != medianTime) - throw Error('Root block must have Time equal MedianTime'); + throw Error("Root block must have Time equal MedianTime"); return true; }, - checkIdentitiesSignature: async (block:BlockDTO) => { + checkIdentitiesSignature: async (block: BlockDTO) => { let i = 0; let wrongSig = false; while (!wrongSig && i < block.identities.length) { @@ -115,198 +129,260 @@ export const LOCAL_RULES_FUNCTIONS = { idty.currency = block.currency; wrongSig = !verifyBuggy(idty.rawWithoutSig(), idty.sig, idty.pubkey); if (wrongSig) { - throw Error('Identity\'s signature must match'); + throw Error("Identity's signature must match"); } i++; } return true; }, - checkIdentitiesUserIDConflict: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkIdentitiesUserIDConflict: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const creates = Indexer.iindexCreate(index); - const uids = Underscore.chain(creates).pluck('uid').uniq().value(); + const uids = Underscore.chain(creates).pluck("uid").uniq().value(); if (creates.length !== uids.length) { - throw Error('Block must not contain twice same identity uid'); + throw Error("Block must not contain twice same identity uid"); } return true; }, - checkIdentitiesPubkeyConflict: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkIdentitiesPubkeyConflict: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const creates = Indexer.iindexCreate(index); - const pubkeys = Underscore.chain(creates).pluck('pub').uniq().value(); + const pubkeys = Underscore.chain(creates).pluck("pub").uniq().value(); if (creates.length !== pubkeys.length) { - throw Error('Block must not contain twice same identity pubkey'); + throw Error("Block must not contain twice same identity pubkey"); } return true; }, - checkIdentitiesMatchJoin: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkIdentitiesMatchJoin: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const icreates = Indexer.iindexCreate(index); const mcreates = Indexer.mindexCreate(index); for (const icreate of icreates) { const matching = Underscore.where(mcreates, { pub: icreate.pub }); if (matching.length == 0) { - throw Error('Each identity must match a newcomer line with same userid and certts'); + throw Error( + "Each identity must match a newcomer line with same userid and certts" + ); } } return true; }, - checkRevokedAreExcluded: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkRevokedAreExcluded: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const iindex = Indexer.iindex(index); const mindex = Indexer.mindex(index); const revocations = mindex - .filter((row:MindexEntry) => !!(row.op == constants.IDX_UPDATE && row.revoked_on !== null)) - .map(e => e.pub) + .filter( + (row: MindexEntry) => + !!(row.op == constants.IDX_UPDATE && row.revoked_on !== null) + ) + .map((e) => e.pub); for (const pub of revocations) { - const exclusions = Underscore.where(iindex, { op: constants.IDX_UPDATE, member: false, pub }) + const exclusions = Underscore.where(iindex, { + op: constants.IDX_UPDATE, + member: false, + pub, + }); if (exclusions.length == 0) { - throw Error('A revoked member must be excluded'); + throw Error("A revoked member must be excluded"); } } return true; }, - checkRevokedUnicity: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkRevokedUnicity: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { try { await LOCAL_RULES_FUNCTIONS.checkMembershipUnicity(block, conf, index); } catch (e) { - throw Error('A single revocation per member is allowed'); + throw Error("A single revocation per member is allowed"); } return true; }, - checkMembershipUnicity: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkMembershipUnicity: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const mindex = Indexer.mindex(index); - const pubkeys = Underscore.chain(mindex).pluck('pub').uniq().value(); + const pubkeys = Underscore.chain(mindex).pluck("pub").uniq().value(); if (pubkeys.length !== mindex.length) { - throw Error('Unicity constraint PUBLIC_KEY on MINDEX is not respected'); + throw Error("Unicity constraint PUBLIC_KEY on MINDEX is not respected"); } return true; }, - checkMembershipsSignature: async (block:BlockDTO) => { + checkMembershipsSignature: async (block: BlockDTO) => { let i = 0; - let wrongSig = false, ms; + let wrongSig = false, + ms; // Joiners while (!wrongSig && i < block.joiners.length) { - ms = MembershipDTO.fromInline(block.joiners[i], 'IN', block.currency); + ms = MembershipDTO.fromInline(block.joiners[i], "IN", block.currency); wrongSig = !checkSingleMembershipSignature(ms); i++; } // Actives i = 0; while (!wrongSig && i < block.actives.length) { - ms = MembershipDTO.fromInline(block.actives[i], 'IN', block.currency); + ms = MembershipDTO.fromInline(block.actives[i], "IN", block.currency); wrongSig = !checkSingleMembershipSignature(ms); i++; } // Leavers i = 0; while (!wrongSig && i < block.leavers.length) { - ms = MembershipDTO.fromInline(block.leavers[i], 'OUT', block.currency); + ms = MembershipDTO.fromInline(block.leavers[i], "OUT", block.currency); wrongSig = !checkSingleMembershipSignature(ms); i++; } if (wrongSig) { - throw Error('Membership\'s signature must match'); + throw Error("Membership's signature must match"); } return true; }, - checkPubkeyUnicity: async (block:BlockDTO) => { + checkPubkeyUnicity: async (block: BlockDTO) => { const pubkeys = []; let conflict = false; let pubk; // Joiners let i = 0; while (!conflict && i < block.joiners.length) { - pubk = block.joiners[i].split(':')[0]; - conflict = !!(~pubkeys.indexOf(pubk)) + pubk = block.joiners[i].split(":")[0]; + conflict = !!~pubkeys.indexOf(pubk); pubkeys.push(pubk); i++; } // Actives i = 0; while (!conflict && i < block.actives.length) { - pubk = block.actives[i].split(':')[0]; - conflict = !!(~pubkeys.indexOf(pubk)) + pubk = block.actives[i].split(":")[0]; + conflict = !!~pubkeys.indexOf(pubk); pubkeys.push(pubk); i++; } // Leavers i = 0; while (!conflict && i < block.leavers.length) { - pubk = block.leavers[i].split(':')[0]; - conflict = !!(~pubkeys.indexOf(pubk)) + pubk = block.leavers[i].split(":")[0]; + conflict = !!~pubkeys.indexOf(pubk); pubkeys.push(pubk); i++; } // Excluded i = 0; while (!conflict && i < block.excluded.length) { - pubk = block.excluded[i].split(':')[0]; - conflict = !!(~pubkeys.indexOf(pubk)) + pubk = block.excluded[i].split(":")[0]; + conflict = !!~pubkeys.indexOf(pubk); pubkeys.push(pubk); i++; } if (conflict) { - throw Error('Block cannot contain a same pubkey more than once in joiners, actives, leavers and excluded'); + throw Error( + "Block cannot contain a same pubkey more than once in joiners, actives, leavers and excluded" + ); } return true; }, - checkCertificationOneByIssuer: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkCertificationOneByIssuer: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { if (block.number > 0) { const cindex = Indexer.cindex(index); - const certFromA = Underscore.uniq(cindex.map((row:CindexEntry) => row.issuer)); + const certFromA = Underscore.uniq( + cindex.map((row: CindexEntry) => row.issuer) + ); if (certFromA.length !== cindex.length) { - throw Error('Block cannot contain two certifications from same issuer'); + throw Error("Block cannot contain two certifications from same issuer"); } } return true; }, - checkCertificationUnicity: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkCertificationUnicity: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const cindex = Indexer.cindex(index); - const certAtoB = Underscore.uniq(cindex.map((row:CindexEntry) => row.issuer + row.receiver)); + const certAtoB = Underscore.uniq( + cindex.map((row: CindexEntry) => row.issuer + row.receiver) + ); if (certAtoB.length !== cindex.length) { - throw Error('Block cannot contain identical certifications (A -> B)'); + throw Error("Block cannot contain identical certifications (A -> B)"); } return true; }, - checkCertificationIsntForLeaverOrExcluded: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkCertificationIsntForLeaverOrExcluded: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const cindex = Indexer.cindex(index); const iindex = Indexer.iindex(index); const mindex = Indexer.mindex(index); - const certified = cindex.map((row:CindexEntry) => row.receiver); + const certified = cindex.map((row: CindexEntry) => row.receiver); for (const pub of certified) { - const exclusions = Underscore.where(iindex, { op: constants.IDX_UPDATE, member: false, pub: pub }) - const leavers = Underscore.where(mindex, { op: constants.IDX_UPDATE, leaving: true, pub: pub }) + const exclusions = Underscore.where(iindex, { + op: constants.IDX_UPDATE, + member: false, + pub: pub, + }); + const leavers = Underscore.where(mindex, { + op: constants.IDX_UPDATE, + leaving: true, + pub: pub, + }); if (exclusions.length > 0 || leavers.length > 0) { - throw Error('Block cannot contain certifications concerning leavers or excluded members'); + throw Error( + "Block cannot contain certifications concerning leavers or excluded members" + ); } } return true; }, - checkTxVersion: async (block:BlockDTO) => { - const txs = block.transactions + checkTxVersion: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { if (tx.version != 10) { - throw Error('A transaction must have the version 10'); + throw Error("A transaction must have the version 10"); } } return true; }, - checkTxLen: async (block:BlockDTO) => { - const txs = block.transactions + checkTxLen: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { - const txLen = TransactionDTO.fromJSONObject(tx).getLen() + const txLen = TransactionDTO.fromJSONObject(tx).getLen(); if (txLen > constants.MAXIMUM_LEN_OF_COMPACT_TX) { throw constants.ERRORS.A_TRANSACTION_HAS_A_MAX_SIZE; } @@ -314,9 +390,12 @@ export const LOCAL_RULES_FUNCTIONS = { // Check rule against each output of each transaction for (const tx of txs) { for (const output of tx.outputs) { - const out = typeof output === 'string' ? output : TransactionDTO.outputObj2Str(output) + const out = + typeof output === "string" + ? output + : TransactionDTO.outputObj2Str(output); if (out.length > constants.MAXIMUM_LEN_OF_OUTPUT) { - throw constants.ERRORS.MAXIMUM_LEN_OF_OUTPUT + throw constants.ERRORS.MAXIMUM_LEN_OF_OUTPUT; } } } @@ -324,61 +403,70 @@ export const LOCAL_RULES_FUNCTIONS = { for (const tx of txs) { for (const unlock of tx.unlocks) { if (unlock.length > constants.MAXIMUM_LEN_OF_UNLOCK) { - throw constants.ERRORS.MAXIMUM_LEN_OF_UNLOCK + throw constants.ERRORS.MAXIMUM_LEN_OF_UNLOCK; } } } return true; }, - checkTxIssuers: async (block:BlockDTO) => { - const txs = block.transactions + checkTxIssuers: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { if (tx.issuers.length == 0) { - throw Error('A transaction must have at least 1 issuer'); + throw Error("A transaction must have at least 1 issuer"); } } return true; }, - checkTxSources: async (block:BlockDTO) => { - const dto = BlockDTO.fromJSONObject(block) + checkTxSources: async (block: BlockDTO) => { + const dto = BlockDTO.fromJSONObject(block); for (const tx of dto.transactions) { if (!tx.inputs || tx.inputs.length == 0) { - throw Error('A transaction must have at least 1 source'); + throw Error("A transaction must have at least 1 source"); } } const sindex = Indexer.localSIndex(dto); - const inputs = Underscore.filter(sindex, (row:SindexEntry) => row.op == constants.IDX_UPDATE).map((row:SindexEntry) => [row.op, row.identifier, row.pos].join('-')); + const inputs = Underscore.filter( + sindex, + (row: SindexEntry) => row.op == constants.IDX_UPDATE + ).map((row: SindexEntry) => [row.op, row.identifier, row.pos].join("-")); if (inputs.length !== Underscore.uniq(inputs).length) { - throw Error('It cannot exist 2 identical sources for transactions inside a given block'); - } - const outputs = Underscore.filter(sindex, (row:SindexEntry) => row.op == constants.IDX_CREATE).map((row:SindexEntry) => [row.op, row.identifier, row.pos].join('-')); + throw Error( + "It cannot exist 2 identical sources for transactions inside a given block" + ); + } + const outputs = Underscore.filter( + sindex, + (row: SindexEntry) => row.op == constants.IDX_CREATE + ).map((row: SindexEntry) => [row.op, row.identifier, row.pos].join("-")); if (outputs.length !== Underscore.uniq(outputs).length) { - throw Error('It cannot exist 2 identical sources for transactions inside a given block'); + throw Error( + "It cannot exist 2 identical sources for transactions inside a given block" + ); } return true; }, - checkTxAmounts: async (block:BlockDTO) => { + checkTxAmounts: async (block: BlockDTO) => { for (const tx of block.transactions) { LOCAL_RULES_HELPERS.checkTxAmountsValidity(tx); } }, - checkTxRecipients: async (block:BlockDTO) => { - const txs = block.transactions + checkTxRecipients: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { if (!tx.outputs || tx.outputs.length == 0) { - throw Error('A transaction must have at least 1 recipient'); - } - else { + throw Error("A transaction must have at least 1 recipient"); + } else { // Cannot have empty output condition for (const output of tx.outputsAsObjects()) { if (!output.conditions.match(/(SIG|XHX)/)) { - throw Error('Empty conditions are forbidden'); + throw Error("Empty conditions are forbidden"); } } } @@ -386,70 +474,113 @@ export const LOCAL_RULES_FUNCTIONS = { return true; }, - checkTxSignature: async (block:BlockDTO) => { - const txs = block.transactions + checkTxSignature: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { if (!tx.checkSignatures(block.version)) { - throw Error('Signature from a transaction must match') + throw Error("Signature from a transaction must match"); } } return true; }, - checkMaxTransactionChainingDepth: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { - const sindex = Indexer.sindex(index) - const max = getMaxTransactionDepth(sindex) + checkMaxTransactionChainingDepth: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { + const sindex = Indexer.sindex(index); + const max = getMaxTransactionDepth(sindex); // - const allowedMax = block.medianTime > CommonConstants.BLOCK_TX_CHAINING_ACTIVATION_MT ? CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH : 0 + const allowedMax = + block.medianTime > CommonConstants.BLOCK_TX_CHAINING_ACTIVATION_MT + ? CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH + : 0; if (max > allowedMax) { - throw "The maximum transaction chaining length per block is " + CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH + throw ( + "The maximum transaction chaining length per block is " + + CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH + ); } - return true - } -} + return true; + }, +}; export interface SindexShortEntry { - op:string, - identifier:string, - pos:number, - tx:string|null + op: string; + identifier: string; + pos: number; + tx: string | null; } -function getMaxTransactionDepth(sindex:SindexShortEntry[]) { - const ids = Underscore.uniq(Underscore.pluck(sindex, 'tx')) as string[] // We are sure because at this moment no UD is in the sources - let maxTxChainingDepth = 0 +function getMaxTransactionDepth(sindex: SindexShortEntry[]) { + const ids = Underscore.uniq(Underscore.pluck(sindex, "tx")) as string[]; // We are sure because at this moment no UD is in the sources + let maxTxChainingDepth = 0; for (let id of ids) { - maxTxChainingDepth = Math.max(maxTxChainingDepth, getTransactionDepth(id, sindex, 0)) + maxTxChainingDepth = Math.max( + maxTxChainingDepth, + getTransactionDepth(id, sindex, 0) + ); } - return maxTxChainingDepth + return maxTxChainingDepth; } -function getTransactionDepth(txHash:string, sindex:SindexShortEntry[], localDepth = 0) { - const inputs = Underscore.filter(sindex, (s:SindexShortEntry) => s.op === 'UPDATE' && s.tx === txHash) - let depth = localDepth +function getTransactionDepth( + txHash: string, + sindex: SindexShortEntry[], + localDepth = 0 +) { + const inputs = Underscore.filter( + sindex, + (s: SindexShortEntry) => s.op === "UPDATE" && s.tx === txHash + ); + let depth = localDepth; for (let input of inputs) { - const consumedOutput = Underscore.findWhere(sindex, { op: 'CREATE', identifier: input.identifier, pos: input.pos }) + const consumedOutput = Underscore.findWhere(sindex, { + op: "CREATE", + identifier: input.identifier, + pos: input.pos, + }); if (consumedOutput) { if (localDepth < 5) { // Cast: we are sure because at this moment no UD is in the sources - const subTxDepth = getTransactionDepth(consumedOutput.tx as string, sindex, localDepth + 1) - depth = Math.max(depth, subTxDepth) + const subTxDepth = getTransactionDepth( + consumedOutput.tx as string, + sindex, + localDepth + 1 + ); + depth = Math.max(depth, subTxDepth); } else { - depth++ + depth++; } } } - return depth + return depth; } -function checkSingleMembershipSignature(ms:any) { +function checkSingleMembershipSignature(ms: any) { return verifyBuggy(ms.getRaw(), ms.signature, ms.issuer); } -function checkBunchOfTransactions(transactions:TransactionDTO[], conf:ConfDTO, medianTime: number, options?:{ dontCareAboutChaining?:boolean }){ - const block:any = { transactions, identities: [], joiners: [], actives: [], leavers: [], revoked: [], excluded: [], certifications: [], medianTime }; - const index = Indexer.localIndex(block, conf) +function checkBunchOfTransactions( + transactions: TransactionDTO[], + conf: ConfDTO, + medianTime: number, + options?: { dontCareAboutChaining?: boolean } +) { + const block: any = { + transactions, + identities: [], + joiners: [], + actives: [], + leavers: [], + revoked: [], + excluded: [], + certifications: [], + medianTime, + }; + const index = Indexer.localIndex(block, conf); return (async () => { let local_rule = LOCAL_RULES_FUNCTIONS; await local_rule.checkTxLen(block); @@ -461,12 +592,11 @@ function checkBunchOfTransactions(transactions:TransactionDTO[], conf:ConfDTO, m if (!options || !options.dontCareAboutChaining) { await local_rule.checkMaxTransactionChainingDepth(block, conf, index); } - })() + })(); } export const LOCAL_RULES_HELPERS = { - - maxAcceleration: (conf:ConfDTO) => maxAcceleration(conf), + maxAcceleration: (conf: ConfDTO) => maxAcceleration(conf), checkSingleMembershipSignature: checkSingleMembershipSignature, @@ -476,16 +606,19 @@ export const LOCAL_RULES_HELPERS = { getMaxTransactionDepth, - checkSingleTransactionLocally: (tx:any, conf:ConfDTO) => checkBunchOfTransactions([tx], conf, 0), + checkSingleTransactionLocally: (tx: any, conf: ConfDTO) => + checkBunchOfTransactions([tx], conf, 0), - checkTxAmountsValidity: (tx:TransactionDTO) => { - const inputs = tx.inputsAsObjects() - const outputs = tx.outputsAsObjects() + checkTxAmountsValidity: (tx: TransactionDTO) => { + const inputs = tx.inputsAsObjects(); + const outputs = tx.outputsAsObjects(); // Rule of money conservation - const commonBase:number = (inputs as BaseDTO[]).concat(outputs).reduce((min:number, input) => { - if (min === null) return input.base; - return Math.min(min, input.base) - }, 0) + const commonBase: number = (inputs as BaseDTO[]) + .concat(outputs) + .reduce((min: number, input) => { + if (min === null) return input.base; + return Math.min(min, input.base); + }, 0); const inputSumCommonBase = inputs.reduce((sum, input) => { return sum + input.amount * Math.pow(10, input.base - commonBase); }, 0); @@ -497,10 +630,10 @@ export const LOCAL_RULES_HELPERS = { } // Rule of unit base transformation const maxOutputBase = outputs.reduce((max, output) => { - return Math.max(max, output.base) - }, 0) + return Math.max(max, output.base); + }, 0); // Compute deltas - const deltas:any = {}; + const deltas: any = {}; for (let i = commonBase; i <= maxOutputBase; i++) { const inputBaseSum = inputs.reduce((sum, input) => { if (input.base == i) { @@ -528,29 +661,42 @@ export const LOCAL_RULES_HELPERS = { } }, - getMaxPossibleVersionNumber: async (current:DBBlock|null, dal: FileDAL) => { + getMaxPossibleVersionNumber: async ( + current: DBBlock | null, + dal: FileDAL + ) => { // Looking at current blockchain, find what is the next maximum version we can produce return !current + ? // 1. We use legacy version + constants.BLOCK_GENESIS_VERSION + : (async () => { + // 2. If we can, we go to the next version + const blocksInFrame = ( + await dal.getBlocksBetween( + current.number - current.issuersFrame + 1, + current.number + ) + ).sort((b1, b2) => b2.number - b1.number); + const uniqIssuersInFrame = Underscore.uniq( + blocksInFrame.map((b) => b.issuer) + ); + const lastNonceOfEachIssuer = uniqIssuersInFrame.map((issuer) => + String(blocksInFrame.filter((b) => b.issuer === issuer)[0].nonce) + ); + const nbNoncesWithNextVersionCode = lastNonceOfEachIssuer.filter( + (nonce) => nonce.substr(-11, 3) === "999" + ).length; + + // More than 70% of the computing network converted? Let's go to next version. + let propIssuersReadyToJump = + nbNoncesWithNextVersionCode / uniqIssuersInFrame.length; + if (propIssuersReadyToJump > 0.7) { + return constants.DUBP_NEXT_VERSION; + } - // 1. We use legacy version - ? constants.BLOCK_GENESIS_VERSION : (async () => { - - // 2. If we can, we go to the next version - const blocksInFrame = (await dal.getBlocksBetween(current.number - current.issuersFrame + 1, current.number)) - .sort((b1, b2) => b2.number - b1.number) - const uniqIssuersInFrame = Underscore.uniq(blocksInFrame.map(b => b.issuer)) - const lastNonceOfEachIssuer = uniqIssuersInFrame.map(issuer => String(blocksInFrame.filter(b => b.issuer === issuer)[0].nonce)) - const nbNoncesWithNextVersionCode = lastNonceOfEachIssuer.filter(nonce => nonce.substr(-11, 3) === '999').length - - // More than 70% of the computing network converted? Let's go to next version. - let propIssuersReadyToJump = nbNoncesWithNextVersionCode / uniqIssuersInFrame.length; - if (propIssuersReadyToJump > 0.7) { - return constants.DUBP_NEXT_VERSION - } - - // Otherwise, we stay on same version - return current.version - })() - } -} + // Otherwise, we stay on same version + return current.version; + })(); + }, +}; diff --git a/app/lib/streams/multicaster.ts b/app/lib/streams/multicaster.ts index cf2cdbf3fc9eb7b2c2e13cd108c14b64c2e03290..5afd88cd1e68267981d9c26c5bd6bcc46e1f48da 100644 --- a/app/lib/streams/multicaster.ts +++ b/app/lib/streams/multicaster.ts @@ -11,233 +11,284 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../dto/ConfDTO" -import * as stream from "stream" -import {BlockDTO} from "../dto/BlockDTO" -import {RevocationDTO} from "../dto/RevocationDTO" -import {IdentityDTO} from "../dto/IdentityDTO" -import {CertificationDTO} from "../dto/CertificationDTO" -import {MembershipDTO} from "../dto/MembershipDTO" -import {TransactionDTO} from "../dto/TransactionDTO" -import {PeerDTO} from "../dto/PeerDTO" -import {CommonConstants} from "../common-libs/constants" -import {DBPeer} from "../db/DBPeer" - -const request = require('request'); -const constants = require('../../lib/constants'); -const logger = require('../logger').NewLogger('multicaster'); +import { ConfDTO } from "../dto/ConfDTO"; +import * as stream from "stream"; +import { BlockDTO } from "../dto/BlockDTO"; +import { RevocationDTO } from "../dto/RevocationDTO"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { CertificationDTO } from "../dto/CertificationDTO"; +import { MembershipDTO } from "../dto/MembershipDTO"; +import { TransactionDTO } from "../dto/TransactionDTO"; +import { PeerDTO } from "../dto/PeerDTO"; +import { CommonConstants } from "../common-libs/constants"; +import { DBPeer } from "../db/DBPeer"; + +const request = require("request"); +const constants = require("../../lib/constants"); +const logger = require("../logger").NewLogger("multicaster"); const WITH_ISOLATION = true; export class Multicaster extends stream.Transform { + constructor( + private conf: ConfDTO | null = null, + private timeout: number = 0 + ) { + super({ objectMode: true }); - constructor(private conf:ConfDTO|null = null, private timeout:number = 0) { - - super({ objectMode: true }) - - this.on('identity', (data:any, peers:DBPeer[]) => this.idtyForward(data, peers)) - this.on('cert', (data:any, peers:DBPeer[]) => this.certForward(data, peers)) - this.on('revocation', (data:any, peers:DBPeer[]) => this.revocationForward(data, peers)) - this.on('block', (data:any, peers:DBPeer[]) => this.blockForward(data, peers)) - this.on('transaction', (data:any, peers:DBPeer[]) => this.txForward(data, peers)) - this.on('peer', (data:any, peers:DBPeer[]) => this.peerForward(data, peers)) - this.on('membership', (data:any, peers:DBPeer[]) => this.msForward(data, peers)) + this.on("identity", (data: any, peers: DBPeer[]) => + this.idtyForward(data, peers) + ); + this.on("cert", (data: any, peers: DBPeer[]) => + this.certForward(data, peers) + ); + this.on("revocation", (data: any, peers: DBPeer[]) => + this.revocationForward(data, peers) + ); + this.on("block", (data: any, peers: DBPeer[]) => + this.blockForward(data, peers) + ); + this.on("transaction", (data: any, peers: DBPeer[]) => + this.txForward(data, peers) + ); + this.on("peer", (data: any, peers: DBPeer[]) => + this.peerForward(data, peers) + ); + this.on("membership", (data: any, peers: DBPeer[]) => + this.msForward(data, peers) + ); } - async blockForward(doc:any, peers:DBPeer[]) { + async blockForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (b:any) => BlockDTO.fromJSONObject(b), - type: 'Block', - uri: '/blockchain/block', - getObj: (block:any) => { + transform: (b: any) => BlockDTO.fromJSONObject(b), + type: "Block", + uri: "/blockchain/block", + getObj: (block: any) => { return { - "block": block.getRawSigned() + block: block.getRawSigned(), }; }, - getDocID: (block:any) => 'block#' + block.number - })(doc, peers) + getDocID: (block: any) => "block#" + block.number, + })(doc, peers); } - async idtyForward(doc:any, peers:DBPeer[]) { + async idtyForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (obj:any) => IdentityDTO.fromJSONObject(obj), - type: 'Identity', - uri: '/wot/add', - getObj: (idty:IdentityDTO) => { + transform: (obj: any) => IdentityDTO.fromJSONObject(obj), + type: "Identity", + uri: "/wot/add", + getObj: (idty: IdentityDTO) => { return { - "identity": idty.getRawSigned() + identity: idty.getRawSigned(), }; }, - getDocID: (idty:any) => 'with ' + (idty.certs || []).length + ' certs' - })(doc, peers) + getDocID: (idty: any) => "with " + (idty.certs || []).length + " certs", + })(doc, peers); } - async certForward(doc:any, peers:DBPeer[]) { + async certForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (obj:any) => CertificationDTO.fromJSONObject(obj), - type: 'Cert', - uri: '/wot/certify', - getObj: (cert:CertificationDTO) => { + transform: (obj: any) => CertificationDTO.fromJSONObject(obj), + type: "Cert", + uri: "/wot/certify", + getObj: (cert: CertificationDTO) => { return { - "cert": cert.getRawSigned() + cert: cert.getRawSigned(), }; }, - getDocID: (idty:any) => 'with ' + (idty.certs || []).length + ' certs' - })(doc, peers) + getDocID: (idty: any) => "with " + (idty.certs || []).length + " certs", + })(doc, peers); } - async revocationForward(doc:any, peers:DBPeer[]) { + async revocationForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (json:any) => RevocationDTO.fromJSONObject(json), - type: 'Revocation', - uri: '/wot/revoke', - getObj: (revocation:RevocationDTO) => { + transform: (json: any) => RevocationDTO.fromJSONObject(json), + type: "Revocation", + uri: "/wot/revoke", + getObj: (revocation: RevocationDTO) => { return { - "revocation": revocation.getRaw() + revocation: revocation.getRaw(), }; - } - })(doc, peers) + }, + })(doc, peers); } - async txForward(doc:any, peers:DBPeer[]) { + async txForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (obj:any) => TransactionDTO.fromJSONObject(obj), - type: 'Transaction', - uri: '/tx/process', - getObj: (transaction:TransactionDTO) => { + transform: (obj: any) => TransactionDTO.fromJSONObject(obj), + type: "Transaction", + uri: "/tx/process", + getObj: (transaction: TransactionDTO) => { return { - "transaction": transaction.getRaw(), - "signature": transaction.signature + transaction: transaction.getRaw(), + signature: transaction.signature, }; - } - })(doc, peers) + }, + })(doc, peers); } - async peerForward(doc:any, peers:DBPeer[]) { + async peerForward(doc: any, peers: DBPeer[]) { return this.forward({ - type: 'Peer', - uri: '/network/peering/peers', - transform: (obj:any) => PeerDTO.fromJSONObject(obj), - getObj: (peering:PeerDTO) => { + type: "Peer", + uri: "/network/peering/peers", + transform: (obj: any) => PeerDTO.fromJSONObject(obj), + getObj: (peering: PeerDTO) => { return { - peer: peering.getRawSigned() + peer: peering.getRawSigned(), }; }, - getDocID: (doc:PeerDTO) => doc.keyID() + '#' + doc.blockNumber(), + getDocID: (doc: PeerDTO) => doc.keyID() + "#" + doc.blockNumber(), withIsolation: WITH_ISOLATION, - onError: (resJSON:{ - peer: { - block:string, - endpoints:string[] + onError: ( + resJSON: { + peer: { + block: string; + endpoints: string[]; + }; + ucode?: number; + message?: string; }, - ucode?:number, - message?:string - }, peering:any, to:any) => { - if (resJSON.ucode !== undefined && resJSON.ucode !== CommonConstants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE.uerr.ucode) { - if (resJSON.ucode == CommonConstants.ERRORS.DOCUMENT_BEING_TREATED.uerr.ucode || resJSON.ucode == constants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN.uerr.ucode) { - return Promise.resolve() + peering: any, + to: any + ) => { + if ( + resJSON.ucode !== undefined && + resJSON.ucode !== + CommonConstants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE.uerr.ucode + ) { + if ( + resJSON.ucode == + CommonConstants.ERRORS.DOCUMENT_BEING_TREATED.uerr.ucode || + resJSON.ucode == + constants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN.uerr.ucode + ) { + return Promise.resolve(); } else { - throw Error(resJSON.message) + throw Error(resJSON.message); } } else { // Handle possibly outdated peering document - const sentPeer = PeerDTO.fromJSONObject(peering) - if (PeerDTO.blockNumber(resJSON.peer.block) > sentPeer.blockNumber()) { + const sentPeer = PeerDTO.fromJSONObject(peering); + if ( + PeerDTO.blockNumber(resJSON.peer.block) > sentPeer.blockNumber() + ) { this.push({ outdated: true, peer: resJSON.peer }); - logger.warn('Outdated peer document (%s) sent to %s', sentPeer.keyID() + '#' + sentPeer.blockNumber(), to); + logger.warn( + "Outdated peer document (%s) sent to %s", + sentPeer.keyID() + "#" + sentPeer.blockNumber(), + to + ); } - return Promise.resolve() + return Promise.resolve(); } - } - })(doc, peers) + }, + })(doc, peers); } - async msForward(doc:any, peers:DBPeer[]) { + async msForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (obj:any) => MembershipDTO.fromJSONObject(obj), - type: 'Membership', - uri: '/blockchain/membership', - getObj: (membership:MembershipDTO) => { + transform: (obj: any) => MembershipDTO.fromJSONObject(obj), + type: "Membership", + uri: "/blockchain/membership", + getObj: (membership: MembershipDTO) => { return { - "membership": membership.getRaw(), - "signature": membership.signature + membership: membership.getRaw(), + signature: membership.signature, }; - } - })(doc, peers) + }, + })(doc, peers); } - _write(obj:any, enc:any, done:any) { - this.emit(obj.type, obj.obj, obj.peers) - done() + _write(obj: any, enc: any, done: any) { + this.emit(obj.type, obj.obj, obj.peers); + done(); } - sendBlock(toPeer:any, block:any) { - return this.blockForward(block, [toPeer]) + sendBlock(toPeer: any, block: any) { + return this.blockForward(block, [toPeer]); } - sendPeering(toPeer:any, peer:any) { - return this.peerForward(peer, [toPeer]) + sendPeering(toPeer: any, peer: any) { + return this.peerForward(peer, [toPeer]); } - forward(params:any) { - return async (doc:any, peers:DBPeer[]) => { + forward(params: any) { + return async (doc: any, peers: DBPeer[]) => { try { - if(!params.withIsolation || !(this.conf && this.conf.isolate)) { + if (!params.withIsolation || !(this.conf && this.conf.isolate)) { let theDoc = params.transform ? params.transform(doc) : doc; if (params.getDocID) { - logger.info('POST %s %s to %s peers', params.type, params.getDocID(theDoc), peers.length) + logger.info( + "POST %s %s to %s peers", + params.type, + params.getDocID(theDoc), + peers.length + ); } else { - logger.info('POST %s to %s peers', params.type, peers.length); + logger.info("POST %s to %s peers", params.type, peers.length); } // Parallel treatment for superfast propagation - await Promise.all(peers.map(async (p) => { - let peer = PeerDTO.fromJSONObject(p) - const namedURL = peer.getNamedURL(); - try { - await this.post(peer, params.uri, params.getObj(theDoc)) - } catch (e) { - if (params.onError) { - try { - const json = JSON.parse(e.body); - await params.onError(json, doc, namedURL) - } catch (ex) { - logger.warn('Could not reach %s, reason: %s', namedURL, (ex && ex.message || ex)) + await Promise.all( + peers.map(async (p) => { + let peer = PeerDTO.fromJSONObject(p); + const namedURL = peer.getNamedURL(); + try { + await this.post(peer, params.uri, params.getObj(theDoc)); + } catch (e) { + if (params.onError) { + try { + const json = JSON.parse(e.body); + await params.onError(json, doc, namedURL); + } catch (ex) { + logger.warn( + "Could not reach %s, reason: %s", + namedURL, + (ex && ex.message) || ex + ); + } } } - } - })) + }) + ); } else { - logger.debug('[ISOLATE] Prevent --> new Peer to be sent to %s peer(s)', peers.length); + logger.debug( + "[ISOLATE] Prevent --> new Peer to be sent to %s peer(s)", + peers.length + ); } } catch (err) { logger.error(err); } - } + }; } - post(peer:any, uri:string, data:any) { + post(peer: any, uri: string, data: any) { if (!peer.isReachable()) { return Promise.resolve(); } return new Promise((resolve, reject) => { - const postReq = request.post({ - "uri": protocol(peer.getPort()) + '://' + peer.getURL() + uri, - "timeout": this.timeout || constants.NETWORK.DEFAULT_TIMEOUT - }, (err:any, res:any) => { - if (err) { - this.push({ unreachable: true, peer: { pubkey: peer.pubkey }}); - logger.warn(err.message || err); - } - if (res && res.statusCode != 200) { - return reject(res); + const postReq = request.post( + { + uri: protocol(peer.getPort()) + "://" + peer.getURL() + uri, + timeout: this.timeout || constants.NETWORK.DEFAULT_TIMEOUT, + }, + (err: any, res: any) => { + if (err) { + this.push({ unreachable: true, peer: { pubkey: peer.pubkey } }); + logger.warn(err.message || err); + } + if (res && res.statusCode != 200) { + return reject(res); + } + resolve(res); } - resolve(res); - }) + ); postReq.form(data); }); } } -function protocol(port:number) { - return port == 443 ? 'https' : 'http'; +function protocol(port: number) { + return port == 443 ? "https" : "http"; } diff --git a/app/lib/streams/router.ts b/app/lib/streams/router.ts index d5a88d46b883d934588cf2a138466ff11c488df6..c6065c81b350075d2d4c0de1a847248ecbba630f 100644 --- a/app/lib/streams/router.ts +++ b/app/lib/streams/router.ts @@ -11,116 +11,153 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as stream from "stream" -import {PeeringService} from "../../service/PeeringService" -import {FileDAL} from "../dal/fileDAL" -import {PeerDTO} from "../dto/PeerDTO" -import {DBPeer} from "../db/DBPeer" +import * as stream from "stream"; +import { PeeringService } from "../../service/PeeringService"; +import { FileDAL } from "../dal/fileDAL"; +import { PeerDTO } from "../dto/PeerDTO"; +import { DBPeer } from "../db/DBPeer"; -const constants = require('../constants'); +const constants = require("../constants"); export class RouterStream extends stream.Transform { + logger: any; + active = true; - logger:any - active = true + constructor(private peeringService: PeeringService, private dal: FileDAL) { + super({ objectMode: true }); - constructor(private peeringService:PeeringService, private dal:FileDAL) { - super({ objectMode: true }) - - this.logger = require('../logger').NewLogger('router') + this.logger = require("../logger").NewLogger("router"); } - - setConfDAL(theDAL:FileDAL) { - this.dal = theDAL + + setConfDAL(theDAL: FileDAL) { + this.dal = theDAL; } - setActive(shouldBeActive:boolean) { - this.active = shouldBeActive + setActive(shouldBeActive: boolean) { + this.active = shouldBeActive; } - async _write(obj:any, enc:any, done:any) { + async _write(obj: any, enc: any, done: any) { try { if (obj.joiners) { - await this.route('block', obj, () => this.getRandomInUPPeers(obj.issuer === this.peeringService.pubkey)()); - } - else if (obj.revocation) { - await this.route('revocation', obj, () => this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)()); - } - else if (obj.pubkey && obj.uid) { - await this.route('identity', obj, () => this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)()); - } - else if (obj.idty_uid) { - await this.route('cert', obj, () => this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)()); - } - else if (obj.userid) { - await this.route('membership', obj, () => this.getRandomInUPPeers(obj.issuer === this.peeringService.pubkey)()); - } - else if (obj.inputs) { - await this.route('transaction', obj, () => this.getRandomInUPPeers(obj.issuers.indexOf(this.peeringService.pubkey) !== -1)()); - } - else if (obj.endpoints) { - await this.route('peer', obj, () => this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)()); - } - else if (obj.from && obj.from == this.peeringService.pubkey) { + await this.route("block", obj, () => + this.getRandomInUPPeers(obj.issuer === this.peeringService.pubkey)() + ); + } else if (obj.revocation) { + await this.route("revocation", obj, () => + this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)() + ); + } else if (obj.pubkey && obj.uid) { + await this.route("identity", obj, () => + this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)() + ); + } else if (obj.idty_uid) { + await this.route("cert", obj, () => + this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)() + ); + } else if (obj.userid) { + await this.route("membership", obj, () => + this.getRandomInUPPeers(obj.issuer === this.peeringService.pubkey)() + ); + } else if (obj.inputs) { + await this.route("transaction", obj, () => + this.getRandomInUPPeers( + obj.issuers.indexOf(this.peeringService.pubkey) !== -1 + )() + ); + } else if (obj.endpoints) { + await this.route("peer", obj, () => + this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)() + ); + } else if (obj.from && obj.from == this.peeringService.pubkey) { // Route ONLY status emitted by this node - await this.route('status', obj, () => this.getTargeted(obj.to || obj.idty_issuer)()); - } - else if (obj.unreachable) { + await this.route("status", obj, () => + this.getTargeted(obj.to || obj.idty_issuer)() + ); + } else if (obj.unreachable) { await this.dal.setPeerDown(obj.peer.pubkey); - this.logger.info("Peer %s unreachable: now considered as DOWN.", obj.peer.pubkey); - } - else if (obj.outdated) { + this.logger.info( + "Peer %s unreachable: now considered as DOWN.", + obj.peer.pubkey + ); + } else if (obj.outdated) { await this.peeringService.handleNewerPeer(obj.peer); } } catch (e) { - if (e && e.uerr && e.uerr.ucode == constants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE.uerr.ucode) { - this.logger.info('Newer peer document available on the network for local node'); + if ( + e && + e.uerr && + e.uerr.ucode == + constants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE.uerr.ucode + ) { + this.logger.info( + "Newer peer document available on the network for local node" + ); } else { - this.logger.error("Routing error: %s", e && (e.stack || e.message || (e.uerr && e.uerr.message) || e)); + this.logger.error( + "Routing error: %s", + e && (e.stack || e.message || (e.uerr && e.uerr.message) || e) + ); } } done && done(); } - private async route(type:string, obj:any, getPeersFunc:any) { + private async route(type: string, obj: any, getPeersFunc: any) { if (!this.active) return; const peers = await getPeersFunc(); this.push({ - 'type': type, - 'obj': obj, - 'peers': (peers || []).map((p:any) => PeerDTO.fromJSONObject(p)) - }) + type: type, + obj: obj, + peers: (peers || []).map((p: any) => PeerDTO.fromJSONObject(p)), + }); } - private getRandomInUPPeers (isSelfDocument:boolean): () => Promise<any> { + private getRandomInUPPeers(isSelfDocument: boolean): () => Promise<any> { return this.getValidUpPeers([this.peeringService.pubkey], isSelfDocument); } - private getValidUpPeers (without:any, isSelfDocument:boolean) { + private getValidUpPeers(without: any, isSelfDocument: boolean) { return async () => { - let members:DBPeer[] = []; - let nonmembers:DBPeer[] = []; + let members: DBPeer[] = []; + let nonmembers: DBPeer[] = []; let peers = await this.dal.getRandomlyUPsWithout(without); // Peers with status UP for (const p of peers) { let isMember = await this.dal.isMember(p.pubkey); isMember ? members.push(p) : nonmembers.push(p); } - members = RouterStream.chooseXin(members, isSelfDocument ? constants.NETWORK.MAX_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS : constants.NETWORK.MAX_MEMBERS_TO_FORWARD_TO); - nonmembers = RouterStream.chooseXin(nonmembers, isSelfDocument ? constants.NETWORK.MAX_NON_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS : constants.NETWORK.MAX_NON_MEMBERS_TO_FORWARD_TO); - let mainRoutes:any = members.map((p:any) => (p.member = true) && p).concat(nonmembers); + members = RouterStream.chooseXin( + members, + isSelfDocument + ? constants.NETWORK.MAX_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS + : constants.NETWORK.MAX_MEMBERS_TO_FORWARD_TO + ); + nonmembers = RouterStream.chooseXin( + nonmembers, + isSelfDocument + ? constants.NETWORK.MAX_NON_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS + : constants.NETWORK.MAX_NON_MEMBERS_TO_FORWARD_TO + ); + let mainRoutes: any = members + .map((p: any) => (p.member = true) && p) + .concat(nonmembers); let mirrors = await this.peeringService.mirrorBMAEndpoints(); - const peersToRoute:DBPeer[] = mainRoutes.concat(mirrors.map((mep, index) => { return { - pubkey: 'M' + index + '_' + this.peeringService.pubkey, - endpoints: [mep] - }})); - return peersToRoute.map(p => PeerDTO.fromJSONObject(p)) - } + const peersToRoute: DBPeer[] = mainRoutes.concat( + mirrors.map((mep, index) => { + return { + pubkey: "M" + index + "_" + this.peeringService.pubkey, + endpoints: [mep], + }; + }) + ); + return peersToRoute.map((p) => PeerDTO.fromJSONObject(p)); + }; } /** - * Get the peer targeted by `to` argument, this node excluded (for not to loop on self). - */ - private getTargeted(to:string) { + * Get the peer targeted by `to` argument, this node excluded (for not to loop on self). + */ + private getTargeted(to: string) { return async () => { if (to == this.peeringService.pubkey) { return []; @@ -130,11 +167,14 @@ export class RouterStream extends stream.Transform { }; } - static chooseXin(peers:DBPeer[], max:number) { - const chosen:DBPeer[] = []; + static chooseXin(peers: DBPeer[], max: number) { + const chosen: DBPeer[] = []; const nbPeers = peers.length; for (let i = 0; i < Math.min(nbPeers, max); i++) { - const randIndex = Math.max(Math.floor(Math.random() * 10) - (10 - nbPeers) - i, 0); + const randIndex = Math.max( + Math.floor(Math.random() * 10) - (10 - nbPeers) - i, + 0 + ); chosen.push(peers[randIndex]); peers.splice(randIndex, 1); } diff --git a/app/lib/system/directory.ts b/app/lib/system/directory.ts index 0571c860225826299f775245c8b6adbb259e672e..07184a1fbeaca5f1935fda8b348c20a3c5c5b3e7 100644 --- a/app/lib/system/directory.ts +++ b/app/lib/system/directory.ts @@ -11,164 +11,187 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as path from "path" -import * as fs from 'fs' -import {SQLiteDriver} from "../dal/drivers/SQLiteDriver" -import {CFSCore} from "../dal/fileDALs/CFSCore" -import {Wot} from "dubp-wot-rs" -import {FileDALParams} from "../dal/fileDAL" -import {cliprogram} from "../common-libs/programOptions" -import {LevelDBDriver} from "../dal/drivers/LevelDBDriver" -import {LevelUp} from 'levelup' -import {AbstractLevelDOWN} from 'abstract-leveldown' - -const opts = cliprogram -const qfs = require('q-io/fs'); +import * as path from "path"; +import * as fs from "fs"; +import { SQLiteDriver } from "../dal/drivers/SQLiteDriver"; +import { CFSCore } from "../dal/fileDALs/CFSCore"; +import { Wot } from "dubp-wot-rs"; +import { FileDALParams } from "../dal/fileDAL"; +import { cliprogram } from "../common-libs/programOptions"; +import { LevelDBDriver } from "../dal/drivers/LevelDBDriver"; +import { LevelUp } from "levelup"; +import { AbstractLevelDOWN } from "abstract-leveldown"; + +const opts = cliprogram; +const qfs = require("q-io/fs"); const DEFAULT_DOMAIN = "duniter_default"; -const DEFAULT_HOME = (process.platform == 'win32' ? process.env.USERPROFILE : process.env.HOME) + '/.config/duniter/'; +const DEFAULT_HOME = + (process.platform == "win32" ? process.env.USERPROFILE : process.env.HOME) + + "/.config/duniter/"; -const getLogsPath = (profile:string|undefined, directory:string|null = null) => path.join(getHomePath(profile, directory), 'duniter.log'); +const getLogsPath = ( + profile: string | undefined, + directory: string | null = null +) => path.join(getHomePath(profile, directory), "duniter.log"); -const getHomePath = (profile:string|null|undefined, directory:string|null = null) => path.normalize(getUserHome(directory) + '/') + getDomain(profile); +const getHomePath = ( + profile: string | null | undefined, + directory: string | null = null +) => path.normalize(getUserHome(directory) + "/") + getDomain(profile); -const getUserHome = (directory:string|null = null) => (directory || DEFAULT_HOME); +const getUserHome = (directory: string | null = null) => + directory || DEFAULT_HOME; -const getDomain = (profile:string|null = null) => (profile || DEFAULT_DOMAIN); +const getDomain = (profile: string | null = null) => profile || DEFAULT_DOMAIN; export interface FileSystem { - isMemoryOnly(): boolean - fsExists(file:string): Promise<boolean> - fsReadFile(file:string): Promise<string> - fsUnlink(file:string): Promise<boolean> - fsList(dir:string): Promise<string[]> - fsWrite(file:string, content:string): Promise<void> - fsMakeDirectory(dir:string): Promise<void> - fsRemoveTree(dir:string): Promise<void> - fsStreamTo(file: string, iterator: IterableIterator<string>): Promise<void> + isMemoryOnly(): boolean; + fsExists(file: string): Promise<boolean>; + fsReadFile(file: string): Promise<string>; + fsUnlink(file: string): Promise<boolean>; + fsList(dir: string): Promise<string[]>; + fsWrite(file: string, content: string): Promise<void>; + fsMakeDirectory(dir: string): Promise<void>; + fsRemoveTree(dir: string): Promise<void>; + fsStreamTo(file: string, iterator: IterableIterator<string>): Promise<void>; } class QioFileSystem implements FileSystem { - - constructor(private qio:any, private isMemory:boolean = false) {} + constructor(private qio: any, private isMemory: boolean = false) {} isMemoryOnly() { - return this.isMemory + return this.isMemory; } - async fsExists(file:string) { - return this.qio.exists(file) + async fsExists(file: string) { + return this.qio.exists(file); } - async fsReadFile(file:string) { - return this.qio.read(file) + async fsReadFile(file: string) { + return this.qio.read(file); } - async fsUnlink(file:string) { - return this.qio.remove(file) + async fsUnlink(file: string) { + return this.qio.remove(file); } async fsList(dir: string): Promise<string[]> { if (!(await this.qio.exists(dir))) { - return [] + return []; } - return this.qio.list(dir) + return this.qio.list(dir); } fsWrite(file: string, content: string): Promise<void> { - return this.qio.write(file, content) + return this.qio.write(file, content); } - async fsStreamTo(file: string, iterator: IterableIterator<string>): Promise<void> { + async fsStreamTo( + file: string, + iterator: IterableIterator<string> + ): Promise<void> { if (this.isMemory) { for (const line of iterator) { - await this.qio.append(file, line) + await this.qio.append(file, line); } } else { // Use NodeJS streams for faster writing - let wstream = fs.createWriteStream(file) + let wstream = fs.createWriteStream(file); await new Promise(async (res, rej) => { // When done, return - wstream.on('close', (err:any) => { - if (err) return rej(err) - res() - }) + wstream.on("close", (err: any) => { + if (err) return rej(err); + res(); + }); // Write each line for (const line of iterator) { - wstream.write(line + "\n") + wstream.write(line + "\n"); } // End the writing - wstream.end() - }) + wstream.end(); + }); } } fsMakeDirectory(dir: string): Promise<void> { - return this.qio.makeTree(dir) + return this.qio.makeTree(dir); } async fsRemoveTree(dir: string): Promise<void> { - return this.qio.removeTree(dir) + return this.qio.removeTree(dir); } } export const RealFS = (): FileSystem => { - return new QioFileSystem(qfs) -} + return new QioFileSystem(qfs); +}; -export const MemFS = (initialTree:{ [folder:string]: { [file:string]: string }} = {}): FileSystem => { - return new QioFileSystem(require('q-io/fs-mock')(initialTree), true) -} +export const MemFS = ( + initialTree: { [folder: string]: { [file: string]: string } } = {} +): FileSystem => { + return new QioFileSystem(require("q-io/fs-mock")(initialTree), true); +}; export const Directory = { - - DATA_FILES: ['mindex.db', 'c_mindex.db', 'iindex.db', 'cindex.db', 'sindex.db', 'wallet.db', 'dividend.db', 'txs.db', 'peers.db'], - WW_FILES: ['wotwizard-export_0.db','wotwizard-export.db'], - DATA_DIRS: ['data'], + DATA_FILES: [ + "mindex.db", + "c_mindex.db", + "iindex.db", + "cindex.db", + "sindex.db", + "wallet.db", + "dividend.db", + "txs.db", + "peers.db", + ], + WW_FILES: ["wotwizard-export_0.db", "wotwizard-export.db"], + DATA_DIRS: ["data"], INSTANCE_NAME: getDomain(opts.mdb), INSTANCE_HOME: getHomePath(opts.mdb, opts.home), - GET_FILE_PATH: (fileSubPath: string, home = '') => path.join(home || Directory.INSTANCE_HOME, fileSubPath), + GET_FILE_PATH: (fileSubPath: string, home = "") => + path.join(home || Directory.INSTANCE_HOME, fileSubPath), INSTANCE_HOMELOG_FILE: getLogsPath(opts.mdb, opts.home), - DUNITER_DB_NAME: 'duniter', - LOKI_DB_DIR: 'loki', - DATA_DIR: 'data', - OLD_WOTB_FILE: 'wotb.bin', - NEW_WOTB_FILE: 'wotb.bin.gz', + DUNITER_DB_NAME: "duniter", + LOKI_DB_DIR: "loki", + DATA_DIR: "data", + OLD_WOTB_FILE: "wotb.bin", + NEW_WOTB_FILE: "wotb.bin.gz", + getHome: (profile: string | null = null, directory: string | null = null) => + getHomePath(profile, directory), - getHome: (profile:string|null = null, directory:string|null = null) => getHomePath(profile, directory), - - getHomeDB: async (isMemory:boolean, dbName: string, home = '') => { + getHomeDB: async (isMemory: boolean, dbName: string, home = "") => { // Memory if (isMemory) { - return new SQLiteDriver(':memory:') + return new SQLiteDriver(":memory:"); } // Or file - const sqlitePath = Directory.GET_FILE_PATH(dbName, home) - return new SQLiteDriver(sqlitePath) + const sqlitePath = Directory.GET_FILE_PATH(dbName, home); + return new SQLiteDriver(sqlitePath); }, - getHomeLevelDB: async (isMemory:boolean, dbName: string, home = '') => { + getHomeLevelDB: async (isMemory: boolean, dbName: string, home = "") => { // Memory if (isMemory) { - return LevelDBDriver.newMemoryInstance() + return LevelDBDriver.newMemoryInstance(); } // Or file - const levelDbRootPath = path.join(home, 'data', 'leveldb') - await RealFS().fsMakeDirectory(levelDbRootPath) - const levelDBPath = path.join(levelDbRootPath, dbName) - return LevelDBDriver.newFileInstance(levelDBPath) + const levelDbRootPath = path.join(home, "data", "leveldb"); + await RealFS().fsMakeDirectory(levelDbRootPath); + const levelDBPath = path.join(levelDbRootPath, dbName); + return LevelDBDriver.newFileInstance(levelDBPath); }, - getHomeFS: async (isMemory:boolean, theHome:string, makeTree = true) => { - const home = theHome || Directory.getHome() + getHomeFS: async (isMemory: boolean, theHome: string, makeTree = true) => { + const home = theHome || Directory.getHome(); const params = { home: home, - fs: isMemory ? MemFS() : RealFS() - } + fs: isMemory ? MemFS() : RealFS(), + }; if (makeTree) { - await params.fs.fsMakeDirectory(home) + await params.fs.fsMakeDirectory(home); } return params; }, @@ -176,7 +199,7 @@ export const Directory = { getWotbFilePathSync: (home: string): string => { let datas_dir = path.join(home, Directory.DATA_DIR); let wotbFilePath = path.join(datas_dir, Directory.NEW_WOTB_FILE); - let existsFile = fs.existsSync(wotbFilePath) + let existsFile = fs.existsSync(wotbFilePath); if (!existsFile) { wotbFilePath = path.join(home, Directory.OLD_WOTB_FILE); } @@ -186,43 +209,43 @@ export const Directory = { getWotbFilePath: async (home: string): Promise<string> => { let datas_dir = path.join(home, Directory.DATA_DIR); let wotbFilePath = path.join(datas_dir, Directory.NEW_WOTB_FILE); - let existsFile = qfs.exists(wotbFilePath) + let existsFile = qfs.exists(wotbFilePath); if (!existsFile) { wotbFilePath = path.join(home, Directory.OLD_WOTB_FILE); } return wotbFilePath; }, - getHomeParams: async (isMemory:boolean, theHome:string): Promise<FileDALParams> => { - const params = await Directory.getHomeFS(isMemory, theHome) + getHomeParams: async ( + isMemory: boolean, + theHome: string + ): Promise<FileDALParams> => { + const params = await Directory.getHomeFS(isMemory, theHome); const home = params.home; let dbf: () => SQLiteDriver; let wotbf: () => Wot; if (isMemory) { - // Memory DB - dbf = () => new SQLiteDriver(':memory:'); - wotbf = () => new Wot(100) - + dbf = () => new SQLiteDriver(":memory:"); + wotbf = () => new Wot(100); } else { - // File DB - const sqlitePath = path.join(home, Directory.DUNITER_DB_NAME + '.db'); + const sqlitePath = path.join(home, Directory.DUNITER_DB_NAME + ".db"); dbf = () => new SQLiteDriver(sqlitePath); let wotbFilePath = await Directory.getWotbFilePath(home); - wotbf = () => new Wot(wotbFilePath) + wotbf = () => new Wot(wotbFilePath); } return { home: params.home, fs: params.fs, dbf, - wotbf - } + wotbf, + }; }, - createHomeIfNotExists: async (fileSystem:any, theHome:string) => { + createHomeIfNotExists: async (fileSystem: any, theHome: string) => { const fsHandler = new CFSCore(theHome, fileSystem); - return fsHandler.makeTree(''); - } -} + return fsHandler.makeTree(""); + }, +}; diff --git a/app/modules/bma/index.ts b/app/modules/bma/index.ts index 8d088641efd7e22cbb5bcfcdbed0903acefda81a..9bd2a3c05c8bd2c271094221db8b74ee0423de85 100644 --- a/app/modules/bma/index.ts +++ b/app/modules/bma/index.ts @@ -11,94 +11,121 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {NetworkConfDTO} from "../../lib/dto/ConfDTO" -import {Server} from "../../../server" -import * as stream from "stream" -import {BmaApi, Network, NetworkInterface} from "./lib/network" -import {Upnp, UpnpApi} from "./lib/upnp" -import {BMAConstants} from "./lib/constants" -import {BMALimitation} from "./lib/limiter" -import {PeerDTO} from "../../lib/dto/PeerDTO" -import {Underscore} from "../../lib/common-libs/underscore" -import {bma} from "./lib/bma" - -const Q = require('q'); -const rp = require('request-promise'); -const async = require('async'); -const dtos = require('./lib/dtos') -const inquirer = require('inquirer'); +import { NetworkConfDTO } from "../../lib/dto/ConfDTO"; +import { Server } from "../../../server"; +import * as stream from "stream"; +import { BmaApi, Network, NetworkInterface } from "./lib/network"; +import { Upnp, UpnpApi } from "./lib/upnp"; +import { BMAConstants } from "./lib/constants"; +import { BMALimitation } from "./lib/limiter"; +import { PeerDTO } from "../../lib/dto/PeerDTO"; +import { Underscore } from "../../lib/common-libs/underscore"; +import { bma } from "./lib/bma"; + +const Q = require("q"); +const rp = require("request-promise"); +const async = require("async"); +const dtos = require("./lib/dtos"); +const inquirer = require("inquirer"); let networkWizardDone = false; export const BmaDependency = { duniter: { - cliOptions: [ - { value: '--upnp', desc: 'Use UPnP to open remote port.' }, - { value: '--noupnp', desc: 'Do not use UPnP to open remote port.' }, - { value: '--bma', desc: 'Enables BMA API and its crawlers.' }, - { value: '--nobma', desc: 'Disables BMA API and its crawlers.' }, - { value: '--bma-with-crawler', desc: 'Enables BMA Crawler.' }, - { value: '--bma-without-crawler', desc: 'Disable BMA Crawler.' }, - { value: '-p, --port <port>', desc: 'Port to listen for requests', parser: (val:string) => parseInt(val) }, - { value: '--ipv4 <address>', desc: 'IPv4 interface to listen for requests' }, - { value: '--ipv6 <address>', desc: 'IPv6 interface to listen for requests' }, - { value: '--remoteh <host>', desc: 'Remote interface others may use to contact this node' }, - { value: '--remote4 <host>', desc: 'Remote interface for IPv4 access' }, - { value: '--remote6 <host>', desc: 'Remote interface for IPv6 access' }, - { value: '--remotep <port>', desc: 'Remote port others may use to contact this node' }, + { value: "--upnp", desc: "Use UPnP to open remote port." }, + { value: "--noupnp", desc: "Do not use UPnP to open remote port." }, + { value: "--bma", desc: "Enables BMA API and its crawlers." }, + { value: "--nobma", desc: "Disables BMA API and its crawlers." }, + { value: "--bma-with-crawler", desc: "Enables BMA Crawler." }, + { value: "--bma-without-crawler", desc: "Disable BMA Crawler." }, + { + value: "-p, --port <port>", + desc: "Port to listen for requests", + parser: (val: string) => parseInt(val), + }, + { + value: "--ipv4 <address>", + desc: "IPv4 interface to listen for requests", + }, + { + value: "--ipv6 <address>", + desc: "IPv6 interface to listen for requests", + }, + { + value: "--remoteh <host>", + desc: "Remote interface others may use to contact this node", + }, + { value: "--remote4 <host>", desc: "Remote interface for IPv4 access" }, + { value: "--remote6 <host>", desc: "Remote interface for IPv6 access" }, + { + value: "--remotep <port>", + desc: "Remote port others may use to contact this node", + }, ], wizard: { - - 'network': async (conf:NetworkConfDTO, program:any, logger:any) => { - await Q.nbind(networkConfiguration, null, conf, logger)() - conf.nobma = false + network: async (conf: NetworkConfDTO, program: any, logger: any) => { + await Q.nbind(networkConfiguration, null, conf, logger)(); + conf.nobma = false; networkWizardDone = true; }, - 'network-reconfigure': async (conf:NetworkConfDTO, program:any, logger:any) => { + "network-reconfigure": async ( + conf: NetworkConfDTO, + program: any, + logger: any + ) => { if (!networkWizardDone) { // This step can only be launched lonely - await Q.nbind(networkReconfiguration, null)(conf, program.autoconf, logger, program.noupnp); + await Q.nbind(networkReconfiguration, null)( + conf, + program.autoconf, + logger, + program.noupnp + ); } - } + }, }, config: { - - onLoading: async (conf:NetworkConfDTO, program:any, logger:any) => { - + onLoading: async (conf: NetworkConfDTO, program: any, logger: any) => { // If the usage of BMA hasn't been defined yet if (conf.nobma === undefined) { // Do we have an existing BMA conf? - if (conf.port !== undefined - || conf.ipv4 !== undefined - || conf.ipv6 !== undefined - || conf.remoteport !== undefined - || conf.remotehost !== undefined - || conf.remoteipv4 !== undefined - || conf.remoteipv6 !== undefined) { - conf.nobma = false + if ( + conf.port !== undefined || + conf.ipv4 !== undefined || + conf.ipv6 !== undefined || + conf.remoteport !== undefined || + conf.remotehost !== undefined || + conf.remoteipv4 !== undefined || + conf.remoteipv6 !== undefined + ) { + conf.nobma = false; } else { - conf.nobma = true + conf.nobma = true; } } // If bmaWithCrawler hasn't been defined yet - if (conf.bmaWithCrawler === undefined) { conf.bmaWithCrawler = false } + if (conf.bmaWithCrawler === undefined) { + conf.bmaWithCrawler = false; + } - if (program.port !== undefined) conf.port = parseInt(program.port) + if (program.port !== undefined) conf.port = parseInt(program.port); if (program.ipv4 !== undefined) conf.ipv4 = program.ipv4; if (program.ipv6 !== undefined) conf.ipv6 = program.ipv6; if (program.remoteh !== undefined) conf.remotehost = program.remoteh; if (program.remote4 !== undefined) conf.remoteipv4 = program.remote4; if (program.remote6 !== undefined) conf.remoteipv6 = program.remote6; - if (program.remotep !== undefined) conf.remoteport = parseInt(program.remotep) - if (program.bma !== undefined) conf.nobma = false - if (program.nobma !== undefined) conf.nobma = true - if (program.bmaWithCrawler !== undefined) conf.bmaWithCrawler = true - if (program.bmaWithoutCrawler !== undefined) conf.bmaWithCrawler = false + if (program.remotep !== undefined) + conf.remoteport = parseInt(program.remotep); + if (program.bma !== undefined) conf.nobma = false; + if (program.nobma !== undefined) conf.nobma = true; + if (program.bmaWithCrawler !== undefined) conf.bmaWithCrawler = true; + if (program.bmaWithoutCrawler !== undefined) + conf.bmaWithCrawler = false; if (!conf.ipv4) delete conf.ipv4; if (!conf.ipv6) delete conf.ipv6; @@ -110,7 +137,10 @@ export const BmaDependency = { conf.remoteipv6 = conf.ipv6; } // Fix #807: default remoteipv4: same as local ipv4 if no removeipv4 is not defined AND no DNS nor IPv6 - if (conf.ipv4 && !(conf.remoteipv4 || conf.remotehost || conf.remoteipv6)) { + if ( + conf.ipv4 && + !(conf.remoteipv4 || conf.remotehost || conf.remoteipv6) + ) { conf.remoteipv4 = conf.ipv4; } if (!conf.remoteport && conf.port) { @@ -119,7 +149,12 @@ export const BmaDependency = { // Network autoconf if (program.autoconf) { - await Q.nbind(networkReconfiguration, null)(conf, true, logger, program.noupnp); + await Q.nbind(networkReconfiguration, null)( + conf, + true, + logger, + program.noupnp + ); } // Default value @@ -127,7 +162,7 @@ export const BmaDependency = { conf.upnp = true; // Defaults to true } if (!conf.dos) { - conf.dos = { whitelist: ['127.0.0.1'] }; + conf.dos = { whitelist: ["127.0.0.1"] }; conf.dos.maxcount = 50; conf.dos.burst = 20; conf.dos.limit = conf.dos.burst * 2; @@ -135,7 +170,7 @@ export const BmaDependency = { conf.dos.checkinterval = 1; conf.dos.trustProxy = true; conf.dos.includeUserAgent = true; - conf.dos.errormessage = 'Error'; + conf.dos.errormessage = "Error"; conf.dos.testmode = false; conf.dos.silent = false; conf.dos.silentStart = false; @@ -151,82 +186,98 @@ export const BmaDependency = { } }, - beforeSave: async (conf:NetworkConfDTO, program:any) => { + beforeSave: async (conf: NetworkConfDTO, program: any) => { if (!conf.ipv4) delete conf.ipv4; if (!conf.ipv6) delete conf.ipv6; if (!conf.remoteipv4) delete conf.remoteipv4; if (!conf.remoteipv6) delete conf.remoteipv6; conf.dos.whitelist = Underscore.uniq(conf.dos.whitelist); - } + }, }, service: { - input: (server:Server, conf:NetworkConfDTO, logger:any) => { + input: (server: Server, conf: NetworkConfDTO, logger: any) => { // Configuration errors if (!conf.nobma) { - if(!conf.ipv4 && !conf.ipv6){ - throw new Error("BMA: no interface to listen to. Provide ipv4/ipv6 interface or deactivate BMA"); + if (!conf.ipv4 && !conf.ipv6) { + throw new Error( + "BMA: no interface to listen to. Provide ipv4/ipv6 interface or deactivate BMA" + ); } - if(!conf.remoteipv4 && !conf.remoteipv6 && !conf.remotehost){ - throw new Error('BMA: no interface for remote contact.'); + if (!conf.remoteipv4 && !conf.remoteipv6 && !conf.remotehost) { + throw new Error("BMA: no interface for remote contact."); } if (!conf.remoteport) { - throw new Error('BMA: no port for remote contact.'); + throw new Error("BMA: no port for remote contact."); } } if (!conf.nobma) { - server.addEndpointsDefinitions(() => Promise.resolve(getEndpoint(conf))) - server.addWrongEndpointFilter((endpoints:string[]) => getWrongEndpoints(endpoints, server.conf.pair.pub)) + server.addEndpointsDefinitions(() => + Promise.resolve(getEndpoint(conf)) + ); + server.addWrongEndpointFilter((endpoints: string[]) => + getWrongEndpoints(endpoints, server.conf.pair.pub) + ); } - return new BMAPI(server, conf, logger) - } + return new BMAPI(server, conf, logger); + }, }, methods: { noLimit: () => BMALimitation.noLimit(), - bma: async (server: Server, interfaces: (NetworkInterface[] | null) = null, httpLogs = false, logger?: any) => bma(server, interfaces, httpLogs, logger), + bma: async ( + server: Server, + interfaces: NetworkInterface[] | null = null, + httpLogs = false, + logger?: any + ) => bma(server, interfaces, httpLogs, logger), dtos, - getMainEndpoint: (conf:NetworkConfDTO) => Promise.resolve(getEndpoint(conf)) - } - } -} - -async function getWrongEndpoints(endpoints:string[], selfPubkey:string) { - const wrongs:string[] = [] - await Promise.all(endpoints.map(async (theEndpoint:string) => { - let remote = PeerDTO.endpoint2host(theEndpoint) - try { - // We test only BMA APIs, because other may exist and we cannot judge against them - if (theEndpoint.startsWith('BASIC_MERKLED_API')) { - let answer = await rp('http://' + remote + '/network/peering', { json: true }); - if (!answer || answer.pubkey != selfPubkey) { - throw Error("Not same pubkey as local instance"); + getMainEndpoint: (conf: NetworkConfDTO) => + Promise.resolve(getEndpoint(conf)), + }, + }, +}; + +async function getWrongEndpoints(endpoints: string[], selfPubkey: string) { + const wrongs: string[] = []; + await Promise.all( + endpoints.map(async (theEndpoint: string) => { + let remote = PeerDTO.endpoint2host(theEndpoint); + try { + // We test only BMA APIs, because other may exist and we cannot judge against them + if (theEndpoint.startsWith("BASIC_MERKLED_API")) { + let answer = await rp("http://" + remote + "/network/peering", { + json: true, + }); + if (!answer || answer.pubkey != selfPubkey) { + throw Error("Not same pubkey as local instance"); + } } + } catch (e) { + wrongs.push(theEndpoint); } - } catch (e) { - wrongs.push(theEndpoint) - } - })) - return wrongs + }) + ); + return wrongs; } export class BMAPI extends stream.Transform { - // Public http interface - private bmapi:BmaApi - private upnpAPI:UpnpApi + private bmapi: BmaApi; + private upnpAPI: UpnpApi; constructor( - private server:Server, - private conf:NetworkConfDTO, - private logger:any) { - super({ objectMode: true }) + private server: Server, + private conf: NetworkConfDTO, + private logger: any + ) { + super({ objectMode: true }); } startService = async () => { if (this.conf.nobma) { // Disable BMA - return Promise.resolve() + return Promise.resolve(); } this.bmapi = await bma(this.server, null, this.conf.httplogs, this.logger); await this.bmapi.openConnections(); @@ -239,7 +290,12 @@ export class BMAPI extends stream.Transform { } if (this.server.conf.upnp) { try { - this.upnpAPI = await Upnp(this.server.conf.port, this.server.conf.remoteport, this.logger, this.server.conf); + this.upnpAPI = await Upnp( + this.server.conf.port, + this.server.conf.remoteport, + this.logger, + this.server.conf + ); this.upnpAPI.startRegular(); const gateway = await this.upnpAPI.findGateway(); if (gateway) { @@ -251,12 +307,12 @@ export class BMAPI extends stream.Transform { this.logger.warn(e); } } - } + }; stopService = async () => { if (this.conf.nobma) { // Disable BMA - return Promise.resolve() + return Promise.resolve(); } if (this.bmapi) { await this.bmapi.closeConnections(); @@ -264,117 +320,154 @@ export class BMAPI extends stream.Transform { if (this.upnpAPI) { this.upnpAPI.stopRegular(); } - } + }; } -function getEndpoint(theConf:NetworkConfDTO) { - let endpoint = 'BASIC_MERKLED_API'; +function getEndpoint(theConf: NetworkConfDTO) { + let endpoint = "BASIC_MERKLED_API"; if (theConf.remoteport && theConf.remoteport == 443) { - endpoint = 'BMAS'; + endpoint = "BMAS"; } if (theConf.remotehost) { if (theConf.remotehost.match(BMAConstants.HOST_ONION_REGEX)) { - endpoint = 'BMATOR'; + endpoint = "BMATOR"; } - endpoint += ' ' + theConf.remotehost; + endpoint += " " + theConf.remotehost; } if (theConf.remoteipv4) { - endpoint += ' ' + theConf.remoteipv4; + endpoint += " " + theConf.remoteipv4; } if (theConf.remoteipv6) { - endpoint += ' ' + theConf.remoteipv6; + endpoint += " " + theConf.remoteipv6; } if (theConf.remoteport) { - endpoint += ' ' + theConf.remoteport; + endpoint += " " + theConf.remoteport; } return endpoint; } -export function networkReconfiguration(conf:NetworkConfDTO, autoconf:boolean, logger:any, noupnp:boolean, done:any) { - async.waterfall([ - upnpResolve.bind(null, noupnp, logger), - function(upnpSuccess:boolean, upnpConf:NetworkConfDTO, next:any) { - - // Default values - conf.port = conf.port || BMAConstants.DEFAULT_PORT; - conf.remoteport = conf.remoteport || BMAConstants.DEFAULT_PORT; - - const localOperations = getLocalNetworkOperations(conf, autoconf); - const remoteOpertions = getRemoteNetworkOperations(conf, upnpConf.remoteipv4); - const dnsOperations = getHostnameOperations(conf, logger, autoconf); - const useUPnPOperations = getUseUPnPOperations(conf, logger, autoconf); - - if (upnpSuccess) { - Underscore.extend(conf, upnpConf) - const local = [conf.ipv4, conf.port].join(':'); - const remote = [conf.remoteipv4, conf.remoteport].join(':'); - if (autoconf) { - conf.ipv6 = conf.remoteipv6 = Network.getBestLocalIPv6(); - logger.info('IPv6: %s', conf.ipv6 || ""); - logger.info('Local IPv4: %s', local); - logger.info('Remote IPv4: %s', remote); - // Use proposed local + remote with UPnP binding - return async.waterfall(useUPnPOperations - .concat(dnsOperations), next); - } - choose("UPnP is available: duniter will be bound: \n from " + local + "\n to " + remote + "\nKeep this configuration?", true, - function () { - // Yes: not network changes +export function networkReconfiguration( + conf: NetworkConfDTO, + autoconf: boolean, + logger: any, + noupnp: boolean, + done: any +) { + async.waterfall( + [ + upnpResolve.bind(null, noupnp, logger), + function (upnpSuccess: boolean, upnpConf: NetworkConfDTO, next: any) { + // Default values + conf.port = conf.port || BMAConstants.DEFAULT_PORT; + conf.remoteport = conf.remoteport || BMAConstants.DEFAULT_PORT; + + const localOperations = getLocalNetworkOperations(conf, autoconf); + const remoteOpertions = getRemoteNetworkOperations( + conf, + upnpConf.remoteipv4 + ); + const dnsOperations = getHostnameOperations(conf, logger, autoconf); + const useUPnPOperations = getUseUPnPOperations(conf, logger, autoconf); + + if (upnpSuccess) { + Underscore.extend(conf, upnpConf); + const local = [conf.ipv4, conf.port].join(":"); + const remote = [conf.remoteipv4, conf.remoteport].join(":"); + if (autoconf) { conf.ipv6 = conf.remoteipv6 = Network.getBestLocalIPv6(); - async.waterfall(useUPnPOperations - .concat(dnsOperations), next); - }, - function () { - // No: want to change - async.waterfall( - localOperations - .concat(remoteOpertions) - .concat(useUPnPOperations) - .concat(dnsOperations), next); - }); - } else { - conf.upnp = false; - if (autoconf) { - // Yes: local configuration = remote configuration - return async.waterfall( - localOperations - .concat(getHostnameOperations(conf, logger, autoconf)) - .concat([function (confDone:any) { - conf.remoteipv4 = conf.ipv4; - conf.remoteipv6 = conf.ipv6; - conf.remoteport = conf.port; - logger.info('Local & Remote IPv4: %s', [conf.ipv4, conf.port].join(':')); - logger.info('Local & Remote IPv6: %s', [conf.ipv6, conf.port].join(':')); - confDone(); - }]), next); - } - choose("UPnP is *not* available: is this a public server (like a VPS)?", true, - function () { + logger.info("IPv6: %s", conf.ipv6 || ""); + logger.info("Local IPv4: %s", local); + logger.info("Remote IPv4: %s", remote); + // Use proposed local + remote with UPnP binding + return async.waterfall( + useUPnPOperations.concat(dnsOperations), + next + ); + } + choose( + "UPnP is available: duniter will be bound: \n from " + + local + + "\n to " + + remote + + "\nKeep this configuration?", + true, + function () { + // Yes: not network changes + conf.ipv6 = conf.remoteipv6 = Network.getBestLocalIPv6(); + async.waterfall(useUPnPOperations.concat(dnsOperations), next); + }, + function () { + // No: want to change + async.waterfall( + localOperations + .concat(remoteOpertions) + .concat(useUPnPOperations) + .concat(dnsOperations), + next + ); + } + ); + } else { + conf.upnp = false; + if (autoconf) { // Yes: local configuration = remote configuration - async.waterfall( - localOperations - .concat(getHostnameOperations(conf, logger)) - .concat([function(confDone:any) { - conf.remoteipv4 = conf.ipv4; - conf.remoteipv6 = conf.ipv6; - conf.remoteport = conf.port; - confDone(); - }]), next); - }, - function () { - // No: must give all details - async.waterfall( + return async.waterfall( localOperations - .concat(remoteOpertions) - .concat(dnsOperations), next); - }); - } - } - ], done); + .concat(getHostnameOperations(conf, logger, autoconf)) + .concat([ + function (confDone: any) { + conf.remoteipv4 = conf.ipv4; + conf.remoteipv6 = conf.ipv6; + conf.remoteport = conf.port; + logger.info( + "Local & Remote IPv4: %s", + [conf.ipv4, conf.port].join(":") + ); + logger.info( + "Local & Remote IPv6: %s", + [conf.ipv6, conf.port].join(":") + ); + confDone(); + }, + ]), + next + ); + } + choose( + "UPnP is *not* available: is this a public server (like a VPS)?", + true, + function () { + // Yes: local configuration = remote configuration + async.waterfall( + localOperations + .concat(getHostnameOperations(conf, logger)) + .concat([ + function (confDone: any) { + conf.remoteipv4 = conf.ipv4; + conf.remoteipv6 = conf.ipv6; + conf.remoteport = conf.port; + confDone(); + }, + ]), + next + ); + }, + function () { + // No: must give all details + async.waterfall( + localOperations.concat(remoteOpertions).concat(dnsOperations), + next + ); + } + ); + } + }, + ], + done + ); } - -async function upnpResolve(noupnp:boolean, logger:any, done:any) { +async function upnpResolve(noupnp: boolean, logger: any, done: any) { try { let conf = await Network.upnpConf(noupnp, logger); done(null, true, conf); @@ -383,35 +476,44 @@ async function upnpResolve(noupnp:boolean, logger:any, done:any) { } } -function networkConfiguration(conf:NetworkConfDTO, logger:any, done:any) { - async.waterfall([ - upnpResolve.bind(null, !conf.upnp, logger), - function(upnpSuccess:boolean, upnpConf:NetworkConfDTO, next:any) { - - let operations = getLocalNetworkOperations(conf) - .concat(getRemoteNetworkOperations(conf, upnpConf.remoteipv4)); - - if (upnpSuccess) { - operations = operations.concat(getUseUPnPOperations(conf, logger)); - } +function networkConfiguration(conf: NetworkConfDTO, logger: any, done: any) { + async.waterfall( + [ + upnpResolve.bind(null, !conf.upnp, logger), + function (upnpSuccess: boolean, upnpConf: NetworkConfDTO, next: any) { + let operations = getLocalNetworkOperations(conf).concat( + getRemoteNetworkOperations(conf, upnpConf.remoteipv4) + ); + + if (upnpSuccess) { + operations = operations.concat(getUseUPnPOperations(conf, logger)); + } - async.waterfall(operations.concat(getHostnameOperations(conf, logger, false)), next); - } - ], done); + async.waterfall( + operations.concat(getHostnameOperations(conf, logger, false)), + next + ); + }, + ], + done + ); } -function getLocalNetworkOperations(conf:NetworkConfDTO, autoconf:boolean = false) { +function getLocalNetworkOperations( + conf: NetworkConfDTO, + autoconf: boolean = false +) { return [ - function (next:any){ + function (next: any) { const osInterfaces = Network.listInterfaces(); const interfaces = [{ name: "None", value: null }]; - osInterfaces.forEach(function(netInterface:any){ + osInterfaces.forEach(function (netInterface: any) { const addresses = netInterface.addresses; - const filtered = Underscore.where(addresses, {family: 'IPv4'}); - filtered.forEach(function(addr:any){ + const filtered = Underscore.where(addresses, { family: "IPv4" }); + filtered.forEach(function (addr: any) { interfaces.push({ - name: [netInterface.name, addr.address].join(' '), - value: addr.address + name: [netInterface.name, addr.address].join(" "), + value: addr.address, }); }); }); @@ -419,40 +521,43 @@ function getLocalNetworkOperations(conf:NetworkConfDTO, autoconf:boolean = false conf.ipv4 = Network.getBestLocalIPv4(); return next(); } - inquirer.prompt([{ - type: "list", - name: "ipv4", - message: "IPv4 interface", - default: conf.ipv4, - choices: interfaces - }]).then((answers:any) => { - conf.ipv4 = answers.ipv4; - next(); - }); + inquirer + .prompt([ + { + type: "list", + name: "ipv4", + message: "IPv4 interface", + default: conf.ipv4, + choices: interfaces, + }, + ]) + .then((answers: any) => { + conf.ipv4 = answers.ipv4; + next(); + }); }, - function (next:any){ + function (next: any) { const osInterfaces = Network.listInterfaces(); - const interfaces:any = [{ name: "None", value: null }]; - osInterfaces.forEach(function(netInterface:any){ + const interfaces: any = [{ name: "None", value: null }]; + osInterfaces.forEach(function (netInterface: any) { const addresses = netInterface.addresses; - const filtered = Underscore.where(addresses, { family: 'IPv6' }); - filtered.forEach(function(addr:any){ - let address = addr.address - if (addr.scopeid) - address += "%" + netInterface.name + const filtered = Underscore.where(addresses, { family: "IPv6" }); + filtered.forEach(function (addr: any) { + let address = addr.address; + if (addr.scopeid) address += "%" + netInterface.name; let nameSuffix = ""; if (addr.scopeid == 0 && !addr.internal) { nameSuffix = " (Global)"; } interfaces.push({ - name: [netInterface.name, address, nameSuffix].join(' '), + name: [netInterface.name, address, nameSuffix].join(" "), internal: addr.internal, scopeid: addr.scopeid, - value: address + value: address, }); }); }); - interfaces.sort((addr1:any, addr2:any) => { + interfaces.sort((addr1: any, addr2: any) => { if (addr1.value === null) return -1; if (addr1.internal && !addr2.internal) return 1; if (addr1.scopeid && !addr2.scopeid) return 1; @@ -464,41 +569,50 @@ function getLocalNetworkOperations(conf:NetworkConfDTO, autoconf:boolean = false if (autoconf) { return next(); } - inquirer.prompt([{ - type: "list", - name: "ipv6", - message: "IPv6 interface", - default: conf.ipv6, - choices: interfaces - }]).then((answers:any) => { - conf.ipv6 = conf.remoteipv6 = answers.ipv6; - next(); - }); + inquirer + .prompt([ + { + type: "list", + name: "ipv6", + message: "IPv6 interface", + default: conf.ipv6, + choices: interfaces, + }, + ]) + .then((answers: any) => { + conf.ipv6 = conf.remoteipv6 = answers.ipv6; + next(); + }); }, - autoconf ? (done:any) => { - conf.port = Network.getRandomPort(conf); - done(); - } : async.apply(simpleInteger, "Port", "port", conf) + autoconf + ? (done: any) => { + conf.port = Network.getRandomPort(conf); + done(); + } + : async.apply(simpleInteger, "Port", "port", conf), ]; } -function getRemoteNetworkOperations(conf:NetworkConfDTO, remoteipv4:string|null) { +function getRemoteNetworkOperations( + conf: NetworkConfDTO, + remoteipv4: string | null +) { return [ - function (next:any){ + function (next: any) { if (!conf.ipv4) { conf.remoteipv4 = null; return next(null, {}); } - const choices:any = [{ name: "None", value: null }]; + const choices: any = [{ name: "None", value: null }]; // Local interfaces const osInterfaces = Network.listInterfaces(); - osInterfaces.forEach(function(netInterface:any){ + osInterfaces.forEach(function (netInterface: any) { const addresses = netInterface.addresses; - const filtered = Underscore.where(addresses, {family: 'IPv4'}); - filtered.forEach(function(addr:any){ + const filtered = Underscore.where(addresses, { family: "IPv4" }); + filtered.forEach(function (addr: any) { choices.push({ - name: [netInterface.name, addr.address].join(' '), - value: addr.address + name: [netInterface.name, addr.address].join(" "), + value: addr.address, }); }); }); @@ -509,38 +623,55 @@ function getRemoteNetworkOperations(conf:NetworkConfDTO, remoteipv4:string|null) choices.push({ name: remoteipv4, value: remoteipv4 }); } choices.push({ name: "Enter new one", value: "new" }); - inquirer.prompt([{ - type: "list", - name: "remoteipv4", - message: "Remote IPv4", - default: conf.remoteipv4 || conf.ipv4 || null, - choices: choices, - validate: function (input:any) { - return !!(input && input.toString().match(BMAConstants.IPV4_REGEXP)); - } - }]).then((answers:any) => { - if (answers.remoteipv4 == "new") { - inquirer.prompt([{ - type: "input", + inquirer + .prompt([ + { + type: "list", name: "remoteipv4", message: "Remote IPv4", - default: conf.remoteipv4 || conf.ipv4, - validate: function (input:any) { - return !!(input && input.toString().match(BMAConstants.IPV4_REGEXP)); - } - }]).then((answers:any) => next(null, answers)); - } else { - next(null, answers); - } - }); + default: conf.remoteipv4 || conf.ipv4 || null, + choices: choices, + validate: function (input: any) { + return !!( + input && input.toString().match(BMAConstants.IPV4_REGEXP) + ); + }, + }, + ]) + .then((answers: any) => { + if (answers.remoteipv4 == "new") { + inquirer + .prompt([ + { + type: "input", + name: "remoteipv4", + message: "Remote IPv4", + default: conf.remoteipv4 || conf.ipv4, + validate: function (input: any) { + return !!( + input && input.toString().match(BMAConstants.IPV4_REGEXP) + ); + }, + }, + ]) + .then((answers: any) => next(null, answers)); + } else { + next(null, answers); + } + }); }, - async function (answers:any, next:any){ + async function (answers: any, next: any) { conf.remoteipv4 = answers.remoteipv4; try { if (conf.remoteipv4 || conf.remotehost) { await new Promise((resolve, reject) => { - const getPort = async.apply(simpleInteger, "Remote port", "remoteport", conf); - getPort((err:any) => { + const getPort = async.apply( + simpleInteger, + "Remote port", + "remoteport", + conf + ); + getPort((err: any) => { if (err) return reject(err); resolve(); }); @@ -552,81 +683,130 @@ function getRemoteNetworkOperations(conf:NetworkConfDTO, remoteipv4:string|null) } catch (e) { next(e); } - } + }, ]; } -function getHostnameOperations(conf:NetworkConfDTO, logger:any, autoconf = false) { - return [function(next:any) { - if (!conf.ipv4) { - conf.remotehost = null; - return next(); - } - if (autoconf) { - logger.info('DNS: %s', conf.remotehost || 'No'); - return next(); - } - choose("Does this server has a DNS name?", !!conf.remotehost, - function() { - // Yes - simpleValue("DNS name:", "remotehost", "", conf, function(){ return true; }, next); - }, - function() { +function getHostnameOperations( + conf: NetworkConfDTO, + logger: any, + autoconf = false +) { + return [ + function (next: any) { + if (!conf.ipv4) { conf.remotehost = null; - next(); - }); - }]; + return next(); + } + if (autoconf) { + logger.info("DNS: %s", conf.remotehost || "No"); + return next(); + } + choose( + "Does this server has a DNS name?", + !!conf.remotehost, + function () { + // Yes + simpleValue( + "DNS name:", + "remotehost", + "", + conf, + function () { + return true; + }, + next + ); + }, + function () { + conf.remotehost = null; + next(); + } + ); + }, + ]; } -function getUseUPnPOperations(conf:NetworkConfDTO, logger:any, autoconf:boolean = false) { - return [function(next:any) { - if (!conf.ipv4) { - conf.upnp = false; - return next(); - } - if (autoconf) { - logger.info('UPnP: %s', 'Yes'); - conf.upnp = true; - return next(); - } - choose("UPnP is available: use automatic port mapping? (easier)", conf.upnp, - function() { - conf.upnp = true; - next(); - }, - function() { +function getUseUPnPOperations( + conf: NetworkConfDTO, + logger: any, + autoconf: boolean = false +) { + return [ + function (next: any) { + if (!conf.ipv4) { conf.upnp = false; - next(); - }); - }]; + return next(); + } + if (autoconf) { + logger.info("UPnP: %s", "Yes"); + conf.upnp = true; + return next(); + } + choose( + "UPnP is available: use automatic port mapping? (easier)", + conf.upnp, + function () { + conf.upnp = true; + next(); + }, + function () { + conf.upnp = false; + next(); + } + ); + }, + ]; } -function choose (question:string, defaultValue:any, ifOK:any, ifNotOK:any) { - inquirer.prompt([{ - type: "confirm", - name: "q", - message: question, - default: defaultValue - }]).then((answer:any) => { - answer.q ? ifOK() : ifNotOK(); - }); +function choose(question: string, defaultValue: any, ifOK: any, ifNotOK: any) { + inquirer + .prompt([ + { + type: "confirm", + name: "q", + message: question, + default: defaultValue, + }, + ]) + .then((answer: any) => { + answer.q ? ifOK() : ifNotOK(); + }); } -function simpleValue (question:string, property:any, defaultValue:any, conf:any, validation:any, done:any) { - inquirer.prompt([{ - type: "input", - name: property, - message: question, - default: conf[property], - validate: validation - }]).then((answers:any) => { - conf[property] = answers[property]; - done(); - }); +function simpleValue( + question: string, + property: any, + defaultValue: any, + conf: any, + validation: any, + done: any +) { + inquirer + .prompt([ + { + type: "input", + name: property, + message: question, + default: conf[property], + validate: validation, + }, + ]) + .then((answers: any) => { + conf[property] = answers[property]; + done(); + }); } -function simpleInteger (question:string, property:any, conf:any, done:any) { - simpleValue(question, property, conf[property], conf, function (input:any) { - return input && input.toString().match(/^[0-9]+$/) ? true : false; - }, done); +function simpleInteger(question: string, property: any, conf: any, done: any) { + simpleValue( + question, + property, + conf[property], + conf, + function (input: any) { + return input && input.toString().match(/^[0-9]+$/) ? true : false; + }, + done + ); } diff --git a/app/modules/crawler/index.ts b/app/modules/crawler/index.ts index ed7594e9a7024d6b9046aae1ecbd9c0f9fddb380..77a3f2d7b9af186fbc024b78a100210732e9fb1a 100644 --- a/app/modules/crawler/index.ts +++ b/app/modules/crawler/index.ts @@ -11,62 +11,81 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../../lib/dto/ConfDTO" -import {Server} from "../../../server" -import {Contacter} from "./lib/contacter" -import {Crawler} from "./lib/crawler" -import {Synchroniser} from "./lib/sync" -import {req2fwd} from "./lib/req2fwd" -import {rawer} from "../../lib/common-libs/index" -import {PeerDTO} from "../../lib/dto/PeerDTO" -import {Buid} from "../../lib/common-libs/buid" -import {BlockDTO} from "../../lib/dto/BlockDTO" -import {Directory} from "../../lib/system/directory" -import {FileDAL} from "../../lib/dal/fileDAL" -import {RemoteSynchronizer} from "./lib/sync/RemoteSynchronizer" -import {AbstractSynchronizer} from "./lib/sync/AbstractSynchronizer" -import {LocalPathSynchronizer} from "./lib/sync/LocalPathSynchronizer" -import {CommonConstants} from "../../lib/common-libs/constants" -import {DataErrors} from "../../lib/common-libs/errors" -import {NewLogger} from "../../lib/logger" -import {CrawlerConstants} from "./lib/constants" -import {ExitCodes} from "../../lib/common-libs/exit-codes" -import {connect} from "./lib/connect" -import {BMARemoteContacter} from "./lib/sync/BMARemoteContacter" -import {applyMempoolRequirements, forwardToServer, pullSandboxToLocalServer} from "./lib/sandbox" -import {DBBlock} from "../../lib/db/DBBlock" +import { ConfDTO } from "../../lib/dto/ConfDTO"; +import { Server } from "../../../server"; +import { Contacter } from "./lib/contacter"; +import { Crawler } from "./lib/crawler"; +import { Synchroniser } from "./lib/sync"; +import { req2fwd } from "./lib/req2fwd"; +import { rawer } from "../../lib/common-libs/index"; +import { PeerDTO } from "../../lib/dto/PeerDTO"; +import { Buid } from "../../lib/common-libs/buid"; +import { BlockDTO } from "../../lib/dto/BlockDTO"; +import { Directory } from "../../lib/system/directory"; +import { FileDAL } from "../../lib/dal/fileDAL"; +import { RemoteSynchronizer } from "./lib/sync/RemoteSynchronizer"; +import { AbstractSynchronizer } from "./lib/sync/AbstractSynchronizer"; +import { LocalPathSynchronizer } from "./lib/sync/LocalPathSynchronizer"; +import { CommonConstants } from "../../lib/common-libs/constants"; +import { DataErrors } from "../../lib/common-libs/errors"; +import { NewLogger } from "../../lib/logger"; +import { CrawlerConstants } from "./lib/constants"; +import { ExitCodes } from "../../lib/common-libs/exit-codes"; +import { connect } from "./lib/connect"; +import { BMARemoteContacter } from "./lib/sync/BMARemoteContacter"; +import { + applyMempoolRequirements, + forwardToServer, + pullSandboxToLocalServer, +} from "./lib/sandbox"; +import { DBBlock } from "../../lib/db/DBBlock"; -const HOST_PATTERN = /^[^:/]+(:[0-9]{1,5})?(\/.*)?$/ -const FILE_PATTERN = /^(\/.+)$/ +const HOST_PATTERN = /^[^:/]+(:[0-9]{1,5})?(\/.*)?$/; +const FILE_PATTERN = /^(\/.+)$/; export const CrawlerDependency = { duniter: { - service: { - process: (server:Server, conf:ConfDTO, logger:any) => new Crawler(server, conf, logger) + process: (server: Server, conf: ConfDTO, logger: any) => + new Crawler(server, conf, logger), }, methods: { + contacter: (host: string, port: number, opts?: any) => + new Contacter(host, port, opts), - contacter: (host:string, port:number, opts?:any) => new Contacter(host, port, opts), - - pullBlocks: async (server:Server, pubkey = "") => { + pullBlocks: async (server: Server, pubkey = "") => { const crawler = new Crawler(server, server.conf, server.logger); return crawler.pullBlocks(server, pubkey); }, - pullSandbox: async (server:Server) => { + pullSandbox: async (server: Server) => { const crawler = new Crawler(server, server.conf, server.logger); - return crawler.sandboxPull(server) + return crawler.sandboxPull(server); }, - synchronize: (server:Server, onHost:string, onPort:number, upTo:number, chunkLength:number, allowLocalSync = false) => { - const strategy = new RemoteSynchronizer(onHost, onPort, server, chunkLength, undefined, undefined, allowLocalSync) - const remote = new Synchroniser(server, strategy) - const syncPromise = remote.sync(upTo, chunkLength) + synchronize: ( + server: Server, + onHost: string, + onPort: number, + upTo: number, + chunkLength: number, + allowLocalSync = false + ) => { + const strategy = new RemoteSynchronizer( + onHost, + onPort, + server, + chunkLength, + undefined, + undefined, + allowLocalSync + ); + const remote = new Synchroniser(server, strategy); + const syncPromise = remote.sync(upTo, chunkLength); return { flow: remote, - syncPromise + syncPromise, }; }, @@ -77,470 +96,697 @@ export const CrawlerDependency = { * @param {number} onPort * @returns {Promise<any>} */ - testForSync: (server:Server, onHost:string, onPort:number) => { - return RemoteSynchronizer.test(onHost, onPort, server.conf.pair) - } + testForSync: (server: Server, onHost: string, onPort: number) => { + return RemoteSynchronizer.test(onHost, onPort, server.conf.pair); + }, }, cliOptions: [ - { value: '--nointeractive', desc: 'Disable interactive sync UI.'}, - { value: '--nocautious', desc: 'Do not check blocks validity during sync.'}, - { value: '--cautious', desc: 'Check blocks validity during sync (overrides --nocautious option).'}, - { value: '--nopeers', desc: 'Do not retrieve peers during sync.'}, - { value: '--nop2p', desc: 'Disables P2P downloading of blocs during sync.'}, - { value: '--localsync', desc: 'Allow to synchronize on nodes with local network IP address for `sync` command' }, - { value: '--nosources', desc: 'Do not parse sources (UD, TX) during sync (debug purposes).'}, - { value: '--nosbx', desc: 'Do not retrieve sandboxes during sync.'}, - { value: '--onlypeers', desc: 'Will only try to sync peers.'}, - { value: '--slow', desc: 'Download slowly the blokchcain (for low connnections).'}, - { value: '--readfilesystem',desc: 'Also read the filesystem to speed up block downloading.'}, - { value: '--minsig <minsig>', desc: 'Minimum pending signatures count for `crawl-lookup`. Default is 5.'}, + { value: "--nointeractive", desc: "Disable interactive sync UI." }, + { + value: "--nocautious", + desc: "Do not check blocks validity during sync.", + }, + { + value: "--cautious", + desc: + "Check blocks validity during sync (overrides --nocautious option).", + }, + { value: "--nopeers", desc: "Do not retrieve peers during sync." }, + { + value: "--nop2p", + desc: "Disables P2P downloading of blocs during sync.", + }, + { + value: "--localsync", + desc: + "Allow to synchronize on nodes with local network IP address for `sync` command", + }, + { + value: "--nosources", + desc: "Do not parse sources (UD, TX) during sync (debug purposes).", + }, + { value: "--nosbx", desc: "Do not retrieve sandboxes during sync." }, + { value: "--onlypeers", desc: "Will only try to sync peers." }, + { + value: "--slow", + desc: "Download slowly the blokchcain (for low connnections).", + }, + { + value: "--readfilesystem", + desc: "Also read the filesystem to speed up block downloading.", + }, + { + value: "--minsig <minsig>", + desc: + "Minimum pending signatures count for `crawl-lookup`. Default is 5.", + }, ], - cli: [{ - name: 'sync [source] [to]', - desc: 'Synchronize blockchain from a remote Duniter node. [source] is [host][:port]. [to] defaults to remote current block number.', - preventIfRunning: true, - onConfiguredExecute: async (server:Server) => { - await server.resetData(); - }, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any): Promise<any> => { - const source = params[0] - const to = params[1] - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (either a host:port or a file path)' - } - let cautious; - if (program.nocautious) { - cautious = false; - } - if (program.cautious) { - cautious = true; - } - const upTo = parseInt(to); - const chunkLength = 0; - const interactive = !program.nointeractive; - const askedCautious = cautious; - const noShufflePeers = program.noshuffle; + cli: [ + { + name: "sync [source] [to]", + desc: + "Synchronize blockchain from a remote Duniter node. [source] is [host][:port]. [to] defaults to remote current block number.", + preventIfRunning: true, + onConfiguredExecute: async (server: Server) => { + await server.resetData(); + }, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ): Promise<any> => { + const source = params[0]; + const to = params[1]; + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (either a host:port or a file path)"; + } + let cautious; + if (program.nocautious) { + cautious = false; + } + if (program.cautious) { + cautious = true; + } + const upTo = parseInt(to); + const chunkLength = 0; + const interactive = !program.nointeractive; + const askedCautious = cautious; + const noShufflePeers = program.noshuffle; - let otherDAL = undefined - if (program.readfilesystem) { - const dbName = program.mdb; - const dbHome = program.home; - const home = Directory.getHome(dbName, dbHome); - const params = await Directory.getHomeParams(false, home) - otherDAL = new FileDAL(params, async() => null as any, async() => null as any) - } + let otherDAL = undefined; + if (program.readfilesystem) { + const dbName = program.mdb; + const dbHome = program.home; + const home = Directory.getHome(dbName, dbHome); + const params = await Directory.getHomeParams(false, home); + otherDAL = new FileDAL( + params, + async () => null as any, + async () => null as any + ); + } - let strategy: AbstractSynchronizer - if (source.match(HOST_PATTERN)) { - const sp = source.split(':') - const onHost = sp[0] - const onPort = parseInt(sp[1] ? sp[1] : '443') // Defaults to 443 - strategy = new RemoteSynchronizer(onHost, onPort, server, CommonConstants.SYNC_BLOCKS_CHUNK, noShufflePeers === true, otherDAL, program.localsync !== undefined) - } else { - strategy = new LocalPathSynchronizer(source, server, CommonConstants.SYNC_BLOCKS_CHUNK) - } - if (program.onlypeers === true) { - return strategy.syncPeers(true) - } else { - const remote = new Synchroniser(server, strategy, interactive === true) + let strategy: AbstractSynchronizer; + if (source.match(HOST_PATTERN)) { + const sp = source.split(":"); + const onHost = sp[0]; + const onPort = parseInt(sp[1] ? sp[1] : "443"); // Defaults to 443 + strategy = new RemoteSynchronizer( + onHost, + onPort, + server, + CommonConstants.SYNC_BLOCKS_CHUNK, + noShufflePeers === true, + otherDAL, + program.localsync !== undefined + ); + } else { + strategy = new LocalPathSynchronizer( + source, + server, + CommonConstants.SYNC_BLOCKS_CHUNK + ); + } + if (program.onlypeers === true) { + return strategy.syncPeers(true); + } else { + const remote = new Synchroniser( + server, + strategy, + interactive === true + ); - // If the sync fail, stop the program - process.on('unhandledRejection', (reason: any) => { - if (reason.message === DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) { - NewLogger().error('Synchronization interrupted: no node was found to continue downloading after %s tries.', CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND) - process.exit(ExitCodes.SYNC_FAIL) - } - }) + // If the sync fail, stop the program + process.on("unhandledRejection", (reason: any) => { + if ( + reason.message === + DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK] + ) { + NewLogger().error( + "Synchronization interrupted: no node was found to continue downloading after %s tries.", + CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND + ); + process.exit(ExitCodes.SYNC_FAIL); + } + }); - return remote.sync(upTo, chunkLength, askedCautious) - } - } - }, { - name: 'peer [host] [port]', - desc: 'Exchange peerings with another node', - preventIfRunning: true, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const host = params[0]; - const port = params[1]; - const logger = server.logger; - try { - const ERASE_IF_ALREADY_RECORDED = true; - logger.info('Fetching peering record at %s:%s...', host, port); - let peering = await Contacter.fetchPeer(host, port); - logger.info('Apply peering ...'); - await server.PeeringService.submitP(peering, ERASE_IF_ALREADY_RECORDED, !program.nocautious, true); - logger.info('Applied'); - let selfPeer = await server.dal.getPeer(server.PeeringService.pubkey); - if (!selfPeer) { - await server.PeeringService.generateSelfPeer(server.conf) - selfPeer = await server.dal.getPeer(server.PeeringService.pubkey); + return remote.sync(upTo, chunkLength, askedCautious); + } + }, + }, + { + name: "peer [host] [port]", + desc: "Exchange peerings with another node", + preventIfRunning: true, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const host = params[0]; + const port = params[1]; + const logger = server.logger; + try { + const ERASE_IF_ALREADY_RECORDED = true; + logger.info("Fetching peering record at %s:%s...", host, port); + let peering = await Contacter.fetchPeer(host, port); + logger.info("Apply peering ..."); + await server.PeeringService.submitP( + peering, + ERASE_IF_ALREADY_RECORDED, + !program.nocautious, + true + ); + logger.info("Applied"); + let selfPeer = await server.dal.getPeer( + server.PeeringService.pubkey + ); + if (!selfPeer) { + await server.PeeringService.generateSelfPeer(server.conf); + selfPeer = await server.dal.getPeer(server.PeeringService.pubkey); + } + logger.info("Send self peering ..."); + const p = PeerDTO.fromJSONObject(peering); + const contact = new Contacter( + p.getHostPreferDNS(), + p.getPort() as number, + {} + ); + await contact.postPeer(PeerDTO.fromJSONObject(selfPeer)); + logger.info("Sent."); + await server.disconnect(); + } catch (e) { + logger.error(e.code || e.message || e); + throw Error("Exiting"); + } + }, + }, + { + name: "import <fromHost> <fromPort> <search> <toHost> <toPort>", + desc: "Import all pending data from matching <search>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const fromHost = params[0]; + const fromPort = params[1]; + const search = params[2]; + const toHost = params[3]; + const toPort = params[4]; + const logger = server.logger; + try { + const peers = + fromHost && fromPort + ? [ + { + endpoints: [ + ["BASIC_MERKLED_API", fromHost, fromPort].join(" "), + ], + }, + ] + : await server.dal.peerDAL.withUPStatus(); + // Memberships + for (const p of peers) { + const peer = PeerDTO.fromJSONObject(p); + const fromHost = peer.getHostPreferDNS(); + const fromPort = peer.getPort(); + logger.info("Looking at %s:%s...", fromHost, fromPort); + try { + const node = new Contacter(fromHost, fromPort as number, { + timeout: 10000, + }); + const requirements = await node.getRequirements(search); + await req2fwd(requirements, toHost, toPort, logger); + } catch (e) { + logger.error(e); + } + } + await server.disconnect(); + } catch (e) { + logger.error(e); + throw Error("Exiting"); } - logger.info('Send self peering ...'); - const p = PeerDTO.fromJSONObject(peering) - const contact = new Contacter(p.getHostPreferDNS(), p.getPort() as number, {}) - await contact.postPeer(PeerDTO.fromJSONObject(selfPeer)) - logger.info('Sent.'); - await server.disconnect(); - } catch(e) { - logger.error(e.code || e.message || e); - throw Error("Exiting"); - } - } - }, { - name: 'import <fromHost> <fromPort> <search> <toHost> <toPort>', - desc: 'Import all pending data from matching <search>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const fromHost = params[0]; - const fromPort = params[1]; - const search = params[2]; - const toHost = params[3]; - const toPort = params[4]; - const logger = server.logger; - try { - const peers = fromHost && fromPort ? [{ endpoints: [['BASIC_MERKLED_API', fromHost, fromPort].join(' ')] }] : await server.dal.peerDAL.withUPStatus() - // Memberships - for (const p of peers) { - const peer = PeerDTO.fromJSONObject(p) + }, + }, + { + name: "sync-mempool <from>", + desc: "Import all pending data from matching <search>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const source: string = params[0]; + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (host[:port])"; + } + const logger = NewLogger(); + const from: string = params[0]; + const { host, port } = extractHostPort(from); + try { + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); const fromHost = peer.getHostPreferDNS(); const fromPort = peer.getPort(); - logger.info('Looking at %s:%s...', fromHost, fromPort); + logger.info("Looking at %s:%s...", fromHost, fromPort); try { - const node = new Contacter(fromHost, fromPort as number, { timeout: 10000 }); - const requirements = await node.getRequirements(search); - await req2fwd(requirements, toHost, toPort, logger) + const fromHost = await connect(peer, 60 * 1000); + const api = new BMARemoteContacter(fromHost); + await pullSandboxToLocalServer( + server.conf.currency, + api, + server, + logger + ); } catch (e) { logger.error(e); } - } - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'sync-mempool <from>', - desc: 'Import all pending data from matching <search>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const source: string = params[0] - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (host[:port])' - } - const logger = NewLogger() - const from: string = params[0] - const { host, port } = extractHostPort(from) - try { - const peer = PeerDTO.fromJSONObject({ endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] }) - const fromHost = peer.getHostPreferDNS(); - const fromPort = peer.getPort(); - logger.info('Looking at %s:%s...', fromHost, fromPort); - try { - const fromHost = await connect(peer, 60*1000) - const api = new BMARemoteContacter(fromHost) - await pullSandboxToLocalServer(server.conf.currency, api, server, logger) + + await server.disconnect(); } catch (e) { logger.error(e); + throw Error("Exiting"); } - - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'sync-mempool-search <from> <search>', - desc: 'Import all pending data from matching <search>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const source: string = params[0] - const search: string = params[1] - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (host[:port])' - } - const logger = NewLogger() - const from: string = params[0] - const { host, port } = extractHostPort(from) - try { - const peer = PeerDTO.fromJSONObject({ endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] }) - const fromHost = peer.getHostPreferDNS(); - const fromPort = peer.getPort(); - logger.info('Looking at %s:%s...', fromHost, fromPort); + }, + }, + { + name: "sync-mempool-search <from> <search>", + desc: "Import all pending data from matching <search>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const source: string = params[0]; + const search: string = params[1]; + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (host[:port])"; + } + const logger = NewLogger(); + const from: string = params[0]; + const { host, port } = extractHostPort(from); try { - const fromHost = await connect(peer) - const res = await fromHost.getRequirements(search) - await applyMempoolRequirements(server.conf.currency, res, server, logger) + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); + const fromHost = peer.getHostPreferDNS(); + const fromPort = peer.getPort(); + logger.info("Looking at %s:%s...", fromHost, fromPort); + try { + const fromHost = await connect(peer); + const res = await fromHost.getRequirements(search); + await applyMempoolRequirements( + server.conf.currency, + res, + server, + logger + ); + } catch (e) { + logger.error(e); + } + + await server.disconnect(); } catch (e) { logger.error(e); + throw Error("Exiting"); } - - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'sync-mempool-fwd <from> <to> <search>', - desc: 'Import all pending data from matching <search>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const source: string = params[0] - const target: string = params[1] - const search: string = params[2] - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (host[:port])' - } - if (!target || !(target.match(HOST_PATTERN) || target.match(FILE_PATTERN))) { - throw 'Target of sync is required. (host[:port])' - } - const logger = NewLogger() - const { host, port } = extractHostPort(source) - const { host: toHost, port: toPort } = extractHostPort(target) - try { - const peer = PeerDTO.fromJSONObject({ endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] }) - logger.info('Looking at %s...', source) + }, + }, + { + name: "sync-mempool-fwd <from> <to> <search>", + desc: "Import all pending data from matching <search>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const source: string = params[0]; + const target: string = params[1]; + const search: string = params[2]; + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (host[:port])"; + } + if ( + !target || + !(target.match(HOST_PATTERN) || target.match(FILE_PATTERN)) + ) { + throw "Target of sync is required. (host[:port])"; + } + const logger = NewLogger(); + const { host, port } = extractHostPort(source); + const { host: toHost, port: toPort } = extractHostPort(target); try { - const fromHost = await connect(peer) - const res = await fromHost.getRequirements(search) - await forwardToServer(server.conf.currency, res, toHost, toPort, logger) + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); + logger.info("Looking at %s...", source); + try { + const fromHost = await connect(peer); + const res = await fromHost.getRequirements(search); + await forwardToServer( + server.conf.currency, + res, + toHost, + toPort, + logger + ); + } catch (e) { + logger.error(e); + } + + await server.disconnect(); } catch (e) { logger.error(e); + throw Error("Exiting"); } - - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'pull <from> [<number>]', - desc: 'Pull blocks from <from> source up to block <number>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const source: string = params[0] - const to = parseInt(params[1]) - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (host[:port])' - } - const logger = NewLogger() - const { host, port } = extractHostPort(source) - try { - const peer = PeerDTO.fromJSONObject({ endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] }) - logger.info('Looking at %s...', source) + }, + }, + { + name: "pull <from> [<number>]", + desc: "Pull blocks from <from> source up to block <number>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const source: string = params[0]; + const to = parseInt(params[1]); + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (host[:port])"; + } + const logger = NewLogger(); + const { host, port } = extractHostPort(source); try { - const fromHost = await connect(peer) - let current: DBBlock|null = await server.dal.getCurrentBlockOrNull() - // Loop until an error occurs - while (current && (isNaN(to) || current.number < to)) { - current = await fromHost.getBlock(current.number + 1) - await server.writeBlock(current, false) + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); + logger.info("Looking at %s...", source); + try { + const fromHost = await connect(peer); + let current: DBBlock | null = await server.dal.getCurrentBlockOrNull(); + // Loop until an error occurs + while (current && (isNaN(to) || current.number < to)) { + current = await fromHost.getBlock(current.number + 1); + await server.writeBlock(current, false); + } + } catch (e) { + logger.error(e); } } catch (e) { logger.error(e); + throw Error("Exiting"); } - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'forward <number> <fromHost> <fromPort> <toHost> <toPort>', - desc: 'Forward existing block <number> from a host to another', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const number = params[0]; - const fromHost = params[1]; - const fromPort = params[2]; - const toHost = params[3]; - const toPort = params[4]; - const logger = server.logger; - try { - logger.info('Looking at %s:%s...', fromHost, fromPort) + }, + }, + { + name: "forward <number> <fromHost> <fromPort> <toHost> <toPort>", + desc: "Forward existing block <number> from a host to another", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const number = params[0]; + const fromHost = params[1]; + const fromPort = params[2]; + const toHost = params[3]; + const toPort = params[4]; + const logger = server.logger; try { - const source = new Contacter(fromHost, fromPort, { timeout: 10000 }) - const target = new Contacter(toHost, toPort, { timeout: 10000 }) - const block = await source.getBlock(number) - const raw = BlockDTO.fromJSONObject(block).getRawSigned() - await target.postBlock(raw) + logger.info("Looking at %s:%s...", fromHost, fromPort); + try { + const source = new Contacter(fromHost, fromPort, { + timeout: 10000, + }); + const target = new Contacter(toHost, toPort, { timeout: 10000 }); + const block = await source.getBlock(number); + const raw = BlockDTO.fromJSONObject(block).getRawSigned(); + await target.postBlock(raw); + } catch (e) { + logger.error(e); + } + await server.disconnect(); } catch (e) { logger.error(e); + throw Error("Exiting"); } - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'import-lookup [search] [fromhost] [fromport] [tohost] [toport]', - desc: 'Exchange peerings with another node', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const search = params[0]; - const fromhost = params[1]; - const fromport = params[2]; - const tohost = params[3]; - const toport = params[4]; - const logger = server.logger; - try { - logger.info('Looking for "%s" at %s:%s...', search, fromhost, fromport); - const sourcePeer = new Contacter(fromhost, fromport); - const targetPeer = new Contacter(tohost, toport); - const lookup = await sourcePeer.getLookup(search); - for (const res of lookup.results) { - for (const uid of res.uids) { - const rawIdty = rawer.getOfficialIdentity({ - currency: 'g1', - issuer: res.pubkey, - uid: uid.uid, - buid: uid.meta.timestamp, - sig: uid.self - }); - logger.info('Success idty %s', uid.uid); - try { - await targetPeer.postIdentity(rawIdty); - } catch (e) { - logger.error(e); + }, + }, + { + name: "import-lookup [search] [fromhost] [fromport] [tohost] [toport]", + desc: "Exchange peerings with another node", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const search = params[0]; + const fromhost = params[1]; + const fromport = params[2]; + const tohost = params[3]; + const toport = params[4]; + const logger = server.logger; + try { + logger.info( + 'Looking for "%s" at %s:%s...', + search, + fromhost, + fromport + ); + const sourcePeer = new Contacter(fromhost, fromport); + const targetPeer = new Contacter(tohost, toport); + const lookup = await sourcePeer.getLookup(search); + for (const res of lookup.results) { + for (const uid of res.uids) { + const rawIdty = rawer.getOfficialIdentity({ + currency: "g1", + issuer: res.pubkey, + uid: uid.uid, + buid: uid.meta.timestamp, + sig: uid.self, + }); + logger.info("Success idty %s", uid.uid); + try { + await targetPeer.postIdentity(rawIdty); + } catch (e) { + logger.error(e); + } + for (const received of uid.others) { + const rawCert = rawer.getOfficialCertification({ + currency: "g1", + issuer: received.pubkey, + idty_issuer: res.pubkey, + idty_uid: uid.uid, + idty_buid: uid.meta.timestamp, + idty_sig: uid.self, + buid: Buid.format.buid( + received.meta.block_number, + received.meta.block_hash + ), + sig: received.signature, + }); + try { + logger.info( + "Success cert %s -> %s", + received.pubkey.slice(0, 8), + uid.uid + ); + await targetPeer.postCert(rawCert); + } catch (e) { + logger.error(e); + } + } } - for (const received of uid.others) { + } + const certBy = await sourcePeer.getCertifiedBy(search); + const mapBlocks: any = {}; + for (const signed of certBy.certifications) { + if (signed.written) { + logger.info( + "Already written cert %s -> %s", + certBy.pubkey.slice(0, 8), + signed.uid + ); + } else { + const lookupIdty = await sourcePeer.getLookup(signed.pubkey); + let idty = null; + for (const result of lookupIdty.results) { + for (const uid of result.uids) { + if ( + uid.uid === signed.uid && + result.pubkey === signed.pubkey && + uid.meta.timestamp === signed.sigDate + ) { + idty = uid; + } + } + } + let block = mapBlocks[signed.cert_time.block]; + if (!block) { + block = await sourcePeer.getBlock(signed.cert_time.block); + mapBlocks[block.number] = block; + } const rawCert = rawer.getOfficialCertification({ - currency: 'g1', - issuer: received.pubkey, - idty_issuer: res.pubkey, - idty_uid: uid.uid, - idty_buid: uid.meta.timestamp, - idty_sig: uid.self, - buid: Buid.format.buid(received.meta.block_number, received.meta.block_hash), - sig: received.signature + currency: "g1", + issuer: certBy.pubkey, + idty_issuer: signed.pubkey, + idty_uid: signed.uid, + idty_buid: idty.meta.timestamp, + idty_sig: idty.self, + buid: Buid.format.buid(block.number, block.hash), + sig: signed.signature, }); try { - logger.info('Success cert %s -> %s', received.pubkey.slice(0, 8), uid.uid); + logger.info( + "Success cert %s -> %s", + certBy.pubkey.slice(0, 8), + signed.uid + ); await targetPeer.postCert(rawCert); } catch (e) { logger.error(e); } } } + logger.info("Sent."); + await server.disconnect(); + } catch (e) { + logger.error(e); + throw Error("Exiting"); } - const certBy = await sourcePeer.getCertifiedBy(search) - const mapBlocks:any = {} - for (const signed of certBy.certifications) { - if (signed.written) { - logger.info('Already written cert %s -> %s', certBy.pubkey.slice(0, 8), signed.uid) - } else { - const lookupIdty = await sourcePeer.getLookup(signed.pubkey); - let idty = null - for (const result of lookupIdty.results) { - for (const uid of result.uids) { - if (uid.uid === signed.uid && result.pubkey === signed.pubkey && uid.meta.timestamp === signed.sigDate) { - idty = uid - } - } - } - let block = mapBlocks[signed.cert_time.block] - if (!block) { - block = await sourcePeer.getBlock(signed.cert_time.block) - mapBlocks[block.number] = block - } - const rawCert = rawer.getOfficialCertification({ - currency: 'g1', - issuer: certBy.pubkey, - idty_issuer: signed.pubkey, - idty_uid: signed.uid, - idty_buid: idty.meta.timestamp, - idty_sig: idty.self, - buid: Buid.format.buid(block.number, block.hash), - sig: signed.signature - }); + }, + }, + { + name: "crawl-lookup <toHost> <toPort> [<fromHost> [<fromPort>]]", + desc: + "Make a full network scan and rebroadcast every WoT pending document (identity, certification, membership)", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const toHost = params[0]; + const toPort = params[1]; + const fromHost = params[2]; + const fromPort = params[3]; + const logger = server.logger; + try { + const peers = + fromHost && fromPort + ? [ + { + endpoints: [ + ["BASIC_MERKLED_API", fromHost, fromPort].join(" "), + ], + }, + ] + : await server.dal.peerDAL.withUPStatus(); + // Memberships + for (const p of peers) { + const peer = PeerDTO.fromJSONObject(p); + const fromHost = peer.getHostPreferDNS(); + const fromPort = peer.getPort(); + logger.info("Looking at %s:%s...", fromHost, fromPort); try { - logger.info('Success cert %s -> %s', certBy.pubkey.slice(0, 8), signed.uid); - await targetPeer.postCert(rawCert); + const node = new Contacter(fromHost, fromPort as number, { + timeout: 10000, + }); + const requirements = await node.getRequirementsPending( + program.minsig || 5 + ); + await req2fwd(requirements, toHost, toPort, logger); } catch (e) { logger.error(e); } } + await server.disconnect(); + } catch (e) { + logger.error(e); + throw Error("Exiting"); } - logger.info('Sent.'); - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'crawl-lookup <toHost> <toPort> [<fromHost> [<fromPort>]]', - desc: 'Make a full network scan and rebroadcast every WoT pending document (identity, certification, membership)', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const toHost = params[0] - const toPort = params[1] - const fromHost = params[2] - const fromPort = params[3] - const logger = server.logger; - try { - const peers = fromHost && fromPort ? [{ endpoints: [['BASIC_MERKLED_API', fromHost, fromPort].join(' ')] }] : await server.dal.peerDAL.withUPStatus() - // Memberships - for (const p of peers) { - const peer = PeerDTO.fromJSONObject(p) - const fromHost = peer.getHostPreferDNS(); - const fromPort = peer.getPort(); - logger.info('Looking at %s:%s...', fromHost, fromPort); - try { - const node = new Contacter(fromHost, fromPort as number, { timeout: 10000 }); - const requirements = await node.getRequirementsPending(program.minsig || 5); - await req2fwd(requirements, toHost, toPort, logger) - } catch (e) { - logger.error(e); - } - } - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'fwd-pending-ms', - desc: 'Forwards all the local pending memberships to target node', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const logger = server.logger; - try { - const pendingMSS = await server.dal.msDAL.getPendingIN() - const targetPeer = new Contacter('g1.cgeek.fr', 80, { timeout: 5000 }); - // Membership - let rawMS - for (const theMS of pendingMSS) { - console.log('New membership pending for %s', theMS.userid); - try { - rawMS = rawer.getMembership({ - currency: 'g1', - issuer: theMS.issuer, - block: theMS.block, - membership: theMS.membership, - userid: theMS.userid, - certts: theMS.certts, - signature: theMS.signature - }); - await targetPeer.postRenew(rawMS); - logger.info('Success ms idty %s', theMS.userid); - } catch (e) { - logger.warn(e); + }, + }, + { + name: "fwd-pending-ms", + desc: "Forwards all the local pending memberships to target node", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const logger = server.logger; + try { + const pendingMSS = await server.dal.msDAL.getPendingIN(); + const targetPeer = new Contacter("g1.cgeek.fr", 80, { + timeout: 5000, + }); + // Membership + let rawMS; + for (const theMS of pendingMSS) { + console.log("New membership pending for %s", theMS.userid); + try { + rawMS = rawer.getMembership({ + currency: "g1", + issuer: theMS.issuer, + block: theMS.block, + membership: theMS.membership, + userid: theMS.userid, + certts: theMS.certts, + signature: theMS.signature, + }); + await targetPeer.postRenew(rawMS); + logger.info("Success ms idty %s", theMS.userid); + } catch (e) { + logger.warn(e); + } } + await server.disconnect(); + } catch (e) { + logger.error(e); + throw Error("Exiting"); } - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }] - } -} + }, + }, + ], + }, +}; function extractHostPort(source: string) { - const sp = source.split(':') - const onHost = sp[0] - const onPort = sp[1] ? sp[1] : '443' // Defaults to 443 + const sp = source.split(":"); + const onHost = sp[0]; + const onPort = sp[1] ? sp[1] : "443"; // Defaults to 443 return { host: onHost, port: onPort, - } -} \ No newline at end of file + }; +} diff --git a/app/modules/keypair/index.ts b/app/modules/keypair/index.ts index d91d508959dbb74961f0e451336e41b4dd6f62e2..053b2ba6d5ffb8ddea6313021c3e97fb6607bf17 100644 --- a/app/modules/keypair/index.ts +++ b/app/modules/keypair/index.ts @@ -11,86 +11,116 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {randomKey} from "../../lib/common-libs/crypto/keyring" -import {ConfDTO, KeypairConfDTO} from "../../lib/dto/ConfDTO" -import {Server} from "../../../server" -import {Scrypt} from "./lib/scrypt" +import { randomKey } from "../../lib/common-libs/crypto/keyring"; +import { ConfDTO, KeypairConfDTO } from "../../lib/dto/ConfDTO"; +import { Server } from "../../../server"; +import { Scrypt } from "./lib/scrypt"; -const inquirer = require('inquirer'); -const fs = require('fs'); -const yaml = require('js-yaml'); +const inquirer = require("inquirer"); +const fs = require("fs"); +const yaml = require("js-yaml"); export const KeypairDependency = { - duniter: { - methods: { - scrypt: Scrypt + scrypt: Scrypt, }, cliOptions: [ - { value: '--salt <salt>', desc: 'Salt to generate the keypair' }, - { value: '--passwd <password>', desc: 'Password to generate the keypair' }, - { value: '--keyN <N>', desc: 'Scrypt `N` parameter. Defaults to 4096.', parser: parseInt }, - { value: '--keyr <r>', desc: 'Scrypt `N` parameter. Defaults to 16.', parser: parseInt }, - { value: '--keyp <p>', desc: 'Scrypt `N` parameter. Defaults to 1.', parser: parseInt }, - { value: '--keyprompt', desc: 'Force to use the keypair given by user prompt.' }, - { value: '--keyfile <filepath>', desc: 'Force to use the keypair of the given YAML file. File must contain `pub:` and `sec:` fields.' } + { value: "--salt <salt>", desc: "Salt to generate the keypair" }, + { + value: "--passwd <password>", + desc: "Password to generate the keypair", + }, + { + value: "--keyN <N>", + desc: "Scrypt `N` parameter. Defaults to 4096.", + parser: parseInt, + }, + { + value: "--keyr <r>", + desc: "Scrypt `N` parameter. Defaults to 16.", + parser: parseInt, + }, + { + value: "--keyp <p>", + desc: "Scrypt `N` parameter. Defaults to 1.", + parser: parseInt, + }, + { + value: "--keyprompt", + desc: "Force to use the keypair given by user prompt.", + }, + { + value: "--keyfile <filepath>", + desc: + "Force to use the keypair of the given YAML file. File must contain `pub:` and `sec:` fields.", + }, ], wizard: { - - 'key': promptKey - + key: promptKey, }, onReset: { - config: (conf:ConfDTO, program:any, logger:any, confDAL:any) => confDAL.coreFS.remove('keyring.yml') + config: (conf: ConfDTO, program: any, logger: any, confDAL: any) => + confDAL.coreFS.remove("keyring.yml"), }, - cli: [{ - name: 'pub', - desc: 'Shows the node public key', - logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO) => { - console.log(conf.pair.pub) - } - }, { - name: 'sec', - desc: 'Shows the node secret key', - logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO) => { - console.log(conf.pair.sec) - } - }], + cli: [ + { + name: "pub", + desc: "Shows the node public key", + logs: false, + onConfiguredExecute: async (server: Server, conf: ConfDTO) => { + console.log(conf.pair.pub); + }, + }, + { + name: "sec", + desc: "Shows the node secret key", + logs: false, + onConfiguredExecute: async (server: Server, conf: ConfDTO) => { + console.log(conf.pair.sec); + }, + }, + ], config: { - /***** * Tries to load a specific parameter `conf.pair` */ - onLoading: async (conf:KeypairConfDTO, program:any, logger:any, confDAL:any) => { - - if ((program.keyN || program.keyr || program.keyp) && !(program.salt && program.passwd)) { - throw Error('Missing --salt and --passwd options along with --keyN|keyr|keyp option'); + onLoading: async ( + conf: KeypairConfDTO, + program: any, + logger: any, + confDAL: any + ) => { + if ( + (program.keyN || program.keyr || program.keyp) && + !(program.salt && program.passwd) + ) { + throw Error( + "Missing --salt and --passwd options along with --keyN|keyr|keyp option" + ); } // If we have salt and password, convert it to keypair if (program.salt || program.passwd) { - const salt = program.salt || ''; - const key = program.passwd || ''; + const salt = program.salt || ""; + const key = program.passwd || ""; conf.pair = await Scrypt(salt, key); } // If no keypair has been loaded, try the default .yml file if (!conf.pair || !conf.pair.pub || !conf.pair.sec) { - const ymlContent = await confDAL.coreFS.read('keyring.yml') + const ymlContent = await confDAL.coreFS.read("keyring.yml"); conf.pair = yaml.safeLoad(ymlContent); } // If no keypair has been loaded or derived from salt/key, generate a random one if (!conf.pair || !conf.pair.pub || !conf.pair.sec) { - conf.pair = randomKey().json() + conf.pair = randomKey().json(); } // With the --keyprompt option, temporarily use a keypair given from CLI prompt (it won't be stored) @@ -98,7 +128,7 @@ export const KeypairDependency = { // Backup of the current pair conf.oldPair = { pub: conf.pair.pub, - sec: conf.pair.sec + sec: conf.pair.sec, }; // Ask the for the session key await promptKey(conf, program); @@ -109,77 +139,87 @@ export const KeypairDependency = { // Backup of the current pair conf.oldPair = { pub: conf.pair.pub, - sec: conf.pair.sec + sec: conf.pair.sec, }; // Load file content - const doc = yaml.safeLoad(fs.readFileSync(program.keyfile, 'utf8')); + const doc = yaml.safeLoad(fs.readFileSync(program.keyfile, "utf8")); if (!doc || !doc.pub || !doc.sec) { - throw 'Could not load full keyring from file'; + throw "Could not load full keyring from file"; } conf.pair = { pub: doc.pub, - sec: doc.sec - } + sec: doc.sec, + }; } - }, - beforeSave: async (conf:KeypairConfDTO, program:any, logger:any, confDAL:any) => { - + beforeSave: async ( + conf: KeypairConfDTO, + program: any, + logger: any, + confDAL: any + ) => { if ((program.keyprompt || program.keyfile) && conf.oldPair) { // Don't store the given key, but only the default/saved one conf.pair = { pub: conf.oldPair.pub, - sec: conf.oldPair.sec + sec: conf.oldPair.sec, }; } delete conf.oldPair; // We save the key in a separate file - const keyring = 'pub: "' + conf.pair.pub + '"\n' + - 'sec: "' + conf.pair.sec + '"' - await confDAL.coreFS.write('keyring.yml', keyring) + const keyring = + 'pub: "' + conf.pair.pub + '"\n' + 'sec: "' + conf.pair.sec + '"'; + await confDAL.coreFS.write("keyring.yml", keyring); // We never want to store salt, password or keypair in the conf.json file delete conf.salt; delete conf.passwd; delete conf.pair; - } - } - } + }, + }, + }, }; -async function promptKey (conf:KeypairConfDTO, program:any) { - +async function promptKey(conf: KeypairConfDTO, program: any) { const changeKeypair = !conf.pair || !conf.pair.pub || !conf.pair.sec; - const answersWantToChange = await inquirer.prompt([{ - type: "confirm", - name: "change", - message: "Modify your keypair?", - default: changeKeypair - }]); + const answersWantToChange = await inquirer.prompt([ + { + type: "confirm", + name: "change", + message: "Modify your keypair?", + default: changeKeypair, + }, + ]); if (answersWantToChange.change) { - const obfuscatedSalt = (program.salt || "").replace(/./g, '*'); - const answersSalt = await inquirer.prompt([{ - type: "password", - name: "salt", - message: "Key's salt", - default: obfuscatedSalt || undefined - }]); - const obfuscatedPasswd = (program.passwd || "").replace(/./g, '*'); - const answersPasswd = await inquirer.prompt([{ - type: "password", - name: "passwd", - message: "Key\'s password", - default: obfuscatedPasswd || undefined - }]); - - const keepOldSalt = obfuscatedSalt.length > 0 && obfuscatedSalt == answersSalt.salt; - const keepOldPasswd = obfuscatedPasswd.length > 0 && obfuscatedPasswd == answersPasswd.passwd; - const salt = keepOldSalt ? program.salt : answersSalt.salt; + const obfuscatedSalt = (program.salt || "").replace(/./g, "*"); + const answersSalt = await inquirer.prompt([ + { + type: "password", + name: "salt", + message: "Key's salt", + default: obfuscatedSalt || undefined, + }, + ]); + const obfuscatedPasswd = (program.passwd || "").replace(/./g, "*"); + const answersPasswd = await inquirer.prompt([ + { + type: "password", + name: "passwd", + message: "Key's password", + default: obfuscatedPasswd || undefined, + }, + ]); + + const keepOldSalt = + obfuscatedSalt.length > 0 && obfuscatedSalt == answersSalt.salt; + const keepOldPasswd = + obfuscatedPasswd.length > 0 && obfuscatedPasswd == answersPasswd.passwd; + const salt = keepOldSalt ? program.salt : answersSalt.salt; const passwd = keepOldPasswd ? program.passwd : answersPasswd.passwd; - conf.pair = await Scrypt(salt, passwd) + conf.pair = await Scrypt(salt, passwd); } } diff --git a/app/modules/prover/index.ts b/app/modules/prover/index.ts index 2aaa7a10f00fe237b900f9c93af3902b2bbcd00d..4ca5f1a495ef276c1ca9f8adf586d2b1a2c1d7fa 100644 --- a/app/modules/prover/index.ts +++ b/app/modules/prover/index.ts @@ -11,34 +11,38 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../../lib/dto/ConfDTO" -import {BlockGenerator, BlockGeneratorWhichProves} from "./lib/blockGenerator" -import {ProverConstants} from "./lib/constants" -import {BlockProver} from "./lib/blockProver" -import {Prover} from "./lib/prover" -import {Contacter} from "../crawler/lib/contacter" -import {parsers} from "../../lib/common-libs/parsers/index" -import {PeerDTO} from "../../lib/dto/PeerDTO" -import {Server} from "../../../server" -import {BlockDTO} from "../../lib/dto/BlockDTO" -import {DBIdentity} from "../../lib/dal/sqliteDAL/IdentityDAL" +import { ConfDTO } from "../../lib/dto/ConfDTO"; +import { + BlockGenerator, + BlockGeneratorWhichProves, +} from "./lib/blockGenerator"; +import { ProverConstants } from "./lib/constants"; +import { BlockProver } from "./lib/blockProver"; +import { Prover } from "./lib/prover"; +import { Contacter } from "../crawler/lib/contacter"; +import { parsers } from "../../lib/common-libs/parsers/index"; +import { PeerDTO } from "../../lib/dto/PeerDTO"; +import { Server } from "../../../server"; +import { BlockDTO } from "../../lib/dto/BlockDTO"; +import { DBIdentity } from "../../lib/dal/sqliteDAL/IdentityDAL"; -const async = require('async'); +const async = require("async"); export const ProverDependency = { - duniter: { - /*********** Permanent prover **************/ config: { - onLoading: async (conf:ConfDTO) => { + onLoading: async (conf: ConfDTO) => { if (conf.cpu === null || conf.cpu === undefined) { conf.cpu = ProverConstants.DEFAULT_CPU; } if (conf.nbCores === null || conf.nbCores === undefined) { - conf.nbCores = Math.min(ProverConstants.CORES_MAXIMUM_USE_IN_PARALLEL, require('os').cpus().length) + conf.nbCores = Math.min( + ProverConstants.CORES_MAXIMUM_USE_IN_PARALLEL, + require("os").cpus().length + ); } else if (conf.nbCores <= 0) { - conf.nbCores = 1 + conf.nbCores = 1; } if (conf.prefix === null || conf.prefix === undefined) { conf.prefix = ProverConstants.DEFAULT_PEER_ID; @@ -46,195 +50,317 @@ export const ProverDependency = { conf.powSecurityRetryDelay = ProverConstants.POW_SECURITY_RETRY_DELAY; conf.powMaxHandicap = ProverConstants.POW_MAXIMUM_ACCEPTABLE_HANDICAP; }, - beforeSave: async (conf:ConfDTO) => { + beforeSave: async (conf: ConfDTO) => { delete conf.powSecurityRetryDelay; delete conf.powMaxHandicap; - } + }, }, service: { - output: (server:Server) => { + output: (server: Server) => { const generator = new BlockGenerator(server); - server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator) - server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator) - server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator) - return new Prover(server) - } + server.generatorGetJoinData = generator.getSinglePreJoinData.bind( + generator + ); + server.generatorComputeNewCerts = generator.computeNewCerts.bind( + generator + ); + server.generatorNewCertsToLinks = generator.newCertsToLinks.bind( + generator + ); + return new Prover(server); + }, }, methods: { - hookServer: (server:Server) => { + hookServer: (server: Server) => { const generator = new BlockGenerator(server); - server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator) - server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator) - server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator) + server.generatorGetJoinData = generator.getSinglePreJoinData.bind( + generator + ); + server.generatorComputeNewCerts = generator.computeNewCerts.bind( + generator + ); + server.generatorNewCertsToLinks = generator.newCertsToLinks.bind( + generator + ); }, - prover: (server:Server) => new Prover(server), - blockGenerator: (server:Server, prover:any) => new BlockGeneratorWhichProves(server, prover), - generateTheNextBlock: async (server:Server, manualValues:any) => { + prover: (server: Server) => new Prover(server), + blockGenerator: (server: Server, prover: any) => + new BlockGeneratorWhichProves(server, prover), + generateTheNextBlock: async (server: Server, manualValues: any) => { const prover = new BlockProver(server); const generator = new BlockGeneratorWhichProves(server, prover); return generator.nextBlock(manualValues); }, - generateAndProveTheNext: async (server:Server, block:any, trial:any, manualValues:any) => { + generateAndProveTheNext: async ( + server: Server, + block: any, + trial: any, + manualValues: any + ) => { const prover = new BlockProver(server); const generator = new BlockGeneratorWhichProves(server, prover); let res = await generator.makeNextBlock(block, trial, manualValues); - return res - } + return res; + }, }, /*********** CLI gen-next + gen-root **************/ cliOptions: [ - {value: '--show', desc: 'With gen-* commands, displays the generated block.'}, - {value: '--check', desc: 'With gen-* commands: just check validity of generated block.'}, - {value: '--submit-local', desc: 'With gen-* commands: the generated block is submitted to this node only.'}, - {value: '--submit-host <host>', desc: 'With gen-* commands: the generated block is submitted to `submit-host` node.'}, - {value: '--submit-port <port>', desc: 'With gen-* commands: the generated block is submitted to `submit-host` node with port `submit-port`.'}, - {value: '--at <medianTime>', desc: 'With gen-next --show --check: allows to try in a future time.', parser: parseInt } + { + value: "--show", + desc: "With gen-* commands, displays the generated block.", + }, + { + value: "--check", + desc: "With gen-* commands: just check validity of generated block.", + }, + { + value: "--submit-local", + desc: + "With gen-* commands: the generated block is submitted to this node only.", + }, + { + value: "--submit-host <host>", + desc: + "With gen-* commands: the generated block is submitted to `submit-host` node.", + }, + { + value: "--submit-port <port>", + desc: + "With gen-* commands: the generated block is submitted to `submit-host` node with port `submit-port`.", + }, + { + value: "--at <medianTime>", + desc: "With gen-next --show --check: allows to try in a future time.", + parser: parseInt, + }, ], - cli: [{ - name: 'gen-next [difficulty]', - desc: 'Tries to generate the next block of the blockchain.', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const difficulty = params[0] - const generator = new BlockGeneratorWhichProves(server, null); - return generateAndSend(program, difficulty, server, () => () => generator.nextBlock()) - } - }, { - name: 'bc-resolve', - desc: 'Tries to resolve next blocks or forks.', - onDatabaseExecute: async () => {} - }, { - name: 'gen-root [difficulty]', - desc: 'Tries to generate the next block of the blockchain.', - preventIfRunning: true, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const difficulty = params[0] - const generator = new BlockGeneratorWhichProves(server, null); - let toDelete:DBIdentity[] = [], catched = true; - do { - try { - await generateAndSend(program, difficulty, server, () => () => generator.nextBlock()) - catched = false; - } catch (e) { - toDelete = await server.dal.idtyDAL.query('SELECT * FROM idty i WHERE 5 > (SELECT count(*) from cert c where c.`to` = i.pubkey)'); - console.log('Deleting', toDelete.map((i:any) => i.pubkey)); - await server.dal.idtyDAL.exec('DELETE FROM idty WHERE pubkey IN (' + toDelete.map((i:any) => "'" + i.pubkey + "'").join(',') + ')'); - await server.dal.idtyDAL.exec('DELETE FROM cert WHERE `to` IN (' + toDelete.map((i:any) => "'" + i.pubkey + "'").join(',') + ')'); - await server.dal.idtyDAL.exec('DELETE FROM cert WHERE `from` IN (' + toDelete.map((i:any) => "'" + i.pubkey + "'").join(',') + ')'); + cli: [ + { + name: "gen-next [difficulty]", + desc: "Tries to generate the next block of the blockchain.", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const difficulty = params[0]; + const generator = new BlockGeneratorWhichProves(server, null); + return generateAndSend(program, difficulty, server, () => () => + generator.nextBlock() + ); + }, + }, + { + name: "bc-resolve", + desc: "Tries to resolve next blocks or forks.", + onDatabaseExecute: async () => {}, + }, + { + name: "gen-root [difficulty]", + desc: "Tries to generate the next block of the blockchain.", + preventIfRunning: true, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const difficulty = params[0]; + const generator = new BlockGeneratorWhichProves(server, null); + let toDelete: DBIdentity[] = [], + catched = true; + do { + try { + await generateAndSend(program, difficulty, server, () => () => + generator.nextBlock() + ); + catched = false; + } catch (e) { + toDelete = await server.dal.idtyDAL.query( + "SELECT * FROM idty i WHERE 5 > (SELECT count(*) from cert c where c.`to` = i.pubkey)" + ); + console.log( + "Deleting", + toDelete.map((i: any) => i.pubkey) + ); + await server.dal.idtyDAL.exec( + "DELETE FROM idty WHERE pubkey IN (" + + toDelete.map((i: any) => "'" + i.pubkey + "'").join(",") + + ")" + ); + await server.dal.idtyDAL.exec( + "DELETE FROM cert WHERE `to` IN (" + + toDelete.map((i: any) => "'" + i.pubkey + "'").join(",") + + ")" + ); + await server.dal.idtyDAL.exec( + "DELETE FROM cert WHERE `from` IN (" + + toDelete.map((i: any) => "'" + i.pubkey + "'").join(",") + + ")" + ); + } + } while (catched && toDelete.length); + console.log("Done"); + }, + }, + { + name: "gen-root-choose [difficulty]", + desc: "Tries to generate root block, with choice of root members.", + preventIfRunning: true, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const difficulty = params[0]; + if (!difficulty) { + throw "Difficulty is required."; } - } while (catched && toDelete.length); - console.log('Done'); - } - }, { - name: 'gen-root-choose [difficulty]', - desc: 'Tries to generate root block, with choice of root members.', - preventIfRunning: true, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const difficulty = params[0] - if (!difficulty) { - throw 'Difficulty is required.'; - } - const generator = new BlockGenerator(server); - return generateAndSend(program, difficulty, server, () => (): Promise<BlockDTO> => generator.manualRoot()) - } - }] - } -} + const generator = new BlockGenerator(server); + return generateAndSend(program, difficulty, server, () => (): Promise< + BlockDTO + > => generator.manualRoot()); + }, + }, + ], + }, +}; -function generateAndSend(program:any, difficulty:string, server:Server, getGenerationMethod:any) { +function generateAndSend( + program: any, + difficulty: string, + server: Server, + getGenerationMethod: any +) { const logger = server.logger; return new Promise((resolve, reject) => { if (!program.submitLocal) { if (!program.submitHost) { - throw 'Option --submit-host is required.' + throw "Option --submit-host is required."; } if (!program.submitPort) { - throw 'Option --submit-port is required.' + throw "Option --submit-port is required."; } if (isNaN(parseInt(program.submitPort))) { - throw 'Option --submit-port must be a number.' + throw "Option --submit-port must be a number."; } } - async.waterfall([ - function (next:any) { - const method = getGenerationMethod(server); - (async() => { - const simulationValues:any = {} - if (program.show && program.check) { - if (program.at && !isNaN(program.at)) { - simulationValues.medianTime = program.at + async.waterfall( + [ + function (next: any) { + const method = getGenerationMethod(server); + (async () => { + const simulationValues: any = {}; + if (program.show && program.check) { + if (program.at && !isNaN(program.at)) { + simulationValues.medianTime = program.at; + } } + const block = await method(null, simulationValues); + next(null, block); + })(); + }, + function (block: any, next: any) { + if (program.check) { + block.time = block.medianTime; + program.show && console.log(block.getRawSigned()); + (async () => { + try { + const parsed = parsers.parseBlock.syncWrite( + block.getRawSigned() + ); + await server.BlockchainService.checkBlock(parsed, false); + logger.info("Acceptable block"); + next(); + } catch (e) { + next(e); + } + })(); + } else { + logger.debug("Block to be sent: %s", block.getRawInnerPart()); + async.waterfall( + [ + function (subNext: any) { + proveAndSend( + program, + server, + block, + server.conf.pair.pub, + parseInt(difficulty), + subNext + ); + }, + ], + next + ); } - const block = await method(null, simulationValues); - next(null, block); - })() - }, - function (block:any, next:any) { - if (program.check) { - block.time = block.medianTime; - program.show && console.log(block.getRawSigned()); - (async() => { - try { - const parsed = parsers.parseBlock.syncWrite(block.getRawSigned()); - await server.BlockchainService.checkBlock(parsed, false); - logger.info('Acceptable block'); - next(); - } catch (e) { - next(e); - } - })() - } - else { - logger.debug('Block to be sent: %s', block.getRawInnerPart()); - async.waterfall([ - function (subNext:any) { - proveAndSend(program, server, block, server.conf.pair.pub, parseInt(difficulty), subNext); - } - ], next); - } + }, + ], + (err: any, data: any) => { + err && reject(err); + !err && resolve(data); } - ], (err:any, data:any) => { - err && reject(err); - !err && resolve(data); - }); + ); }); } -function proveAndSend(program:any, server:Server, block:any, issuer:any, difficulty:any, done:any) { +function proveAndSend( + program: any, + server: Server, + block: any, + issuer: any, + difficulty: any, + done: any +) { const logger = server.logger; - async.waterfall([ - function (next:any) { - block.issuer = issuer; - program.show && console.log(block.getRawSigned()); - (async () => { - try { - const host:string = program.submitHost - const port:string = program.submitPort - const trialLevel = isNaN(difficulty) ? await server.getBcContext().getIssuerPersonalizedDifficulty(server.PeeringService.selfPubkey) : difficulty - const prover = new BlockProver(server); - const proven = await prover.prove(block, trialLevel); - if (program.submitLocal) { - await server.writeBlock(proven) - next() - } else { - const peer = PeerDTO.fromJSONObject({ - endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] - }); - program.show && console.log(proven.getRawSigned()); - logger.info('Posted block ' + proven.getRawSigned()); - const p = PeerDTO.fromJSONObject(peer); - const contact = new Contacter(p.getHostPreferDNS(), p.getPort() as number); - await contact.postBlock(proven.getRawSigned()); - next() + async.waterfall( + [ + function (next: any) { + block.issuer = issuer; + program.show && console.log(block.getRawSigned()); + (async () => { + try { + const host: string = program.submitHost; + const port: string = program.submitPort; + const trialLevel = isNaN(difficulty) + ? await server + .getBcContext() + .getIssuerPersonalizedDifficulty( + server.PeeringService.selfPubkey + ) + : difficulty; + const prover = new BlockProver(server); + const proven = await prover.prove(block, trialLevel); + if (program.submitLocal) { + await server.writeBlock(proven); + next(); + } else { + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); + program.show && console.log(proven.getRawSigned()); + logger.info("Posted block " + proven.getRawSigned()); + const p = PeerDTO.fromJSONObject(peer); + const contact = new Contacter( + p.getHostPreferDNS(), + p.getPort() as number + ); + await contact.postBlock(proven.getRawSigned()); + next(); + } + } catch (e) { + next(e); } - } catch(e) { - next(e); - } - })() - } - ], done); + })(); + }, + ], + done + ); } diff --git a/app/modules/ws2p/index.ts b/app/modules/ws2p/index.ts index 930962bd275c9f3c199fd51d02fe59f0092f087a..d46d3d3bbd0525ea8cbe52fa3cf6703213b0b16f 100644 --- a/app/modules/ws2p/index.ts +++ b/app/modules/ws2p/index.ts @@ -11,106 +11,143 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {WS2PConstants} from './lib/constants'; -import {ConfDTO, WS2PConfDTO} from "../../lib/dto/ConfDTO" -import {Server} from "../../../server" -import * as stream from 'stream'; -import {WS2PCluster} from "./lib/WS2PCluster" -import {CommonConstants} from "../../lib/common-libs/constants" -import {NewLogger} from "../../lib/logger" -import {UpnpProvider} from "../upnp-provider" +import { WS2PConstants } from "./lib/constants"; +import { ConfDTO, WS2PConfDTO } from "../../lib/dto/ConfDTO"; +import { Server } from "../../../server"; +import * as stream from "stream"; +import { WS2PCluster } from "./lib/WS2PCluster"; +import { CommonConstants } from "../../lib/common-libs/constants"; +import { NewLogger } from "../../lib/logger"; +import { UpnpProvider } from "../upnp-provider"; const constants = require("../../lib/constants"); -const logger = NewLogger() -const nuuid = require('node-uuid') +const logger = NewLogger(); +const nuuid = require("node-uuid"); export const WS2PDependency = { duniter: { - cliOptions: [ - { value: '--ws2p-upnp', desc: 'Use UPnP to open remote port.' }, - { value: '--ws2p-noupnp', desc: 'Do not use UPnP to open remote port.' }, - { value: '--ws2p-host <host>', desc: 'Host to listen to.' }, - { value: '--ws2p-port <port>', desc: 'Port to listen to.', parser: (val:string) => parseInt(val) }, - { value: '--ws2p-remote-host <address>', desc: 'Availabily host.' }, - { value: '--ws2p-remote-port <port>', desc: 'Availabily port.', parser: (val:string) => parseInt(val) }, - { value: '--ws2p-remote-path <path>', desc: 'Availabily web path.' }, - { value: '--ws2p-max-private <count>', desc: 'Maximum private connections count.', parser: (val:string) => parseInt(val) }, - { value: '--ws2p-max-public <count>', desc: 'Maximum public connections count.', parser: (val:string) => parseInt(val) }, - { value: '--ws2p-private', desc: 'Enable WS2P Private access.' }, - { value: '--ws2p-public', desc: 'Enable WS2P Public access.' }, - { value: '--ws2p-noprivate', desc: 'Disable WS2P Private access.' }, - { value: '--ws2p-nopublic', desc: 'Disable WS2P Public access.' }, - { value: '--ws2p-sync', desc: 'Enable WS2P SYNC access.' }, - { value: '--ws2p-nosync', desc: 'Disable WS2P SYNC access.' }, - { value: '--ws2p-prefered-add <pubkey>', desc: 'Add a prefered node to connect to through private access.' }, - { value: '--ws2p-prefered-rm <pubkey>', desc: 'Remove prefered node.' }, - { value: '--ws2p-prefered-only <pubkey>', desc: 'Only connect to prefered node.' }, - { value: '--ws2p-privileged-add <pubkey>', desc: 'Add a privileged node to for our public access.' }, - { value: '--ws2p-privileged-rm <pubkey>', desc: 'Remove a privileged.' }, - { value: '--ws2p-privileged-only <pubkey>', desc: 'Accept only connections from a privileged node.' }, + { value: "--ws2p-upnp", desc: "Use UPnP to open remote port." }, + { value: "--ws2p-noupnp", desc: "Do not use UPnP to open remote port." }, + { value: "--ws2p-host <host>", desc: "Host to listen to." }, + { + value: "--ws2p-port <port>", + desc: "Port to listen to.", + parser: (val: string) => parseInt(val), + }, + { value: "--ws2p-remote-host <address>", desc: "Availabily host." }, + { + value: "--ws2p-remote-port <port>", + desc: "Availabily port.", + parser: (val: string) => parseInt(val), + }, + { value: "--ws2p-remote-path <path>", desc: "Availabily web path." }, + { + value: "--ws2p-max-private <count>", + desc: "Maximum private connections count.", + parser: (val: string) => parseInt(val), + }, + { + value: "--ws2p-max-public <count>", + desc: "Maximum public connections count.", + parser: (val: string) => parseInt(val), + }, + { value: "--ws2p-private", desc: "Enable WS2P Private access." }, + { value: "--ws2p-public", desc: "Enable WS2P Public access." }, + { value: "--ws2p-noprivate", desc: "Disable WS2P Private access." }, + { value: "--ws2p-nopublic", desc: "Disable WS2P Public access." }, + { value: "--ws2p-sync", desc: "Enable WS2P SYNC access." }, + { value: "--ws2p-nosync", desc: "Disable WS2P SYNC access." }, + { + value: "--ws2p-prefered-add <pubkey>", + desc: "Add a prefered node to connect to through private access.", + }, + { value: "--ws2p-prefered-rm <pubkey>", desc: "Remove prefered node." }, + { + value: "--ws2p-prefered-only <pubkey>", + desc: "Only connect to prefered node.", + }, + { + value: "--ws2p-privileged-add <pubkey>", + desc: "Add a privileged node to for our public access.", + }, + { value: "--ws2p-privileged-rm <pubkey>", desc: "Remove a privileged." }, + { + value: "--ws2p-privileged-only <pubkey>", + desc: "Accept only connections from a privileged node.", + }, ], config: { - - onLoading: async (conf:WS2PConfDTO, program:any, logger:any) => { - + onLoading: async (conf: WS2PConfDTO, program: any, logger: any) => { conf.ws2p = conf.ws2p || { - uuid: nuuid.v4().slice(0,8), + uuid: nuuid.v4().slice(0, 8), privateAccess: true, publicAccess: true, preferedOnly: false, - privilegedOnly: false - } + privilegedOnly: false, + }; // For config with missing value - conf.ws2p.uuid = conf.ws2p.uuid || nuuid.v4().slice(0,8) - if (conf.ws2p.privateAccess === undefined) conf.ws2p.privateAccess = true - if (conf.ws2p.publicAccess === undefined) conf.ws2p.publicAccess = true + conf.ws2p.uuid = conf.ws2p.uuid || nuuid.v4().slice(0, 8); + if (conf.ws2p.privateAccess === undefined) + conf.ws2p.privateAccess = true; + if (conf.ws2p.publicAccess === undefined) conf.ws2p.publicAccess = true; - if (program.ws2pHost !== undefined) conf.ws2p.host = program.ws2pHost - if (program.ws2pPort !== undefined) conf.ws2p.port = parseInt(program.ws2pPort) - if (program.ws2pRemotePort !== undefined) conf.ws2p.remoteport = program.ws2pRemotePort - if (program.ws2pRemoteHost !== undefined) conf.ws2p.remotehost = program.ws2pRemoteHost - if (program.ws2pRemotePath !== undefined) conf.ws2p.remotepath = program.ws2pRemotePath - if (program.ws2pUpnp !== undefined) conf.ws2p.upnp = true - if (program.ws2pNoupnp !== undefined) conf.ws2p.upnp = false - if (program.ws2pMaxPrivate !== undefined) conf.ws2p.maxPrivate = program.ws2pMaxPrivate - if (program.ws2pMaxPublic !== undefined) conf.ws2p.maxPublic = program.ws2pMaxPublic - if (program.ws2pPrivate !== undefined) conf.ws2p.privateAccess = true - if (program.ws2pPublic !== undefined) conf.ws2p.publicAccess = true - if (program.ws2pNoprivate !== undefined) conf.ws2p.privateAccess = false - if (program.ws2pNopublic !== undefined) conf.ws2p.publicAccess = false - if (program.ws2pSync !== undefined) conf.ws2p.sync = true - if (program.ws2pNosync !== undefined) conf.ws2p.sync = false + if (program.ws2pHost !== undefined) conf.ws2p.host = program.ws2pHost; + if (program.ws2pPort !== undefined) + conf.ws2p.port = parseInt(program.ws2pPort); + if (program.ws2pRemotePort !== undefined) + conf.ws2p.remoteport = program.ws2pRemotePort; + if (program.ws2pRemoteHost !== undefined) + conf.ws2p.remotehost = program.ws2pRemoteHost; + if (program.ws2pRemotePath !== undefined) + conf.ws2p.remotepath = program.ws2pRemotePath; + if (program.ws2pUpnp !== undefined) conf.ws2p.upnp = true; + if (program.ws2pNoupnp !== undefined) conf.ws2p.upnp = false; + if (program.ws2pMaxPrivate !== undefined) + conf.ws2p.maxPrivate = program.ws2pMaxPrivate; + if (program.ws2pMaxPublic !== undefined) + conf.ws2p.maxPublic = program.ws2pMaxPublic; + if (program.ws2pPrivate !== undefined) conf.ws2p.privateAccess = true; + if (program.ws2pPublic !== undefined) conf.ws2p.publicAccess = true; + if (program.ws2pNoprivate !== undefined) + conf.ws2p.privateAccess = false; + if (program.ws2pNopublic !== undefined) conf.ws2p.publicAccess = false; + if (program.ws2pSync !== undefined) conf.ws2p.sync = true; + if (program.ws2pNosync !== undefined) conf.ws2p.sync = false; // Prefered nodes if (program.ws2pPreferedAdd !== undefined) { - conf.ws2p.preferedNodes = conf.ws2p.preferedNodes || [] - conf.ws2p.preferedNodes.push(String(program.ws2pPreferedAdd)) + conf.ws2p.preferedNodes = conf.ws2p.preferedNodes || []; + conf.ws2p.preferedNodes.push(String(program.ws2pPreferedAdd)); } if (program.ws2pPreferedRm !== undefined) { - conf.ws2p.preferedNodes = conf.ws2p.preferedNodes || [] - const index = conf.ws2p.preferedNodes.indexOf(program.ws2pPreferedRm) + conf.ws2p.preferedNodes = conf.ws2p.preferedNodes || []; + const index = conf.ws2p.preferedNodes.indexOf(program.ws2pPreferedRm); if (index !== -1) { - conf.ws2p.preferedNodes.splice(index, 1) + conf.ws2p.preferedNodes.splice(index, 1); } } - if (program.ws2pPreferedOnly !== undefined) conf.ws2p.preferedOnly = true + if (program.ws2pPreferedOnly !== undefined) + conf.ws2p.preferedOnly = true; // Privileged nodes if (program.ws2pPrivilegedAdd !== undefined) { - conf.ws2p.privilegedNodes = conf.ws2p.privilegedNodes || [] - conf.ws2p.privilegedNodes.push(String(program.ws2pPrivilegedAdd)) + conf.ws2p.privilegedNodes = conf.ws2p.privilegedNodes || []; + conf.ws2p.privilegedNodes.push(String(program.ws2pPrivilegedAdd)); } if (program.ws2pPrivilegedRm !== undefined) { - conf.ws2p.privilegedNodes = conf.ws2p.privilegedNodes || [] - const index = conf.ws2p.privilegedNodes.indexOf(program.ws2pPrivilegedRm) + conf.ws2p.privilegedNodes = conf.ws2p.privilegedNodes || []; + const index = conf.ws2p.privilegedNodes.indexOf( + program.ws2pPrivilegedRm + ); if (index !== -1) { - conf.ws2p.privilegedNodes.splice(index, 1) + conf.ws2p.privilegedNodes.splice(index, 1); } } - if (program.ws2pPrivilegedOnly !== undefined) conf.ws2p.privilegedOnly = true + if (program.ws2pPrivilegedOnly !== undefined) + conf.ws2p.privilegedOnly = true; // Default value if (conf.ws2p.upnp === undefined || conf.ws2p.upnp === null) { @@ -118,139 +155,144 @@ export const WS2PDependency = { } }, - beforeSave: async (conf:WS2PConfDTO) => { - if (conf.ws2p && !conf.ws2p.host) delete conf.ws2p.host - if (conf.ws2p && !conf.ws2p.port) delete conf.ws2p.port - if (conf.ws2p && !conf.ws2p.remoteport) delete conf.ws2p.remoteport - if (conf.ws2p && !conf.ws2p.remotehost) delete conf.ws2p.remotehost - } + beforeSave: async (conf: WS2PConfDTO) => { + if (conf.ws2p && !conf.ws2p.host) delete conf.ws2p.host; + if (conf.ws2p && !conf.ws2p.port) delete conf.ws2p.port; + if (conf.ws2p && !conf.ws2p.remoteport) delete conf.ws2p.remoteport; + if (conf.ws2p && !conf.ws2p.remotehost) delete conf.ws2p.remotehost; + }, }, methods: { bindWS2P: (server: Server) => { - const api = new WS2PAPI(server, server.conf) - server.ws2pCluster = api.getCluster() - server.addEndpointsDefinitions(async () => api.getEndpoint()) - server.addWrongEndpointFilter((endpoints:string[]) => getWrongEndpoints(endpoints, server.conf)) - return api - } + const api = new WS2PAPI(server, server.conf); + server.ws2pCluster = api.getCluster(); + server.addEndpointsDefinitions(async () => api.getEndpoint()); + server.addWrongEndpointFilter((endpoints: string[]) => + getWrongEndpoints(endpoints, server.conf) + ); + return api; + }, }, service: { - input: (server:Server) => { - return WS2PDependency.duniter.methods.bindWS2P(server) - } + input: (server: Server) => { + return WS2PDependency.duniter.methods.bindWS2P(server); + }, }, - cli: [{ - name: 'ws2p [list-prefered|list-privileged|list-nodes|show-conf]', - desc: 'WS2P operations for configuration and diagnosis tasks.', - logs: false, + cli: [ + { + name: "ws2p [list-prefered|list-privileged|list-nodes|show-conf]", + desc: "WS2P operations for configuration and diagnosis tasks.", + logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const subcmd = params[0]; - if (subcmd === 'list-nodes') { - // Needs the DAL plugged - await server.initDAL(); - } - switch (subcmd) { - case 'show-conf': - console.log(JSON.stringify(conf.ws2p, null, ' ')) - break; - case 'list-prefered': - for (const p of (conf.ws2p && conf.ws2p.preferedNodes || [])) { - console.log(p) - } - break; - case 'list-privileged': - for (const p of (conf.ws2p && conf.ws2p.privilegedNodes || [])) { - console.log(p) - } - break; - case 'list-nodes': - const peers = await server.dal.getWS2Peers() - for (const p of peers) { - for (const ep of p.endpoints) { - if (ep.match(/^WS2P/)) { - console.log(p.pubkey, ep) + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const subcmd = params[0]; + if (subcmd === "list-nodes") { + // Needs the DAL plugged + await server.initDAL(); + } + switch (subcmd) { + case "show-conf": + console.log(JSON.stringify(conf.ws2p, null, " ")); + break; + case "list-prefered": + for (const p of (conf.ws2p && conf.ws2p.preferedNodes) || []) { + console.log(p); + } + break; + case "list-privileged": + for (const p of (conf.ws2p && conf.ws2p.privilegedNodes) || []) { + console.log(p); + } + break; + case "list-nodes": + const peers = await server.dal.getWS2Peers(); + for (const p of peers) { + for (const ep of p.endpoints) { + if (ep.match(/^WS2P/)) { + console.log(p.pubkey, ep); + } } } - } - break; - default: - throw constants.ERRORS.CLI_CALLERR_WS2P; - } - } - }] - } -} + break; + default: + throw constants.ERRORS.CLI_CALLERR_WS2P; + } + }, + }, + ], + }, +}; -async function getWrongEndpoints(endpoints:string[], ws2pConf:WS2PConfDTO) { - return endpoints.filter(ep => { - const match = ep.match(CommonConstants.WS2P_REGEXP) - return ws2pConf.ws2p && match && match[1] === ws2pConf.ws2p.uuid - }) +async function getWrongEndpoints(endpoints: string[], ws2pConf: WS2PConfDTO) { + return endpoints.filter((ep) => { + const match = ep.match(CommonConstants.WS2P_REGEXP); + return ws2pConf.ws2p && match && match[1] === ws2pConf.ws2p.uuid; + }); } export class WS2PAPI extends stream.Transform { - // Public http interface - private cluster:WS2PCluster - private upnpAPI:UpnpProvider|null + private cluster: WS2PCluster; + private upnpAPI: UpnpProvider | null; - constructor( - private server:Server, - private conf:ConfDTO) { - super({ objectMode: true }) - this.cluster = WS2PCluster.plugOn(server) + constructor(private server: Server, private conf: ConfDTO) { + super({ objectMode: true }); + this.cluster = WS2PCluster.plugOn(server); } getCluster() { - return this.cluster + return this.cluster; } startService = async () => { - /*************** * PUBLIC ACCESS **************/ if (this.conf.ws2p && this.conf.ws2p.publicAccess) { - /*************** * MANUAL **************/ - if (this.conf.ws2p - && !this.conf.ws2p.upnp - && this.conf.ws2p.host - && this.conf.ws2p.port) { - await this.cluster.listen(this.conf.ws2p.host, this.conf.ws2p.port) - } - - /*************** - * UPnP - **************/ - else if (!this.conf.ws2p || this.conf.ws2p.upnp !== false) { + if ( + this.conf.ws2p && + !this.conf.ws2p.upnp && + this.conf.ws2p.host && + this.conf.ws2p.port + ) { + await this.cluster.listen(this.conf.ws2p.host, this.conf.ws2p.port); + } else if (!this.conf.ws2p || this.conf.ws2p.upnp !== false) { + /*************** + * UPnP + **************/ if (this.upnpAPI) { this.upnpAPI.stopRegular(); } try { - const uuid = (this.conf.ws2p && this.conf.ws2p.uuid) || "no-uuid-yet" - const suffix = this.conf.pair.pub.substr(0, 6) + ":" + uuid + const uuid = (this.conf.ws2p && this.conf.ws2p.uuid) || "no-uuid-yet"; + const suffix = this.conf.pair.pub.substr(0, 6) + ":" + uuid; this.upnpAPI = new UpnpProvider( WS2PConstants.WS2P_PORTS_START, WS2PConstants.WS2P_PORTS_END, - ':ws2p:' + suffix, + ":ws2p:" + suffix, WS2PConstants.WS2P_UPNP_INTERVAL, WS2PConstants.WS2P_UPNP_TTL, logger, - this.conf.ws2p.host) - const { host, port, available } = await this.upnpAPI.startRegular() + this.conf.ws2p.host + ); + const { host, port, available } = await this.upnpAPI.startRegular(); if (available) { // Defaults UPnP to true if not defined and available - this.conf.ws2p.upnp = true - await this.cluster.listen(host, port) - await this.server.PeeringService.generateSelfPeer(this.server.conf) + this.conf.ws2p.upnp = true; + await this.cluster.listen(host, port); + await this.server.PeeringService.generateSelfPeer(this.server.conf); } } catch (e) { logger.warn(e); @@ -263,49 +305,65 @@ export class WS2PAPI extends stream.Transform { **************/ if (!this.conf.ws2p || this.conf.ws2p.privateAccess) { - await this.cluster.startCrawling() + await this.cluster.startCrawling(); } - } + }; stopService = async () => { if (this.cluster) { - await this.cluster.stopCrawling() - await this.cluster.close() + await this.cluster.stopCrawling(); + await this.cluster.close(); } if (this.upnpAPI) { this.upnpAPI.stopRegular(); } - } + }; async getEndpoint() { // If WS2P defined and enabled - if (this.server.conf.ws2p !== undefined && (this.server.conf.ws2p.publicAccess || this.server.conf.ws2p.privateAccess)) - { - let endpointType = "WS2P" + if ( + this.server.conf.ws2p !== undefined && + (this.server.conf.ws2p.publicAccess || + this.server.conf.ws2p.privateAccess) + ) { + let endpointType = "WS2P"; if (this.server.conf.upnp && this.upnpAPI) { - const config = this.upnpAPI.getCurrentConfig() + const config = this.upnpAPI.getCurrentConfig(); if (config) { - if (config.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) { endpointType += "TOR"; } - return [endpointType, this.server.conf.ws2p.uuid, config.remotehost, config.port].join(' ') + if (config.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) { + endpointType += "TOR"; + } + return [ + endpointType, + this.server.conf.ws2p.uuid, + config.remotehost, + config.port, + ].join(" "); } else { - return '' + return ""; } - } - else if (this.server.conf.ws2p.uuid - && this.server.conf.ws2p.remotehost - && this.server.conf.ws2p.remoteport) { - if (this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) { endpointType += "TOR"; } - let ep = [endpointType, - this.server.conf.ws2p.uuid, - this.server.conf.ws2p.remotehost, - this.server.conf.ws2p.remoteport - ].join(' ') - if (this.server.conf.ws2p.remotepath) { - ep += ` ${this.server.conf.ws2p.remotepath}` - } - return ep + } else if ( + this.server.conf.ws2p.uuid && + this.server.conf.ws2p.remotehost && + this.server.conf.ws2p.remoteport + ) { + if ( + this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX) + ) { + endpointType += "TOR"; + } + let ep = [ + endpointType, + this.server.conf.ws2p.uuid, + this.server.conf.ws2p.remotehost, + this.server.conf.ws2p.remoteport, + ].join(" "); + if (this.server.conf.ws2p.remotepath) { + ep += ` ${this.server.conf.ws2p.remotepath}`; + } + return ep; } } - return '' + return ""; } -} \ No newline at end of file +}