diff --git a/app/ProcessCpuProfiler.ts b/app/ProcessCpuProfiler.ts index 42b9999269e7b6ca6c00098b213dfdcb9b1b6fc9..8ae64e6425de77b0110d3d9ca89a0f432dc26f0c 100644 --- a/app/ProcessCpuProfiler.ts +++ b/app/ProcessCpuProfiler.ts @@ -11,84 +11,88 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -const SAMPLING_PERIOD = 150 // milliseconds -const MAX_SAMPLES_DISTANCE = 20 * 1000000 // seconds +const SAMPLING_PERIOD = 150; // milliseconds +const MAX_SAMPLES_DISTANCE = 20 * 1000000; // seconds export function getMicrosecondsTime() { - const [ seconds, nanoseconds ] = process.hrtime() - return seconds * 1000000 + nanoseconds / 1000 + const [seconds, nanoseconds] = process.hrtime(); + return seconds * 1000000 + nanoseconds / 1000; } export function getNanosecondsTime() { - const [ seconds, nanoseconds ] = process.hrtime() - return seconds * 1000000 + nanoseconds + const [seconds, nanoseconds] = process.hrtime(); + return seconds * 1000000 + nanoseconds; } -export function getDurationInMicroSeconds(before:number) { - return parseInt(String(getMicrosecondsTime() - before)) +export function getDurationInMicroSeconds(before: number) { + return parseInt(String(getMicrosecondsTime() - before)); } interface CpuUsage { - user: number - system:number + user: number; + system: number; } interface CpuUsageAt { - usage:number - at:number // microseconds timestamp - elapsed:number // microseconds elapsed for this result + usage: number; + at: number; // microseconds timestamp + elapsed: number; // microseconds elapsed for this result } export class ProcessCpuProfiler { - - private cumulatedUsage: CpuUsage - private startedAt:number // microseconds timestamp - private samples:CpuUsageAt[] = [] + private cumulatedUsage: CpuUsage; + private startedAt: number; // microseconds timestamp + private samples: CpuUsageAt[] = []; constructor(samplingPeriod = SAMPLING_PERIOD) { // Initial state - const start = getMicrosecondsTime() - this.startedAt = start - this.cumulatedUsage = process.cpuUsage() - this.samples.push({ usage: 0, at: start, elapsed: 1 }) + const start = getMicrosecondsTime(); + this.startedAt = start; + this.cumulatedUsage = process.cpuUsage(); + this.samples.push({ usage: 0, at: start, elapsed: 1 }); // Periodic sample setInterval(() => { - const newSampleAt = getMicrosecondsTime() - const newUsage:CpuUsage = process.cpuUsage() - const elapsed = newSampleAt - this.lastSampleAt - const userDiff = newUsage.user - this.cumulatedUsage.user - const usagePercent = userDiff / elapsed // The percent of time consumed by the process since last sample - this.samples.push({ usage: usagePercent, at: newSampleAt, elapsed }) - while(this.samplesDistance > MAX_SAMPLES_DISTANCE) { - this.samples.shift() + const newSampleAt = getMicrosecondsTime(); + const newUsage: CpuUsage = process.cpuUsage(); + const elapsed = newSampleAt - this.lastSampleAt; + const userDiff = newUsage.user - this.cumulatedUsage.user; + const usagePercent = userDiff / elapsed; // The percent of time consumed by the process since last sample + this.samples.push({ usage: usagePercent, at: newSampleAt, elapsed }); + while (this.samplesDistance > MAX_SAMPLES_DISTANCE) { + this.samples.shift(); } - this.cumulatedUsage = newUsage + this.cumulatedUsage = newUsage; // console.log('Time elapsed: %s microseconds, = %s %CPU', elapsed, (usagePercent*100).toFixed(2)) - }, samplingPeriod) + }, samplingPeriod); } private get lastSampleAt() { - return this.samples[this.samples.length - 1].at + return this.samples[this.samples.length - 1].at; } private get samplesDistance() { - return this.samples[this.samples.length - 1].at - this.samples[0].at + return this.samples[this.samples.length - 1].at - this.samples[0].at; } - cpuUsageOverLastMilliseconds(elapsedMilliseconds:number) { - return this.cpuUsageOverLastX(elapsedMilliseconds * 1000) + cpuUsageOverLastMilliseconds(elapsedMilliseconds: number) { + return this.cpuUsageOverLastX(elapsedMilliseconds * 1000); } - private cpuUsageOverLastX(nbMicrosecondsElapsed:number) { - return this.getSamplesResult(getMicrosecondsTime() - nbMicrosecondsElapsed) + private cpuUsageOverLastX(nbMicrosecondsElapsed: number) { + return this.getSamplesResult(getMicrosecondsTime() - nbMicrosecondsElapsed); } - private getSamplesResult(minTimestamp:number) { - const matchingSamples = this.samples.filter(s => s.at >= minTimestamp - SAMPLING_PERIOD * 1000) - const cumulativeElapsed = matchingSamples.reduce((sum, s) => sum + s.elapsed, 0) + private getSamplesResult(minTimestamp: number) { + const matchingSamples = this.samples.filter( + (s) => s.at >= minTimestamp - SAMPLING_PERIOD * 1000 + ); + const cumulativeElapsed = matchingSamples.reduce( + (sum, s) => sum + s.elapsed, + 0 + ); return matchingSamples.reduce((cumulated, percent) => { - const weight = percent.elapsed / cumulativeElapsed - return cumulated + percent.usage * weight - }, 0) + const weight = percent.elapsed / cumulativeElapsed; + return cumulated + percent.usage * weight; + }, 0); } -} \ No newline at end of file +} diff --git a/app/cli.ts b/app/cli.ts index cf82db86832e1874407a607b01b7df2d180cd831..f0bb646c87ba360e32d03acb4d5493591e70cd9f 100644 --- a/app/cli.ts +++ b/app/cli.ts @@ -11,30 +11,30 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -const Command = require('commander').Command; -const pjson = require('../package.json'); +const Command = require("commander").Command; +const pjson = require("../package.json"); export const ExecuteCommand = () => { - - const options:any = []; - const commands:any = []; + const options: any = []; + const commands: any = []; return { + addOption: (optFormat: string, optDesc: string, optParser: any) => + options.push({ optFormat, optDesc, optParser }), - addOption: (optFormat:string, optDesc:string, optParser:any) => options.push({ optFormat, optDesc, optParser }), - - addCommand: (command:any, executionCallback:any) => commands.push({ command, executionCallback }), + addCommand: (command: any, executionCallback: any) => + commands.push({ command, executionCallback }), // To execute the provided command - execute: async (programArgs:string[]) => { - + execute: async (programArgs: string[]) => { const program = new Command(); // Callback for command success - let onResolve:any; + let onResolve: any; // Callback for command rejection - let onReject:any = () => Promise.reject(Error("Uninitilized rejection throw")); + let onReject: any = () => + Promise.reject(Error("Uninitilized rejection throw")); // Command execution promise const currentCommand = new Promise((resolve, reject) => { @@ -44,53 +44,97 @@ export const ExecuteCommand = () => { program .version(pjson.version) - .usage('<command> [options]') - - .option('--home <path>', 'Path to Duniter HOME (defaults to "$HOME/.config/duniter").') - .option('-d, --mdb <name>', 'Database name (defaults to "duniter_default").') - - .option('--autoconf', 'With `config` and `init` commands, will guess the best network and key options witout asking for confirmation') - .option('--addep <endpoint>', 'With `config` command, add given endpoint to the list of endpoints of this node') - .option('--remep <endpoint>', 'With `config` command, remove given endpoint to the list of endpoints of this node') - - .option('--cpu <percent>', 'Percent of CPU usage for proof-of-work computation', parsePercent) - .option('--nb-cores <number>', 'Number of cores uses for proof-of-work computation', parseInt) - .option('--prefix <nodeId>', 'Prefix node id for the first character of nonce', parseInt) - - .option('-c, --currency <name>', 'Name of the currency managed by this node.') - - .option('--nostdout', 'Disable stdout printing for `export-bc` command') - .option('--noshuffle', 'Disable peers shuffling for `sync` command') - - .option('--socks-proxy <host:port>', 'Use Socks Proxy') - .option('--tor-proxy <host:port>', 'Use Tor Socks Proxy') - .option('--reaching-clear-ep <clear|tor|none>', 'method for reaching an clear endpoint') - .option('--force-tor', 'force duniter to contact endpoint tor (if you redirect the traffic to tor yourself)') - .option('--rm-proxies', 'Remove all proxies') - - .option('--timeout <milliseconds>', 'Timeout to use when contacting peers', parseInt) - .option('--httplogs', 'Enable HTTP logs') - .option('--nohttplogs', 'Disable HTTP logs') - .option('--isolate', 'Avoid the node to send peering or status informations to the network') - .option('--forksize <size>', 'Maximum size of fork window', parseInt) - .option('--notrim', 'Disable the INDEX trimming.') - .option('--notrimc', 'Disable the C_INDEX trimming specifically.') - .option('--memory', 'Memory mode') - ; + .usage("<command> [options]") + + .option( + "--home <path>", + 'Path to Duniter HOME (defaults to "$HOME/.config/duniter").' + ) + .option( + "-d, --mdb <name>", + 'Database name (defaults to "duniter_default").' + ) + + .option( + "--autoconf", + "With `config` and `init` commands, will guess the best network and key options witout asking for confirmation" + ) + .option( + "--addep <endpoint>", + "With `config` command, add given endpoint to the list of endpoints of this node" + ) + .option( + "--remep <endpoint>", + "With `config` command, remove given endpoint to the list of endpoints of this node" + ) + + .option( + "--cpu <percent>", + "Percent of CPU usage for proof-of-work computation", + parsePercent + ) + .option( + "--nb-cores <number>", + "Number of cores uses for proof-of-work computation", + parseInt + ) + .option( + "--prefix <nodeId>", + "Prefix node id for the first character of nonce", + parseInt + ) + + .option( + "-c, --currency <name>", + "Name of the currency managed by this node." + ) + + .option("--nostdout", "Disable stdout printing for `export-bc` command") + .option("--noshuffle", "Disable peers shuffling for `sync` command") + + .option("--socks-proxy <host:port>", "Use Socks Proxy") + .option("--tor-proxy <host:port>", "Use Tor Socks Proxy") + .option( + "--reaching-clear-ep <clear|tor|none>", + "method for reaching an clear endpoint" + ) + .option( + "--force-tor", + "force duniter to contact endpoint tor (if you redirect the traffic to tor yourself)" + ) + .option("--rm-proxies", "Remove all proxies") + + .option( + "--timeout <milliseconds>", + "Timeout to use when contacting peers", + parseInt + ) + .option("--httplogs", "Enable HTTP logs") + .option("--nohttplogs", "Disable HTTP logs") + .option( + "--isolate", + "Avoid the node to send peering or status informations to the network" + ) + .option("--forksize <size>", "Maximum size of fork window", parseInt) + .option("--notrim", "Disable the INDEX trimming.") + .option("--notrimc", "Disable the C_INDEX trimming specifically.") + .option("--memory", "Memory mode"); for (const opt of options) { - program - .option(opt.optFormat, opt.optDesc, opt.optParser); + program.option(opt.optFormat, opt.optDesc, opt.optParser); } for (const cmd of commands) { program .command(cmd.command.name) .description(cmd.command.desc) - .action(async function() { + .action(async function () { const args = Array.from(arguments); try { - const resOfExecution = await cmd.executionCallback.apply(null, [program].concat(args)); + const resOfExecution = await cmd.executionCallback.apply( + null, + [program].concat(args) + ); onResolve(resOfExecution); } catch (e) { onReject(e); @@ -98,23 +142,25 @@ export const ExecuteCommand = () => { }); } - program - .on('*', function (cmd:any) { - console.log("Unknown command '%s'. Try --help for a listing of commands & options.", cmd); - onResolve(); - }); + program.on("*", function (cmd: any) { + console.log( + "Unknown command '%s'. Try --help for a listing of commands & options.", + cmd + ); + onResolve(); + }); program.parse(programArgs); if (programArgs.length <= 2) { - onReject('No command given.'); + onReject("No command given."); } return currentCommand; - } + }, }; }; -function parsePercent(s:string) { +function parsePercent(s: string) { const f = parseFloat(s); return isNaN(f) ? 0 : f; } diff --git a/app/lib/blockchain/DuniterBlockchain.ts b/app/lib/blockchain/DuniterBlockchain.ts index cb036850ebfe0859f4c86cdf28340b4635d3c33f..e5659a1d2cfa428f496c62338b7419dcd4d6404d 100644 --- a/app/lib/blockchain/DuniterBlockchain.ts +++ b/app/lib/blockchain/DuniterBlockchain.ts @@ -19,37 +19,40 @@ import { Indexer, MindexEntry, SimpleSindexEntryForWallet, - SimpleUdEntryForWallet -} from "../indexer" -import {ConfDTO} from "../dto/ConfDTO" -import {BlockDTO} from "../dto/BlockDTO" -import {DBHead} from "../db/DBHead" -import {DBBlock} from "../db/DBBlock" -import {CHECK} from "../rules/index" -import {RevocationDTO} from "../dto/RevocationDTO" -import {IdentityDTO} from "../dto/IdentityDTO" -import {CertificationDTO} from "../dto/CertificationDTO" -import {MembershipDTO} from "../dto/MembershipDTO" -import {TransactionDTO} from "../dto/TransactionDTO" -import {CommonConstants} from "../common-libs/constants" -import {FileDAL} from "../dal/fileDAL" -import {NewLogger} from "../logger" -import {DBTx} from "../db/DBTx" -import {Underscore} from "../common-libs/underscore" -import {OtherConstants} from "../other_constants" -import {MonitorExecutionTime} from "../debug/MonitorExecutionTime" -import {Wot} from "duniteroxyde" -import { Directory } from "../system/directory" + SimpleUdEntryForWallet, +} from "../indexer"; +import { ConfDTO } from "../dto/ConfDTO"; +import { BlockDTO } from "../dto/BlockDTO"; +import { DBHead } from "../db/DBHead"; +import { DBBlock } from "../db/DBBlock"; +import { CHECK } from "../rules/index"; +import { RevocationDTO } from "../dto/RevocationDTO"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { CertificationDTO } from "../dto/CertificationDTO"; +import { MembershipDTO } from "../dto/MembershipDTO"; +import { TransactionDTO } from "../dto/TransactionDTO"; +import { CommonConstants } from "../common-libs/constants"; +import { FileDAL } from "../dal/fileDAL"; +import { NewLogger } from "../logger"; +import { DBTx } from "../db/DBTx"; +import { Underscore } from "../common-libs/underscore"; +import { OtherConstants } from "../other_constants"; +import { MonitorExecutionTime } from "../debug/MonitorExecutionTime"; +import { Wot } from "duniteroxyde"; +import { Directory } from "../system/directory"; export class DuniterBlockchain { - - static async checkBlock(block:BlockDTO, withPoWAndSignature:boolean, conf: ConfDTO, dal:FileDAL) { - const index = Indexer.localIndex(block, conf) + static async checkBlock( + block: BlockDTO, + withPoWAndSignature: boolean, + conf: ConfDTO, + dal: FileDAL + ) { + const index = Indexer.localIndex(block, conf); if (withPoWAndSignature) { - await CHECK.ASYNC.ALL_LOCAL(block, conf, index) - } - else { - await CHECK.ASYNC.ALL_LOCAL_BUT_POW(block, conf, index) + await CHECK.ASYNC.ALL_LOCAL(block, conf, index); + } else { + await CHECK.ASYNC.ALL_LOCAL_BUT_POW(block, conf, index); } const HEAD = await Indexer.completeGlobalScope(block, conf, index, dal); const HEAD_1 = await dal.bindexDAL.head(1); @@ -58,112 +61,150 @@ export class DuniterBlockchain { const sindex = Indexer.sindex(index); const cindex = Indexer.cindex(index); // BR_G49 - if (Indexer.ruleVersion(HEAD, HEAD_1) === false) throw Error('ruleVersion'); + if (Indexer.ruleVersion(HEAD, HEAD_1) === false) throw Error("ruleVersion"); // BR_G50 - if (Indexer.ruleBlockSize(HEAD) === false) throw Error('ruleBlockSize'); + if (Indexer.ruleBlockSize(HEAD) === false) throw Error("ruleBlockSize"); // BR_G98 - if (Indexer.ruleCurrency(block, HEAD) === false) throw Error('ruleCurrency'); + if (Indexer.ruleCurrency(block, HEAD) === false) + throw Error("ruleCurrency"); // BR_G51 if (Indexer.ruleNumber(block, HEAD) === false) { - throw Error('ruleNumber') + throw Error("ruleNumber"); } // BR_G52 - if (Indexer.rulePreviousHash(block, HEAD) === false) throw Error('rulePreviousHash'); + if (Indexer.rulePreviousHash(block, HEAD) === false) + throw Error("rulePreviousHash"); // BR_G53 - if (Indexer.rulePreviousIssuer(block, HEAD) === false) throw Error('rulePreviousIssuer'); + if (Indexer.rulePreviousIssuer(block, HEAD) === false) + throw Error("rulePreviousIssuer"); // BR_G101 - if (Indexer.ruleIssuerIsMember(HEAD) === false) throw Error('ruleIssuerIsMember'); + if (Indexer.ruleIssuerIsMember(HEAD) === false) + throw Error("ruleIssuerIsMember"); // BR_G54 - if (Indexer.ruleIssuersCount(block, HEAD) === false) throw Error('ruleIssuersCount'); + if (Indexer.ruleIssuersCount(block, HEAD) === false) + throw Error("ruleIssuersCount"); // BR_G55 - if (Indexer.ruleIssuersFrame(block, HEAD) === false) throw Error('ruleIssuersFrame'); + if (Indexer.ruleIssuersFrame(block, HEAD) === false) + throw Error("ruleIssuersFrame"); // BR_G56 - if (Indexer.ruleIssuersFrameVar(block, HEAD) === false) throw Error('ruleIssuersFrameVar'); + if (Indexer.ruleIssuersFrameVar(block, HEAD) === false) + throw Error("ruleIssuersFrameVar"); // BR_G57 if (Indexer.ruleMedianTime(block, HEAD) === false) { - throw Error('ruleMedianTime') + throw Error("ruleMedianTime"); } // BR_G58 - if (Indexer.ruleDividend(block, HEAD) === false) throw Error('ruleDividend'); + if (Indexer.ruleDividend(block, HEAD) === false) + throw Error("ruleDividend"); // BR_G59 - if (Indexer.ruleUnitBase(block, HEAD) === false) throw Error('ruleUnitBase'); + if (Indexer.ruleUnitBase(block, HEAD) === false) + throw Error("ruleUnitBase"); // BR_G60 - if (Indexer.ruleMembersCount(block, HEAD) === false) throw Error('ruleMembersCount'); + if (Indexer.ruleMembersCount(block, HEAD) === false) + throw Error("ruleMembersCount"); // BR_G61 - if (Indexer.rulePowMin(block, HEAD) === false) throw Error('rulePowMin'); + if (Indexer.rulePowMin(block, HEAD) === false) throw Error("rulePowMin"); if (withPoWAndSignature) { // BR_G62 - if (Indexer.ruleProofOfWork(HEAD) === false) throw Error('ruleProofOfWork'); + if (Indexer.ruleProofOfWork(HEAD) === false) + throw Error("ruleProofOfWork"); } // BR_G63 - if (Indexer.ruleIdentityWritability(iindex, conf) === false) throw Error('ruleIdentityWritability'); + if (Indexer.ruleIdentityWritability(iindex, conf) === false) + throw Error("ruleIdentityWritability"); // BR_G64 - if (Indexer.ruleMembershipWritability(mindex, conf) === false) throw Error('ruleMembershipWritability'); + if (Indexer.ruleMembershipWritability(mindex, conf) === false) + throw Error("ruleMembershipWritability"); // BR_G108 - if (Indexer.ruleMembershipPeriod(mindex) === false) throw Error('ruleMembershipPeriod'); + if (Indexer.ruleMembershipPeriod(mindex) === false) + throw Error("ruleMembershipPeriod"); // BR_G65 - if (Indexer.ruleCertificationWritability(cindex, conf) === false) throw Error('ruleCertificationWritability'); + if (Indexer.ruleCertificationWritability(cindex, conf) === false) + throw Error("ruleCertificationWritability"); // BR_G66 - if (Indexer.ruleCertificationStock(cindex, conf) === false) throw Error('ruleCertificationStock'); + if (Indexer.ruleCertificationStock(cindex, conf) === false) + throw Error("ruleCertificationStock"); // BR_G67 - if (Indexer.ruleCertificationPeriod(cindex) === false) throw Error('ruleCertificationPeriod'); + if (Indexer.ruleCertificationPeriod(cindex) === false) + throw Error("ruleCertificationPeriod"); // BR_G68 - if (Indexer.ruleCertificationFromMember(HEAD, cindex) === false) throw Error('ruleCertificationFromMember'); + if (Indexer.ruleCertificationFromMember(HEAD, cindex) === false) + throw Error("ruleCertificationFromMember"); // BR_G69 - if (Indexer.ruleCertificationToMemberOrNewcomer(cindex) === false) throw Error('ruleCertificationToMemberOrNewcomer'); + if (Indexer.ruleCertificationToMemberOrNewcomer(cindex) === false) + throw Error("ruleCertificationToMemberOrNewcomer"); // BR_G70 - if (Indexer.ruleCertificationToLeaver(cindex) === false) throw Error('ruleCertificationToLeaver'); + if (Indexer.ruleCertificationToLeaver(cindex) === false) + throw Error("ruleCertificationToLeaver"); // BR_G71 if (Indexer.ruleCertificationReplay(cindex) === false) { - throw Error('ruleCertificationReplay') + throw Error("ruleCertificationReplay"); } // BR_G72 - if (Indexer.ruleCertificationSignature(cindex) === false) throw Error('ruleCertificationSignature'); + if (Indexer.ruleCertificationSignature(cindex) === false) + throw Error("ruleCertificationSignature"); // BR_G73 - if (Indexer.ruleIdentityUIDUnicity(iindex) === false) throw Error('ruleIdentityUIDUnicity'); + if (Indexer.ruleIdentityUIDUnicity(iindex) === false) + throw Error("ruleIdentityUIDUnicity"); // BR_G74 - if (Indexer.ruleIdentityPubkeyUnicity(iindex) === false) throw Error('ruleIdentityPubkeyUnicity'); + if (Indexer.ruleIdentityPubkeyUnicity(iindex) === false) + throw Error("ruleIdentityPubkeyUnicity"); // BR_G75 - if (Indexer.ruleMembershipSuccession(mindex) === false) throw Error('ruleMembershipSuccession'); + if (Indexer.ruleMembershipSuccession(mindex) === false) + throw Error("ruleMembershipSuccession"); // BR_G76 - if (Indexer.ruleMembershipDistance(HEAD, mindex) === false) throw Error('ruleMembershipDistance'); + if (Indexer.ruleMembershipDistance(HEAD, mindex) === false) + throw Error("ruleMembershipDistance"); // BR_G77 - if (Indexer.ruleMembershipOnRevoked(mindex) === false) throw Error('ruleMembershipOnRevoked'); + if (Indexer.ruleMembershipOnRevoked(mindex) === false) + throw Error("ruleMembershipOnRevoked"); // BR_G78 - if (Indexer.ruleMembershipJoinsTwice(mindex) === false) throw Error('ruleMembershipJoinsTwice'); + if (Indexer.ruleMembershipJoinsTwice(mindex) === false) + throw Error("ruleMembershipJoinsTwice"); // BR_G79 - if (Indexer.ruleMembershipEnoughCerts(mindex) === false) throw Error('ruleMembershipEnoughCerts'); + if (Indexer.ruleMembershipEnoughCerts(mindex) === false) + throw Error("ruleMembershipEnoughCerts"); // BR_G80 - if (Indexer.ruleMembershipLeaverIsMember(mindex) === false) throw Error('ruleMembershipLeaverIsMember'); + if (Indexer.ruleMembershipLeaverIsMember(mindex) === false) + throw Error("ruleMembershipLeaverIsMember"); // BR_G81 if (Indexer.ruleMembershipActiveIsMember(mindex) === false) { - throw Error('ruleMembershipActiveIsMember') + throw Error("ruleMembershipActiveIsMember"); } // BR_G82 - if (Indexer.ruleMembershipRevokedIsMember(mindex) === false) throw Error('ruleMembershipRevokedIsMember'); + if (Indexer.ruleMembershipRevokedIsMember(mindex) === false) + throw Error("ruleMembershipRevokedIsMember"); // BR_G83 - if (Indexer.ruleMembershipRevokedSingleton(mindex) === false) throw Error('ruleMembershipRevokedSingleton'); + if (Indexer.ruleMembershipRevokedSingleton(mindex) === false) + throw Error("ruleMembershipRevokedSingleton"); // BR_G84 - if (Indexer.ruleMembershipRevocationSignature(mindex) === false) throw Error('ruleMembershipRevocationSignature'); + if (Indexer.ruleMembershipRevocationSignature(mindex) === false) + throw Error("ruleMembershipRevocationSignature"); // BR_G85 - if (Indexer.ruleMembershipExcludedIsMember(iindex) === false) throw Error('ruleMembershipExcludedIsMember'); + if (Indexer.ruleMembershipExcludedIsMember(iindex) === false) + throw Error("ruleMembershipExcludedIsMember"); // BR_G86 if ((await Indexer.ruleToBeKickedArePresent(iindex, dal)) === false) { - throw Error('ruleToBeKickedArePresent') + throw Error("ruleToBeKickedArePresent"); } // BR_G103 - if (Indexer.ruleTxWritability(sindex) === false) throw Error('ruleTxWritability'); + if (Indexer.ruleTxWritability(sindex) === false) + throw Error("ruleTxWritability"); // BR_G87 - if (Indexer.ruleInputIsAvailable(sindex) === false) throw Error('ruleInputIsAvailable'); + if (Indexer.ruleInputIsAvailable(sindex) === false) + throw Error("ruleInputIsAvailable"); // BR_G88 - if (Indexer.ruleInputIsUnlocked(sindex) === false) throw Error('ruleInputIsUnlocked'); + if (Indexer.ruleInputIsUnlocked(sindex) === false) + throw Error("ruleInputIsUnlocked"); // BR_G89 - if (Indexer.ruleInputIsTimeUnlocked(sindex) === false) throw Error('ruleInputIsTimeUnlocked'); + if (Indexer.ruleInputIsTimeUnlocked(sindex) === false) + throw Error("ruleInputIsTimeUnlocked"); // BR_G90 - if (Indexer.ruleOutputBase(sindex, HEAD_1) === false) throw Error('ruleOutputBase'); + if (Indexer.ruleOutputBase(sindex, HEAD_1) === false) + throw Error("ruleOutputBase"); // Check document's coherence - const matchesList = (regexp:RegExp, list:string[]) => { + const matchesList = (regexp: RegExp, list: string[]) => { let i = 0; let found = ""; while (!found && i < list.length) { @@ -171,16 +212,16 @@ export class DuniterBlockchain { i++; } return found; - } + }; const isMember = await dal.isMember(block.issuer); if (!isMember) { if (block.number == 0) { - if (!matchesList(new RegExp('^' + block.issuer + ':'), block.joiners)) { - throw Error('Block not signed by the root members'); + if (!matchesList(new RegExp("^" + block.issuer + ":"), block.joiners)) { + throw Error("Block not signed by the root members"); } } else { - throw Error('Block must be signed by an existing member'); + throw Error("Block must be signed by an existing member"); } } @@ -188,22 +229,41 @@ export class DuniterBlockchain { // Check the local rules // Enrich with the global index // Check the global rules - return { index, HEAD } + return { index, HEAD }; } - static async pushTheBlock(obj:BlockDTO, index:IndexEntry[], HEAD:DBHead | null, conf:ConfDTO, dal:FileDAL, logger:any, trim = true) { + static async pushTheBlock( + obj: BlockDTO, + index: IndexEntry[], + HEAD: DBHead | null, + conf: ConfDTO, + dal: FileDAL, + logger: any, + trim = true + ) { const start = Date.now(); - const block = BlockDTO.fromJSONObject(obj) + const block = BlockDTO.fromJSONObject(obj); try { const currentBlock = await dal.getCurrentBlockOrNull(); block.fork = false; - const added = await this.saveBlockData(currentBlock, block, conf, dal, logger, index, HEAD, trim); - - logger.info('Block #' + block.number + ' added to the blockchain in %s ms', (Date.now() - start)); - - return BlockDTO.fromJSONObject(added) - } - catch(err) { + const added = await this.saveBlockData( + currentBlock, + block, + conf, + dal, + logger, + index, + HEAD, + trim + ); + + logger.info( + "Block #" + block.number + " added to the blockchain in %s ms", + Date.now() - start + ); + + return BlockDTO.fromJSONObject(added); + } catch (err) { throw err; } @@ -213,7 +273,16 @@ export class DuniterBlockchain { // await supra.recordIndex(index) } - static async saveBlockData(current:DBBlock|null, block:BlockDTO, conf:ConfDTO, dal:FileDAL, logger:any, index:IndexEntry[], HEAD:DBHead | null, trim: boolean) { + static async saveBlockData( + current: DBBlock | null, + block: BlockDTO, + conf: ConfDTO, + dal: FileDAL, + logger: any, + index: IndexEntry[], + HEAD: DBHead | null, + trim: boolean + ) { if (block.number == 0) { await this.saveParametersForRoot(block, conf, dal); } @@ -225,20 +294,20 @@ export class DuniterBlockchain { // Save indexes await dal.bindexDAL.insert(indexes.HEAD); - await dal.flushIndexes(indexes) + await dal.flushIndexes(indexes); // Create/Update nodes in wotb await this.updateMembers(block, dal); // Update the wallets' blances - await this.updateWallets(indexes.sindex, indexes.dividends, dal) + await this.updateWallets(indexes.sindex, indexes.dividends, dal); if (trim) { - await DuniterBlockchain.trimIndexes(dal, indexes.HEAD, conf) + await DuniterBlockchain.trimIndexes(dal, indexes.HEAD, conf); } - const dbb = DBBlock.fromBlockDTO(block) - this.updateBlocksComputedVars(current, dbb) + const dbb = DBBlock.fromBlockDTO(block); + this.updateBlocksComputedVars(current, dbb); // --> Update links await dal.updateWotbLinks(indexes.cindex); @@ -256,19 +325,23 @@ export class DuniterBlockchain { // Saves the block (DAL) await dal.saveBlock(dbb); - + // Save wot file if (!dal.fs.isMemoryOnly()) { let wotbFilepath = await Directory.getWotbFilePath(dal.rootPath); dal.wotb.writeInFile(wotbFilepath); - } + } - return dbb + return dbb; } - static async saveParametersForRoot(block:BlockDTO, conf:ConfDTO, dal:FileDAL) { + static async saveParametersForRoot( + block: BlockDTO, + conf: ConfDTO, + dal: FileDAL + ) { if (block.parameters) { - const bconf = BlockDTO.getConf(block) + const bconf = BlockDTO.getConf(block); conf.c = bconf.c; conf.dt = bconf.dt; conf.ud0 = bconf.ud0; @@ -297,73 +370,128 @@ export class DuniterBlockchain { } @MonitorExecutionTime() - static async createNewcomers(iindex:IindexEntry[], dal:FileDAL, logger:any, instance?: Wot) { - const wotb = instance || dal.wotb + static async createNewcomers( + iindex: IindexEntry[], + dal: FileDAL, + logger: any, + instance?: Wot + ) { + const wotb = instance || dal.wotb; for (const i of iindex) { if (i.op == CommonConstants.IDX_CREATE) { - const entry = i as FullIindexEntry + const entry = i as FullIindexEntry; // Reserves a wotb ID entry.wotb_id = wotb.addNode(); - logger.trace('%s was affected wotb_id %s', entry.uid, entry.wotb_id); + logger.trace("%s was affected wotb_id %s", entry.uid, entry.wotb_id); // Remove from the sandbox any other identity with the same pubkey/uid, since it has now been reserved. - await dal.removeUnWrittenWithPubkey(entry.pub) - await dal.removeUnWrittenWithUID(entry.uid) + await dal.removeUnWrittenWithPubkey(entry.pub); + await dal.removeUnWrittenWithUID(entry.uid); } } } - static async updateMembers(block:BlockDTO, dal:FileDAL, instance?: Wot) { - const wotb = instance || dal.wotb + static async updateMembers(block: BlockDTO, dal: FileDAL, instance?: Wot) { + const wotb = instance || dal.wotb; // Joiners (come back) for (const inlineMS of block.joiners) { - let ms = MembershipDTO.fromInline(inlineMS) + let ms = MembershipDTO.fromInline(inlineMS); const idty = await dal.getWrittenIdtyByPubkeyForWotbID(ms.issuer); wotb.setEnabled(true, idty.wotb_id); - await dal.dividendDAL.setMember(true, ms.issuer) + await dal.dividendDAL.setMember(true, ms.issuer); } // Revoked for (const inlineRevocation of block.revoked) { - let revocation = RevocationDTO.fromInline(inlineRevocation) - await dal.revokeIdentity(revocation.pubkey) + let revocation = RevocationDTO.fromInline(inlineRevocation); + await dal.revokeIdentity(revocation.pubkey); } // Excluded for (const excluded of block.excluded) { const idty = await dal.getWrittenIdtyByPubkeyForWotbID(excluded); wotb.setEnabled(false, idty.wotb_id); - await dal.dividendDAL.setMember(false, excluded) + await dal.dividendDAL.setMember(false, excluded); } } - static async updateWallets(sindex:SimpleSindexEntryForWallet[], dividends:SimpleUdEntryForWallet[], aDal:any, reverse = false, at?: number) { - const differentConditions = Underscore.uniq(sindex.map((entry) => entry.conditions).concat(dividends.map(d => d.conditions))) + static async updateWallets( + sindex: SimpleSindexEntryForWallet[], + dividends: SimpleUdEntryForWallet[], + aDal: any, + reverse = false, + at?: number + ) { + const differentConditions = Underscore.uniq( + sindex + .map((entry) => entry.conditions) + .concat(dividends.map((d) => d.conditions)) + ); for (const conditions of differentConditions) { - const udsOfKey: BasedAmount[] = dividends.filter(d => d.conditions === conditions).map(d => ({ amount: d.amount, base: d.base })) - const creates: BasedAmount[] = sindex.filter(entry => entry.conditions === conditions && entry.op === CommonConstants.IDX_CREATE) - const updates: BasedAmount[] = sindex.filter(entry => entry.conditions === conditions && entry.op === CommonConstants.IDX_UPDATE) - const positives = creates.concat(udsOfKey).reduce((sum, src) => sum + src.amount * Math.pow(10, src.base), 0) - const negatives = updates.reduce((sum, src) => sum + src.amount * Math.pow(10, src.base), 0) - const wallet = await aDal.getWallet(conditions) - let variation = positives - negatives + const udsOfKey: BasedAmount[] = dividends + .filter((d) => d.conditions === conditions) + .map((d) => ({ amount: d.amount, base: d.base })); + const creates: BasedAmount[] = sindex.filter( + (entry) => + entry.conditions === conditions && + entry.op === CommonConstants.IDX_CREATE + ); + const updates: BasedAmount[] = sindex.filter( + (entry) => + entry.conditions === conditions && + entry.op === CommonConstants.IDX_UPDATE + ); + const positives = creates + .concat(udsOfKey) + .reduce((sum, src) => sum + src.amount * Math.pow(10, src.base), 0); + const negatives = updates.reduce( + (sum, src) => sum + src.amount * Math.pow(10, src.base), + 0 + ); + const wallet = await aDal.getWallet(conditions); + let variation = positives - negatives; if (reverse) { // To do the opposite operations, for a reverted block - variation *= -1 + variation *= -1; } if (OtherConstants.TRACE_BALANCES) { - if (!OtherConstants.TRACE_PARTICULAR_BALANCE || wallet.conditions.match(new RegExp(OtherConstants.TRACE_PARTICULAR_BALANCE))) { - NewLogger().trace('Balance of %s: %s (%s %s %s) at #%s', wallet.conditions, wallet.balance + variation, wallet.balance, variation < 0 ? '-' : '+', Math.abs(variation), at) + if ( + !OtherConstants.TRACE_PARTICULAR_BALANCE || + wallet.conditions.match( + new RegExp(OtherConstants.TRACE_PARTICULAR_BALANCE) + ) + ) { + NewLogger().trace( + "Balance of %s: %s (%s %s %s) at #%s", + wallet.conditions, + wallet.balance + variation, + wallet.balance, + variation < 0 ? "-" : "+", + Math.abs(variation), + at + ); } } - wallet.balance += variation - if (OtherConstants.TRACE_PARTICULAR_BALANCE && wallet.conditions.match(new RegExp(OtherConstants.TRACE_PARTICULAR_BALANCE))) { - NewLogger().trace('>>>>>>>>> WALLET = ', (wallet.balance > 0 ? '+' : '') + wallet.balance) + wallet.balance += variation; + if ( + OtherConstants.TRACE_PARTICULAR_BALANCE && + wallet.conditions.match( + new RegExp(OtherConstants.TRACE_PARTICULAR_BALANCE) + ) + ) { + NewLogger().trace( + ">>>>>>>>> WALLET = ", + (wallet.balance > 0 ? "+" : "") + wallet.balance + ); } - await aDal.saveWallet(wallet) + await aDal.saveWallet(wallet); } } - static async revertBlock(number:number, hash:string, dal:FileDAL, block?: DBBlock) { - - const blockstamp = [number, hash].join('-'); + static async revertBlock( + number: number, + hash: string, + dal: FileDAL, + block?: DBBlock + ) { + const blockstamp = [number, hash].join("-"); // Revert links const writtenOn = await dal.cindexDAL.getWrittenOn(blockstamp); @@ -383,34 +511,42 @@ export class DuniterBlockchain { await this.undoMembersUpdate(blockstamp, dal); // Get the money movements to revert in the balance - const REVERSE_BALANCE = true - const sindexOfBlock = await dal.sindexDAL.getWrittenOnTxs(blockstamp) + const REVERSE_BALANCE = true; + const sindexOfBlock = await dal.sindexDAL.getWrittenOnTxs(blockstamp); await dal.bindexDAL.removeBlock(blockstamp); await dal.mindexDAL.removeBlock(blockstamp); await dal.iindexDAL.removeBlock(blockstamp); await dal.cindexDAL.removeBlock(blockstamp); await dal.sindexDAL.removeBlock(blockstamp); - const { createdUDsDestroyedByRevert, consumedUDsRecoveredByRevert } = await dal.dividendDAL.revertUDs(number) + const { + createdUDsDestroyedByRevert, + consumedUDsRecoveredByRevert, + } = await dal.dividendDAL.revertUDs(number); // Then: normal updates - const previousBlock = await dal.getFullBlockOf(number - 1) + const previousBlock = await dal.getFullBlockOf(number - 1); // Set the block as SIDE block (equivalent to removal from main branch) await dal.blockDAL.setSideBlock(number, previousBlock); // Update the dividends in our wallet - await this.updateWallets([], createdUDsDestroyedByRevert, dal, REVERSE_BALANCE) - await this.updateWallets([], consumedUDsRecoveredByRevert, dal) + await this.updateWallets( + [], + createdUDsDestroyedByRevert, + dal, + REVERSE_BALANCE + ); + await this.updateWallets([], consumedUDsRecoveredByRevert, dal); // Revert the balances variations for this block - await this.updateWallets(sindexOfBlock, [], dal, REVERSE_BALANCE) + await this.updateWallets(sindexOfBlock, [], dal, REVERSE_BALANCE); // Restore block's transaction as incoming transactions if (block) { - await this.undoDeleteTransactions(block, dal) + await this.undoDeleteTransactions(block, dal); } } - static async undoMembersUpdate(blockstamp:string, dal:FileDAL) { + static async undoMembersUpdate(blockstamp: string, dal: FileDAL) { const joiners = await dal.iindexDAL.getWrittenOn(blockstamp); for (const entry of joiners) { // Undo 'join' which can be either newcomers or comebackers @@ -418,7 +554,7 @@ export class DuniterBlockchain { if (entry.member === true && entry.op === CommonConstants.IDX_UPDATE) { const idty = await dal.getWrittenIdtyByPubkeyForWotbID(entry.pub); dal.wotb.setEnabled(false, idty.wotb_id); - await dal.dividendDAL.setMember(false, entry.pub) + await dal.dividendDAL.setMember(false, entry.pub); } } const newcomers = await dal.iindexDAL.getWrittenOn(blockstamp); @@ -427,11 +563,11 @@ export class DuniterBlockchain { // => equivalent to i_index.op = 'CREATE' if (entry.op === CommonConstants.IDX_CREATE) { // Does not matter which one it really was, we pop the last X identities - NewLogger().trace('removeNode') + NewLogger().trace("removeNode"); if (dal.wotb.getWoTSize() > 0) { dal.wotb.removeNode(); } - await dal.dividendDAL.deleteMember(entry.pub) + await dal.dividendDAL.deleteMember(entry.pub); } } const excluded = await dal.iindexDAL.getWrittenOn(blockstamp); @@ -441,16 +577,16 @@ export class DuniterBlockchain { if (entry.member === false && entry.op === CommonConstants.IDX_UPDATE) { const idty = await dal.getWrittenIdtyByPubkeyForWotbID(entry.pub); dal.wotb.setEnabled(true, idty.wotb_id); - await dal.dividendDAL.setMember(true, entry.pub) + await dal.dividendDAL.setMember(true, entry.pub); } } } - static async undoDeleteTransactions(block:DBBlock, dal:FileDAL) { + static async undoDeleteTransactions(block: DBBlock, dal: FileDAL) { for (const obj of block.transactions) { obj.currency = block.currency; - let tx = TransactionDTO.fromJSONObject(obj) - await dal.saveTransaction(DBTx.fromTransactionDTO(tx)) + let tx = TransactionDTO.fromJSONObject(obj); + await dal.saveTransaction(DBTx.fromTransactionDTO(tx)); } } @@ -460,9 +596,9 @@ export class DuniterBlockchain { * @param block Block in which are contained the certifications to remove from sandbox. * @param dal The DAL */ - static async removeCertificationsFromSandbox(block:BlockDTO, dal:FileDAL) { + static async removeCertificationsFromSandbox(block: BlockDTO, dal: FileDAL) { for (let inlineCert of block.certifications) { - let cert = CertificationDTO.fromInline(inlineCert) + let cert = CertificationDTO.fromInline(inlineCert); let idty = await dal.getWrittenIdtyByPubkeyForHashing(cert.to); await dal.deleteCert({ from: cert.from, @@ -478,87 +614,112 @@ export class DuniterBlockchain { * @param block Block in which are contained the certifications to remove from sandbox. * @param dal The DAL */ - static async removeMembershipsFromSandbox(block:BlockDTO, dal:FileDAL) { + static async removeMembershipsFromSandbox(block: BlockDTO, dal: FileDAL) { const mss = block.joiners.concat(block.actives).concat(block.leavers); for (const inlineMS of mss) { - let ms = MembershipDTO.fromInline(inlineMS) + let ms = MembershipDTO.fromInline(inlineMS); await dal.deleteMS({ issuer: ms.issuer, - signature: ms.signature + signature: ms.signature, }); } } - static async computeToBeRevoked(mindex:MindexEntry[], dal:FileDAL) { - const revocations = Underscore.filter(mindex, (entry:MindexEntry) => !!(entry.revoked_on)) + static async computeToBeRevoked(mindex: MindexEntry[], dal: FileDAL) { + const revocations = Underscore.filter( + mindex, + (entry: MindexEntry) => !!entry.revoked_on + ); for (const revoked of revocations) { - await dal.setRevoked(revoked.pub) + await dal.setRevoked(revoked.pub); } } - static async deleteTransactions(block:BlockDTO, dal:FileDAL) { + static async deleteTransactions(block: BlockDTO, dal: FileDAL) { for (const obj of block.transactions) { obj.currency = block.currency; - const tx = TransactionDTO.fromJSONObject(obj) + const tx = TransactionDTO.fromJSONObject(obj); const txHash = tx.getHash(); await dal.removeTxByHash(txHash); } } static updateBlocksComputedVars( - current:{ unitbase:number, monetaryMass:number }|null, - block:{ number:number, unitbase:number, dividend:number|null, membersCount:number, monetaryMass:number }): void { + current: { unitbase: number; monetaryMass: number } | null, + block: { + number: number; + unitbase: number; + dividend: number | null; + membersCount: number; + monetaryMass: number; + } + ): void { // Unit Base - block.unitbase = (block.dividend && block.unitbase) || (current && current.unitbase) || 0; + block.unitbase = + (block.dividend && block.unitbase) || (current && current.unitbase) || 0; // Monetary Mass update if (current) { - block.monetaryMass = (current.monetaryMass || 0) - + (block.dividend || 0) * Math.pow(10, block.unitbase || 0) * block.membersCount; + block.monetaryMass = + (current.monetaryMass || 0) + + (block.dividend || 0) * + Math.pow(10, block.unitbase || 0) * + block.membersCount; } else { - block.monetaryMass = 0 + block.monetaryMass = 0; } // UD Time update if (block.number == 0) { block.dividend = null; - } - else if (!block.dividend) { + } else if (!block.dividend) { block.dividend = null; } } - static async pushSideBlock(obj:BlockDTO, dal:FileDAL, logger:any) { + static async pushSideBlock(obj: BlockDTO, dal: FileDAL, logger: any) { const start = Date.now(); - const block = DBBlock.fromBlockDTO(BlockDTO.fromJSONObject(obj)) + const block = DBBlock.fromBlockDTO(BlockDTO.fromJSONObject(obj)); block.fork = true; try { // Saves the block (DAL) block.wrong = false; await dal.saveSideBlockInFile(block); - logger.info('SIDE Block #%s-%s added to the blockchain in %s ms', block.number, block.hash.substr(0, 8), (Date.now() - start)); + logger.info( + "SIDE Block #%s-%s added to the blockchain in %s ms", + block.number, + block.hash.substr(0, 8), + Date.now() - start + ); return block; } catch (err) { throw err; } } - public static async trimIndexes(dal: FileDAL, HEAD: { number: number }, conf: ConfDTO) { + public static async trimIndexes( + dal: FileDAL, + HEAD: { number: number }, + conf: ConfDTO + ) { const TAIL = await dal.bindexDAL.tail(); - const MAX_BINDEX_SIZE = requiredBindexSizeForTail(TAIL, conf) - const currentSize = HEAD.number - TAIL.number + 1 + const MAX_BINDEX_SIZE = requiredBindexSizeForTail(TAIL, conf); + const currentSize = HEAD.number - TAIL.number + 1; if (currentSize > MAX_BINDEX_SIZE) { await dal.trimIndexes(HEAD.number - MAX_BINDEX_SIZE); } } } -export function requiredBindexSizeForTail(TAIL: { issuersCount: number, issuersFrame: number }, conf: { medianTimeBlocks: number, dtDiffEval: number, forksize: number }) { +export function requiredBindexSizeForTail( + TAIL: { issuersCount: number; issuersFrame: number }, + conf: { medianTimeBlocks: number; dtDiffEval: number; forksize: number } +) { const bindexSize = [ TAIL.issuersCount, TAIL.issuersFrame, conf.medianTimeBlocks, - conf.dtDiffEval + conf.dtDiffEval, ].reduce((max, value) => { return Math.max(max, value); }, 0); - return conf.forksize + bindexSize + return conf.forksize + bindexSize; } diff --git a/app/lib/blockchain/Switcher.ts b/app/lib/blockchain/Switcher.ts index 367fafd4ebe4fcd1ab3abd132df4ffdafed63d16..f600aeedfa7bde0e465df80714c2d3cc6d942250 100644 --- a/app/lib/blockchain/Switcher.ts +++ b/app/lib/blockchain/Switcher.ts @@ -11,75 +11,92 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../dto/BlockDTO" -import {Underscore} from "../common-libs/underscore" +import { BlockDTO } from "../dto/BlockDTO"; +import { Underscore } from "../common-libs/underscore"; export interface SwitchBlock { - - number:number - hash:string - previousHash:string - medianTime:number + number: number; + hash: string; + previousHash: string; + medianTime: number; } export interface SwitcherDao<T extends SwitchBlock> { - - getCurrent(): Promise<T|null> - getPotentials(numberStart:number, timeStart:number, maxNumber:number): Promise<T[]> - getBlockchainBlock(number:number, hash:string): Promise<T|null> - getAbsoluteBlockInForkWindow(number:number, hash:string): Promise<T|null> - revertTo(number:number): Promise<T[]> - addBlock(block:T): Promise<T> + getCurrent(): Promise<T | null>; + getPotentials( + numberStart: number, + timeStart: number, + maxNumber: number + ): Promise<T[]>; + getBlockchainBlock(number: number, hash: string): Promise<T | null>; + getAbsoluteBlockInForkWindow(number: number, hash: string): Promise<T | null>; + revertTo(number: number): Promise<T[]>; + addBlock(block: T): Promise<T>; } export class Switcher<T extends SwitchBlock> { - constructor( - private dao:SwitcherDao<T>, - private invalidForks:string[], - private avgGenTime:number, - private forkWindowSize:number, - private switchOnHeadAdvance:number, - private logger:any = undefined) {} + private dao: SwitcherDao<T>, + private invalidForks: string[], + private avgGenTime: number, + private forkWindowSize: number, + private switchOnHeadAdvance: number, + private logger: any = undefined + ) {} /** * Looks at known blocks in the sandbox and try to follow the longest resulting chain that has at least both 3 blocks of * advance and 3 * avgGenTime of medianTime advancce. */ async tryToFork() { - const current = await this.dao.getCurrent() + const current = await this.dao.getCurrent(); if (current) { - const numberStart = current.number + this.switchOnHeadAdvance - const timeStart = current.medianTime + this.switchOnHeadAdvance * this.avgGenTime + const numberStart = current.number + this.switchOnHeadAdvance; + const timeStart = + current.medianTime + this.switchOnHeadAdvance * this.avgGenTime; // Phase 1: find potential chains - const suites = await this.findPotentialSuites(numberStart, timeStart) + const suites = await this.findPotentialSuites(numberStart, timeStart); if (suites.length) { - this.logger && this.logger.info("Fork resolution: %s potential suite(s) found...", suites.length) + this.logger && + this.logger.info( + "Fork resolution: %s potential suite(s) found...", + suites.length + ); } // Phase 2: select the best chain - let longestChain:null|T[] = await this.findLongestChain(current, suites) + let longestChain: null | T[] = await this.findLongestChain( + current, + suites + ); // Phase 3: a best exist? if (longestChain) { - const chainHEAD = longestChain[longestChain.length - 1] + const chainHEAD = longestChain[longestChain.length - 1]; // apply it if it respects the 3-3 rule - if (chainHEAD.number >= numberStart && chainHEAD.medianTime >= timeStart) { - await this.switchOnChain(longestChain) - return await this.dao.getCurrent() + if ( + chainHEAD.number >= numberStart && + chainHEAD.medianTime >= timeStart + ) { + await this.switchOnChain(longestChain); + return await this.dao.getCurrent(); } } } - return null + return null; } /** * Find all the suites' HEAD that we could potentially fork on, in the current fork window. * @param current */ - async findPotentialSuitesHeads(current:{ number:number, medianTime:number }) { - const numberStart = current.number - this.forkWindowSize - const timeStart = current.medianTime - this.forkWindowSize * this.avgGenTime - const suites = await this.findPotentialSuites(numberStart, timeStart) - return suites.map(suite => suite[suite.length - 1]) + async findPotentialSuitesHeads(current: { + number: number; + medianTime: number; + }) { + const numberStart = current.number - this.forkWindowSize; + const timeStart = + current.medianTime - this.forkWindowSize * this.avgGenTime; + const suites = await this.findPotentialSuites(numberStart, timeStart); + return suites.map((suite) => suite[suite.length - 1]); } /** @@ -89,52 +106,86 @@ export class Switcher<T extends SwitchBlock> { * @param timeStart The minimum medianTime of a fork block. * @returns {SwitchBlock[][]} The suites found. */ - private async findPotentialSuites(numberStart:number, timeStart:number) { - const suites:T[][] = [] - const potentials:T[] = Underscore.sortBy(await this.dao.getPotentials(numberStart, timeStart, numberStart + this.forkWindowSize), element => -element.number) - const knownForkBlocks:{ [k:string]: boolean } = {} + private async findPotentialSuites(numberStart: number, timeStart: number) { + const suites: T[][] = []; + const potentials: T[] = Underscore.sortBy( + await this.dao.getPotentials( + numberStart, + timeStart, + numberStart + this.forkWindowSize + ), + (element) => -element.number + ); + const knownForkBlocks: { [k: string]: boolean } = {}; for (const candidate of potentials) { - knownForkBlocks[BlockDTO.fromJSONObject(candidate).blockstamp] = true + knownForkBlocks[BlockDTO.fromJSONObject(candidate).blockstamp] = true; } - const invalids: { [hash:string]: T } = {} + const invalids: { [hash: string]: T } = {}; if (potentials.length) { - this.logger && this.logger.info("Fork resolution: %s potential block(s) found...", potentials.length) + this.logger && + this.logger.info( + "Fork resolution: %s potential block(s) found...", + potentials.length + ); } for (const candidate of potentials) { - const suite:T[] = [] + const suite: T[] = []; // Do not process the block if it is already known as invalid (has no fork point with current blockchain or misses // some blocks) or is already contained in a valid chain. - if (!invalids[candidate.hash] && !Switcher.suitesContains(suites, candidate)) { + if ( + !invalids[candidate.hash] && + !Switcher.suitesContains(suites, candidate) + ) { // Tries to build up a full chain that is linked to current chain by a fork point. - let previous:T|null = candidate, commonRootFound = false - let previousNumber:number = previous.number - 1 - let previousHash:string = previous.previousHash - while (previous && previous.number > candidate.number - this.forkWindowSize) { - suite.push(previous) - previousNumber = previous.number - 1 - previousHash = previous.previousHash - previous = null - const previousBlockstamp = [previousNumber, previousHash].join('-') + let previous: T | null = candidate, + commonRootFound = false; + let previousNumber: number = previous.number - 1; + let previousHash: string = previous.previousHash; + while ( + previous && + previous.number > candidate.number - this.forkWindowSize + ) { + suite.push(previous); + previousNumber = previous.number - 1; + previousHash = previous.previousHash; + previous = null; + const previousBlockstamp = [previousNumber, previousHash].join("-"); // We try to look at blockchain if, of course, it is not already known as a fork block // Otherwise it cost a useless DB access if (!knownForkBlocks[previousBlockstamp]) { - previous = await this.dao.getBlockchainBlock(previousNumber, previousHash) + previous = await this.dao.getBlockchainBlock( + previousNumber, + previousHash + ); } if (previous) { // Stop the loop: common block has been found - previous = null - suites.push(suite) - commonRootFound = true + previous = null; + suites.push(suite); + commonRootFound = true; } else { // Have a look in sandboxes - previous = await this.dao.getAbsoluteBlockInForkWindow(previousNumber, previousHash) + previous = await this.dao.getAbsoluteBlockInForkWindow( + previousNumber, + previousHash + ); if (previous) { - knownForkBlocks[BlockDTO.fromJSONObject(previous).blockstamp] = true - const alreadyKnownInvalidBlock = this.invalidForks.indexOf([previous.number, previous.hash].join('-')) !== -1 + knownForkBlocks[ + BlockDTO.fromJSONObject(previous).blockstamp + ] = true; + const alreadyKnownInvalidBlock = + this.invalidForks.indexOf( + [previous.number, previous.hash].join("-") + ) !== -1; if (alreadyKnownInvalidBlock) { // Incorrect = not found - this.logger && this.logger.info("Fork resolution: block #%s-%s is known as incorrect. Skipping.", previous.number, previous.hash.substr(0, 8)) - previous = null + this.logger && + this.logger.info( + "Fork resolution: block #%s-%s is known as incorrect. Skipping.", + previous.number, + previous.hash.substr(0, 8) + ); + previous = null; } } } @@ -142,18 +193,32 @@ export class Switcher<T extends SwitchBlock> { // Forget about invalid blocks if (!commonRootFound) { if (!previous) { - this.logger && this.logger.debug("Suite -> %s-%s missing block#%s-%s", candidate.number, candidate.hash.substr(0, 8), previousNumber, previousHash.substr(0, 8)) + this.logger && + this.logger.debug( + "Suite -> %s-%s missing block#%s-%s", + candidate.number, + candidate.hash.substr(0, 8), + previousNumber, + previousHash.substr(0, 8) + ); for (const b of suite) { - invalids[b.hash] = b + invalids[b.hash] = b; } } else { // The chain would be too long, we could not revert correctly the chain. - this.logger && this.logger.debug("Suite #%s-%s -> %s-%s out of fork window", previousNumber, previousHash.substr(0, 8), candidate.number, candidate.hash.substr(0, 8)) + this.logger && + this.logger.debug( + "Suite #%s-%s -> %s-%s out of fork window", + previousNumber, + previousHash.substr(0, 8), + candidate.number, + candidate.hash.substr(0, 8) + ); } } } } - return suites + return suites; } /** @@ -163,66 +228,102 @@ export class Switcher<T extends SwitchBlock> { * @param {SwitchBlock[][]} suites * @returns {SwitchBlock[]} */ - private async findLongestChain(current:T, suites:T[][]) { + private async findLongestChain(current: T, suites: T[][]) { if (suites.length) { - this.logger && this.logger.info("Fork resolution: HEAD = block#%s", current.number) + this.logger && + this.logger.info("Fork resolution: HEAD = block#%s", current.number); } - let longestChain:null|T[] = null - let j = 0 + let longestChain: null | T[] = null; + let j = 0; for (const s of suites) { - j++ - s.reverse() + j++; + s.reverse(); // Revert current blockchain to fork point - const forkPoint = s[0].number - 1 - const forkHead = s[s.length - 1] - this.logger && this.logger.info("Fork resolution: suite %s/%s (-> #%s-%s) revert to fork point block#%s", j, suites.length, forkHead.number, forkHead.hash.substr(0, 6), forkPoint) - const reverted = await this.dao.revertTo(s[0].number - 1) + const forkPoint = s[0].number - 1; + const forkHead = s[s.length - 1]; + this.logger && + this.logger.info( + "Fork resolution: suite %s/%s (-> #%s-%s) revert to fork point block#%s", + j, + suites.length, + forkHead.number, + forkHead.hash.substr(0, 6), + forkPoint + ); + const reverted = await this.dao.revertTo(s[0].number - 1); // Try to add a maximum of blocks - let added = true, i = 0, successfulBlocks:T[] = [] + let added = true, + i = 0, + successfulBlocks: T[] = []; while (added && i < s.length) { try { - await this.dao.addBlock(s[i]) - this.logger && this.logger.info("Fork resolution: suite %s/%s added block#%s-%s", j, suites.length, s[i].number, s[i].hash) - successfulBlocks.push(s[i]) + await this.dao.addBlock(s[i]); + this.logger && + this.logger.info( + "Fork resolution: suite %s/%s added block#%s-%s", + j, + suites.length, + s[i].number, + s[i].hash + ); + successfulBlocks.push(s[i]); } catch (e) { - this.invalidForks.push([s[i].number, s[i].hash].join('-')) - this.logger && this.logger.info("Fork resolution: suite %s/%s REFUSED block#%s: %s", j, suites.length, s[0].number + i, e && e.message) - added = false + this.invalidForks.push([s[i].number, s[i].hash].join("-")); + this.logger && + this.logger.info( + "Fork resolution: suite %s/%s REFUSED block#%s: %s", + j, + suites.length, + s[0].number + i, + e && e.message + ); + added = false; } - i++ + i++; } // Pop the successfuly added blocks if (successfulBlocks.length) { - for (const b of successfulBlocks) { - this.invalidForks.push([b.number, b.hash].join('-')) + for (const b of successfulBlocks) { + this.invalidForks.push([b.number, b.hash].join("-")); } - const addedToHeadLevel = successfulBlocks[successfulBlocks.length-1].number - current.number - this.logger && this.logger.info("Fork resolution: suite %s/%s reached HEAD + %s. Now rolling back.", j, suites.length, addedToHeadLevel) - await this.dao.revertTo(forkPoint) + const addedToHeadLevel = + successfulBlocks[successfulBlocks.length - 1].number - current.number; + this.logger && + this.logger.info( + "Fork resolution: suite %s/%s reached HEAD + %s. Now rolling back.", + j, + suites.length, + addedToHeadLevel + ); + await this.dao.revertTo(forkPoint); } // Push back the initial blocks that were temporarily reverted - reverted.reverse() + reverted.reverse(); for (const b of reverted) { - await this.dao.addBlock(b) + await this.dao.addBlock(b); } // Remember the chain if it is the longest (highest HEAD) among tested chains - const longestHEAD = longestChain && longestChain[longestChain.length - 1] - const successHEAD = successfulBlocks && successfulBlocks[successfulBlocks.length - 1] - if ((!longestHEAD && successHEAD) || (longestHEAD && successHEAD && longestHEAD.number < successHEAD.number)) { - longestChain = successfulBlocks + const longestHEAD = longestChain && longestChain[longestChain.length - 1]; + const successHEAD = + successfulBlocks && successfulBlocks[successfulBlocks.length - 1]; + if ( + (!longestHEAD && successHEAD) || + (longestHEAD && successHEAD && longestHEAD.number < successHEAD.number) + ) { + longestChain = successfulBlocks; } } - return longestChain + return longestChain; } /** * Switch current blockchain on another chain, by poping top blocks and replacing them by new ones. * @param {SwitchBlock[]} chain */ - private async switchOnChain(chain:T[]) { - await this.dao.revertTo(chain[0].number - 1) + private async switchOnChain(chain: T[]) { + await this.dao.revertTo(chain[0].number - 1); for (const b of chain) { - await this.dao.addBlock(b) + await this.dao.addBlock(b); } } @@ -231,14 +332,14 @@ export class Switcher<T extends SwitchBlock> { * @param {SwitchBlock[][]} suites * @param {SwitchBlock} block */ - static suitesContains<T extends SwitchBlock>(suites:T[][], block:T) { + static suitesContains<T extends SwitchBlock>(suites: T[][], block: T) { for (const suite of suites) { for (const b of suite) { if (b.number === block.number && b.hash === block.hash) { - return true + return true; } } } - return false + return false; } } diff --git a/app/lib/common-libs/array-filter.ts b/app/lib/common-libs/array-filter.ts index 134aff471d712cbf6e20d8e62f4305eccb21691c..4c38092bca5bd14b4d3d1ef057c24224bad30557 100644 --- a/app/lib/common-libs/array-filter.ts +++ b/app/lib/common-libs/array-filter.ts @@ -1,3 +1,3 @@ export function uniqFilter<T>(value: T, index: number, self: T[]) { - return self.indexOf(value) === index -} \ No newline at end of file + return self.indexOf(value) === index; +} diff --git a/app/lib/common-libs/array-prune.ts b/app/lib/common-libs/array-prune.ts index 0e262d538bcf44bf7a6727066df6b58bfd073e97..42884cbb246b76a54876c3ce39690c12b5141b81 100644 --- a/app/lib/common-libs/array-prune.ts +++ b/app/lib/common-libs/array-prune.ts @@ -1,14 +1,14 @@ export function arrayPruneAll<T>(array: T[], value: T) { if (!array || array.length === 0) { - return + return; } - let index + let index; do { - index = array.indexOf(value) + index = array.indexOf(value); if (index !== -1) { - array.splice(index, 1) + array.splice(index, 1); } - } while (index !== -1) + } while (index !== -1); } /** @@ -17,13 +17,13 @@ export function arrayPruneAll<T>(array: T[], value: T) { * @param value The value we don't want to see in our copy array. */ export function arrayPruneAllCopy<T>(original: T[], value: T) { - const array = original.slice() - let index + const array = original.slice(); + let index; do { - index = array.indexOf(value) + index = array.indexOf(value); if (index !== -1) { - array.splice(index, 1) + array.splice(index, 1); } - } while (index !== -1) - return array + } while (index !== -1); + return array; } diff --git a/app/lib/common-libs/buid.ts b/app/lib/common-libs/buid.ts index d5d2be517902f352e4bb6654832bc53f57482431..c9a1fc12e617a9631dedee88c9f4117d1c4c200f 100644 --- a/app/lib/common-libs/buid.ts +++ b/app/lib/common-libs/buid.ts @@ -14,36 +14,34 @@ "use strict"; const BLOCK_UID = /^(0|[1-9]\d{0,18})-[A-F0-9]{64}$/; -const buidFunctions:any = function(number:number, hash:string) { +const buidFunctions: any = function (number: number, hash: string) { if (arguments.length === 2) { - return [number, hash].join('-'); + return [number, hash].join("-"); } if (arguments[0]) { - return [arguments[0].number, arguments[0].hash].join('-'); + return [arguments[0].number, arguments[0].hash].join("-"); } - return '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855'; -} + return "0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855"; +}; -buidFunctions.fromTS = (line:string) => { - const match = line.match(/TS:(.*)/) - return (match && match[1]) || "" -} -buidFunctions.fromIdty = (idty:any) => { - return buidFunctions(idty.ts_number, idty.ts_hash) -} +buidFunctions.fromTS = (line: string) => { + const match = line.match(/TS:(.*)/); + return (match && match[1]) || ""; +}; +buidFunctions.fromIdty = (idty: any) => { + return buidFunctions(idty.ts_number, idty.ts_hash); +}; export const Buid = { - format: { - - isBuid: (value:any) => { - return (typeof value === 'string') && value.match(BLOCK_UID) ? true : false; + isBuid: (value: any) => { + return typeof value === "string" && value.match(BLOCK_UID) ? true : false; }, - buid: buidFunctions + buid: buidFunctions, }, - getBlockstamp: (block:{ number:number, hash:string }) => { - return [block.number, block.hash].join('-') - } + getBlockstamp: (block: { number: number; hash: string }) => { + return [block.number, block.hash].join("-"); + }, }; diff --git a/app/lib/common-libs/constants.ts b/app/lib/common-libs/constants.ts index 760d8969cd14914f852aa8e18197cca15e29b916..a0b6151cfe8e3c02064c8f402fb3c2908655aced 100755 --- a/app/lib/common-libs/constants.ts +++ b/app/lib/common-libs/constants.ts @@ -13,53 +13,62 @@ "use strict"; -const G1 = "g1" -const GT = "g1-test" -const CURRENCY = "[a-zA-Z0-9-_ ]{2,50}" -const BASE58 = "[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]+" -const PUBKEY = "[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{43,44}" -const SIGNATURE = "[A-Za-z0-9+\\/=]{87,88}" -const USER_ID = "[A-Za-z0-9_-]{2,100}" -const INTEGER = "(0|[1-9]\\d{0,18})" -const FINGERPRINT = "[A-F0-9]{64}" -const BLOCK_VERSION = "(10|11|12)" -const TX_VERSION = "(10)" -const DIVIDEND = "[1-9][0-9]{0,5}" -const ZERO_OR_POSITIVE_INT = "0|[1-9][0-9]{0,18}" -const BLOCK_UID = "(" + ZERO_OR_POSITIVE_INT + ")-" + FINGERPRINT -const RELATIVE_INTEGER = "(0|-?[1-9]\\d{0,18})" -const FLOAT = "\\d+\.\\d+" -const POSITIVE_INT = "[1-9][0-9]{0,18}" -const TIMESTAMP = "[1-9][0-9]{0,18}" -const BOOLEAN = "[01]" -const WS2PID = "[0-9a-f]{8}" -const SOFTWARE = "[a-z0-9._-]{2,15}" -const SOFT_VERSION = "[0-9a-z._-]{2,15}" -const POW_PREFIX = "([1-9]|[1-9][0-9]|[1-8][0-9][0-9])" // 1-899 -const SPECIAL_BLOCK = '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855' -const META_TS = "META:TS:" + BLOCK_UID -const COMMENT = "[ a-zA-Z0-9-_:/;*\\[\\]()?!^\\+=@&~#{}|\\\\<>%.]{0,255}" +const G1 = "g1"; +const GT = "g1-test"; +const CURRENCY = "[a-zA-Z0-9-_ ]{2,50}"; +const BASE58 = "[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]+"; +const PUBKEY = + "[123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz]{43,44}"; +const SIGNATURE = "[A-Za-z0-9+\\/=]{87,88}"; +const USER_ID = "[A-Za-z0-9_-]{2,100}"; +const INTEGER = "(0|[1-9]\\d{0,18})"; +const FINGERPRINT = "[A-F0-9]{64}"; +const BLOCK_VERSION = "(10|11|12)"; +const TX_VERSION = "(10)"; +const DIVIDEND = "[1-9][0-9]{0,5}"; +const ZERO_OR_POSITIVE_INT = "0|[1-9][0-9]{0,18}"; +const BLOCK_UID = "(" + ZERO_OR_POSITIVE_INT + ")-" + FINGERPRINT; +const RELATIVE_INTEGER = "(0|-?[1-9]\\d{0,18})"; +const FLOAT = "\\d+.\\d+"; +const POSITIVE_INT = "[1-9][0-9]{0,18}"; +const TIMESTAMP = "[1-9][0-9]{0,18}"; +const BOOLEAN = "[01]"; +const WS2PID = "[0-9a-f]{8}"; +const SOFTWARE = "[a-z0-9._-]{2,15}"; +const SOFT_VERSION = "[0-9a-z._-]{2,15}"; +const POW_PREFIX = "([1-9]|[1-9][0-9]|[1-8][0-9][0-9])"; // 1-899 +const SPECIAL_BLOCK = + "0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855"; +const META_TS = "META:TS:" + BLOCK_UID; +const COMMENT = "[ a-zA-Z0-9-_:/;*\\[\\]()?!^\\+=@&~#{}|\\\\<>%.]{0,255}"; const CLTV_INTEGER = "([0-9]{1,10})"; -const CSV_INTEGER = "([0-9]{1,8})"; -const XUNLOCK = "[a-zA-Z0-9]{1,64}"; -const UNLOCK = "(SIG\\(" + INTEGER + "\\)|XHX\\(" + XUNLOCK + "\\))" -const CONDITIONS = "(&&|\\|\\|| |[()]|(SIG\\(" + PUBKEY + "\\)|(XHX\\([A-F0-9]{64}\\)|CLTV\\(" + CLTV_INTEGER + "\\)|CSV\\(" + CSV_INTEGER + "\\))))*" +const CSV_INTEGER = "([0-9]{1,8})"; +const XUNLOCK = "[a-zA-Z0-9]{1,64}"; +const UNLOCK = "(SIG\\(" + INTEGER + "\\)|XHX\\(" + XUNLOCK + "\\))"; +const CONDITIONS = + "(&&|\\|\\|| |[()]|(SIG\\(" + + PUBKEY + + "\\)|(XHX\\([A-F0-9]{64}\\)|CLTV\\(" + + CLTV_INTEGER + + "\\)|CSV\\(" + + CSV_INTEGER + + "\\))))*"; -const BMA_REGEXP = /^BASIC_MERKLED_API( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/ -const BMAS_REGEXP = /^BMAS( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))( (\/.+))?$/ -const BMATOR_REGEXP = /^BMATOR( ([a-z0-9]{16})\.onion)( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/ -const WS2P_REGEXP = /^WS2P (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/ -const WS2P_V2_REGEXP = /^WS2P ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/ -const WS2PTOR_REGEXP = /^WS2PTOR (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/ -const WS2PTOR_V2_REGEXP = /^WS2PTOR ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/ -const WS_FULL_ADDRESS_ONION_REGEX = /^(?:wss?:\/\/)(?:www\.)?([0-9a-z]{16}\.onion)(:[0-9]+)?$/ +const BMA_REGEXP = /^BASIC_MERKLED_API( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/; +const BMAS_REGEXP = /^BMAS( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))( (\/.+))?$/; +const BMATOR_REGEXP = /^BMATOR( ([a-z0-9]{16})\.onion)( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/; +const WS2P_REGEXP = /^WS2P (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/; +const WS2P_V2_REGEXP = /^WS2P ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/; +const WS2PTOR_REGEXP = /^WS2PTOR (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/; +const WS2PTOR_V2_REGEXP = /^WS2PTOR ([1-9][0-9]*) ([a-f0-9]{8}) ([a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+.onion) ([0-9]+)(?: (.+))?$/; +const WS_FULL_ADDRESS_ONION_REGEX = /^(?:wss?:\/\/)(?:www\.)?([0-9a-z]{16}\.onion)(:[0-9]+)?$/; const IPV4_REGEXP = /^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$/; const IPV6_REGEXP = /^((([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}:[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){5}:([0-9A-Fa-f]{1,4}:)?[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){4}:([0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){3}:([0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){2}:([0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(([0-9A-Fa-f]{1,4}:){0,5}:((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(::([0-9A-Fa-f]{1,4}:){0,5}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|([0-9A-Fa-f]{1,4}::([0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4})|(::([0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){1,7}:))$/; -const HOST_ONION_REGEX = /^(?:www\.)?([0-9a-z]{16}\.onion)$/ +const HOST_ONION_REGEX = /^(?:www\.)?([0-9a-z]{16}\.onion)$/; -const MAXIMUM_LEN_OF_COMPACT_TX = 100 -const MAXIMUM_LEN_OF_OUTPUT = 2000 -const MAXIMUM_LEN_OF_UNLOCK = MAXIMUM_LEN_OF_OUTPUT +const MAXIMUM_LEN_OF_COMPACT_TX = 100; +const MAXIMUM_LEN_OF_OUTPUT = 2000; +const MAXIMUM_LEN_OF_UNLOCK = MAXIMUM_LEN_OF_OUTPUT; export enum DuniterDocument { ENTITY_NULL, @@ -69,25 +78,31 @@ export enum DuniterDocument { ENTITY_MEMBERSHIP, ENTITY_REVOCATION, ENTITY_TRANSACTION, - ENTITY_PEER + ENTITY_PEER, } -export const duniterDocument2str = (type:DuniterDocument) => { +export const duniterDocument2str = (type: DuniterDocument) => { switch (type) { - case DuniterDocument.ENTITY_BLOCK: return "block" - case DuniterDocument.ENTITY_IDENTITY: return "identity" - case DuniterDocument.ENTITY_CERTIFICATION: return "certification" - case DuniterDocument.ENTITY_REVOCATION: return "revocation" - case DuniterDocument.ENTITY_MEMBERSHIP: return "membership" - case DuniterDocument.ENTITY_TRANSACTION: return "transaction" - case DuniterDocument.ENTITY_PEER: return "peer" + case DuniterDocument.ENTITY_BLOCK: + return "block"; + case DuniterDocument.ENTITY_IDENTITY: + return "identity"; + case DuniterDocument.ENTITY_CERTIFICATION: + return "certification"; + case DuniterDocument.ENTITY_REVOCATION: + return "revocation"; + case DuniterDocument.ENTITY_MEMBERSHIP: + return "membership"; + case DuniterDocument.ENTITY_TRANSACTION: + return "transaction"; + case DuniterDocument.ENTITY_PEER: + return "peer"; default: - return "" + return ""; } -} +}; export const CommonConstants = { - G1, GT, @@ -103,7 +118,7 @@ export const CommonConstants = { SOFT_VERSION, POW_PREFIX, ZERO_OR_POSITIVE_INT, - SIGNATURE + SIGNATURE, }, // Version of genesis block @@ -141,7 +156,8 @@ export const CommonConstants = { DOCUMENTS_BLOCK_VERSION_REGEXP: new RegExp("^" + BLOCK_VERSION + "$"), DOCUMENTS_TRANSACTION_VERSION_REGEXP: /^(10)$/, SPECIAL_BLOCK, - SPECIAL_HASH: 'E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855', + SPECIAL_HASH: + "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855", MAXIMUM_LEN_OF_COMPACT_TX, MAXIMUM_LEN_OF_OUTPUT, MAXIMUM_LEN_OF_UNLOCK, @@ -151,64 +167,171 @@ export const CommonConstants = { PROOF_OF_WORK: { UPPER_BOUND: [ - '9A-F', - '9A-E', - '9A-D', - '9A-C', - '9A-B', - '9A', - '9', - '8', - '7', - '6', - '5', - '4', - '3', - '2', - '1', - '1' // In case remainder 15 happens for some reason - ] + "9A-F", + "9A-E", + "9A-D", + "9A-C", + "9A-B", + "9A", + "9", + "8", + "7", + "6", + "5", + "4", + "3", + "2", + "1", + "1", // In case remainder 15 happens for some reason + ], }, DocumentError: "documentError", ERRORS: { // Technical errors - WRONG_DOCUMENT: { httpCode: 400, uerr: { ucode: 1005, message: "Document has unkown fields or wrong line ending format" }}, - DOCUMENT_BEING_TREATED: { httpCode: 400, uerr: { ucode: 1015, message: "Document already under treatment" }}, + WRONG_DOCUMENT: { + httpCode: 400, + uerr: { + ucode: 1005, + message: "Document has unkown fields or wrong line ending format", + }, + }, + DOCUMENT_BEING_TREATED: { + httpCode: 400, + uerr: { ucode: 1015, message: "Document already under treatment" }, + }, // Business errors - WRONG_UNLOCKER: { httpCode: 400, uerr: { ucode: 2013, message: "Wrong unlocker in transaction" }}, - LOCKTIME_PREVENT: { httpCode: 400, uerr: { ucode: 2014, message: "Locktime not elapsed yet" }}, - SOURCE_ALREADY_CONSUMED: { httpCode: 400, uerr: { ucode: 2015, message: "Source already consumed" }}, - WRONG_AMOUNTS: { httpCode: 400, uerr: { ucode: 2016, message: "Sum of inputs must equal sum of outputs" }}, - WRONG_OUTPUT_BASE: { httpCode: 400, uerr: { ucode: 2017, message: "Wrong unit base for outputs" }}, - CANNOT_ROOT_BLOCK_NO_MEMBERS: { httpCode: 400, uerr: { ucode: 2018, message: "Wrong new block: cannot make a root block without members" }}, - IDENTITY_WRONGLY_SIGNED: { httpCode: 400, uerr: { ucode: 2019, message: "Weird, the signature is wrong and in the database." }}, - TOO_OLD_IDENTITY: { httpCode: 400, uerr: { ucode: 2020, message: "Identity has expired and cannot be written in the blockchain anymore." }}, - NEWER_PEER_DOCUMENT_AVAILABLE: { httpCode: 409, uerr: { ucode: 2022, message: "A newer peer document is available" }}, - PEER_DOCUMENT_ALREADY_KNOWN: { httpCode: 400, uerr: { ucode: 2023, message: "Peer document already known" }}, - TX_INPUTS_OUTPUTS_NOT_EQUAL: { httpCode: 400, uerr: { ucode: 2024, message: "Transaction inputs sum must equal outputs sum" }}, - TX_OUTPUT_SUM_NOT_EQUALS_PREV_DELTAS: { httpCode: 400, uerr: { ucode: 2025, message: "Transaction output base amount does not equal previous base deltas" }}, - BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK: { httpCode: 400, uerr: { ucode: 2026, message: "Blockstamp does not match a block" }}, - A_TRANSACTION_HAS_A_MAX_SIZE: { httpCode: 400, uerr: { ucode: 2027, message: 'A transaction has a maximum size of ' + MAXIMUM_LEN_OF_COMPACT_TX + ' lines' }}, - TOO_OLD_MEMBERSHIP: { httpCode: 400, uerr: { ucode: 2029, message: "Too old membership." }}, - MAXIMUM_LEN_OF_OUTPUT: { httpCode: 400, uerr: { ucode: 2032, message: 'A transaction output has a maximum size of ' + MAXIMUM_LEN_OF_OUTPUT + ' characters' }}, - MAXIMUM_LEN_OF_UNLOCK: { httpCode: 400, uerr: { ucode: 2033, message: 'A transaction unlock has a maximum size of ' + MAXIMUM_LEN_OF_UNLOCK + ' characters' }}, - WRONG_CURRENCY: { httpCode: 400, uerr: { ucode: 2500, message: 'Wrong currency' }}, - WRONG_POW: { httpCode: 400, uerr: { ucode: 2501, message: 'Wrong proof-of-work' }}, - OUT_OF_FORK_WINDOW: { httpCode: 400, uerr: { ucode: 2501, message: 'Out of fork window' }}, + WRONG_UNLOCKER: { + httpCode: 400, + uerr: { ucode: 2013, message: "Wrong unlocker in transaction" }, + }, + LOCKTIME_PREVENT: { + httpCode: 400, + uerr: { ucode: 2014, message: "Locktime not elapsed yet" }, + }, + SOURCE_ALREADY_CONSUMED: { + httpCode: 400, + uerr: { ucode: 2015, message: "Source already consumed" }, + }, + WRONG_AMOUNTS: { + httpCode: 400, + uerr: { ucode: 2016, message: "Sum of inputs must equal sum of outputs" }, + }, + WRONG_OUTPUT_BASE: { + httpCode: 400, + uerr: { ucode: 2017, message: "Wrong unit base for outputs" }, + }, + CANNOT_ROOT_BLOCK_NO_MEMBERS: { + httpCode: 400, + uerr: { + ucode: 2018, + message: "Wrong new block: cannot make a root block without members", + }, + }, + IDENTITY_WRONGLY_SIGNED: { + httpCode: 400, + uerr: { + ucode: 2019, + message: "Weird, the signature is wrong and in the database.", + }, + }, + TOO_OLD_IDENTITY: { + httpCode: 400, + uerr: { + ucode: 2020, + message: + "Identity has expired and cannot be written in the blockchain anymore.", + }, + }, + NEWER_PEER_DOCUMENT_AVAILABLE: { + httpCode: 409, + uerr: { ucode: 2022, message: "A newer peer document is available" }, + }, + PEER_DOCUMENT_ALREADY_KNOWN: { + httpCode: 400, + uerr: { ucode: 2023, message: "Peer document already known" }, + }, + TX_INPUTS_OUTPUTS_NOT_EQUAL: { + httpCode: 400, + uerr: { + ucode: 2024, + message: "Transaction inputs sum must equal outputs sum", + }, + }, + TX_OUTPUT_SUM_NOT_EQUALS_PREV_DELTAS: { + httpCode: 400, + uerr: { + ucode: 2025, + message: + "Transaction output base amount does not equal previous base deltas", + }, + }, + BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK: { + httpCode: 400, + uerr: { ucode: 2026, message: "Blockstamp does not match a block" }, + }, + A_TRANSACTION_HAS_A_MAX_SIZE: { + httpCode: 400, + uerr: { + ucode: 2027, + message: + "A transaction has a maximum size of " + + MAXIMUM_LEN_OF_COMPACT_TX + + " lines", + }, + }, + TOO_OLD_MEMBERSHIP: { + httpCode: 400, + uerr: { ucode: 2029, message: "Too old membership." }, + }, + MAXIMUM_LEN_OF_OUTPUT: { + httpCode: 400, + uerr: { + ucode: 2032, + message: + "A transaction output has a maximum size of " + + MAXIMUM_LEN_OF_OUTPUT + + " characters", + }, + }, + MAXIMUM_LEN_OF_UNLOCK: { + httpCode: 400, + uerr: { + ucode: 2033, + message: + "A transaction unlock has a maximum size of " + + MAXIMUM_LEN_OF_UNLOCK + + " characters", + }, + }, + WRONG_CURRENCY: { + httpCode: 400, + uerr: { ucode: 2500, message: "Wrong currency" }, + }, + WRONG_POW: { + httpCode: 400, + uerr: { ucode: 2501, message: "Wrong proof-of-work" }, + }, + OUT_OF_FORK_WINDOW: { + httpCode: 400, + uerr: { ucode: 2501, message: "Out of fork window" }, + }, - WRONG_SIGNATURE_FOR_CERT: { httpCode: 400, uerr: { ucode: 3000, message: 'Wrong signature for certification' }}, + WRONG_SIGNATURE_FOR_CERT: { + httpCode: 400, + uerr: { ucode: 3000, message: "Wrong signature for certification" }, + }, }, // INDEXES - M_INDEX: 'MINDEX', - I_INDEX: 'IINDEX', - S_INDEX: 'SINDEX', - C_INDEX: 'CINDEX', - IDX_CREATE: 'CREATE', - IDX_UPDATE: 'UPDATE', + M_INDEX: "MINDEX", + I_INDEX: "IINDEX", + S_INDEX: "SINDEX", + C_INDEX: "CINDEX", + IDX_CREATE: "CREATE", + IDX_UPDATE: "UPDATE", // Protocol fixed values NB_DIGITS_UD: 4, @@ -217,96 +340,203 @@ export const CommonConstants = { POW_DIFFICULTY_RANGE_RATIO: 1.189, // deduced from Hexadecimal relation between 2 chars ~= 16^(1/16) ACCOUNT_MINIMUM_CURRENT_BASED_AMOUNT: 100, - DOCUMENTS: { - DOC_VERSION: find('Version: (10)'), - DOC_CURRENCY: find('Currency: (' + CURRENCY + ')'), - DOC_ISSUER: find('Issuer: (' + PUBKEY + ')'), - TIMESTAMP: find('Timestamp: (' + BLOCK_UID + ')') + DOC_VERSION: find("Version: (10)"), + DOC_CURRENCY: find("Currency: (" + CURRENCY + ")"), + DOC_ISSUER: find("Issuer: (" + PUBKEY + ")"), + TIMESTAMP: find("Timestamp: (" + BLOCK_UID + ")"), }, IDENTITY: { INLINE: exact(PUBKEY + ":" + SIGNATURE + ":" + BLOCK_UID + ":" + USER_ID), - IDTY_TYPE: find('Type: (Identity)'), - IDTY_UID: find('UniqueID: (' + USER_ID + ')') + IDTY_TYPE: find("Type: (Identity)"), + IDTY_UID: find("UniqueID: (" + USER_ID + ")"), }, BLOCK: { - NONCE: find("Nonce: (" + ZERO_OR_POSITIVE_INT + ")"), - VERSION: find("Version: " + BLOCK_VERSION), - TYPE: find("Type: (Block)"), - CURRENCY: find("Currency: (" + CURRENCY + ")"), - BNUMBER: find("Number: (" + ZERO_OR_POSITIVE_INT + ")"), - POWMIN: find("PoWMin: (" + ZERO_OR_POSITIVE_INT + ")"), - TIME: find("Time: (" + TIMESTAMP + ")"), + NONCE: find("Nonce: (" + ZERO_OR_POSITIVE_INT + ")"), + VERSION: find("Version: " + BLOCK_VERSION), + TYPE: find("Type: (Block)"), + CURRENCY: find("Currency: (" + CURRENCY + ")"), + BNUMBER: find("Number: (" + ZERO_OR_POSITIVE_INT + ")"), + POWMIN: find("PoWMin: (" + ZERO_OR_POSITIVE_INT + ")"), + TIME: find("Time: (" + TIMESTAMP + ")"), MEDIAN_TIME: find("MedianTime: (" + TIMESTAMP + ")"), - UD: find("UniversalDividend: (" + DIVIDEND + ")"), - UNIT_BASE: find("UnitBase: (" + INTEGER + ")"), - PREV_HASH: find("PreviousHash: (" + FINGERPRINT + ")"), + UD: find("UniversalDividend: (" + DIVIDEND + ")"), + UNIT_BASE: find("UnitBase: (" + INTEGER + ")"), + PREV_HASH: find("PreviousHash: (" + FINGERPRINT + ")"), PREV_ISSUER: find("PreviousIssuer: (" + PUBKEY + ")"), - MEMBERS_COUNT:find("MembersCount: (" + ZERO_OR_POSITIVE_INT + ")"), - BLOCK_ISSUER:find('Issuer: (' + PUBKEY + ')'), - BLOCK_ISSUERS_FRAME:find('IssuersFrame: (' + INTEGER + ')'), - BLOCK_ISSUERS_FRAME_VAR:find('IssuersFrameVar: (' + RELATIVE_INTEGER + ')'), - DIFFERENT_ISSUERS_COUNT:find('DifferentIssuersCount: (' + INTEGER + ')'), - PARAMETERS: find("Parameters: (" + FLOAT + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + FLOAT + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + FLOAT + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ")"), - JOINER: exact(PUBKEY + ":" + SIGNATURE + ":" + BLOCK_UID + ":" + BLOCK_UID + ":" + USER_ID), - ACTIVE: exact(PUBKEY + ":" + SIGNATURE + ":" + BLOCK_UID + ":" + BLOCK_UID + ":" + USER_ID), - LEAVER: exact(PUBKEY + ":" + SIGNATURE + ":" + BLOCK_UID + ":" + BLOCK_UID + ":" + USER_ID), + MEMBERS_COUNT: find("MembersCount: (" + ZERO_OR_POSITIVE_INT + ")"), + BLOCK_ISSUER: find("Issuer: (" + PUBKEY + ")"), + BLOCK_ISSUERS_FRAME: find("IssuersFrame: (" + INTEGER + ")"), + BLOCK_ISSUERS_FRAME_VAR: find( + "IssuersFrameVar: (" + RELATIVE_INTEGER + ")" + ), + DIFFERENT_ISSUERS_COUNT: find("DifferentIssuersCount: (" + INTEGER + ")"), + PARAMETERS: find( + "Parameters: (" + + FLOAT + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + FLOAT + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + FLOAT + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ")" + ), + JOINER: exact( + PUBKEY + + ":" + + SIGNATURE + + ":" + + BLOCK_UID + + ":" + + BLOCK_UID + + ":" + + USER_ID + ), + ACTIVE: exact( + PUBKEY + + ":" + + SIGNATURE + + ":" + + BLOCK_UID + + ":" + + BLOCK_UID + + ":" + + USER_ID + ), + LEAVER: exact( + PUBKEY + + ":" + + SIGNATURE + + ":" + + BLOCK_UID + + ":" + + BLOCK_UID + + ":" + + USER_ID + ), REVOCATION: exact(PUBKEY + ":" + SIGNATURE), EXCLUDED: exact(PUBKEY), INNER_HASH: find("InnerHash: (" + FINGERPRINT + ")"), - SPECIAL_HASH: 'E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855', - SPECIAL_BLOCK + SPECIAL_HASH: + "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855", + SPECIAL_BLOCK, }, CERT: { SELF: { UID: exact("UID:" + USER_ID), - META: exact(META_TS) + META: exact(META_TS), }, REVOKE: exact("UID:REVOKE"), OTHER: { META: exact(META_TS), - INLINE: exact(PUBKEY + ":" + PUBKEY + ":" + INTEGER + ":" + SIGNATURE) - } + INLINE: exact(PUBKEY + ":" + PUBKEY + ":" + INTEGER + ":" + SIGNATURE), + }, }, CERTIFICATION: { - CERT_TYPE: find('Type: (Certification)'), - IDTY_ISSUER: find('IdtyIssuer: (' + PUBKEY + ')'), - IDTY_UID: find('IdtyUniqueID: (' + USER_ID + ')'), - IDTY_TIMESTAMP: find('IdtyTimestamp: (' + BLOCK_UID + ')'), - IDTY_SIG: find('IdtySignature: (' + SIGNATURE + ')'), - CERT_TIMESTAMP: find('CertTimestamp: (' + BLOCK_UID + ')') + CERT_TYPE: find("Type: (Certification)"), + IDTY_ISSUER: find("IdtyIssuer: (" + PUBKEY + ")"), + IDTY_UID: find("IdtyUniqueID: (" + USER_ID + ")"), + IDTY_TIMESTAMP: find("IdtyTimestamp: (" + BLOCK_UID + ")"), + IDTY_SIG: find("IdtySignature: (" + SIGNATURE + ")"), + CERT_TIMESTAMP: find("CertTimestamp: (" + BLOCK_UID + ")"), }, REVOCATION: { - REVOC_TYPE: find('Type: (Certification)'), - IDTY_ISSUER: find('IdtyIssuer: (' + PUBKEY + ')'), - IDTY_UID: find('IdtyUniqueID: (' + USER_ID + ')'), - IDTY_TIMESTAMP: find('IdtyTimestamp: (' + BLOCK_UID + ')'), - IDTY_SIG: find('IdtySignature: (' + SIGNATURE + ')') + REVOC_TYPE: find("Type: (Certification)"), + IDTY_ISSUER: find("IdtyIssuer: (" + PUBKEY + ")"), + IDTY_UID: find("IdtyUniqueID: (" + USER_ID + ")"), + IDTY_TIMESTAMP: find("IdtyTimestamp: (" + BLOCK_UID + ")"), + IDTY_SIG: find("IdtySignature: (" + SIGNATURE + ")"), }, MEMBERSHIP: { - BLOCK: find('Block: (' + BLOCK_UID + ')'), - VERSION: find('Version: (10)'), - CURRENCY: find('Currency: (' + CURRENCY + ')'), - ISSUER: find('Issuer: (' + PUBKEY + ')'), - MEMBERSHIP: find('Membership: (IN|OUT)'), - USERID: find('UserID: (' + USER_ID + ')'), - CERTTS: find('CertTS: (' + BLOCK_UID + ')') + BLOCK: find("Block: (" + BLOCK_UID + ")"), + VERSION: find("Version: (10)"), + CURRENCY: find("Currency: (" + CURRENCY + ")"), + ISSUER: find("Issuer: (" + PUBKEY + ")"), + MEMBERSHIP: find("Membership: (IN|OUT)"), + USERID: find("UserID: (" + USER_ID + ")"), + CERTTS: find("CertTS: (" + BLOCK_UID + ")"), }, TRANSACTION: { - HEADER: exact("TX:" + TX_VERSION + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + INTEGER + ":" + BOOLEAN + ":" + INTEGER), - SENDER: exact(PUBKEY), - SOURCE_V3: exact("(" + POSITIVE_INT + ":" + INTEGER + ":T:" + FINGERPRINT + ":" + INTEGER + "|" + POSITIVE_INT + ":" + INTEGER + ":D:" + PUBKEY + ":" + POSITIVE_INT + ")"), - UNLOCK: exact(INTEGER + ":" + UNLOCK + "( (" + UNLOCK + "))*"), - TARGET: exact(POSITIVE_INT + ":" + INTEGER + ":" + CONDITIONS), - BLOCKSTAMP:find('Blockstamp: (' + BLOCK_UID + ')'), + HEADER: exact( + "TX:" + + TX_VERSION + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + INTEGER + + ":" + + BOOLEAN + + ":" + + INTEGER + ), + SENDER: exact(PUBKEY), + SOURCE_V3: exact( + "(" + + POSITIVE_INT + + ":" + + INTEGER + + ":T:" + + FINGERPRINT + + ":" + + INTEGER + + "|" + + POSITIVE_INT + + ":" + + INTEGER + + ":D:" + + PUBKEY + + ":" + + POSITIVE_INT + + ")" + ), + UNLOCK: exact(INTEGER + ":" + UNLOCK + "( (" + UNLOCK + "))*"), + TARGET: exact(POSITIVE_INT + ":" + INTEGER + ":" + CONDITIONS), + BLOCKSTAMP: find("Blockstamp: (" + BLOCK_UID + ")"), COMMENT: find("Comment: (" + COMMENT + ")"), - LOCKTIME:find("Locktime: (" + INTEGER + ")"), + LOCKTIME: find("Locktime: (" + INTEGER + ")"), INLINE_COMMENT: exact(COMMENT), - OUTPUT_CONDITION: exact(CONDITIONS) + OUTPUT_CONDITION: exact(CONDITIONS), }, PEER: { BLOCK: find("Block: (" + INTEGER + "-" + FINGERPRINT + ")"), - SPECIAL_BLOCK + SPECIAL_BLOCK, }, BLOCK_MAX_TX_CHAINING_DEPTH: 5, @@ -314,7 +544,7 @@ export const CommonConstants = { SYNC_BLOCKS_CHUNK: 250, MILESTONES_PER_PAGE: 50, - CHUNK_PREFIX: 'chunk_', + CHUNK_PREFIX: "chunk_", BLOCKS_IN_MEMORY_MAX: 288 * 60, // 288 = 1 day MAX_AGE_OF_PEER_IN_BLOCKS: 200, // blocks @@ -325,12 +555,12 @@ export const CommonConstants = { WAIT_P2P_CANDIDATE_HEARTBEAT: 30 * 1000, // Wait X seconds for a node to answer about its state MAX_READING_SLOTS_FOR_FILE_SYNC: 20, // Number of file reading in parallel -} +}; -function exact (regexpContent:string) { +function exact(regexpContent: string) { return new RegExp("^" + regexpContent + "$"); } -function find (regexpContent:string) { +function find(regexpContent: string) { return new RegExp(regexpContent); } diff --git a/app/lib/common-libs/crypto/keyring.ts b/app/lib/common-libs/crypto/keyring.ts index a6b045d49e3b6b8b05d2e094d1d80db49e76b2d1..200e32b2822fca6e48ec02cbde66c4b0af245391 100644 --- a/app/lib/common-libs/crypto/keyring.ts +++ b/app/lib/common-libs/crypto/keyring.ts @@ -11,50 +11,49 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {KeyPairBuilder, generateRandomSeed, seedToSecretKey} from "duniteroxyde" +import { + KeyPairBuilder, + generateRandomSeed, + seedToSecretKey, +} from "duniteroxyde"; export class Key { - - constructor(readonly pub:string, readonly sec:string) { - } + constructor(readonly pub: string, readonly sec: string) {} /***************************** - * - * GENERAL CRYPTO - * - *****************************/ + * + * GENERAL CRYPTO + * + *****************************/ get publicKey() { - return this.pub + return this.pub; } get secretKey() { - return this.sec + return this.sec; } json() { return { pub: this.publicKey, - sec: this.secretKey - } + sec: this.secretKey, + }; } - sign(msg:string) { - return Promise.resolve(this.signSync(msg)) + sign(msg: string) { + return Promise.resolve(this.signSync(msg)); } - signSync(msg:string) { + signSync(msg: string) { const signator = KeyPairBuilder.fromSecretKey(this.secretKey); return signator.sign(msg); - }; + } } export function randomKey() { const seed = generateRandomSeed(); const secretKey = seedToSecretKey(seed); const keypair = KeyPairBuilder.fromSecretKey(secretKey); - return new Key( - keypair.getPublicKey(), - secretKey, - ) + return new Key(keypair.getPublicKey(), secretKey); } diff --git a/app/lib/common-libs/crypto/map.ts b/app/lib/common-libs/crypto/map.ts index a178fde884f70191fa00e56a5e4d2e91ba2f9eaa..ff3754fca058c225030b700ff5240c1bd10bf471 100644 --- a/app/lib/common-libs/crypto/map.ts +++ b/app/lib/common-libs/crypto/map.ts @@ -1,4 +1,3 @@ - export interface Map<T> { - [k:string]: T + [k: string]: T; } diff --git a/app/lib/common-libs/dos2unix.ts b/app/lib/common-libs/dos2unix.ts index e319feace52dead71eec20c430040a523a3e8d39..150a2a75fa800e59a855b105559f603eb3ba71d5 100644 --- a/app/lib/common-libs/dos2unix.ts +++ b/app/lib/common-libs/dos2unix.ts @@ -11,6 +11,6 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -export function dos2unix(str:string) { - return str.replace(/\r\n/g, '\n') +export function dos2unix(str: string) { + return str.replace(/\r\n/g, "\n"); } diff --git a/app/lib/common-libs/errors.ts b/app/lib/common-libs/errors.ts index e905ac699c947e369d8ae46911e241d8dcf7a35e..0deb3e7047d0cd3b4a257f612f88827d9d552285 100755 --- a/app/lib/common-libs/errors.ts +++ b/app/lib/common-libs/errors.ts @@ -31,5 +31,5 @@ export enum DataErrors { CANNOT_REVERT_NO_CURRENT_BLOCK, BLOCK_TO_REVERT_NOT_FOUND, MEMBER_NOT_FOUND, - MILESTONE_BLOCK_NOT_FOUND + MILESTONE_BLOCK_NOT_FOUND, } diff --git a/app/lib/common-libs/filter-async.ts b/app/lib/common-libs/filter-async.ts index d495d54012c0b75d8ba6394e42bcb0cb566113c9..182d78a4b727e7482eb355ab2c0b5f278e188e5c 100644 --- a/app/lib/common-libs/filter-async.ts +++ b/app/lib/common-libs/filter-async.ts @@ -1,9 +1,14 @@ -export async function filterAsync<T>(arr: T[], filter: (t: T) => Promise<boolean>) { - const filtered: T[] = [] - await Promise.all(arr.map(async t => { - if (await filter(t)) { - filtered.push(t) - } - })) - return filtered -} \ No newline at end of file +export async function filterAsync<T>( + arr: T[], + filter: (t: T) => Promise<boolean> +) { + const filtered: T[] = []; + await Promise.all( + arr.map(async (t) => { + if (await filter(t)) { + filtered.push(t); + } + }) + ); + return filtered; +} diff --git a/app/lib/common-libs/index.ts b/app/lib/common-libs/index.ts index ea6df33cb39e30b0d8206d7b242e4661172131fa..3f54df3f900b819153f0e331fd63d8719bd7bd51 100644 --- a/app/lib/common-libs/index.ts +++ b/app/lib/common-libs/index.ts @@ -11,12 +11,8 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as rawer from './rawer' -import {unlock as txunlock} from "./txunlock" -import {hashf} from "../common"; +import * as rawer from "./rawer"; +import { unlock as txunlock } from "./txunlock"; +import { hashf } from "../common"; -export { - rawer, - txunlock, - hashf -} +export { rawer, txunlock, hashf }; diff --git a/app/lib/common-libs/manual-promise.ts b/app/lib/common-libs/manual-promise.ts index ea7f4c436cd80bdc43c48da2aa3ec16f451f572e..24d6382aa5f259c7d778a1f7d94f3a5312209ef8 100644 --- a/app/lib/common-libs/manual-promise.ts +++ b/app/lib/common-libs/manual-promise.ts @@ -1,10 +1,10 @@ -import {Querable} from "./querable" +import { Querable } from "./querable"; -const querablePromise = require('querablep'); +const querablePromise = require("querablep"); export interface ManualPromise<T> extends Querable<T> { - resolve: (data: T) => void - reject: (error: Error) => void + resolve: (data: T) => void; + reject: (error: Error) => void; } /** @@ -12,14 +12,14 @@ export interface ManualPromise<T> extends Querable<T> { * @returns {ManualPromise<T>} */ export function newManualPromise<T>() { - let resolveCb: (data: T) => void = () => {} - let rejectCb: (error: Error) => void = () => {} + let resolveCb: (data: T) => void = () => {}; + let rejectCb: (error: Error) => void = () => {}; const p = new Promise((res, rej) => { - resolveCb = res - rejectCb = rej - }) - const q: ManualPromise<T> = querablePromise(p) - q.resolve = resolveCb - q.reject = rejectCb - return q + resolveCb = res; + rejectCb = rej; + }); + const q: ManualPromise<T> = querablePromise(p); + q.resolve = resolveCb; + q.reject = rejectCb; + return q; } diff --git a/app/lib/common-libs/moment.ts b/app/lib/common-libs/moment.ts index ca05144233a3364fc44985f8cee8947870af7521..6f0494a176aeb7cdcc47712c35ad266d0fcb8808 100644 --- a/app/lib/common-libs/moment.ts +++ b/app/lib/common-libs/moment.ts @@ -11,6 +11,6 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -const _moment_ = require("moment") +const _moment_ = require("moment"); -export const moment = _moment_ \ No newline at end of file +export const moment = _moment_; diff --git a/app/lib/common-libs/parsers/GenericParser.ts b/app/lib/common-libs/parsers/GenericParser.ts index a030846f5ad6717e19caf7a9029a04d93f7c1578..621308d46d4b36982d850f8f16f2dd30ca22d9b6 100644 --- a/app/lib/common-libs/parsers/GenericParser.ts +++ b/app/lib/common-libs/parsers/GenericParser.ts @@ -11,34 +11,31 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../../../lib/common-libs/constants" -import * as stream from "stream" -import {hashf} from "../../../lib/common" -import {NewLogger} from "../../logger" +import { CommonConstants } from "../../../lib/common-libs/constants"; +import * as stream from "stream"; +import { hashf } from "../../../lib/common"; +import { NewLogger } from "../../logger"; -const logger = NewLogger() +const logger = NewLogger(); export abstract class GenericParser extends stream.Transform { - - constructor( - private captures:any, - private rawerFunc:any) { - super({ decodeStrings: false, objectMode: true }) + constructor(private captures: any, private rawerFunc: any) { + super({ decodeStrings: false, objectMode: true }); } - abstract _clean(obj:any): void - abstract _verify(obj:any): any + abstract _clean(obj: any): void; + abstract _verify(obj: any): any; - static _simpleLineExtraction(pr:any, rawEntry:string, cap:any) { + static _simpleLineExtraction(pr: any, rawEntry: string, cap: any) { const fieldValue = rawEntry.match(cap.regexp); - if(fieldValue && fieldValue.length >= 2){ + if (fieldValue && fieldValue.length >= 2) { pr[cap.prop] = cap.parser ? cap.parser(fieldValue[1], pr) : fieldValue[1]; } return; } - syncWrite(str:string): any { - let error = "" + syncWrite(str: string): any { + let error = ""; const obj = {}; this._parse(str, obj); this._clean(obj); @@ -51,44 +48,43 @@ export abstract class GenericParser extends stream.Transform { error = CommonConstants.ERRORS.WRONG_DOCUMENT.uerr.message; if (error) { logger && logger.trace(error); - logger && logger.trace('-----------------'); - logger && logger.trace('Written: %s', JSON.stringify({ str: str })); - logger && logger.trace('Extract: %s', JSON.stringify({ raw: raw })); - logger && logger.trace('-----------------'); + logger && logger.trace("-----------------"); + logger && logger.trace("Written: %s", JSON.stringify({ str: str })); + logger && logger.trace("Extract: %s", JSON.stringify({ raw: raw })); + logger && logger.trace("-----------------"); } } - if (error){ + if (error) { logger && logger.trace(error); throw CommonConstants.ERRORS.WRONG_DOCUMENT; } return obj; - }; + } - _parse(str:string, obj:any) { + _parse(str: string, obj: any) { let error; - if(!str){ + if (!str) { error = "No document given"; } else { error = ""; obj.hash = hashf(str).toUpperCase(); // Divide in 2 parts: document & signature - const sp = str.split('\n'); + const sp = str.split("\n"); if (sp.length < 3) { error = "Wrong document: must have at least 2 lines"; - } - else { + } else { const endOffset = str.match(/\n$/) ? 2 : 1; obj.signature = sp[sp.length - endOffset]; obj.hash = hashf(str).toUpperCase(); - obj.raw = sp.slice(0, sp.length - endOffset).join('\n') + '\n'; + obj.raw = sp.slice(0, sp.length - endOffset).join("\n") + "\n"; const docLF = obj.raw.replace(/\r\n/g, "\n"); - if(docLF.match(/\n$/)){ - this.captures.forEach((cap:any) => { + if (docLF.match(/\n$/)) { + this.captures.forEach((cap: any) => { GenericParser._simpleLineExtraction(obj, docLF, cap); }); - } - else{ - error = "Bad document structure: no new line character at the end of the document."; + } else { + error = + "Bad document structure: no new line character at the end of the document."; } } } diff --git a/app/lib/common-libs/parsers/block.ts b/app/lib/common-libs/parsers/block.ts index 343095f485c87a77a7f6a79c6e52ecf5f9901ed1..438111db3beb1dc85618fb2c365a595a5d436a4e 100644 --- a/app/lib/common-libs/parsers/block.ts +++ b/app/lib/common-libs/parsers/block.ts @@ -11,47 +11,90 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../../../lib/common-libs/constants" -import {GenericParser} from "./GenericParser" -import {hashf} from "../../../lib/common" -import {rawer} from "../../../lib/common-libs/index" -import {BlockDTO} from "../../dto/BlockDTO" +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { GenericParser } from "./GenericParser"; +import { hashf } from "../../../lib/common"; +import { rawer } from "../../../lib/common-libs/index"; +import { BlockDTO } from "../../dto/BlockDTO"; export class BlockParser extends GenericParser { - constructor() { - super([ - {prop: "version", regexp: CommonConstants.BLOCK.VERSION}, - {prop: "type", regexp: CommonConstants.BLOCK.TYPE}, - {prop: "currency", regexp: CommonConstants.BLOCK.CURRENCY}, - {prop: "number", regexp: CommonConstants.BLOCK.BNUMBER}, - {prop: "powMin", regexp: CommonConstants.BLOCK.POWMIN}, - {prop: "time", regexp: CommonConstants.BLOCK.TIME}, - {prop: "medianTime", regexp: CommonConstants.BLOCK.MEDIAN_TIME}, - {prop: "dividend", regexp: CommonConstants.BLOCK.UD}, - {prop: "unitbase", regexp: CommonConstants.BLOCK.UNIT_BASE}, - {prop: "issuer", regexp: CommonConstants.BLOCK.BLOCK_ISSUER}, - {prop: "issuersFrame", regexp: CommonConstants.BLOCK.BLOCK_ISSUERS_FRAME}, - {prop: "issuersFrameVar", regexp: CommonConstants.BLOCK.BLOCK_ISSUERS_FRAME_VAR}, - {prop: "issuersCount", regexp: CommonConstants.BLOCK.DIFFERENT_ISSUERS_COUNT}, - {prop: "parameters", regexp: CommonConstants.BLOCK.PARAMETERS}, - {prop: "previousHash", regexp: CommonConstants.BLOCK.PREV_HASH}, - {prop: "previousIssuer", regexp: CommonConstants.BLOCK.PREV_ISSUER}, - {prop: "membersCount", regexp: CommonConstants.BLOCK.MEMBERS_COUNT}, - {prop: "identities", regexp: /Identities:\n([\s\S]*)Joiners/, parser: splitAndMatch('\n', CommonConstants.IDENTITY.INLINE)}, - {prop: "joiners", regexp: /Joiners:\n([\s\S]*)Actives/, parser: splitAndMatch('\n', CommonConstants.BLOCK.JOINER)}, - {prop: "actives", regexp: /Actives:\n([\s\S]*)Leavers/, parser: splitAndMatch('\n', CommonConstants.BLOCK.ACTIVE)}, - {prop: "leavers", regexp: /Leavers:\n([\s\S]*)Excluded/, parser: splitAndMatch('\n', CommonConstants.BLOCK.LEAVER)}, - {prop: "revoked", regexp: /Revoked:\n([\s\S]*)Excluded/, parser: splitAndMatch('\n', CommonConstants.BLOCK.REVOCATION)}, - {prop: "excluded", regexp: /Excluded:\n([\s\S]*)Certifications/, parser: splitAndMatch('\n', CommonConstants.PUBLIC_KEY)}, - {prop: "certifications", regexp: /Certifications:\n([\s\S]*)Transactions/, parser: splitAndMatch('\n', CommonConstants.CERT.OTHER.INLINE)}, - {prop: "transactions", regexp: /Transactions:\n([\s\S]*)/, parser: extractTransactions}, - {prop: "inner_hash", regexp: CommonConstants.BLOCK.INNER_HASH}, - {prop: "nonce", regexp: CommonConstants.BLOCK.NONCE} - ], rawer.getBlock) + super( + [ + { prop: "version", regexp: CommonConstants.BLOCK.VERSION }, + { prop: "type", regexp: CommonConstants.BLOCK.TYPE }, + { prop: "currency", regexp: CommonConstants.BLOCK.CURRENCY }, + { prop: "number", regexp: CommonConstants.BLOCK.BNUMBER }, + { prop: "powMin", regexp: CommonConstants.BLOCK.POWMIN }, + { prop: "time", regexp: CommonConstants.BLOCK.TIME }, + { prop: "medianTime", regexp: CommonConstants.BLOCK.MEDIAN_TIME }, + { prop: "dividend", regexp: CommonConstants.BLOCK.UD }, + { prop: "unitbase", regexp: CommonConstants.BLOCK.UNIT_BASE }, + { prop: "issuer", regexp: CommonConstants.BLOCK.BLOCK_ISSUER }, + { + prop: "issuersFrame", + regexp: CommonConstants.BLOCK.BLOCK_ISSUERS_FRAME, + }, + { + prop: "issuersFrameVar", + regexp: CommonConstants.BLOCK.BLOCK_ISSUERS_FRAME_VAR, + }, + { + prop: "issuersCount", + regexp: CommonConstants.BLOCK.DIFFERENT_ISSUERS_COUNT, + }, + { prop: "parameters", regexp: CommonConstants.BLOCK.PARAMETERS }, + { prop: "previousHash", regexp: CommonConstants.BLOCK.PREV_HASH }, + { prop: "previousIssuer", regexp: CommonConstants.BLOCK.PREV_ISSUER }, + { prop: "membersCount", regexp: CommonConstants.BLOCK.MEMBERS_COUNT }, + { + prop: "identities", + regexp: /Identities:\n([\s\S]*)Joiners/, + parser: splitAndMatch("\n", CommonConstants.IDENTITY.INLINE), + }, + { + prop: "joiners", + regexp: /Joiners:\n([\s\S]*)Actives/, + parser: splitAndMatch("\n", CommonConstants.BLOCK.JOINER), + }, + { + prop: "actives", + regexp: /Actives:\n([\s\S]*)Leavers/, + parser: splitAndMatch("\n", CommonConstants.BLOCK.ACTIVE), + }, + { + prop: "leavers", + regexp: /Leavers:\n([\s\S]*)Excluded/, + parser: splitAndMatch("\n", CommonConstants.BLOCK.LEAVER), + }, + { + prop: "revoked", + regexp: /Revoked:\n([\s\S]*)Excluded/, + parser: splitAndMatch("\n", CommonConstants.BLOCK.REVOCATION), + }, + { + prop: "excluded", + regexp: /Excluded:\n([\s\S]*)Certifications/, + parser: splitAndMatch("\n", CommonConstants.PUBLIC_KEY), + }, + { + prop: "certifications", + regexp: /Certifications:\n([\s\S]*)Transactions/, + parser: splitAndMatch("\n", CommonConstants.CERT.OTHER.INLINE), + }, + { + prop: "transactions", + regexp: /Transactions:\n([\s\S]*)/, + parser: extractTransactions, + }, + { prop: "inner_hash", regexp: CommonConstants.BLOCK.INNER_HASH }, + { prop: "nonce", regexp: CommonConstants.BLOCK.NONCE }, + ], + rawer.getBlock + ); } - _clean(obj:any) { + _clean(obj: any) { obj.identities = obj.identities || []; obj.joiners = obj.joiners || []; obj.actives = obj.actives || []; @@ -60,126 +103,145 @@ export class BlockParser extends GenericParser { obj.excluded = obj.excluded || []; obj.certifications = obj.certifications || []; obj.transactions = obj.transactions || []; - obj.version = obj.version || ''; - obj.type = obj.type || ''; - obj.hash = hashf(rawer.getBlockInnerHashAndNonceWithSignature(obj)).toUpperCase(); - obj.inner_hash = obj.inner_hash || ''; - obj.currency = obj.currency || ''; - obj.nonce = obj.nonce || ''; - obj.number = obj.number || ''; - obj.time = obj.time || ''; - obj.medianTime = obj.medianTime || ''; + obj.version = obj.version || ""; + obj.type = obj.type || ""; + obj.hash = hashf( + rawer.getBlockInnerHashAndNonceWithSignature(obj) + ).toUpperCase(); + obj.inner_hash = obj.inner_hash || ""; + obj.currency = obj.currency || ""; + obj.nonce = obj.nonce || ""; + obj.number = obj.number || ""; + obj.time = obj.time || ""; + obj.medianTime = obj.medianTime || ""; obj.dividend = obj.dividend || null; - obj.unitbase = obj.unitbase || ''; - obj.issuer = obj.issuer || ''; - obj.parameters = obj.parameters || ''; - obj.previousHash = obj.previousHash || ''; - obj.previousIssuer = obj.previousIssuer || ''; - obj.membersCount = obj.membersCount || ''; - obj.transactions.map((tx:any) => { + obj.unitbase = obj.unitbase || ""; + obj.issuer = obj.issuer || ""; + obj.parameters = obj.parameters || ""; + obj.previousHash = obj.previousHash || ""; + obj.previousIssuer = obj.previousIssuer || ""; + obj.membersCount = obj.membersCount || ""; + obj.transactions.map((tx: any) => { tx.currency = obj.currency; tx.hash = hashf(rawer.getTransaction(tx)).toUpperCase(); }); obj.len = BlockDTO.getLen(obj); - }; + } - _verify(obj:any) { + _verify(obj: any) { let err = null; const codes = { - 'BAD_VERSION': 150, - 'BAD_CURRENCY': 151, - 'BAD_NUMBER': 152, - 'BAD_TYPE': 153, - 'BAD_NONCE': 154, - 'BAD_RECIPIENT_OF_NONTRANSFERT': 155, - 'BAD_PREV_HASH_PRESENT': 156, - 'BAD_PREV_HASH_ABSENT': 157, - 'BAD_PREV_ISSUER_PRESENT': 158, - 'BAD_PREV_ISSUER_ABSENT': 159, - 'BAD_DIVIDEND': 160, - 'BAD_TIME': 161, - 'BAD_MEDIAN_TIME': 162, - 'BAD_INNER_HASH': 163, - 'BAD_MEMBERS_COUNT': 164, - 'BAD_UNITBASE': 165, - 'BAD_ISSUER': 166 + BAD_VERSION: 150, + BAD_CURRENCY: 151, + BAD_NUMBER: 152, + BAD_TYPE: 153, + BAD_NONCE: 154, + BAD_RECIPIENT_OF_NONTRANSFERT: 155, + BAD_PREV_HASH_PRESENT: 156, + BAD_PREV_HASH_ABSENT: 157, + BAD_PREV_ISSUER_PRESENT: 158, + BAD_PREV_ISSUER_ABSENT: 159, + BAD_DIVIDEND: 160, + BAD_TIME: 161, + BAD_MEDIAN_TIME: 162, + BAD_INNER_HASH: 163, + BAD_MEMBERS_COUNT: 164, + BAD_UNITBASE: 165, + BAD_ISSUER: 166, }; - if(!err){ + if (!err) { // Version - if(!obj.version || !obj.version.match(CommonConstants.DOCUMENTS_BLOCK_VERSION_REGEXP)) - err = {code: codes.BAD_VERSION, message: "Version unknown"}; + if ( + !obj.version || + !obj.version.match(CommonConstants.DOCUMENTS_BLOCK_VERSION_REGEXP) + ) + err = { code: codes.BAD_VERSION, message: "Version unknown" }; } - if(!err){ + if (!err) { // Type - if(!obj.type || !obj.type.match(/^Block$/)) - err = {code: codes.BAD_TYPE, message: "Not a Block type"}; + if (!obj.type || !obj.type.match(/^Block$/)) + err = { code: codes.BAD_TYPE, message: "Not a Block type" }; } - if(!err){ + if (!err) { // Nonce - if(!obj.nonce || !obj.nonce.match(CommonConstants.INTEGER)) - err = {code: codes.BAD_NONCE, message: "Nonce must be an integer value"}; + if (!obj.nonce || !obj.nonce.match(CommonConstants.INTEGER)) + err = { + code: codes.BAD_NONCE, + message: "Nonce must be an integer value", + }; } - if(!err){ + if (!err) { // Number - if(!obj.number || !obj.number.match(CommonConstants.INTEGER)) - err = {code: codes.BAD_NUMBER, message: "Incorrect Number field"}; + if (!obj.number || !obj.number.match(CommonConstants.INTEGER)) + err = { code: codes.BAD_NUMBER, message: "Incorrect Number field" }; } - if(!err){ + if (!err) { // Time - if(!obj.time || !obj.time.match(CommonConstants.INTEGER)) - err = {code: codes.BAD_TIME, message: "Time must be an integer"}; + if (!obj.time || !obj.time.match(CommonConstants.INTEGER)) + err = { code: codes.BAD_TIME, message: "Time must be an integer" }; } - if(!err){ + if (!err) { // MedianTime - if(!obj.medianTime || !obj.medianTime.match(CommonConstants.INTEGER)) - err = {code: codes.BAD_MEDIAN_TIME, message: "MedianTime must be an integer"}; + if (!obj.medianTime || !obj.medianTime.match(CommonConstants.INTEGER)) + err = { + code: codes.BAD_MEDIAN_TIME, + message: "MedianTime must be an integer", + }; } - if(!err){ - if(obj.dividend && !obj.dividend.match(CommonConstants.INTEGER)) - err = {code: codes.BAD_DIVIDEND, message: "Incorrect UniversalDividend field"}; + if (!err) { + if (obj.dividend && !obj.dividend.match(CommonConstants.INTEGER)) + err = { + code: codes.BAD_DIVIDEND, + message: "Incorrect UniversalDividend field", + }; } - if(!err){ - if(obj.unitbase && !obj.unitbase.match(CommonConstants.INTEGER)) - err = {code: codes.BAD_UNITBASE, message: "Incorrect UnitBase field"}; + if (!err) { + if (obj.unitbase && !obj.unitbase.match(CommonConstants.INTEGER)) + err = { code: codes.BAD_UNITBASE, message: "Incorrect UnitBase field" }; } - if(!err){ - if(!obj.issuer || !obj.issuer.match(CommonConstants.BASE58)) - err = {code: codes.BAD_ISSUER, message: "Incorrect Issuer field"}; + if (!err) { + if (!obj.issuer || !obj.issuer.match(CommonConstants.BASE58)) + err = { code: codes.BAD_ISSUER, message: "Incorrect Issuer field" }; } - if(!err){ + if (!err) { // MembersCount - if(!obj.nonce || !obj.nonce.match(CommonConstants.INTEGER)) - err = {code: codes.BAD_MEMBERS_COUNT, message: "MembersCount must be an integer value"}; + if (!obj.nonce || !obj.nonce.match(CommonConstants.INTEGER)) + err = { + code: codes.BAD_MEMBERS_COUNT, + message: "MembersCount must be an integer value", + }; } - if(!err){ + if (!err) { // InnerHash - if(!obj.inner_hash || !obj.inner_hash.match(CommonConstants.FINGERPRINT)) - err = {code: codes.BAD_INNER_HASH, message: "InnerHash must be a hash value"}; + if (!obj.inner_hash || !obj.inner_hash.match(CommonConstants.FINGERPRINT)) + err = { + code: codes.BAD_INNER_HASH, + message: "InnerHash must be a hash value", + }; } return err && err.message; - }; + } } -function splitAndMatch (separator:string, regexp:RegExp) { - return function (raw:string) { +function splitAndMatch(separator: string, regexp: RegExp) { + return function (raw: string) { const lines = raw.split(new RegExp(separator)); - const kept:string[] = []; - lines.forEach(function(line){ - if (line.match(regexp)) - kept.push(line); + const kept: string[] = []; + lines.forEach(function (line) { + if (line.match(regexp)) kept.push(line); }); return kept; }; } -function extractTransactions(raw:string) { - const regexps:any = { - "issuers": CommonConstants.TRANSACTION.SENDER, - "inputs": CommonConstants.TRANSACTION.SOURCE_V3, - "unlocks": CommonConstants.TRANSACTION.UNLOCK, - "outputs": CommonConstants.TRANSACTION.TARGET, - "comments": CommonConstants.TRANSACTION.INLINE_COMMENT, - "signatures": CommonConstants.SIG +function extractTransactions(raw: string) { + const regexps: any = { + issuers: CommonConstants.TRANSACTION.SENDER, + inputs: CommonConstants.TRANSACTION.SOURCE_V3, + unlocks: CommonConstants.TRANSACTION.UNLOCK, + outputs: CommonConstants.TRANSACTION.TARGET, + comments: CommonConstants.TRANSACTION.INLINE_COMMENT, + signatures: CommonConstants.SIG, }; const transactions = []; const lines = raw.split(/\n/); @@ -188,8 +250,8 @@ function extractTransactions(raw:string) { // On each header if (line.match(CommonConstants.TRANSACTION.HEADER)) { // Parse the transaction - const currentTX:any = { raw: line + '\n' }; - const sp = line.split(':'); + const currentTX: any = { raw: line + "\n" }; + const sp = line.split(":"); const version = parseInt(sp[1]); const nbIssuers = parseInt(sp[2]); const nbInputs = parseInt(sp[3]); @@ -199,40 +261,66 @@ function extractTransactions(raw:string) { const start = 2; currentTX.version = version; currentTX.blockstamp = lines[i + 1]; - currentTX.raw += currentTX.blockstamp + '\n'; + currentTX.raw += currentTX.blockstamp + "\n"; currentTX.locktime = parseInt(sp[7]); - const linesToExtract:any = { + const linesToExtract: any = { issuers: { start: start, - end: (start - 1) + nbIssuers + end: start - 1 + nbIssuers, }, inputs: { start: start + nbIssuers, - end: (start - 1) + nbIssuers + nbInputs + end: start - 1 + nbIssuers + nbInputs, }, unlocks: { start: start + nbIssuers + nbInputs, - end: (start - 1) + nbIssuers + nbInputs + nbUnlocks + end: start - 1 + nbIssuers + nbInputs + nbUnlocks, }, outputs: { start: start + nbIssuers + nbInputs + nbUnlocks, - end: (start - 1) + nbIssuers + nbInputs + nbUnlocks + nbOutputs + end: start - 1 + nbIssuers + nbInputs + nbUnlocks + nbOutputs, }, comments: { start: start + nbIssuers + nbInputs + nbUnlocks + nbOutputs, - end: (start - 1) + nbIssuers + nbInputs + nbUnlocks + nbOutputs + hasComment + end: + start - + 1 + + nbIssuers + + nbInputs + + nbUnlocks + + nbOutputs + + hasComment, }, signatures: { - start: start + nbIssuers + nbInputs + nbUnlocks + nbOutputs + hasComment, - end: (start - 1) + 2 * nbIssuers + nbInputs + nbUnlocks + nbOutputs + hasComment - } + start: + start + nbIssuers + nbInputs + nbUnlocks + nbOutputs + hasComment, + end: + start - + 1 + + 2 * nbIssuers + + nbInputs + + nbUnlocks + + nbOutputs + + hasComment, + }, }; - ['issuers', 'inputs', 'unlocks', 'outputs', 'comments', 'signatures'].forEach((prop) => { + [ + "issuers", + "inputs", + "unlocks", + "outputs", + "comments", + "signatures", + ].forEach((prop) => { currentTX[prop] = currentTX[prop] || []; - for (let j = linesToExtract[prop].start; j <= linesToExtract[prop].end; j++) { + for ( + let j = linesToExtract[prop].start; + j <= linesToExtract[prop].end; + j++ + ) { const line = lines[i + j]; if (line.match(regexps[prop])) { - currentTX.raw += line + '\n'; + currentTX.raw += line + "\n"; currentTX[prop].push(line); } } @@ -241,7 +329,7 @@ function extractTransactions(raw:string) { if (hasComment) { currentTX.comment = currentTX.comments[0]; } else { - currentTX.comment = ''; + currentTX.comment = ""; } currentTX.hash = hashf(rawer.getTransaction(currentTX)).toUpperCase(); // Add to txs array diff --git a/app/lib/common-libs/parsers/certification.ts b/app/lib/common-libs/parsers/certification.ts index ed7f517cf6face9884189bc11000fd3449d65d5e..3f97a103f65100a4e157bc8142f845acba9634d4 100644 --- a/app/lib/common-libs/parsers/certification.ts +++ b/app/lib/common-libs/parsers/certification.ts @@ -11,41 +11,59 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../../../lib/common-libs/constants" -import {GenericParser} from "./GenericParser" -import {rawer} from "../../../lib/common-libs/index" +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { GenericParser } from "./GenericParser"; +import { rawer } from "../../../lib/common-libs/index"; export class CertificationParser extends GenericParser { - constructor() { - super([ - {prop: "version", regexp: CommonConstants.DOCUMENTS.DOC_VERSION }, - {prop: "type", regexp: CommonConstants.CERTIFICATION.CERT_TYPE }, - {prop: "currency", regexp: CommonConstants.DOCUMENTS.DOC_CURRENCY }, - {prop: "issuer", regexp: CommonConstants.DOCUMENTS.DOC_ISSUER }, - {prop: "idty_issuer", regexp: CommonConstants.CERTIFICATION.IDTY_ISSUER }, - {prop: "idty_sig", regexp: CommonConstants.CERTIFICATION.IDTY_SIG }, - {prop: "idty_buid", regexp: CommonConstants.CERTIFICATION.IDTY_TIMESTAMP}, - {prop: "idty_uid", regexp: CommonConstants.CERTIFICATION.IDTY_UID }, - {prop: "buid", regexp: CommonConstants.CERTIFICATION.CERT_TIMESTAMP } - ], rawer.getOfficialCertification) + super( + [ + { prop: "version", regexp: CommonConstants.DOCUMENTS.DOC_VERSION }, + { prop: "type", regexp: CommonConstants.CERTIFICATION.CERT_TYPE }, + { prop: "currency", regexp: CommonConstants.DOCUMENTS.DOC_CURRENCY }, + { prop: "issuer", regexp: CommonConstants.DOCUMENTS.DOC_ISSUER }, + { + prop: "idty_issuer", + regexp: CommonConstants.CERTIFICATION.IDTY_ISSUER, + }, + { prop: "idty_sig", regexp: CommonConstants.CERTIFICATION.IDTY_SIG }, + { + prop: "idty_buid", + regexp: CommonConstants.CERTIFICATION.IDTY_TIMESTAMP, + }, + { prop: "idty_uid", regexp: CommonConstants.CERTIFICATION.IDTY_UID }, + { prop: "buid", regexp: CommonConstants.CERTIFICATION.CERT_TIMESTAMP }, + ], + rawer.getOfficialCertification + ); } - _clean(obj:any) { + _clean(obj: any) { obj.sig = obj.signature; obj.block = obj.buid; if (obj.block) { - obj.number = obj.block.split('-')[0]; - obj.fpr = obj.block.split('-')[1]; + obj.number = obj.block.split("-")[0]; + obj.fpr = obj.block.split("-")[1]; } else { - obj.number = '0'; - obj.fpr = ''; + obj.number = "0"; + obj.fpr = ""; } } - _verify(obj:any): string { - return ["version", "type", "currency", "issuer", "idty_issuer", "idty_sig", "idty_buid", "idty_uid", "block"].reduce((p, field) => { - return p || (!obj[field] && "Wrong format for certification") || "" - }, "") + _verify(obj: any): string { + return [ + "version", + "type", + "currency", + "issuer", + "idty_issuer", + "idty_sig", + "idty_buid", + "idty_uid", + "block", + ].reduce((p, field) => { + return p || (!obj[field] && "Wrong format for certification") || ""; + }, ""); } -} \ No newline at end of file +} diff --git a/app/lib/common-libs/parsers/identity.ts b/app/lib/common-libs/parsers/identity.ts index 309f7722a0828def4d51dddb60ecf069872f834e..1b9cfadebbf47f2adf7a4afe992493e6e0c8165f 100644 --- a/app/lib/common-libs/parsers/identity.ts +++ b/app/lib/common-libs/parsers/identity.ts @@ -11,32 +11,34 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {GenericParser} from "./GenericParser" -import {CommonConstants} from "../../../lib/common-libs/constants" -import {hashf} from "../../../lib/common" -import {rawer} from "../../../lib/common-libs/index" +import { GenericParser } from "./GenericParser"; +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { hashf } from "../../../lib/common"; +import { rawer } from "../../../lib/common-libs/index"; export class IdentityParser extends GenericParser { - constructor() { - super([ - {prop: "version", regexp: CommonConstants.DOCUMENTS.DOC_VERSION }, - {prop: "type", regexp: CommonConstants.IDENTITY.IDTY_TYPE}, - {prop: "currency", regexp: CommonConstants.DOCUMENTS.DOC_CURRENCY }, - {prop: "pubkey", regexp: CommonConstants.DOCUMENTS.DOC_ISSUER }, - {prop: "uid", regexp: CommonConstants.IDENTITY.IDTY_UID }, - {prop: "buid", regexp: CommonConstants.DOCUMENTS.TIMESTAMP } - ], rawer.getOfficialIdentity) + super( + [ + { prop: "version", regexp: CommonConstants.DOCUMENTS.DOC_VERSION }, + { prop: "type", regexp: CommonConstants.IDENTITY.IDTY_TYPE }, + { prop: "currency", regexp: CommonConstants.DOCUMENTS.DOC_CURRENCY }, + { prop: "pubkey", regexp: CommonConstants.DOCUMENTS.DOC_ISSUER }, + { prop: "uid", regexp: CommonConstants.IDENTITY.IDTY_UID }, + { prop: "buid", regexp: CommonConstants.DOCUMENTS.TIMESTAMP }, + ], + rawer.getOfficialIdentity + ); } - _clean(obj:any) { + _clean(obj: any) { obj.sig = obj.signature; if (obj.uid && obj.buid && obj.pubkey) { obj.hash = hashf(obj.uid + obj.buid + obj.pubkey).toUpperCase(); } } - _verify(obj:any) { + _verify(obj: any) { if (!obj.pubkey) { return "No pubkey found"; } @@ -49,6 +51,6 @@ export class IdentityParser extends GenericParser { if (!obj.sig) { return "No signature found for self-certification"; } - return "" + return ""; } } diff --git a/app/lib/common-libs/parsers/index.ts b/app/lib/common-libs/parsers/index.ts index 520a96a603ede3056b4bf28d4d0cd8b4e84a7188..0fb8a31f1dedc4741d3a18d5106b9fcf24fe8b6d 100644 --- a/app/lib/common-libs/parsers/index.ts +++ b/app/lib/common-libs/parsers/index.ts @@ -11,20 +11,20 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockParser} from "./block" -import {CertificationParser} from "./certification" -import {IdentityParser} from "./identity" -import {MembershipParser} from "./membership" -import {PeerParser} from "./peer" -import {RevocationParser} from "./revocation" -import {TransactionParser} from "./transaction" +import { BlockParser } from "./block"; +import { CertificationParser } from "./certification"; +import { IdentityParser } from "./identity"; +import { MembershipParser } from "./membership"; +import { PeerParser } from "./peer"; +import { RevocationParser } from "./revocation"; +import { TransactionParser } from "./transaction"; export const parsers = { - parseIdentity: new IdentityParser(), + parseIdentity: new IdentityParser(), parseCertification: new CertificationParser(), - parseRevocation: new RevocationParser(), - parseTransaction: new TransactionParser(), - parsePeer: new PeerParser(), - parseMembership: new MembershipParser(), - parseBlock: new BlockParser() -} + parseRevocation: new RevocationParser(), + parseTransaction: new TransactionParser(), + parsePeer: new PeerParser(), + parseMembership: new MembershipParser(), + parseBlock: new BlockParser(), +}; diff --git a/app/lib/common-libs/parsers/membership.ts b/app/lib/common-libs/parsers/membership.ts index c05224931ae23695be6c9bd98ce8de11e5a1ee02..abe7d78639c0a525f45e69133b3eea38f10dd3da 100644 --- a/app/lib/common-libs/parsers/membership.ts +++ b/app/lib/common-libs/parsers/membership.ts @@ -11,71 +11,89 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../../../lib/common-libs/constants" -import {GenericParser} from "./GenericParser" -import {rawer} from "../../../lib/common-libs/index" -import {Buid} from "../../../lib/common-libs/buid" +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { GenericParser } from "./GenericParser"; +import { rawer } from "../../../lib/common-libs/index"; +import { Buid } from "../../../lib/common-libs/buid"; export class MembershipParser extends GenericParser { - constructor() { - super([ - {prop: "version", regexp: CommonConstants.MEMBERSHIP.VERSION }, - {prop: "currency", regexp: CommonConstants.MEMBERSHIP.CURRENCY }, - {prop: "issuer", regexp: CommonConstants.MEMBERSHIP.ISSUER }, - {prop: "membership", regexp: CommonConstants.MEMBERSHIP.MEMBERSHIP }, - {prop: "userid", regexp: CommonConstants.MEMBERSHIP.USERID }, - {prop: "block", regexp: CommonConstants.MEMBERSHIP.BLOCK}, - {prop: "certts", regexp: CommonConstants.MEMBERSHIP.CERTTS} - ], rawer.getMembership) + super( + [ + { prop: "version", regexp: CommonConstants.MEMBERSHIP.VERSION }, + { prop: "currency", regexp: CommonConstants.MEMBERSHIP.CURRENCY }, + { prop: "issuer", regexp: CommonConstants.MEMBERSHIP.ISSUER }, + { prop: "membership", regexp: CommonConstants.MEMBERSHIP.MEMBERSHIP }, + { prop: "userid", regexp: CommonConstants.MEMBERSHIP.USERID }, + { prop: "block", regexp: CommonConstants.MEMBERSHIP.BLOCK }, + { prop: "certts", regexp: CommonConstants.MEMBERSHIP.CERTTS }, + ], + rawer.getMembership + ); } - _clean(obj:any) { + _clean(obj: any) { if (obj.block) { - obj.number = obj.block.split('-')[0]; - obj.fpr = obj.block.split('-')[1]; + obj.number = obj.block.split("-")[0]; + obj.fpr = obj.block.split("-")[1]; } else { - obj.number = '0'; - obj.fpr = ''; + obj.number = "0"; + obj.fpr = ""; } } - _verify(obj:any) { + _verify(obj: any) { let err = null; const codes = { - 'BAD_VERSION': 150, - 'BAD_CURRENCY': 151, - 'BAD_ISSUER': 152, - 'BAD_MEMBERSHIP': 153, - 'BAD_REGISTRY_TYPE': 154, - 'BAD_BLOCK': 155, - 'BAD_USERID': 156, - 'BAD_CERTTS': 157 + BAD_VERSION: 150, + BAD_CURRENCY: 151, + BAD_ISSUER: 152, + BAD_MEMBERSHIP: 153, + BAD_REGISTRY_TYPE: 154, + BAD_BLOCK: 155, + BAD_USERID: 156, + BAD_CERTTS: 157, }; - if(!err){ - if(!obj.version || !obj.version.match(CommonConstants.DOCUMENTS_VERSION_REGEXP)) - err = {code: codes.BAD_VERSION, message: "Version unknown"}; + if (!err) { + if ( + !obj.version || + !obj.version.match(CommonConstants.DOCUMENTS_VERSION_REGEXP) + ) + err = { code: codes.BAD_VERSION, message: "Version unknown" }; } - if(!err){ - if(obj.issuer && !obj.issuer.match(CommonConstants.BASE58)) - err = {code: codes.BAD_ISSUER, message: "Incorrect issuer field"}; + if (!err) { + if (obj.issuer && !obj.issuer.match(CommonConstants.BASE58)) + err = { code: codes.BAD_ISSUER, message: "Incorrect issuer field" }; } - if(!err){ - if(!(obj.membership || "").match(/^(IN|OUT)$/)) - err = {code: codes.BAD_MEMBERSHIP, message: "Incorrect Membership field: must be either IN or OUT"}; + if (!err) { + if (!(obj.membership || "").match(/^(IN|OUT)$/)) + err = { + code: codes.BAD_MEMBERSHIP, + message: "Incorrect Membership field: must be either IN or OUT", + }; } - if(!err){ - if(obj.block && !obj.block.match(CommonConstants.BLOCK_UID)) - err = {code: codes.BAD_BLOCK, message: "Incorrect Block field: must be a positive or zero integer, a dash and an uppercased SHA1 hash"}; + if (!err) { + if (obj.block && !obj.block.match(CommonConstants.BLOCK_UID)) + err = { + code: codes.BAD_BLOCK, + message: + "Incorrect Block field: must be a positive or zero integer, a dash and an uppercased SHA1 hash", + }; } - if(!err){ - if(obj.userid && !obj.userid.match(CommonConstants.USER_ID)) - err = {code: codes.BAD_USERID, message: "UserID must match udid2 format"}; + if (!err) { + if (obj.userid && !obj.userid.match(CommonConstants.USER_ID)) + err = { + code: codes.BAD_USERID, + message: "UserID must match udid2 format", + }; } - if(!err){ - if(!Buid.format.isBuid(obj.certts)) - err = {code: codes.BAD_CERTTS, message: "CertTS must be a valid timestamp"}; + if (!err) { + if (!Buid.format.isBuid(obj.certts)) + err = { + code: codes.BAD_CERTTS, + message: "CertTS must be a valid timestamp", + }; } return err && err.message; - }; -} \ No newline at end of file + } +} diff --git a/app/lib/common-libs/parsers/peer.ts b/app/lib/common-libs/parsers/peer.ts index 67ddb9ad327f87d825494886555df8bd241ee28d..13263b0ee4c6bf982ba9dc417a2ddc1ca8aa0647 100644 --- a/app/lib/common-libs/parsers/peer.ts +++ b/app/lib/common-libs/parsers/peer.ts @@ -11,112 +11,146 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {GenericParser} from "./GenericParser" -import {CommonConstants} from "../../../lib/common-libs/constants" -import {rawer} from "../../../lib/common-libs/index" +import { GenericParser } from "./GenericParser"; +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { rawer } from "../../../lib/common-libs/index"; export class PeerParser extends GenericParser { - constructor() { - super([ - {prop: "version", regexp: /Version: (.*)/}, - {prop: "currency", regexp: /Currency: (.*)/}, - {prop: "pubkey", regexp: /PublicKey: (.*)/}, - {prop: "block", regexp: CommonConstants.PEER.BLOCK}, - { - prop: "endpoints", regexp: /Endpoints:\n([\s\S]*)/, parser: (str:string) => str.split("\n") - } - ], rawer.getPeer) + super( + [ + { prop: "version", regexp: /Version: (.*)/ }, + { prop: "currency", regexp: /Currency: (.*)/ }, + { prop: "pubkey", regexp: /PublicKey: (.*)/ }, + { prop: "block", regexp: CommonConstants.PEER.BLOCK }, + { + prop: "endpoints", + regexp: /Endpoints:\n([\s\S]*)/, + parser: (str: string) => str.split("\n"), + }, + ], + rawer.getPeer + ); } - _clean(obj:any) { + _clean(obj: any) { obj.endpoints = obj.endpoints || []; // Removes trailing space if (obj.endpoints.length > 0) obj.endpoints.splice(obj.endpoints.length - 1, 1); - obj.getBMAOrNull = function() { - let bma:any = null; - obj.endpoints.forEach((ep:string) => { + obj.getBMAOrNull = function () { + let bma: any = null; + obj.endpoints.forEach((ep: string) => { let matches = !bma && ep.match(CommonConstants.BMA_REGEXP); if (matches) { bma = { - "dns": matches[2] || '', - "ipv4": matches[4] || '', - "ipv6": matches[6] || '', - "port": matches[8] || 9101 + dns: matches[2] || "", + ipv4: matches[4] || "", + ipv6: matches[6] || "", + port: matches[8] || 9101, }; } }); - return bma || null + return bma || null; }; } - _verify(obj:any) { + _verify(obj: any) { let err = null; const codes = { - 'BAD_VERSION': 150, - 'BAD_CURRENCY': 151, - 'BAD_DNS': 152, - 'BAD_IPV4': 153, - 'BAD_IPV6': 154, - 'BAD_PORT': 155, - 'BAD_FINGERPRINT': 156, - 'BAD_BLOCK': 157, - 'NO_IP_GIVEN': 158, - 'TOO_LONG_ENDPOINT': 159 + BAD_VERSION: 150, + BAD_CURRENCY: 151, + BAD_DNS: 152, + BAD_IPV4: 153, + BAD_IPV6: 154, + BAD_PORT: 155, + BAD_FINGERPRINT: 156, + BAD_BLOCK: 157, + NO_IP_GIVEN: 158, + TOO_LONG_ENDPOINT: 159, }; - if(!err){ + if (!err) { // Version - if(!obj.version || !obj.version.match(CommonConstants.DOCUMENTS_VERSION_REGEXP)) - err = {code: codes.BAD_VERSION, message: "Version unknown"}; + if ( + !obj.version || + !obj.version.match(CommonConstants.DOCUMENTS_VERSION_REGEXP) + ) + err = { code: codes.BAD_VERSION, message: "Version unknown" }; } - if(!err){ + if (!err) { // PublicKey - if(!obj.pubkey || !obj.pubkey.match(CommonConstants.BASE58)) - err = {code: codes.BAD_FINGERPRINT, message: "Incorrect PublicKey field"}; + if (!obj.pubkey || !obj.pubkey.match(CommonConstants.BASE58)) + err = { + code: codes.BAD_FINGERPRINT, + message: "Incorrect PublicKey field", + }; } - if(!err){ + if (!err) { // Block - if(!obj.block) - err = {code: codes.BAD_BLOCK, message: "Incorrect Block field"}; + if (!obj.block) + err = { code: codes.BAD_BLOCK, message: "Incorrect Block field" }; } - if(!err){ + if (!err) { // Endpoint length - for (const ep of (obj.endpoints || [])) { + for (const ep of obj.endpoints || []) { if (!err && ep.length > 255) { - err = {code: codes.TOO_LONG_ENDPOINT, message: "An endpoint has maximum 255 characters length."} + err = { + code: codes.TOO_LONG_ENDPOINT, + message: "An endpoint has maximum 255 characters length.", + }; } } } // Basic Merkled API requirements - let bma = obj.getBMAOrNull() + let bma = obj.getBMAOrNull(); if (bma) { - if(!err){ + if (!err) { // DNS - if(bma.dns && !bma.dns.match(/^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/)) - err = {code: codes.BAD_DNS, message: "Incorrect Dns field"}; + if ( + bma.dns && + !bma.dns.match( + /^(([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]*[a-zA-Z0-9])\.)*([A-Za-z0-9]|[A-Za-z0-9][A-Za-z0-9\-]*[A-Za-z0-9])$/ + ) + ) + err = { code: codes.BAD_DNS, message: "Incorrect Dns field" }; } - if(!err){ + if (!err) { // IPv4 - if(bma.ipv4 && !bma.ipv4.match(/^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$/)) - err = {code: codes.BAD_IPV4, message: "Incorrect IPv4 field"}; + if ( + bma.ipv4 && + !bma.ipv4.match( + /^(([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])\.){3}([0-9]|[1-9][0-9]|1[0-9]{2}|2[0-4][0-9]|25[0-5])$/ + ) + ) + err = { code: codes.BAD_IPV4, message: "Incorrect IPv4 field" }; } - if(!err){ + if (!err) { // IPv6 - if(bma.ipv6 && !bma.ipv6.match(/^((([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}:[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){5}:([0-9A-Fa-f]{1,4}:)?[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){4}:([0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){3}:([0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){2}:([0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(([0-9A-Fa-f]{1,4}:){0,5}:((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(::([0-9A-Fa-f]{1,4}:){0,5}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|([0-9A-Fa-f]{1,4}::([0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4})|(::([0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){1,7}:))$/)) - err = {code: codes.BAD_IPV6, message: "Incorrect IPv6 field"}; + if ( + bma.ipv6 && + !bma.ipv6.match( + /^((([0-9A-Fa-f]{1,4}:){7}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}:[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){5}:([0-9A-Fa-f]{1,4}:)?[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){4}:([0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){3}:([0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){2}:([0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){6}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(([0-9A-Fa-f]{1,4}:){0,5}:((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|(::([0-9A-Fa-f]{1,4}:){0,5}((b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b).){3}(b((25[0-5])|(1d{2})|(2[0-4]d)|(d{1,2}))b))|([0-9A-Fa-f]{1,4}::([0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4})|(::([0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4})|(([0-9A-Fa-f]{1,4}:){1,7}:))$/ + ) + ) + err = { code: codes.BAD_IPV6, message: "Incorrect IPv6 field" }; } - if(!err){ + if (!err) { // IP - if(!bma.dns && !bma.ipv4 && !bma.ipv6) - err = {code: codes.NO_IP_GIVEN, message: "It must be given at least DNS or one IP, either v4 or v6"}; + if (!bma.dns && !bma.ipv4 && !bma.ipv6) + err = { + code: codes.NO_IP_GIVEN, + message: "It must be given at least DNS or one IP, either v4 or v6", + }; } - if(!err){ + if (!err) { // Port - if(bma.port && !(bma.port + "").match(/^\d+$/)) - err = {code: codes.BAD_PORT, message: "Port must be provided and match an integer format"}; + if (bma.port && !(bma.port + "").match(/^\d+$/)) + err = { + code: codes.BAD_PORT, + message: "Port must be provided and match an integer format", + }; } } return err && err.message; - }; -} \ No newline at end of file + } +} diff --git a/app/lib/common-libs/parsers/revocation.ts b/app/lib/common-libs/parsers/revocation.ts index 9f253c6b787683081b6b189190e966ccb69ce277..d051b8ed2eb12f46f5c3fd7f308030d385da70d4 100644 --- a/app/lib/common-libs/parsers/revocation.ts +++ b/app/lib/common-libs/parsers/revocation.ts @@ -11,26 +11,28 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../../../lib/common-libs/constants" -import {GenericParser} from "./GenericParser" -import {hashf} from "../../../lib/common" -import {rawer} from "../../../lib/common-libs/index" +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { GenericParser } from "./GenericParser"; +import { hashf } from "../../../lib/common"; +import { rawer } from "../../../lib/common-libs/index"; export class RevocationParser extends GenericParser { - constructor() { - super([ - {prop: "version", regexp: CommonConstants.DOCUMENTS.DOC_VERSION }, - {prop: "type", regexp: CommonConstants.REVOCATION.REVOC_TYPE }, - {prop: "currency", regexp: CommonConstants.DOCUMENTS.DOC_CURRENCY }, - {prop: "issuer", regexp: CommonConstants.DOCUMENTS.DOC_ISSUER }, - {prop: "sig", regexp: CommonConstants.REVOCATION.IDTY_SIG }, - {prop: "buid", regexp: CommonConstants.REVOCATION.IDTY_TIMESTAMP}, - {prop: "uid", regexp: CommonConstants.REVOCATION.IDTY_UID } - ], rawer.getOfficialRevocation) + super( + [ + { prop: "version", regexp: CommonConstants.DOCUMENTS.DOC_VERSION }, + { prop: "type", regexp: CommonConstants.REVOCATION.REVOC_TYPE }, + { prop: "currency", regexp: CommonConstants.DOCUMENTS.DOC_CURRENCY }, + { prop: "issuer", regexp: CommonConstants.DOCUMENTS.DOC_ISSUER }, + { prop: "sig", regexp: CommonConstants.REVOCATION.IDTY_SIG }, + { prop: "buid", regexp: CommonConstants.REVOCATION.IDTY_TIMESTAMP }, + { prop: "uid", regexp: CommonConstants.REVOCATION.IDTY_UID }, + ], + rawer.getOfficialRevocation + ); } - _clean(obj:any) { + _clean(obj: any) { obj.pubkey = obj.issuer; obj.revocation = obj.signature; if (obj.uid && obj.buid && obj.pubkey) { @@ -38,7 +40,7 @@ export class RevocationParser extends GenericParser { } } - _verify(obj:any) { + _verify(obj: any) { if (!obj.pubkey) { return "No pubkey found"; } @@ -54,6 +56,6 @@ export class RevocationParser extends GenericParser { if (!obj.revocation) { return "No revocation signature found"; } - return "" + return ""; } -} \ No newline at end of file +} diff --git a/app/lib/common-libs/parsers/transaction.ts b/app/lib/common-libs/parsers/transaction.ts index fc8bf5033541eb986c995c732a7845498ed707d7..d5ed8c20709210d560d3feb4b571705b39fb3682 100644 --- a/app/lib/common-libs/parsers/transaction.ts +++ b/app/lib/common-libs/parsers/transaction.ts @@ -11,67 +11,96 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../constants" -import {GenericParser} from "./GenericParser" -import {rawer} from "../../../lib/common-libs/index" -import {checkGrammar} from '../txunlock'; +import { CommonConstants } from "../constants"; +import { GenericParser } from "./GenericParser"; +import { rawer } from "../../../lib/common-libs/index"; +import { checkGrammar } from "../txunlock"; export class TransactionParser extends GenericParser { - constructor() { - super([ - {prop: "version", regexp: /Version: (.*)/}, - {prop: "currency", regexp: /Currency: (.*)/}, - {prop: "issuers", regexp: /Issuers:\n([\s\S]*)Inputs/, parser: extractIssuers }, - {prop: "inputs", regexp: /Inputs:\n([\s\S]*)Unlocks/, parser: extractInputs }, - {prop: "unlocks", regexp: /Unlocks:\n([\s\S]*)Outputs/,parser: extractUnlocks }, - {prop: "outputs", regexp: /Outputs:\n([\s\S]*)/, parser: extractOutputs }, - {prop: "comment", regexp: CommonConstants.TRANSACTION.COMMENT }, - {prop: "locktime", regexp: CommonConstants.TRANSACTION.LOCKTIME }, - {prop: "blockstamp", regexp: CommonConstants.TRANSACTION.BLOCKSTAMP }, - {prop: "signatures", regexp: /Outputs:\n([\s\S]*)/, parser: extractSignatures } - ], rawer.getTransaction) + super( + [ + { prop: "version", regexp: /Version: (.*)/ }, + { prop: "currency", regexp: /Currency: (.*)/ }, + { + prop: "issuers", + regexp: /Issuers:\n([\s\S]*)Inputs/, + parser: extractIssuers, + }, + { + prop: "inputs", + regexp: /Inputs:\n([\s\S]*)Unlocks/, + parser: extractInputs, + }, + { + prop: "unlocks", + regexp: /Unlocks:\n([\s\S]*)Outputs/, + parser: extractUnlocks, + }, + { + prop: "outputs", + regexp: /Outputs:\n([\s\S]*)/, + parser: extractOutputs, + }, + { prop: "comment", regexp: CommonConstants.TRANSACTION.COMMENT }, + { prop: "locktime", regexp: CommonConstants.TRANSACTION.LOCKTIME }, + { prop: "blockstamp", regexp: CommonConstants.TRANSACTION.BLOCKSTAMP }, + { + prop: "signatures", + regexp: /Outputs:\n([\s\S]*)/, + parser: extractSignatures, + }, + ], + rawer.getTransaction + ); } - _clean(obj:any) { + _clean(obj: any) { obj.comment = obj.comment || ""; obj.locktime = parseInt(obj.locktime) || 0; - obj.signatures = obj.signatures || [] - obj.issuers = obj.issuers || [] - obj.inputs = obj.inputs || [] - obj.outputs = obj.outputs || [] + obj.signatures = obj.signatures || []; + obj.issuers = obj.issuers || []; + obj.inputs = obj.inputs || []; + obj.outputs = obj.outputs || []; obj.signatures.push(obj.signature); - const compactSize = 2 // Header + blockstamp - + obj.issuers.length - + obj.inputs.length - + obj.unlocks.length - + obj.outputs.length - + (obj.comment ? 1 : 0) - + obj.signatures; + const compactSize = + 2 + // Header + blockstamp + obj.issuers.length + + obj.inputs.length + + obj.unlocks.length + + obj.outputs.length + + (obj.comment ? 1 : 0) + + obj.signatures; if (compactSize > 100) { - throw 'A transaction has a maximum size of 100 lines'; + throw "A transaction has a maximum size of 100 lines"; } } - _verify(obj:any) { + _verify(obj: any) { let err = null; const codes = { - 'BAD_VERSION': 150, - 'NO_BLOCKSTAMP': 151 + BAD_VERSION: 150, + NO_BLOCKSTAMP: 151, }; - if(!err){ + if (!err) { // Version - if(!obj.version || !obj.version.match(CommonConstants.DOCUMENTS_TRANSACTION_VERSION_REGEXP)) - err = {code: codes.BAD_VERSION, message: "Version unknown"}; + if ( + !obj.version || + !obj.version.match(CommonConstants.DOCUMENTS_TRANSACTION_VERSION_REGEXP) + ) + err = { code: codes.BAD_VERSION, message: "Version unknown" }; // Blockstamp - if(!obj.blockstamp || !obj.blockstamp.match(CommonConstants.BLOCKSTAMP_REGEXP)) - err = {code: codes.BAD_VERSION, message: "Blockstamp is required"}; + if ( + !obj.blockstamp || + !obj.blockstamp.match(CommonConstants.BLOCKSTAMP_REGEXP) + ) + err = { code: codes.BAD_VERSION, message: "Blockstamp is required" }; } return err && err.message; } } -function extractIssuers(raw:string) { +function extractIssuers(raw: string) { const issuers = []; const lines = raw.split(/\n/); for (const line of lines) { @@ -85,7 +114,7 @@ function extractIssuers(raw:string) { return issuers; } -function extractInputs(raw:string) { +function extractInputs(raw: string) { const inputs = []; const lines = raw.split(/\n/); for (const line of lines) { @@ -99,7 +128,7 @@ function extractInputs(raw:string) { return inputs; } -function extractUnlocks(raw:string) { +function extractUnlocks(raw: string) { const unlocks = []; const lines = raw.split(/\n/); for (const line of lines) { @@ -113,15 +142,15 @@ function extractUnlocks(raw:string) { return unlocks; } -function extractOutputs(raw:string) { +function extractOutputs(raw: string) { const outputs = []; const lines = raw.split(/\n/); for (const line of lines) { if (line.match(CommonConstants.TRANSACTION.TARGET)) { outputs.push(line); - const unlocked = checkGrammar(line.split(':')[2]) + const unlocked = checkGrammar(line.split(":")[2]); if (unlocked === null) { - throw Error("Wrong output format") + throw Error("Wrong output format"); } } else { // Not a transaction input, stop reading @@ -131,7 +160,7 @@ function extractOutputs(raw:string) { return outputs; } -function extractSignatures(raw:string) { +function extractSignatures(raw: string) { const signatures = []; const lines = raw.split(/\n/); for (const line of lines) { @@ -140,4 +169,4 @@ function extractSignatures(raw:string) { } } return signatures; -} \ No newline at end of file +} diff --git a/app/lib/common-libs/pint.ts b/app/lib/common-libs/pint.ts index f95518f80e47013bfd67bf6c9393ef2947c4cf3d..69b5e574090a71a4824421d3f98db16a054cfec3 100644 --- a/app/lib/common-libs/pint.ts +++ b/app/lib/common-libs/pint.ts @@ -1,6 +1,6 @@ export function pint(value: string | number): number { - if (typeof value === 'string') { - return parseInt(value, 10) + if (typeof value === "string") { + return parseInt(value, 10); } - return value + return value; } diff --git a/app/lib/common-libs/programOptions.ts b/app/lib/common-libs/programOptions.ts index a845061ed951fbdedad44e81d56b9f50a5b91208..501cc4c1a1c91289b44740d60910388322ace304 100644 --- a/app/lib/common-libs/programOptions.ts +++ b/app/lib/common-libs/programOptions.ts @@ -11,25 +11,25 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -const opts = require('optimist').argv +const opts = require("optimist").argv; export interface ProgramOptions { - mdb?: string - home?: string - notrim?: boolean - notrimc?: boolean // Specificaly disable for c_index - nosbx?: boolean - nopeers?: boolean - nop2p?: boolean - syncTrace?: string - isSync: boolean - noSources: boolean - slow?: boolean - loglevel?: string - sqlTraces?: boolean - memory?: boolean - storeTxs?: boolean - storeWw?: boolean + mdb?: string; + home?: string; + notrim?: boolean; + notrimc?: boolean; // Specificaly disable for c_index + nosbx?: boolean; + nopeers?: boolean; + nop2p?: boolean; + syncTrace?: string; + isSync: boolean; + noSources: boolean; + slow?: boolean; + loglevel?: string; + sqlTraces?: boolean; + memory?: boolean; + storeTxs?: boolean; + storeWw?: boolean; } export const cliprogram: ProgramOptions = { @@ -40,7 +40,7 @@ export const cliprogram: ProgramOptions = { nopeers: opts.nopeers, nop2p: opts.nop2p, noSources: !!opts.nosources, - syncTrace: opts['sync-trace'], - isSync: opts._[0] === 'sync', + syncTrace: opts["sync-trace"], + isSync: opts._[0] === "sync", slow: opts.slow, -} +}; diff --git a/app/lib/common-libs/querable.ts b/app/lib/common-libs/querable.ts index b5cba5da23170c599d46d7dadd138376b0d9f20f..c4f4398ba1f61d6567d11a22c7fda60e78ec6255 100644 --- a/app/lib/common-libs/querable.ts +++ b/app/lib/common-libs/querable.ts @@ -1,14 +1,14 @@ -const querablePromise = require('querablep'); +const querablePromise = require("querablep"); export interface Querable<T> extends Promise<T> { - isFulfilled(): boolean - isResolved(): boolean - isRejected(): boolean - startedOn: number + isFulfilled(): boolean; + isResolved(): boolean; + isRejected(): boolean; + startedOn: number; } export function querablep<T>(p: Promise<T>): Querable<T> { - const qp = querablePromise(p) - qp.startedOn = Date.now() - return qp + const qp = querablePromise(p); + qp.startedOn = Date.now(); + return qp; } diff --git a/app/lib/common-libs/randomPick.ts b/app/lib/common-libs/randomPick.ts index ccd912754e15840a089ef5ca35d641d4e1be8bf1..8368d9ef284e7ae2f1745a57520803cf1bc53995 100644 --- a/app/lib/common-libs/randomPick.ts +++ b/app/lib/common-libs/randomPick.ts @@ -11,14 +11,16 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. - -export const randomPick = <T>(elements:T[], max:number) => { - const chosen:T[] = [] - const nbElements = elements.length +export const randomPick = <T>(elements: T[], max: number) => { + const chosen: T[] = []; + const nbElements = elements.length; for (let i = 0; i < Math.min(nbElements, max); i++) { - const randIndex = Math.max(Math.floor(Math.random() * 10) - (10 - nbElements) - i, 0) - chosen.push(elements[randIndex]) - elements.splice(randIndex, 1) + const randIndex = Math.max( + Math.floor(Math.random() * 10) - (10 - nbElements) - i, + 0 + ); + chosen.push(elements[randIndex]); + elements.splice(randIndex, 1); } - return chosen -} \ No newline at end of file + return chosen; +}; diff --git a/app/lib/common-libs/rawer.ts b/app/lib/common-libs/rawer.ts index 9ed172071cad9d086bc01db5d13e3df19440d02b..018addc59fa948c23e1f09d617d0160880795d77 100644 --- a/app/lib/common-libs/rawer.ts +++ b/app/lib/common-libs/rawer.ts @@ -11,101 +11,104 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {dos2unix} from "./dos2unix" -import {PeerDTO} from "../dto/PeerDTO" -import {IdentityDTO} from "../dto/IdentityDTO" -import {MembershipDTO} from "../dto/MembershipDTO" -import {TransactionDTO} from "../dto/TransactionDTO" -import {BlockDTO} from "../dto/BlockDTO" +import { dos2unix } from "./dos2unix"; +import { PeerDTO } from "../dto/PeerDTO"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { MembershipDTO } from "../dto/MembershipDTO"; +import { TransactionDTO } from "../dto/TransactionDTO"; +import { BlockDTO } from "../dto/BlockDTO"; const DOCUMENTS_VERSION = 10; -const SIGNED = false -const UNSIGNED = true +const SIGNED = false; +const UNSIGNED = true; -export const getOfficialIdentity = (json:any, withSig = true) => { - const dto = IdentityDTO.fromJSONObject(json) +export const getOfficialIdentity = (json: any, withSig = true) => { + const dto = IdentityDTO.fromJSONObject(json); if (withSig !== false) { - return dto.getRawSigned() + return dto.getRawSigned(); } else { - return dto.rawWithoutSig() + return dto.rawWithoutSig(); } -} - -export const getOfficialCertification = (json:{ - version?:number - currency:string - issuer:string - idty_issuer:string - idty_uid:string - idty_buid:string - idty_sig:string - buid:string - sig?:string +}; + +export const getOfficialCertification = (json: { + version?: number; + currency: string; + issuer: string; + idty_issuer: string; + idty_uid: string; + idty_buid: string; + idty_sig: string; + buid: string; + sig?: string; }) => { - let raw = getNormalHeader('Certification', json); - raw += "IdtyIssuer: " + json.idty_issuer + '\n'; - raw += "IdtyUniqueID: " + json.idty_uid + '\n'; - raw += "IdtyTimestamp: " + json.idty_buid + '\n'; - raw += "IdtySignature: " + json.idty_sig + '\n'; - raw += "CertTimestamp: " + json.buid + '\n'; + let raw = getNormalHeader("Certification", json); + raw += "IdtyIssuer: " + json.idty_issuer + "\n"; + raw += "IdtyUniqueID: " + json.idty_uid + "\n"; + raw += "IdtyTimestamp: " + json.idty_buid + "\n"; + raw += "IdtySignature: " + json.idty_sig + "\n"; + raw += "CertTimestamp: " + json.buid + "\n"; if (json.sig) { - raw += json.sig + '\n'; + raw += json.sig + "\n"; } return dos2unix(raw); -} +}; -export const getOfficialRevocation = (json:any) => { - let raw = getNormalHeader('Revocation', json); - raw += "IdtyUniqueID: " + json.uid + '\n'; - raw += "IdtyTimestamp: " + json.buid + '\n'; - raw += "IdtySignature: " + json.sig + '\n'; +export const getOfficialRevocation = (json: any) => { + let raw = getNormalHeader("Revocation", json); + raw += "IdtyUniqueID: " + json.uid + "\n"; + raw += "IdtyTimestamp: " + json.buid + "\n"; + raw += "IdtySignature: " + json.sig + "\n"; if (json.revocation) { - raw += json.revocation + '\n'; + raw += json.revocation + "\n"; } return dos2unix(raw); -} +}; -export const getPeerWithoutSignature = (json:any) => { - return PeerDTO.fromJSONObject(json).getRawUnsigned() -} +export const getPeerWithoutSignature = (json: any) => { + return PeerDTO.fromJSONObject(json).getRawUnsigned(); +}; -export const getPeer = (json:any) => { - return PeerDTO.fromJSONObject(json).getRawSigned() -} +export const getPeer = (json: any) => { + return PeerDTO.fromJSONObject(json).getRawSigned(); +}; -export const getMembershipWithoutSignature = (json:any) => { - return MembershipDTO.fromJSONObject(json).getRaw() -} +export const getMembershipWithoutSignature = (json: any) => { + return MembershipDTO.fromJSONObject(json).getRaw(); +}; -export const getMembership = (json:any) => { +export const getMembership = (json: any) => { return dos2unix(signed(getMembershipWithoutSignature(json), json)); -} +}; -export const getBlockInnerPart = (json:any) => { - return BlockDTO.fromJSONObject(json).getRawInnerPart() -} +export const getBlockInnerPart = (json: any) => { + return BlockDTO.fromJSONObject(json).getRawInnerPart(); +}; -export const getBlockWithInnerHashAndNonce = (json:any) => { - return BlockDTO.fromJSONObject(json).getRawUnSigned() -} +export const getBlockWithInnerHashAndNonce = (json: any) => { + return BlockDTO.fromJSONObject(json).getRawUnSigned(); +}; -export const getBlockInnerHashAndNonceWithSignature = (json:any) => { - return BlockDTO.fromJSONObject(json).getSignedPartSigned() -} +export const getBlockInnerHashAndNonceWithSignature = (json: any) => { + return BlockDTO.fromJSONObject(json).getSignedPartSigned(); +}; -export const getBlock = (json:any) => { +export const getBlock = (json: any) => { return dos2unix(signed(getBlockWithInnerHashAndNonce(json), json)); -} - -export const getTransaction = (json:any) => { - return TransactionDTO.toRAW(json) -} - -function getNormalHeader(doctype:string, json:{ - version?:number - currency:string - issuer:string -}) { +}; + +export const getTransaction = (json: any) => { + return TransactionDTO.toRAW(json); +}; + +function getNormalHeader( + doctype: string, + json: { + version?: number; + currency: string; + issuer: string; + } +) { let raw = ""; raw += "Version: " + (json.version || DOCUMENTS_VERSION) + "\n"; raw += "Type: " + doctype + "\n"; @@ -114,7 +117,7 @@ function getNormalHeader(doctype:string, json:{ return raw; } -function signed(raw:string, json:any) { - raw += json.signature + '\n'; +function signed(raw: string, json: any) { + raw += json.signature + "\n"; return raw; } diff --git a/app/lib/common-libs/reduce.ts b/app/lib/common-libs/reduce.ts index 2d674e4ed585c738463f83e1b0c9b181b434287a..6c52a1355bf58b52f389af4007ddc24ffb5aa756 100644 --- a/app/lib/common-libs/reduce.ts +++ b/app/lib/common-libs/reduce.ts @@ -1,16 +1,19 @@ export function reduceConcat<T>(cumulated: T[], arr: T[]) { - return cumulated.concat(arr) + return cumulated.concat(arr); } -export type GroupResult<T> = { [k:string]: T[] } +export type GroupResult<T> = { [k: string]: T[] }; -export function reduceGroupBy<T, K extends keyof T>(arr: T[], k: K): GroupResult<T> { +export function reduceGroupBy<T, K extends keyof T>( + arr: T[], + k: K +): GroupResult<T> { return arr.reduce((cumulated: GroupResult<T>, t: T) => { - const key: string = String(t[k]) + const key: string = String(t[k]); if (!cumulated[key]) { - cumulated[key] = [] + cumulated[key] = []; } - cumulated[key].push(t) - return cumulated - }, {} as GroupResult<T>) + cumulated[key].push(t); + return cumulated; + }, {} as GroupResult<T>); } diff --git a/app/lib/common-libs/timeout-promise.ts b/app/lib/common-libs/timeout-promise.ts index 57c6e3daad65f7e3296b47bff61abe660fcb9368..eab7d91f93ab0e2efe82905358e71b44d1b10694 100644 --- a/app/lib/common-libs/timeout-promise.ts +++ b/app/lib/common-libs/timeout-promise.ts @@ -13,12 +13,15 @@ export function newRejectTimeoutPromise(timeout: number) { return new Promise((res, rej) => { - setTimeout(rej, timeout) - }) + setTimeout(rej, timeout); + }); } -export function newResolveTimeoutPromise<T>(timeout: number, value: T): Promise<T> { - return new Promise(res => { - setTimeout(() => res(value), timeout) - }) +export function newResolveTimeoutPromise<T>( + timeout: number, + value: T +): Promise<T> { + return new Promise((res) => { + setTimeout(() => res(value), timeout); + }); } diff --git a/app/lib/common-libs/txunlock.ts b/app/lib/common-libs/txunlock.ts index bde87663a909cbd715f953032a88ece16632b3b5..8450e0263363533edfb996ab17481ea309399daa 100644 --- a/app/lib/common-libs/txunlock.ts +++ b/app/lib/common-libs/txunlock.ts @@ -11,122 +11,134 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {hashf} from "../common" -import {evalParams} from "../rules/global_rules" -import {TxSignatureResult} from "../dto/TransactionDTO" +import { hashf } from "../common"; +import { evalParams } from "../rules/global_rules"; +import { TxSignatureResult } from "../dto/TransactionDTO"; -let Parser = require("jison").Parser +let Parser = require("jison").Parser; let grammar = { - "lex": { - "rules": [ - ["\\s+", "/* skip whitespace */"], - ["\\&\\&", "return 'AND';"], - ["\\|\\|", "return 'OR';"], - ["\\(", "return '(';"], - ["\\)", "return ')';"], - ["[0-9A-Za-z]{40,64}", "return 'PARAMETER';"], - ["[0-9]{1,10}", "return 'PARAMETER';"], - ["SIG", "return 'SIG';"], - ["XHX", "return 'XHX';"], - ["CLTV", "return 'CLTV';"], - ["CSV", "return 'CSV';"], - ["$", "return 'EOF';"] - ] + lex: { + rules: [ + ["\\s+", "/* skip whitespace */"], + ["\\&\\&", "return 'AND';"], + ["\\|\\|", "return 'OR';"], + ["\\(", "return '(';"], + ["\\)", "return ')';"], + ["[0-9A-Za-z]{40,64}", "return 'PARAMETER';"], + ["[0-9]{1,10}", "return 'PARAMETER';"], + ["SIG", "return 'SIG';"], + ["XHX", "return 'XHX';"], + ["CLTV", "return 'CLTV';"], + ["CSV", "return 'CSV';"], + ["$", "return 'EOF';"], + ], }, - "operators": [ - ["left", "AND", "OR"] - ], + operators: [["left", "AND", "OR"]], - "bnf": { - "expressions" :[ - [ "e EOF", "return $1;" ] - ], + bnf: { + expressions: [["e EOF", "return $1;"]], - "e" :[ - [ "e AND e", "$$ = $1 && $3;" ], - [ "e OR e", "$$ = $1 || $3;" ], - [ "SIG ( e )","$$ = yy.sig($3);"], - [ "XHX ( e )","$$ = yy.xHx($3);"], - [ "CLTV ( e )","$$ = yy.cltv($3);"], - [ "CSV ( e )","$$ = yy.csv($3);"], - [ "PARAMETER", "$$ = $1;" ], - [ "( e )", "$$ = $2;" ] - ] - } -} + e: [ + ["e AND e", "$$ = $1 && $3;"], + ["e OR e", "$$ = $1 || $3;"], + ["SIG ( e )", "$$ = yy.sig($3);"], + ["XHX ( e )", "$$ = yy.xHx($3);"], + ["CLTV ( e )", "$$ = yy.cltv($3);"], + ["CSV ( e )", "$$ = yy.csv($3);"], + ["PARAMETER", "$$ = $1;"], + ["( e )", "$$ = $2;"], + ], + }, +}; export interface UnlockMetadata { - currentTime?:number - elapsedTime?:number + currentTime?: number; + elapsedTime?: number; } -export function unlock(conditionsStr:string, unlockParams:string[], sigResult:TxSignatureResult, metadata?:UnlockMetadata): boolean|null { - - const params = evalParams(unlockParams, conditionsStr, sigResult) - let parser = new Parser(grammar) - let nbFunctions = 0 +export function unlock( + conditionsStr: string, + unlockParams: string[], + sigResult: TxSignatureResult, + metadata?: UnlockMetadata +): boolean | null { + const params = evalParams(unlockParams, conditionsStr, sigResult); + let parser = new Parser(grammar); + let nbFunctions = 0; parser.yy = { i: 0, - sig: function (pubkey:string) { + sig: function (pubkey: string) { // Counting functions - nbFunctions++ + nbFunctions++; // Make the test - let success = false - let i = 0 + let success = false; + let i = 0; while (!success && i < params.length) { - const p = params[i] - success = p.successful && p.funcName === 'SIG' && p.parameter === pubkey - i++ + const p = params[i]; + success = + p.successful && p.funcName === "SIG" && p.parameter === pubkey; + i++; } - return success + return success; }, - xHx: function(hash:string) { + xHx: function (hash: string) { // Counting functions - nbFunctions++ + nbFunctions++; // Make the test - let success = false - let i = 0 + let success = false; + let i = 0; while (!success && i < params.length) { - const p = params[i] - success = p.successful && p.funcName === 'XHX' && hashf(p.parameter) === hash - i++ + const p = params[i]; + success = + p.successful && p.funcName === "XHX" && hashf(p.parameter) === hash; + i++; } - return success + return success; }, - cltv: function(deadline:string) { + cltv: function (deadline: string) { // Counting functions - nbFunctions++ + nbFunctions++; // Make the test - return metadata && metadata.currentTime && metadata.currentTime >= parseInt(deadline) + return ( + metadata && + metadata.currentTime && + metadata.currentTime >= parseInt(deadline) + ); }, - csv: function(amountToWait:string) { + csv: function (amountToWait: string) { // Counting functions - nbFunctions++ + nbFunctions++; // Make the test - return metadata && metadata.elapsedTime && metadata.elapsedTime >= parseInt(amountToWait) - } - } + return ( + metadata && + metadata.elapsedTime && + metadata.elapsedTime >= parseInt(amountToWait) + ); + }, + }; try { - const areAllValidParameters = params.reduce((success, p) => success && !!(p.successful), true) + const areAllValidParameters = params.reduce( + (success, p) => success && !!p.successful, + true + ); if (!areAllValidParameters) { - throw "All parameters must be successful" + throw "All parameters must be successful"; } - const unlocked = parser.parse(conditionsStr) + const unlocked = parser.parse(conditionsStr); if (unlockParams.length > nbFunctions) { - throw "There must be at most as much params as function calls" + throw "There must be at most as much params as function calls"; } - return unlocked - } catch(e) { - return null + return unlocked; + } catch (e) { + return null; } } -export function checkGrammar(conditionsStr:string): boolean|null { - +export function checkGrammar(conditionsStr: string): boolean | null { let parser = new Parser(grammar); parser.yy = { @@ -134,12 +146,12 @@ export function checkGrammar(conditionsStr:string): boolean|null { sig: () => true, xHx: () => true, cltv: () => true, - csv: () => true - } + csv: () => true, + }; try { - return parser.parse(conditionsStr) - } catch(e) { - return null + return parser.parse(conditionsStr); + } catch (e) { + return null; } } diff --git a/app/lib/common-libs/underscore.ts b/app/lib/common-libs/underscore.ts index 2592220474e242b23f4e9af5b59609f132c6e8eb..3f567e3d81296ec7161300d45a01d39a0fd080af 100644 --- a/app/lib/common-libs/underscore.ts +++ b/app/lib/common-libs/underscore.ts @@ -1,87 +1,102 @@ -import {Map} from "./crypto/map" +import { Map } from "./crypto/map"; -const _underscore_ = require("underscore") +const _underscore_ = require("underscore"); export interface UnderscoreClass<T> { - filter(filterFunc: (t: T) => boolean): UnderscoreClass<T> - where(props: { [k in keyof T]?: T[k] }): UnderscoreClass<T> - sortBy(sortFunc:(element:T) => number): UnderscoreClass<T> - pluck<K extends keyof T>(k:K): UnderscoreClass<T> - uniq<K extends keyof T>(isSorted?:boolean, iteratee?:(t:T) => K): UnderscoreClass<T> - value(): T[] + filter(filterFunc: (t: T) => boolean): UnderscoreClass<T>; + where(props: { [k in keyof T]?: T[k] }): UnderscoreClass<T>; + sortBy(sortFunc: (element: T) => number): UnderscoreClass<T>; + pluck<K extends keyof T>(k: K): UnderscoreClass<T>; + uniq<K extends keyof T>( + isSorted?: boolean, + iteratee?: (t: T) => K + ): UnderscoreClass<T>; + value(): T[]; } export const Underscore = { - - filter: <T>(elements:T[], filterFunc: (t:T) => boolean): T[] => { - return _underscore_.filter(elements, filterFunc) + filter: <T>(elements: T[], filterFunc: (t: T) => boolean): T[] => { + return _underscore_.filter(elements, filterFunc); }, - where: <T>(elements:T[], props: { [k in keyof T]?: T[k] }): T[] => { - return _underscore_.where(elements, props) + where: <T>(elements: T[], props: { [k in keyof T]?: T[k] }): T[] => { + return _underscore_.where(elements, props); }, - findWhere: <T>(elements:T[], props: { [k in keyof T]?: T[k] }): T|null => { - return _underscore_.findWhere(elements, props) + findWhere: <T>(elements: T[], props: { [k in keyof T]?: T[k] }): T | null => { + return _underscore_.findWhere(elements, props); }, - keys: <T>(map:T): (keyof T)[] => { - return _underscore_.keys(map) + keys: <T>(map: T): (keyof T)[] => { + return _underscore_.keys(map); }, - values: <T>(map:{ [k:string]: T }): T[] => { - return _underscore_.values(map) + values: <T>(map: { [k: string]: T }): T[] => { + return _underscore_.values(map); }, - pluck: <T, K extends keyof T>(elements:T[], k:K): T[K][] => { - return _underscore_.pluck(elements, k) + pluck: <T, K extends keyof T>(elements: T[], k: K): T[K][] => { + return _underscore_.pluck(elements, k); }, - pick: <T, K extends keyof T>(elements:T, ...k:K[]): T[K][] => { - return _underscore_.pick(elements, ...k) + pick: <T, K extends keyof T>(elements: T, ...k: K[]): T[K][] => { + return _underscore_.pick(elements, ...k); }, - omit: <T, K extends keyof T>(element:T, ...k:K[]): T[K][] => { - return _underscore_.omit(element, ...k) + omit: <T, K extends keyof T>(element: T, ...k: K[]): T[K][] => { + return _underscore_.omit(element, ...k); }, - uniq: <T, K>(elements:T[], isSorted:boolean = false, iteratee?:(t:T) => K): T[] => { - return _underscore_.uniq(elements, isSorted, iteratee) + uniq: <T, K>( + elements: T[], + isSorted: boolean = false, + iteratee?: (t: T) => K + ): T[] => { + return _underscore_.uniq(elements, isSorted, iteratee); }, - clone: <T>(t:T): T => { - return _underscore_.clone(t) + clone: <T>(t: T): T => { + return _underscore_.clone(t); }, - mapObject: <T, K extends keyof T, L extends keyof (T[K])>(t:T, cb:(k:K) => (T[K])[L]): Map<T[K][L]> => { - return _underscore_.mapObject(t, cb) + mapObject: <T, K extends keyof T, L extends keyof T[K]>( + t: T, + cb: (k: K) => T[K][L] + ): Map<T[K][L]> => { + return _underscore_.mapObject(t, cb); }, - mapObjectByProp: <T, K extends keyof T, L extends keyof (T[K])>(t:T, prop:L): Map<T[K][L]> => { - return _underscore_.mapObject(t, (o:T[K]) => o[prop]) + mapObjectByProp: <T, K extends keyof T, L extends keyof T[K]>( + t: T, + prop: L + ): Map<T[K][L]> => { + return _underscore_.mapObject(t, (o: T[K]) => o[prop]); }, - sortBy: <T, K extends keyof T>(elements:T[], sortFunc:((element:T) => number|string)|K): T[] => { - return _underscore_.sortBy(elements, sortFunc) + sortBy: <T, K extends keyof T>( + elements: T[], + sortFunc: ((element: T) => number | string) | K + ): T[] => { + return _underscore_.sortBy(elements, sortFunc); }, - difference: <T>(array1:T[], array2:T[]): T[] => { - return _underscore_.difference(array1, array2) + difference: <T>(array1: T[], array2: T[]): T[] => { + return _underscore_.difference(array1, array2); }, - shuffle: <T>(elements:T[]): T[] => { - return _underscore_.shuffle(elements) + shuffle: <T>(elements: T[]): T[] => { + return _underscore_.shuffle(elements); }, - extend: <T, U>(t1:T, t2:U): T|U => { - return _underscore_.extend(t1, t2) + extend: <T, U>(t1: T, t2: U): T | U => { + return _underscore_.extend(t1, t2); }, - range: (count:number, end?:number): number[] => { - return _underscore_.range(count, end) + range: (count: number, end?: number): number[] => { + return _underscore_.range(count, end); }, - chain: <T>(element:T[]): UnderscoreClass<T> => { - return _underscore_.chain(element) + chain: <T>(element: T[]): UnderscoreClass<T> => { + return _underscore_.chain(element); }, -} +}; diff --git a/app/lib/common-libs/websocket.ts b/app/lib/common-libs/websocket.ts index a9589d2aba1efa73ac575be99124f706f49f17f3..bc28b44b8843af5824fa287abfc334ce3b677d2f 100644 --- a/app/lib/common-libs/websocket.ts +++ b/app/lib/common-libs/websocket.ts @@ -11,28 +11,28 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as WS from 'ws' +import * as WS from "ws"; export class WebSocket extends WS { constructor(url: string, options?: { agent: any }) { super(url, { agent: options && options.agent, - }) + }); } } export class WebSocketServer extends WS.Server { constructor(options: { - server?: any, - host?: string, - port?: number, - path?: string + server?: any; + host?: string; + port?: number; + path?: string; }) { super({ server: options.server, path: options.path, host: options.host, port: options.port, - }) + }); } } diff --git a/app/lib/common.ts b/app/lib/common.ts index 8501d067a94403799229ec5f08cd397de5e7a705..db15a6444e6d6c428eeaf84d803ebf6d541fb583 100644 --- a/app/lib/common.ts +++ b/app/lib/common.ts @@ -11,8 +11,8 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {sha256} from 'duniteroxyde' +import { sha256 } from "duniteroxyde"; -export const hashf = function hashf(str:string) { - return sha256(str) -} +export const hashf = function hashf(str: string) { + return sha256(str); +}; diff --git a/app/lib/common/Tristamp.ts b/app/lib/common/Tristamp.ts index 6c3232a66b55197c7c03ce3f49b11a17bbd92015..9b3d7f807909198b66dac976da185c99d33c5e77 100644 --- a/app/lib/common/Tristamp.ts +++ b/app/lib/common/Tristamp.ts @@ -12,7 +12,7 @@ // GNU Affero General Public License for more details. export interface Tristamp { - number: number - hash: string - medianTime: number + number: number; + hash: string; + medianTime: number; } diff --git a/app/lib/common/package.ts b/app/lib/common/package.ts index 2fdfc677f7e3872593e1c86046a66b66279bd6b3..19fe8d339bf974ef2245d4b3c18ef6f3b4621b88 100644 --- a/app/lib/common/package.ts +++ b/app/lib/common/package.ts @@ -11,25 +11,23 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. - export class Package { - - private json:{ version:string } + private json: { version: string }; private constructor() { - this.json = require('../../../package.json') + this.json = require("../../../package.json"); } get version() { - return this.json.version + return this.json.version; } - private static instance:Package + private static instance: Package; static getInstance() { if (!Package.instance) { - Package.instance = new Package() + Package.instance = new Package(); } - return Package.instance + return Package.instance; } -} \ No newline at end of file +} diff --git a/app/lib/computation/BlockchainContext.ts b/app/lib/computation/BlockchainContext.ts index 899b4b266336e9763a6ec2bc0f87654c3b71af71..3348fa911860bc0c8b96a0e906ea410f893e1062 100644 --- a/app/lib/computation/BlockchainContext.ts +++ b/app/lib/computation/BlockchainContext.ts @@ -11,33 +11,32 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../dto/BlockDTO" -import {DuniterBlockchain} from "../blockchain/DuniterBlockchain" -import {DBHead} from "../db/DBHead" -import {FileDAL} from "../dal/fileDAL" -import {DBBlock} from "../db/DBBlock" -import {Underscore} from "../common-libs/underscore" -import {DataErrors} from "../common-libs/errors" +import { BlockDTO } from "../dto/BlockDTO"; +import { DuniterBlockchain } from "../blockchain/DuniterBlockchain"; +import { DBHead } from "../db/DBHead"; +import { FileDAL } from "../dal/fileDAL"; +import { DBBlock } from "../db/DBBlock"; +import { Underscore } from "../common-libs/underscore"; +import { DataErrors } from "../common-libs/errors"; -const indexer = require('../indexer').Indexer -const constants = require('../constants'); +const indexer = require("../indexer").Indexer; +const constants = require("../constants"); export class BlockchainContext { - - private conf:any - private dal:FileDAL - private logger:any + private conf: any; + private dal: FileDAL; + private logger: any; /** * The virtual next HEAD. Computed each time a new block is added, because a lot of HEAD variables are deterministic * and can be computed one, just after a block is added for later controls. */ - private vHEAD:any + private vHEAD: any; /** * The currently written HEAD, aka. HEAD_1 relatively to incoming HEAD. */ - private vHEAD_1:any + private vHEAD_1: any; private HEADrefreshed: Promise<void> = Promise.resolve(); @@ -58,13 +57,18 @@ export class BlockchainContext { powMin: this.conf.powMin || 0, powZeros: 0, powRemainder: 0, - avgBlockSize: 0 + avgBlockSize: 0, }; } else { block = { version: this.vHEAD_1.version }; } - this.vHEAD = await indexer.completeGlobalScope(BlockDTO.fromJSONObject(block), this.conf, [], this.dal); - })() + this.vHEAD = await indexer.completeGlobalScope( + BlockDTO.fromJSONObject(block), + this.conf, + [], + this.dal + ); + })(); return this.HEADrefreshed; } @@ -92,7 +96,7 @@ export class BlockchainContext { if (!this.vHEAD) { await this.refreshHead(); } - return this.vHEAD_1 + return this.vHEAD_1; } /** @@ -101,71 +105,112 @@ export class BlockchainContext { */ async getIssuerPersonalizedDifficulty(issuer: string): Promise<any> { const local_vHEAD = await this.getvHeadCopy({ issuer }); - await indexer.preparePersonalizedPoW(local_vHEAD, this.vHEAD_1, (n:number, m:number, p:string = "") => this.dal.range(n,m,p), this.conf) + await indexer.preparePersonalizedPoW( + local_vHEAD, + this.vHEAD_1, + (n: number, m: number, p: string = "") => this.dal.range(n, m, p), + this.conf + ); return local_vHEAD.issuerDiff; } setConfDAL(newConf: any, newDAL: any): void { this.dal = newDAL; this.conf = newConf; - this.logger = require('../logger').NewLogger(this.dal.profile); + this.logger = require("../logger").NewLogger(this.dal.profile); } - async checkBlock(block: BlockDTO, withPoWAndSignature:boolean): Promise<any> { - return DuniterBlockchain.checkBlock(block, withPoWAndSignature, this.conf, this.dal) + async checkBlock( + block: BlockDTO, + withPoWAndSignature: boolean + ): Promise<any> { + return DuniterBlockchain.checkBlock( + block, + withPoWAndSignature, + this.conf, + this.dal + ); } - private async addBlock(obj: BlockDTO, index: any = null, HEAD: DBHead | null = null, trim: boolean): Promise<BlockDTO> { - const block = await DuniterBlockchain.pushTheBlock(obj, index, HEAD, this.conf, this.dal, this.logger, trim) - this.vHEAD_1 = this.vHEAD = null - return block + private async addBlock( + obj: BlockDTO, + index: any = null, + HEAD: DBHead | null = null, + trim: boolean + ): Promise<BlockDTO> { + const block = await DuniterBlockchain.pushTheBlock( + obj, + index, + HEAD, + this.conf, + this.dal, + this.logger, + trim + ); + this.vHEAD_1 = this.vHEAD = null; + return block; } - async addSideBlock(obj:BlockDTO): Promise<BlockDTO> { - const dbb = await DuniterBlockchain.pushSideBlock(obj, this.dal, this.logger) - return dbb.toBlockDTO() + async addSideBlock(obj: BlockDTO): Promise<BlockDTO> { + const dbb = await DuniterBlockchain.pushSideBlock( + obj, + this.dal, + this.logger + ); + return dbb.toBlockDTO(); } async revertCurrentBlock(): Promise<DBBlock> { const head_1 = await this.dal.bindexDAL.head(1); - this.logger.debug('Reverting block #%s...', head_1.number); - const block = await this.dal.getAbsoluteValidBlockInForkWindow(head_1.number, head_1.hash) + this.logger.debug("Reverting block #%s...", head_1.number); + const block = await this.dal.getAbsoluteValidBlockInForkWindow( + head_1.number, + head_1.hash + ); if (!block) { - throw DataErrors[DataErrors.BLOCK_TO_REVERT_NOT_FOUND] + throw DataErrors[DataErrors.BLOCK_TO_REVERT_NOT_FOUND]; } - await DuniterBlockchain.revertBlock(head_1.number, head_1.hash, this.dal, block) + await DuniterBlockchain.revertBlock( + head_1.number, + head_1.hash, + this.dal, + block + ); // Invalidates the head, since it has changed. await this.refreshHead(); - return block + return block; } async revertCurrentHead() { const head_1 = await this.dal.bindexDAL.head(1); - this.logger.debug('Reverting HEAD~1... (b#%s)', head_1.number); - await DuniterBlockchain.revertBlock(head_1.number, head_1.hash, this.dal) + this.logger.debug("Reverting HEAD~1... (b#%s)", head_1.number); + await DuniterBlockchain.revertBlock(head_1.number, head_1.hash, this.dal); // Invalidates the head, since it has changed. await this.refreshHead(); } async applyNextAvailableFork(): Promise<any> { const current = await this.current(); - this.logger.debug('Find next potential block #%s...', current.number + 1); + this.logger.debug("Find next potential block #%s...", current.number + 1); const forks = await this.dal.getForkBlocksFollowing(current); if (!forks.length) { throw constants.ERRORS.NO_POTENTIAL_FORK_AS_NEXT; } const block = forks[0]; - await this.checkAndAddBlock(BlockDTO.fromJSONObject(block)) - this.logger.debug('Applied block #%s', block.number); + await this.checkAndAddBlock(BlockDTO.fromJSONObject(block)); + this.logger.debug("Applied block #%s", block.number); } - async checkAndAddBlock(block:BlockDTO, trim = true) { - const { index, HEAD } = await this.checkBlock(block, constants.WITH_SIGNATURES_AND_POW); + async checkAndAddBlock(block: BlockDTO, trim = true) { + const { index, HEAD } = await this.checkBlock( + block, + constants.WITH_SIGNATURES_AND_POW + ); return await this.addBlock(block, index, HEAD, trim); } current(): Promise<any> { - return this.dal.getCurrentBlockOrNull() + return this.dal.getCurrentBlockOrNull(); } async checkHaveEnoughLinks(target: string, newLinks: any): Promise<any> { @@ -175,7 +220,15 @@ export class BlockchainContext { count += newLinks[target].length; } if (count < this.conf.sigQty) { - throw 'Key ' + target + ' does not have enough links (' + count + '/' + this.conf.sigQty + ')'; + throw ( + "Key " + + target + + " does not have enough links (" + + count + + "/" + + this.conf.sigQty + + ")" + ); } } } diff --git a/app/lib/constants.ts b/app/lib/constants.ts index ac93b5935dc58715c9b7f83585a7925ae11d2728..fc3d9a200082bb5ef05fa086d4df0422eb1d3c8f 100644 --- a/app/lib/constants.ts +++ b/app/lib/constants.ts @@ -12,82 +12,192 @@ // GNU Affero General Public License for more details. "use strict"; -import {CommonConstants} from "./common-libs/constants" -import {OtherConstants} from "./other_constants" -import {ProverConstants} from '../modules/prover/lib/constants'; +import { CommonConstants } from "./common-libs/constants"; +import { OtherConstants } from "./other_constants"; +import { ProverConstants } from "../modules/prover/lib/constants"; -const UDID2 = "udid2;c;([A-Z-]*);([A-Z-]*);(\\d{4}-\\d{2}-\\d{2});(e\\+\\d{2}\\.\\d{2}(\\+|-)\\d{3}\\.\\d{2});(\\d+)(;?)"; -const PUBKEY = CommonConstants.FORMATS.PUBKEY -const TIMESTAMP = CommonConstants.FORMATS.TIMESTAMP +const UDID2 = + "udid2;c;([A-Z-]*);([A-Z-]*);(\\d{4}-\\d{2}-\\d{2});(e\\+\\d{2}\\.\\d{2}(\\+|-)\\d{3}\\.\\d{2});(\\d+)(;?)"; +const PUBKEY = CommonConstants.FORMATS.PUBKEY; +const TIMESTAMP = CommonConstants.FORMATS.TIMESTAMP; module.exports = { - TIME_TO_TURN_ON_BRG_107: 1498860000, ERROR: { - PEER: { - UNKNOWN_REFERENCE_BLOCK: 'Unknown reference block of peer' + UNKNOWN_REFERENCE_BLOCK: "Unknown reference block of peer", }, BLOCK: { - NO_CURRENT_BLOCK: 'No current block' - } + NO_CURRENT_BLOCK: "No current block", + }, }, ERRORS: { - // Technical errors - UNKNOWN: { httpCode: 500, uerr: { ucode: 1001, message: "An unknown error occured" }}, - UNHANDLED: { httpCode: 500, uerr: { ucode: 1002, message: "An unhandled error occured" }}, - SIGNATURE_DOES_NOT_MATCH: { httpCode: 400, uerr: { ucode: 1003, message: "Signature does not match" }}, - ALREADY_UP_TO_DATE: { httpCode: 400, uerr: { ucode: 1004, message: "Already up-to-date" }}, - WRONG_DOCUMENT: CommonConstants.ERRORS.WRONG_DOCUMENT, - SANDBOX_FOR_IDENTITY_IS_FULL: { httpCode: 503, uerr: { ucode: 1007, message: "The identities' sandbox is full. Please retry with another document or retry later." }}, - SANDBOX_FOR_CERT_IS_FULL: { httpCode: 503, uerr: { ucode: 1008, message: "The certifications' sandbox is full. Please retry with another document or retry later." }}, - SANDBOX_FOR_MEMERSHIP_IS_FULL: { httpCode: 503, uerr: { ucode: 1009, message: "The memberships' sandbox is full. Please retry with another document or retry later." }}, - SANDBOX_FOR_TRANSACTION_IS_FULL: { httpCode: 503, uerr: { ucode: 1010, message: "The transactions' sandbox is full. Please retry with another document or retry later." }}, - NO_POTENTIAL_FORK_AS_NEXT: { httpCode: 503, uerr: { ucode: 1011, message: "No fork block exists in the database as a potential next block." }}, - INCONSISTENT_DB_MULTI_TXS_SAME_HASH: { httpCode: 503, uerr: { ucode: 1012, message: "Several transactions written with the same hash." }}, - CLI_CALLERR_RESET: { httpCode: 503, uerr: { ucode: 1013, message: "Bad command: usage is `reset config`, `reset data`, `reset peers`, `reset stats` or `reset all`" }}, - CLI_CALLERR_CONFIG: { httpCode: 503, uerr: { ucode: 1014, message: "Bad command: usage is `config`." }}, - CLI_CALLERR_WS2P: { httpCode: 503, uerr: { ucode: 1014, message: "Bad command: usage is `ws2p [subcmd]`." }}, + UNKNOWN: { + httpCode: 500, + uerr: { ucode: 1001, message: "An unknown error occured" }, + }, + UNHANDLED: { + httpCode: 500, + uerr: { ucode: 1002, message: "An unhandled error occured" }, + }, + SIGNATURE_DOES_NOT_MATCH: { + httpCode: 400, + uerr: { ucode: 1003, message: "Signature does not match" }, + }, + ALREADY_UP_TO_DATE: { + httpCode: 400, + uerr: { ucode: 1004, message: "Already up-to-date" }, + }, + WRONG_DOCUMENT: CommonConstants.ERRORS.WRONG_DOCUMENT, + SANDBOX_FOR_IDENTITY_IS_FULL: { + httpCode: 503, + uerr: { + ucode: 1007, + message: + "The identities' sandbox is full. Please retry with another document or retry later.", + }, + }, + SANDBOX_FOR_CERT_IS_FULL: { + httpCode: 503, + uerr: { + ucode: 1008, + message: + "The certifications' sandbox is full. Please retry with another document or retry later.", + }, + }, + SANDBOX_FOR_MEMERSHIP_IS_FULL: { + httpCode: 503, + uerr: { + ucode: 1009, + message: + "The memberships' sandbox is full. Please retry with another document or retry later.", + }, + }, + SANDBOX_FOR_TRANSACTION_IS_FULL: { + httpCode: 503, + uerr: { + ucode: 1010, + message: + "The transactions' sandbox is full. Please retry with another document or retry later.", + }, + }, + NO_POTENTIAL_FORK_AS_NEXT: { + httpCode: 503, + uerr: { + ucode: 1011, + message: + "No fork block exists in the database as a potential next block.", + }, + }, + INCONSISTENT_DB_MULTI_TXS_SAME_HASH: { + httpCode: 503, + uerr: { + ucode: 1012, + message: "Several transactions written with the same hash.", + }, + }, + CLI_CALLERR_RESET: { + httpCode: 503, + uerr: { + ucode: 1013, + message: + "Bad command: usage is `reset config`, `reset data`, `reset peers`, `reset stats` or `reset all`", + }, + }, + CLI_CALLERR_CONFIG: { + httpCode: 503, + uerr: { ucode: 1014, message: "Bad command: usage is `config`." }, + }, + CLI_CALLERR_WS2P: { + httpCode: 503, + uerr: { ucode: 1014, message: "Bad command: usage is `ws2p [subcmd]`." }, + }, // Business errors - NO_MATCHING_IDENTITY: { httpCode: 404, uerr: { ucode: 2001, message: "No matching identity" }}, - UID_ALREADY_USED: { httpCode: 400, uerr: { ucode: 2002, message: "UID already used in the blockchain" }}, - PUBKEY_ALREADY_USED: { httpCode: 400, uerr: { ucode: 2003, message: "Pubkey already used in the blockchain" }}, - NO_MEMBER_MATCHING_PUB_OR_UID: { httpCode: 404, uerr: { ucode: 2004, message: "No member matching this pubkey or uid" }}, - WRONG_SIGNATURE_MEMBERSHIP: { httpCode: 400, uerr: { ucode: 2006, message: "wrong signature for membership" }}, - ALREADY_RECEIVED_MEMBERSHIP: { httpCode: 400, uerr: { ucode: 2007, message: "Already received membership" }}, - MEMBERSHIP_A_NON_MEMBER_CANNOT_LEAVE: { httpCode: 400, uerr: { ucode: 2008, message: "A non-member cannot leave" }}, - NOT_A_MEMBER: { httpCode: 400, uerr: { ucode: 2009, message: "Not a member" }}, - BLOCK_NOT_FOUND: { httpCode: 404, uerr: { ucode: 2011, message: "Block not found" }}, - WRONG_UNLOCKER: CommonConstants.ERRORS.WRONG_UNLOCKER, - LOCKTIME_PREVENT: CommonConstants.ERRORS.LOCKTIME_PREVENT, - SOURCE_ALREADY_CONSUMED: CommonConstants.ERRORS.SOURCE_ALREADY_CONSUMED, - WRONG_AMOUNTS: CommonConstants.ERRORS.WRONG_AMOUNTS, - WRONG_OUTPUT_BASE: CommonConstants.ERRORS.WRONG_OUTPUT_BASE, - CANNOT_ROOT_BLOCK_NO_MEMBERS: CommonConstants.ERRORS.CANNOT_ROOT_BLOCK_NO_MEMBERS, - IDENTITY_WRONGLY_SIGNED: CommonConstants.ERRORS.IDENTITY_WRONGLY_SIGNED, - TOO_OLD_IDENTITY: CommonConstants.ERRORS.TOO_OLD_IDENTITY, - NO_IDTY_MATCHING_PUB_OR_UID: { httpCode: 404, uerr: { ucode: 2021, message: "No identity matching this pubkey or uid" }}, - NEWER_PEER_DOCUMENT_AVAILABLE: CommonConstants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE, - PEER_DOCUMENT_ALREADY_KNOWN: CommonConstants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN, - TX_INPUTS_OUTPUTS_NOT_EQUAL: CommonConstants.ERRORS.TX_INPUTS_OUTPUTS_NOT_EQUAL, - TX_OUTPUT_SUM_NOT_EQUALS_PREV_DELTAS: CommonConstants.ERRORS.TX_OUTPUT_SUM_NOT_EQUALS_PREV_DELTAS, - BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK: CommonConstants.ERRORS.BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK, - A_TRANSACTION_HAS_A_MAX_SIZE: CommonConstants.ERRORS.A_TRANSACTION_HAS_A_MAX_SIZE, - BLOCK_ALREADY_PROCESSED: { httpCode: 400, uerr: { ucode: 2028, message: 'Already processed' }}, - TOO_OLD_MEMBERSHIP: CommonConstants.ERRORS.TOO_OLD_MEMBERSHIP, - TX_ALREADY_PROCESSED: { httpCode: 400, uerr: { ucode: 2030, message: "Transaction already processed" }}, - A_MORE_RECENT_MEMBERSHIP_EXISTS: { httpCode: 400, uerr: { ucode: 2031, message: "A more recent membership already exists" }}, - MAXIMUM_LEN_OF_OUTPUT: CommonConstants.ERRORS.MAXIMUM_LEN_OF_OUTPUT, - MAXIMUM_LEN_OF_UNLOCK: CommonConstants.ERRORS.MAXIMUM_LEN_OF_UNLOCK + NO_MATCHING_IDENTITY: { + httpCode: 404, + uerr: { ucode: 2001, message: "No matching identity" }, + }, + UID_ALREADY_USED: { + httpCode: 400, + uerr: { ucode: 2002, message: "UID already used in the blockchain" }, + }, + PUBKEY_ALREADY_USED: { + httpCode: 400, + uerr: { ucode: 2003, message: "Pubkey already used in the blockchain" }, + }, + NO_MEMBER_MATCHING_PUB_OR_UID: { + httpCode: 404, + uerr: { ucode: 2004, message: "No member matching this pubkey or uid" }, + }, + WRONG_SIGNATURE_MEMBERSHIP: { + httpCode: 400, + uerr: { ucode: 2006, message: "wrong signature for membership" }, + }, + ALREADY_RECEIVED_MEMBERSHIP: { + httpCode: 400, + uerr: { ucode: 2007, message: "Already received membership" }, + }, + MEMBERSHIP_A_NON_MEMBER_CANNOT_LEAVE: { + httpCode: 400, + uerr: { ucode: 2008, message: "A non-member cannot leave" }, + }, + NOT_A_MEMBER: { + httpCode: 400, + uerr: { ucode: 2009, message: "Not a member" }, + }, + BLOCK_NOT_FOUND: { + httpCode: 404, + uerr: { ucode: 2011, message: "Block not found" }, + }, + WRONG_UNLOCKER: CommonConstants.ERRORS.WRONG_UNLOCKER, + LOCKTIME_PREVENT: CommonConstants.ERRORS.LOCKTIME_PREVENT, + SOURCE_ALREADY_CONSUMED: CommonConstants.ERRORS.SOURCE_ALREADY_CONSUMED, + WRONG_AMOUNTS: CommonConstants.ERRORS.WRONG_AMOUNTS, + WRONG_OUTPUT_BASE: CommonConstants.ERRORS.WRONG_OUTPUT_BASE, + CANNOT_ROOT_BLOCK_NO_MEMBERS: + CommonConstants.ERRORS.CANNOT_ROOT_BLOCK_NO_MEMBERS, + IDENTITY_WRONGLY_SIGNED: CommonConstants.ERRORS.IDENTITY_WRONGLY_SIGNED, + TOO_OLD_IDENTITY: CommonConstants.ERRORS.TOO_OLD_IDENTITY, + NO_IDTY_MATCHING_PUB_OR_UID: { + httpCode: 404, + uerr: { ucode: 2021, message: "No identity matching this pubkey or uid" }, + }, + NEWER_PEER_DOCUMENT_AVAILABLE: + CommonConstants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE, + PEER_DOCUMENT_ALREADY_KNOWN: + CommonConstants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN, + TX_INPUTS_OUTPUTS_NOT_EQUAL: + CommonConstants.ERRORS.TX_INPUTS_OUTPUTS_NOT_EQUAL, + TX_OUTPUT_SUM_NOT_EQUALS_PREV_DELTAS: + CommonConstants.ERRORS.TX_OUTPUT_SUM_NOT_EQUALS_PREV_DELTAS, + BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK: + CommonConstants.ERRORS.BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK, + A_TRANSACTION_HAS_A_MAX_SIZE: + CommonConstants.ERRORS.A_TRANSACTION_HAS_A_MAX_SIZE, + BLOCK_ALREADY_PROCESSED: { + httpCode: 400, + uerr: { ucode: 2028, message: "Already processed" }, + }, + TOO_OLD_MEMBERSHIP: CommonConstants.ERRORS.TOO_OLD_MEMBERSHIP, + TX_ALREADY_PROCESSED: { + httpCode: 400, + uerr: { ucode: 2030, message: "Transaction already processed" }, + }, + A_MORE_RECENT_MEMBERSHIP_EXISTS: { + httpCode: 400, + uerr: { ucode: 2031, message: "A more recent membership already exists" }, + }, + MAXIMUM_LEN_OF_OUTPUT: CommonConstants.ERRORS.MAXIMUM_LEN_OF_OUTPUT, + MAXIMUM_LEN_OF_UNLOCK: CommonConstants.ERRORS.MAXIMUM_LEN_OF_UNLOCK, }, DEBUG: { - LONG_DAL_PROCESS: 50 + LONG_DAL_PROCESS: 50, }, BMA_REGEXP: CommonConstants.BMA_REGEXP, @@ -118,21 +228,23 @@ module.exports = { MAX_CONCURRENT_POST: 3, DEFAULT_TIMEOUT: 10 * 1000, // 10 seconds SYNC: { - MAX: 20 + MAX: 20, }, STATUS_INTERVAL: { UPDATE: 6 * 12, // Every X blocks - MAX: 20 // MAX Y blocks + MAX: 20, // MAX Y blocks }, - ONION_ENDPOINT_REGEX: new RegExp('(?:https?:\/\/)?(?:www)?(\S*?\.onion)(\/[-\w]*)*') + ONION_ENDPOINT_REGEX: new RegExp( + "(?:https?://)?(?:www)?(S*?.onion)(/[-w]*)*" + ), }, PROOF_OF_WORK: { EVALUATION: 1000, UPPER_BOUND: CommonConstants.PROOF_OF_WORK.UPPER_BOUND.slice(), DEFAULT: { CPU: ProverConstants.DEFAULT_CPU, - PREFIX: ProverConstants.DEFAULT_PEER_ID - } + PREFIX: ProverConstants.DEFAULT_PEER_ID, + }, }, DEFAULT_CURRENCY_NAME: "no_currency", @@ -161,14 +273,14 @@ module.exports = { DTDIFFEVAL: 10, MEDIANTIMEBLOCKS: 20, IDTYWINDOW: 3600 * 24 * 7, // a week - MSWINDOW: 3600 * 24 * 7 // a week + MSWINDOW: 3600 * 24 * 7, // a week }, - DSEN_P: 1.2 // dSen proportional factor + DSEN_P: 1.2, // dSen proportional factor }, BRANCHES: { - DEFAULT_WINDOW_SIZE: 100 + DEFAULT_WINDOW_SIZE: 100, }, INVALIDATE_CORE_CACHE: true, @@ -186,9 +298,9 @@ module.exports = { SANDBOX_SIZE_MEMBERSHIPS: 5000, // With `logs` command, the number of tail lines to show - NB_INITIAL_LINES_TO_SHOW: 100 + NB_INITIAL_LINES_TO_SHOW: 100, }; -function exact (regexpContent:string) { +function exact(regexpContent: string) { return new RegExp("^" + regexpContent + "$"); } diff --git a/app/lib/dal/drivers/LevelDBDriver.ts b/app/lib/dal/drivers/LevelDBDriver.ts index c1d5f9ddb7a64116b09b4b6a726364861dcdd9f1..7959c3f1e632a97cb6ea8e069ace16587cb8816f 100644 --- a/app/lib/dal/drivers/LevelDBDriver.ts +++ b/app/lib/dal/drivers/LevelDBDriver.ts @@ -11,32 +11,30 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as levelup from 'levelup' -import {LevelUp} from 'levelup' -import {AbstractLevelDOWN, ErrorCallback} from 'abstract-leveldown' -import * as leveldown from 'leveldown' -import * as memdown from 'memdown' +import * as levelup from "levelup"; +import { LevelUp } from "levelup"; +import { AbstractLevelDOWN, ErrorCallback } from "abstract-leveldown"; +import * as leveldown from "leveldown"; +import * as memdown from "memdown"; export const LevelDBDriver = { - newMemoryInstance: (): Promise<LevelUp> => { - const impl: any = memdown.default() + const impl: any = memdown.default(); return new Promise((res, rej) => { const db: LevelUp = levelup.default(impl, undefined, (err: Error) => { - if (err) return rej(err) - res(db) - }) - }) + if (err) return rej(err); + res(db); + }); + }); }, newFileInstance: (path: string): Promise<LevelUp> => { - const impl: any = leveldown.default(path) + const impl: any = leveldown.default(path); return new Promise((res, rej) => { const db: LevelUp = levelup.default(impl, undefined, (err: Error) => { - if (err) return rej(err) - res(db) - }) - }) - } - -} + if (err) return rej(err); + res(db); + }); + }); + }, +}; diff --git a/app/lib/dal/drivers/SQLiteDriver.ts b/app/lib/dal/drivers/SQLiteDriver.ts index 1b2c3896d7867e8ddee0b304e2684d13980f0e33..5a8b0bb068129563c43142329b44216a8d26838c 100644 --- a/app/lib/dal/drivers/SQLiteDriver.ts +++ b/app/lib/dal/drivers/SQLiteDriver.ts @@ -11,110 +11,126 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {OtherConstants} from "../../other_constants" -import {RealFS} from "../../system/directory" +import { OtherConstants } from "../../other_constants"; +import { RealFS } from "../../system/directory"; -const sqlite3 = require("sqlite3").verbose() +const sqlite3 = require("sqlite3").verbose(); -const MEMORY_PATH = ':memory:' +const MEMORY_PATH = ":memory:"; export class SQLiteDriver { + private logger: any; + private dbPromise: Promise<any> | null = null; - private logger:any - private dbPromise: Promise<any> | null = null - - constructor( - private path:string - ) { - this.logger = require('../../logger').NewLogger('driver') + constructor(private path: string) { + this.logger = require("../../logger").NewLogger("driver"); } getDB(): Promise<any> { if (!this.dbPromise) { this.dbPromise = (async () => { - this.logger.debug('Opening SQLite database "%s"...', this.path) - let sqlite = new sqlite3.Database(this.path) - await new Promise<any>((resolve) => sqlite.once('open', resolve)) + this.logger.debug('Opening SQLite database "%s"...', this.path); + let sqlite = new sqlite3.Database(this.path); + await new Promise<any>((resolve) => sqlite.once("open", resolve)); // Database is opened if (OtherConstants.SQL_TRACES) { - sqlite.on('trace', (trace:any) => { - this.logger.trace(trace) - }) + sqlite.on("trace", (trace: any) => { + this.logger.trace(trace); + }); } // Force case sensitiveness on LIKE operator - const sql = 'PRAGMA case_sensitive_like=ON' - await new Promise<any>((resolve, reject) => sqlite.exec(sql, (err:any) => { - if (err) return reject(Error('SQL error "' + err.message + '" on INIT queries "' + sql + '"')) - return resolve() - })) + const sql = "PRAGMA case_sensitive_like=ON"; + await new Promise<any>((resolve, reject) => + sqlite.exec(sql, (err: any) => { + if (err) + return reject( + Error( + 'SQL error "' + + err.message + + '" on INIT queries "' + + sql + + '"' + ) + ); + return resolve(); + }) + ); // Database is ready - return sqlite - })() + return sqlite; + })(); } - return this.dbPromise + return this.dbPromise; } - async executeAll(sql:string, params:any[]): Promise<any[]> { - const db = await this.getDB() - return new Promise<any>((resolve, reject) => db.all(sql, params, (err:any, rows:any[]) => { - if (err) { - return reject(Error('SQL error "' + err.message + '" on query "' + sql + '"')) - } else { - return resolve(rows) - } - })) + async executeAll(sql: string, params: any[]): Promise<any[]> { + const db = await this.getDB(); + return new Promise<any>((resolve, reject) => + db.all(sql, params, (err: any, rows: any[]) => { + if (err) { + return reject( + Error('SQL error "' + err.message + '" on query "' + sql + '"') + ); + } else { + return resolve(rows); + } + }) + ); } - async executeSql(sql:string): Promise<void> { - const db = await this.getDB() - return new Promise<void>((resolve, reject) => db.exec(sql, (err:any) => { - if (err) { - return reject(Error('SQL error "' + err.message + '" on query "' + sql + '"')) - } else { - return resolve() - } - })) + async executeSql(sql: string): Promise<void> { + const db = await this.getDB(); + return new Promise<void>((resolve, reject) => + db.exec(sql, (err: any) => { + if (err) { + return reject( + Error('SQL error "' + err.message + '" on query "' + sql + '"') + ); + } else { + return resolve(); + } + }) + ); } async destroyDatabase(): Promise<void> { - this.logger.debug('Removing SQLite database...') - await this.closeConnection() + this.logger.debug("Removing SQLite database..."); + await this.closeConnection(); if (this.path !== MEMORY_PATH) { - await RealFS().fsUnlink(this.path) + await RealFS().fsUnlink(this.path); } - this.logger.debug('Database removed') + this.logger.debug("Database removed"); } async closeConnection(): Promise<void> { if (!this.dbPromise) { - return + return; } - const db = await this.getDB() - if (process.platform === 'win32') { - db.open // For an unknown reason, we need this line. + const db = await this.getDB(); + if (process.platform === "win32") { + db.open; // For an unknown reason, we need this line. } await new Promise((resolve, reject) => { - this.logger.debug('Trying to close SQLite...') - db.on('close', () => { - this.logger.info('Database closed.') - this.dbPromise = null - resolve() - }) - db.on('error', (err:any) => { - if (err && err.message === 'SQLITE_MISUSE: Database is closed') { - this.dbPromise = null - return resolve() + this.logger.debug("Trying to close SQLite..."); + db.on("close", () => { + this.logger.info("Database closed."); + this.dbPromise = null; + resolve(); + }); + db.on("error", (err: any) => { + if (err && err.message === "SQLITE_MISUSE: Database is closed") { + this.dbPromise = null; + return resolve(); } - reject(err) - }) + reject(err); + }); try { - db.close() + db.close(); } catch (e) { - this.logger.error(e) - throw e + this.logger.error(e); + throw e; } - }) + }); } } diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts index 8827044ed24718a27fdc94be2d949ee9e7a4a036..f846346ecbc52615a21f04f8bac2d8e42991be99 100644 --- a/app/lib/dal/fileDAL.ts +++ b/app/lib/dal/fileDAL.ts @@ -11,14 +11,14 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as fs from 'fs' -import * as path from 'path' -import {SQLiteDriver} from "./drivers/SQLiteDriver" -import {ConfDAL} from "./fileDALs/ConfDAL" -import {ConfDTO} from "../dto/ConfDTO" -import {BlockDTO} from "../dto/BlockDTO" -import {DBHead} from "../db/DBHead" -import {DBIdentity, IdentityDAL} from "./sqliteDAL/IdentityDAL" +import * as fs from "fs"; +import * as path from "path"; +import { SQLiteDriver } from "./drivers/SQLiteDriver"; +import { ConfDAL } from "./fileDALs/ConfDAL"; +import { ConfDTO } from "../dto/ConfDTO"; +import { BlockDTO } from "../dto/BlockDTO"; +import { DBHead } from "../db/DBHead"; +import { DBIdentity, IdentityDAL } from "./sqliteDAL/IdentityDAL"; import { CindexEntry, FullCindexEntry, @@ -29,165 +29,173 @@ import { MindexEntry, SimpleTxInput, SimpleUdEntryForWallet, - SindexEntry -} from "../indexer" -import {TransactionDTO} from "../dto/TransactionDTO" -import {CertDAL, DBCert} from "./sqliteDAL/CertDAL" -import {DBBlock} from "../db/DBBlock" -import {DBMembership, MembershipDAL} from "./sqliteDAL/MembershipDAL" -import {MerkleDTO} from "../dto/MerkleDTO" -import {CommonConstants} from "../common-libs/constants" -import {PowDAL} from "./fileDALs/PowDAL"; -import {Initiable} from "./sqliteDAL/Initiable" -import {MetaDAL} from "./sqliteDAL/MetaDAL" -import {DataErrors} from "../common-libs/errors" -import {BasicRevocableIdentity, IdentityDTO} from "../dto/IdentityDTO" -import {FileSystem} from "../system/directory" -import {Wot} from "duniteroxyde" -import {IIndexDAO} from "./indexDAL/abstract/IIndexDAO" -import {BIndexDAO} from "./indexDAL/abstract/BIndexDAO" -import {MIndexDAO} from "./indexDAL/abstract/MIndexDAO" -import {SIndexDAO} from "./indexDAL/abstract/SIndexDAO" -import {CIndexDAO} from "./indexDAL/abstract/CIndexDAO" -import {IdentityForRequirements} from "../../service/BlockchainService" -import {NewLogger} from "../logger" -import {BlockchainDAO} from "./indexDAL/abstract/BlockchainDAO" -import {TxsDAO} from "./indexDAL/abstract/TxsDAO" -import {WalletDAO} from "./indexDAL/abstract/WalletDAO" -import {PeerDAO} from "./indexDAL/abstract/PeerDAO" -import {DBTx} from "../db/DBTx" -import {DBWallet} from "../db/DBWallet" -import {Tristamp} from "../common/Tristamp" -import {CFSCore} from "./fileDALs/CFSCore" -import {Underscore} from "../common-libs/underscore" -import {DBPeer} from "../db/DBPeer" -import {MonitorFlushedIndex} from "../debug/MonitorFlushedIndex" -import {cliprogram} from "../common-libs/programOptions" -import {DividendDAO, UDSource} from "./indexDAL/abstract/DividendDAO" -import {HttpSource, HttpUD} from "../../modules/bma/lib/dtos" -import {GenericDAO} from "./indexDAL/abstract/GenericDAO" -import {MonitorExecutionTime} from "../debug/MonitorExecutionTime" -import {LevelDBDividend} from "./indexDAL/leveldb/LevelDBDividend" -import {LevelDBBindex} from "./indexDAL/leveldb/LevelDBBindex" - -import {LevelUp} from 'levelup'; -import {LevelDBBlockchain} from "./indexDAL/leveldb/LevelDBBlockchain" -import {LevelDBSindex} from "./indexDAL/leveldb/LevelDBSindex" -import {SqliteTransactions} from "./indexDAL/sqlite/SqliteTransactions" -import {SqlitePeers} from "./indexDAL/sqlite/SqlitePeers" -import {LevelDBWallet} from "./indexDAL/leveldb/LevelDBWallet" -import {LevelDBCindex} from "./indexDAL/leveldb/LevelDBCindex" -import {LevelDBIindex} from "./indexDAL/leveldb/LevelDBIindex" -import {LevelDBMindex} from "./indexDAL/leveldb/LevelDBMindex" -import {ConfDAO} from "./indexDAL/abstract/ConfDAO" -import {ServerDAO} from "./server-dao" - -const readline = require('readline') -const indexer = require('../indexer').Indexer -const logger = require('../logger').NewLogger('filedal'); -const constants = require('../constants'); + SindexEntry, +} from "../indexer"; +import { TransactionDTO } from "../dto/TransactionDTO"; +import { CertDAL, DBCert } from "./sqliteDAL/CertDAL"; +import { DBBlock } from "../db/DBBlock"; +import { DBMembership, MembershipDAL } from "./sqliteDAL/MembershipDAL"; +import { MerkleDTO } from "../dto/MerkleDTO"; +import { CommonConstants } from "../common-libs/constants"; +import { PowDAL } from "./fileDALs/PowDAL"; +import { Initiable } from "./sqliteDAL/Initiable"; +import { MetaDAL } from "./sqliteDAL/MetaDAL"; +import { DataErrors } from "../common-libs/errors"; +import { BasicRevocableIdentity, IdentityDTO } from "../dto/IdentityDTO"; +import { FileSystem } from "../system/directory"; +import { Wot } from "duniteroxyde"; +import { IIndexDAO } from "./indexDAL/abstract/IIndexDAO"; +import { BIndexDAO } from "./indexDAL/abstract/BIndexDAO"; +import { MIndexDAO } from "./indexDAL/abstract/MIndexDAO"; +import { SIndexDAO } from "./indexDAL/abstract/SIndexDAO"; +import { CIndexDAO } from "./indexDAL/abstract/CIndexDAO"; +import { IdentityForRequirements } from "../../service/BlockchainService"; +import { NewLogger } from "../logger"; +import { BlockchainDAO } from "./indexDAL/abstract/BlockchainDAO"; +import { TxsDAO } from "./indexDAL/abstract/TxsDAO"; +import { WalletDAO } from "./indexDAL/abstract/WalletDAO"; +import { PeerDAO } from "./indexDAL/abstract/PeerDAO"; +import { DBTx } from "../db/DBTx"; +import { DBWallet } from "../db/DBWallet"; +import { Tristamp } from "../common/Tristamp"; +import { CFSCore } from "./fileDALs/CFSCore"; +import { Underscore } from "../common-libs/underscore"; +import { DBPeer } from "../db/DBPeer"; +import { MonitorFlushedIndex } from "../debug/MonitorFlushedIndex"; +import { cliprogram } from "../common-libs/programOptions"; +import { DividendDAO, UDSource } from "./indexDAL/abstract/DividendDAO"; +import { HttpSource, HttpUD } from "../../modules/bma/lib/dtos"; +import { GenericDAO } from "./indexDAL/abstract/GenericDAO"; +import { MonitorExecutionTime } from "../debug/MonitorExecutionTime"; +import { LevelDBDividend } from "./indexDAL/leveldb/LevelDBDividend"; +import { LevelDBBindex } from "./indexDAL/leveldb/LevelDBBindex"; + +import { LevelUp } from "levelup"; +import { LevelDBBlockchain } from "./indexDAL/leveldb/LevelDBBlockchain"; +import { LevelDBSindex } from "./indexDAL/leveldb/LevelDBSindex"; +import { SqliteTransactions } from "./indexDAL/sqlite/SqliteTransactions"; +import { SqlitePeers } from "./indexDAL/sqlite/SqlitePeers"; +import { LevelDBWallet } from "./indexDAL/leveldb/LevelDBWallet"; +import { LevelDBCindex } from "./indexDAL/leveldb/LevelDBCindex"; +import { LevelDBIindex } from "./indexDAL/leveldb/LevelDBIindex"; +import { LevelDBMindex } from "./indexDAL/leveldb/LevelDBMindex"; +import { ConfDAO } from "./indexDAL/abstract/ConfDAO"; +import { ServerDAO } from "./server-dao"; + +const readline = require("readline"); +const indexer = require("../indexer").Indexer; +const logger = require("../logger").NewLogger("filedal"); +const constants = require("../constants"); export interface FileDALParams { - home:string - fs:FileSystem - dbf:() => SQLiteDriver - wotbf:() => Wot + home: string; + fs: FileSystem; + dbf: () => SQLiteDriver; + wotbf: () => Wot; } export interface IndexBatch { - mindex: MindexEntry[] - iindex: IindexEntry[] - sindex: SindexEntry[] - cindex: CindexEntry[] + mindex: MindexEntry[]; + iindex: IindexEntry[]; + sindex: SindexEntry[]; + cindex: CindexEntry[]; } export class FileDAL implements ServerDAO { - - rootPath:string - fs: FileSystem - sqliteDriver:SQLiteDriver - wotb:Wot - profile:string + rootPath: string; + fs: FileSystem; + sqliteDriver: SQLiteDriver; + wotb: Wot; + profile: string; // Simple file accessors - powDAL:PowDAL - coreFS:CFSCore - confDAL:ConfDAO + powDAL: PowDAL; + coreFS: CFSCore; + confDAL: ConfDAO; // SQLite DALs - metaDAL:MetaDAL - idtyDAL:IdentityDAL - certDAL:CertDAL - msDAL:MembershipDAL + metaDAL: MetaDAL; + idtyDAL: IdentityDAL; + certDAL: CertDAL; + msDAL: MembershipDAL; // New DAO entities - blockDAL:BlockchainDAO - txsDAL:TxsDAO - peerDAL:PeerDAO - walletDAL:WalletDAO - bindexDAL:BIndexDAO - mindexDAL:MIndexDAO - iindexDAL:IIndexDAO - sindexDAL:SIndexDAO - cindexDAL:CIndexDAO - dividendDAL:DividendDAO - newDals:{ [k:string]: Initiable } - private dals:(PeerDAO|WalletDAO|GenericDAO<any>)[] - - loadConfHook: (conf:ConfDTO) => Promise<void> - saveConfHook: (conf:ConfDTO) => Promise<ConfDTO> - - constructor(private params:FileDALParams, - public getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>, - public getLevelDB: (dbName: string)=> Promise<LevelUp>, - ) { - this.rootPath = params.home - this.sqliteDriver = params.dbf() - this.profile = 'DAL' - this.fs = params.fs + blockDAL: BlockchainDAO; + txsDAL: TxsDAO; + peerDAL: PeerDAO; + walletDAL: WalletDAO; + bindexDAL: BIndexDAO; + mindexDAL: MIndexDAO; + iindexDAL: IIndexDAO; + sindexDAL: SIndexDAO; + cindexDAL: CIndexDAO; + dividendDAL: DividendDAO; + newDals: { [k: string]: Initiable }; + private dals: (PeerDAO | WalletDAO | GenericDAO<any>)[]; + + loadConfHook: (conf: ConfDTO) => Promise<void>; + saveConfHook: (conf: ConfDTO) => Promise<ConfDTO>; + + constructor( + private params: FileDALParams, + public getSqliteDB: (dbName: string) => Promise<SQLiteDriver>, + public getLevelDB: (dbName: string) => Promise<LevelUp> + ) { + this.rootPath = params.home; + this.sqliteDriver = params.dbf(); + this.profile = "DAL"; + this.fs = params.fs; // DALs - this.coreFS = new CFSCore(this.rootPath, params.fs) - this.powDAL = new PowDAL(this.rootPath, params.fs) - this.confDAL = new ConfDAL(this.rootPath, params.fs) - this.metaDAL = new (require('./sqliteDAL/MetaDAL').MetaDAL)(this.sqliteDriver); - this.idtyDAL = new (require('./sqliteDAL/IdentityDAL').IdentityDAL)(this.sqliteDriver); - this.certDAL = new (require('./sqliteDAL/CertDAL').CertDAL)(this.sqliteDriver); - this.msDAL = new (require('./sqliteDAL/MembershipDAL').MembershipDAL)(this.sqliteDriver); - - this.blockDAL = new LevelDBBlockchain(getLevelDB) - this.txsDAL = new SqliteTransactions(getSqliteDB) - this.peerDAL = new SqlitePeers(getSqliteDB) - this.walletDAL = new LevelDBWallet(getLevelDB) - this.bindexDAL = new LevelDBBindex(getLevelDB) - this.mindexDAL = new LevelDBMindex(getLevelDB) - this.iindexDAL = new LevelDBIindex(getLevelDB) - this.sindexDAL = new LevelDBSindex(getLevelDB) - this.cindexDAL = new LevelDBCindex(getLevelDB) - this.dividendDAL = new LevelDBDividend(getLevelDB) + this.coreFS = new CFSCore(this.rootPath, params.fs); + this.powDAL = new PowDAL(this.rootPath, params.fs); + this.confDAL = new ConfDAL(this.rootPath, params.fs); + this.metaDAL = new (require("./sqliteDAL/MetaDAL").MetaDAL)( + this.sqliteDriver + ); + this.idtyDAL = new (require("./sqliteDAL/IdentityDAL").IdentityDAL)( + this.sqliteDriver + ); + this.certDAL = new (require("./sqliteDAL/CertDAL").CertDAL)( + this.sqliteDriver + ); + this.msDAL = new (require("./sqliteDAL/MembershipDAL").MembershipDAL)( + this.sqliteDriver + ); + + this.blockDAL = new LevelDBBlockchain(getLevelDB); + this.txsDAL = new SqliteTransactions(getSqliteDB); + this.peerDAL = new SqlitePeers(getSqliteDB); + this.walletDAL = new LevelDBWallet(getLevelDB); + this.bindexDAL = new LevelDBBindex(getLevelDB); + this.mindexDAL = new LevelDBMindex(getLevelDB); + this.iindexDAL = new LevelDBIindex(getLevelDB); + this.sindexDAL = new LevelDBSindex(getLevelDB); + this.cindexDAL = new LevelDBCindex(getLevelDB); + this.dividendDAL = new LevelDBDividend(getLevelDB); this.newDals = { - 'powDAL': this.powDAL, - 'metaDAL': this.metaDAL, - 'blockDAL': this.blockDAL, - 'certDAL': this.certDAL, - 'msDAL': this.msDAL, - 'idtyDAL': this.idtyDAL, - 'txsDAL': this.txsDAL, - 'peerDAL': this.peerDAL, - 'confDAL': this.confDAL, - 'walletDAL': this.walletDAL, - 'bindexDAL': this.bindexDAL, - 'mindexDAL': this.mindexDAL, - 'iindexDAL': this.iindexDAL, - 'sindexDAL': this.sindexDAL, - 'cindexDAL': this.cindexDAL, - 'dividendDAL': this.dividendDAL, - } + powDAL: this.powDAL, + metaDAL: this.metaDAL, + blockDAL: this.blockDAL, + certDAL: this.certDAL, + msDAL: this.msDAL, + idtyDAL: this.idtyDAL, + txsDAL: this.txsDAL, + peerDAL: this.peerDAL, + confDAL: this.confDAL, + walletDAL: this.walletDAL, + bindexDAL: this.bindexDAL, + mindexDAL: this.mindexDAL, + iindexDAL: this.iindexDAL, + sindexDAL: this.sindexDAL, + cindexDAL: this.cindexDAL, + dividendDAL: this.dividendDAL, + }; } - async init(conf:ConfDTO) { - this.wotb = this.params.wotbf() + async init(conf: ConfDTO) { + this.wotb = this.params.wotbf(); this.dals = [ this.blockDAL, this.txsDAL, @@ -199,9 +207,9 @@ export class FileDAL implements ServerDAO { this.sindexDAL, this.cindexDAL, this.dividendDAL, - ] + ]; for (const indexDAL of this.dals) { - indexDAL.triggerInit() + indexDAL.triggerInit(); } const dalNames = Underscore.keys(this.newDals); for (const dalName of dalNames) { @@ -212,126 +220,156 @@ export class FileDAL implements ServerDAO { await this.metaDAL.upgradeDatabase(conf); // Update the maximum certifications count a member can issue into the C++ addon const currencyParams = await this.getParameters(); - if (currencyParams && currencyParams.sigStock !== undefined && currencyParams.sigStock !== null) { + if ( + currencyParams && + currencyParams.sigStock !== undefined && + currencyParams.sigStock !== null + ) { this.wotb.setMaxCert(currencyParams.sigStock); } } getDBVersion() { - return this.metaDAL.getVersion() + return this.metaDAL.getVersion(); } - writeFileOfBlock(block:DBBlock) { - return this.blockDAL.saveBlock(block) + writeFileOfBlock(block: DBBlock) { + return this.blockDAL.saveBlock(block); } - writeSideFileOfBlock(block:DBBlock) { - return this.blockDAL.saveSideBlock(block) + writeSideFileOfBlock(block: DBBlock) { + return this.blockDAL.saveSideBlock(block); } listAllPeers() { - return this.peerDAL.listAll() + return this.peerDAL.listAll(); } - async getPeer(pubkey:string) { + async getPeer(pubkey: string) { try { - return await this.peerDAL.getPeer(pubkey) + return await this.peerDAL.getPeer(pubkey); } catch (err) { - throw Error('Unknown peer ' + pubkey); + throw Error("Unknown peer " + pubkey); } } async getWS2Peers() { - return this.peerDAL.getPeersWithEndpointsLike('WS2P') + return this.peerDAL.getPeersWithEndpointsLike("WS2P"); } - getAbsoluteBlockInForkWindowByBlockstamp(blockstamp:string) { + getAbsoluteBlockInForkWindowByBlockstamp(blockstamp: string) { if (!blockstamp) throw "Blockstamp is required to find the block"; - const sp = blockstamp.split('-'); + const sp = blockstamp.split("-"); const number = parseInt(sp[0]); const hash = sp[1]; - return this.getAbsoluteBlockInForkWindow(number, hash) + return this.getAbsoluteBlockInForkWindow(number, hash); } - getAbsoluteValidBlockInForkWindowByBlockstamp(blockstamp:string) { + getAbsoluteValidBlockInForkWindowByBlockstamp(blockstamp: string) { if (!blockstamp) throw "Blockstamp is required to find the block"; - const sp = blockstamp.split('-'); + const sp = blockstamp.split("-"); const number = parseInt(sp[0]); const hash = sp[1]; - return this.getAbsoluteValidBlockInForkWindow(number, hash) + return this.getAbsoluteValidBlockInForkWindow(number, hash); } - async getBlockWeHaveItForSure(number:number): Promise<DBBlock> { - return (await this.blockDAL.getBlock(number)) as DBBlock + async getBlockWeHaveItForSure(number: number): Promise<DBBlock> { + return (await this.blockDAL.getBlock(number)) as DBBlock; } // Duniter-UI dependency - async getBlock(number: number): Promise<DBBlock|null> { - return this.getFullBlockOf(number) + async getBlock(number: number): Promise<DBBlock | null> { + return this.getFullBlockOf(number); } - async getFullBlockOf(number: number): Promise<DBBlock|null> { - return this.blockDAL.getBlock(number) + async getFullBlockOf(number: number): Promise<DBBlock | null> { + return this.blockDAL.getBlock(number); } - async getBlockstampOf(number: number): Promise<string|null> { - const block = await this.getTristampOf(number) + async getBlockstampOf(number: number): Promise<string | null> { + const block = await this.getTristampOf(number); if (block) { - return [block.number, block.hash].join('-') + return [block.number, block.hash].join("-"); } - return null + return null; } - async getTristampOf(number: number): Promise<Tristamp|null> { - return this.blockDAL.getBlock(number) + async getTristampOf(number: number): Promise<Tristamp | null> { + return this.blockDAL.getBlock(number); } - async existsAbsoluteBlockInForkWindow(number:number, hash:string): Promise<boolean> { - return !!(await this.getAbsoluteBlockByNumberAndHash(number, hash, true)) + async existsAbsoluteBlockInForkWindow( + number: number, + hash: string + ): Promise<boolean> { + return !!(await this.getAbsoluteBlockByNumberAndHash(number, hash, true)); } - async getAbsoluteBlockInForkWindow(number:number, hash:string): Promise<DBBlock|null> { - return this.getAbsoluteBlockByNumberAndHash(number, hash) + async getAbsoluteBlockInForkWindow( + number: number, + hash: string + ): Promise<DBBlock | null> { + return this.getAbsoluteBlockByNumberAndHash(number, hash); } - async getAbsoluteValidBlockInForkWindow(number:number, hash:string): Promise<DBBlock|null> { - const block = await this.getAbsoluteBlockByNumberAndHash(number, hash) + async getAbsoluteValidBlockInForkWindow( + number: number, + hash: string + ): Promise<DBBlock | null> { + const block = await this.getAbsoluteBlockByNumberAndHash(number, hash); if (block && !block.fork) { - return block + return block; } - return null + return null; } - async getAbsoluteBlockByNumberAndHash(number:number, hash:string, forceNumberAndHashFinding = false): Promise<DBBlock|null> { + async getAbsoluteBlockByNumberAndHash( + number: number, + hash: string, + forceNumberAndHashFinding = false + ): Promise<DBBlock | null> { if (number > 0 || forceNumberAndHashFinding) { - return await this.blockDAL.getAbsoluteBlock(number, hash) + return await this.blockDAL.getAbsoluteBlock(number, hash); } else { // Block#0 is special - return await this.blockDAL.getBlock(number) + return await this.blockDAL.getBlock(number); } } - async getAbsoluteBlockByBlockstamp(blockstamp: string): Promise<DBBlock|null> { - const sp = blockstamp.split('-') - return this.getAbsoluteBlockByNumberAndHash(parseInt(sp[0]), sp[1]) + async getAbsoluteBlockByBlockstamp( + blockstamp: string + ): Promise<DBBlock | null> { + const sp = blockstamp.split("-"); + return this.getAbsoluteBlockByNumberAndHash(parseInt(sp[0]), sp[1]); } - async existsNonChainableLink(from:string, vHEAD_1:DBHead, sigStock:number) { + async existsNonChainableLink( + from: string, + vHEAD_1: DBHead, + sigStock: number + ) { // Cert period rule const medianTime = vHEAD_1 ? vHEAD_1.medianTime : 0; - const linksFrom:FullCindexEntry[] = await this.cindexDAL.reducablesFrom(from) - const unchainables = Underscore.filter(linksFrom, (link:CindexEntry) => link.chainable_on > medianTime); + const linksFrom: FullCindexEntry[] = await this.cindexDAL.reducablesFrom( + from + ); + const unchainables = Underscore.filter( + linksFrom, + (link: CindexEntry) => link.chainable_on > medianTime + ); if (unchainables.length > 0) return true; // Max stock rule - let activeLinks = Underscore.filter(linksFrom, (link:CindexEntry) => !link.expired_on); + let activeLinks = Underscore.filter( + linksFrom, + (link: CindexEntry) => !link.expired_on + ); return activeLinks.length >= sigStock; } - async getCurrentBlockOrNull() { - let current:DBBlock|null = null; + let current: DBBlock | null = null; try { - current = await this.getBlockCurrent() + current = await this.getBlockCurrent(); } catch (e) { if (e != constants.ERROR.BLOCK.NO_CURRENT_BLOCK) { throw e; @@ -340,22 +378,22 @@ export class FileDAL implements ServerDAO { return current; } - getPromoted(number:number) { - return this.getFullBlockOf(number) + getPromoted(number: number) { + return this.getFullBlockOf(number); } // Block getPotentialRootBlocks() { - return this.blockDAL.getPotentialRoots() + return this.blockDAL.getPotentialRoots(); } - lastBlockOfIssuer(issuer:string) { + lastBlockOfIssuer(issuer: string) { return this.blockDAL.lastBlockOfIssuer(issuer); } - - getCountOfPoW(issuer:string) { - return this.blockDAL.getCountOfBlocksIssuedBy(issuer) + + getCountOfPoW(issuer: string) { + return this.blockDAL.getCountOfBlocksIssuedBy(issuer); } /** @@ -363,97 +401,140 @@ export class FileDAL implements ServerDAO { * @param start Lower number bound (included). * @param end Higher number bound (included). */ - async getBlocksBetween (start:number, end:number): Promise<DBBlock[]> { - start = Math.max(0, start) - end= Math.max(0, end) - return this.blockDAL.getBlocks(Math.max(0, start), end) + async getBlocksBetween(start: number, end: number): Promise<DBBlock[]> { + start = Math.max(0, start); + end = Math.max(0, end); + return this.blockDAL.getBlocks(Math.max(0, start), end); } - getForkBlocksFollowing(current:DBBlock) { - return this.blockDAL.getNextForkBlocks(current.number, current.hash) + getForkBlocksFollowing(current: DBBlock) { + return this.blockDAL.getNextForkBlocks(current.number, current.hash); } - getPotentialForkBlocks(numberStart:number, medianTimeStart:number, maxNumber:number) { - return this.blockDAL.getPotentialForkBlocks(numberStart, medianTimeStart, maxNumber) + getPotentialForkBlocks( + numberStart: number, + medianTimeStart: number, + maxNumber: number + ) { + return this.blockDAL.getPotentialForkBlocks( + numberStart, + medianTimeStart, + maxNumber + ); } async getBlockCurrent() { const current = await this.blockDAL.getCurrent(); - if (!current) - throw 'No current block'; + if (!current) throw "No current block"; return current; } - getValidLinksTo(to:string) { - return this.cindexDAL.getValidLinksTo(to) + getValidLinksTo(to: string) { + return this.cindexDAL.getValidLinksTo(to); } - async getAvailableSourcesByPubkey(pubkey:string): Promise<HttpSource[]> { - const txAvailable = await this.sindexDAL.getAvailableForPubkey(pubkey) - const sources: UDSource[] = await this.dividendDAL.getUDSources(pubkey) - return sources.map(d => { - return { - type: 'D', - noffset: d.pos, - identifier: pubkey, - amount: d.amount, - base: d.base, - conditions: 'SIG(' + pubkey + ')' - } - }).concat(txAvailable.map(s => { - return { - type: 'T', - noffset: s.pos, - identifier: s.identifier, - amount: s.amount, - base: s.base, - conditions: s.conditions - } - })) - } - - async findByIdentifierPosAmountBase(identifier: string, pos: number, amount: number, base: number, isDividend: boolean): Promise<SimpleTxInput[]> { + async getAvailableSourcesByPubkey(pubkey: string): Promise<HttpSource[]> { + const txAvailable = await this.sindexDAL.getAvailableForPubkey(pubkey); + const sources: UDSource[] = await this.dividendDAL.getUDSources(pubkey); + return sources + .map((d) => { + return { + type: "D", + noffset: d.pos, + identifier: pubkey, + amount: d.amount, + base: d.base, + conditions: "SIG(" + pubkey + ")", + }; + }) + .concat( + txAvailable.map((s) => { + return { + type: "T", + noffset: s.pos, + identifier: s.identifier, + amount: s.amount, + base: s.base, + conditions: s.conditions, + }; + }) + ); + } + + async findByIdentifierPosAmountBase( + identifier: string, + pos: number, + amount: number, + base: number, + isDividend: boolean + ): Promise<SimpleTxInput[]> { if (isDividend) { - return this.dividendDAL.findUdSourceByIdentifierPosAmountBase(identifier, pos, amount, base) + return this.dividendDAL.findUdSourceByIdentifierPosAmountBase( + identifier, + pos, + amount, + base + ); } else { - return this.sindexDAL.findTxSourceByIdentifierPosAmountBase(identifier, pos, amount, base) + return this.sindexDAL.findTxSourceByIdentifierPosAmountBase( + identifier, + pos, + amount, + base + ); } } - async getGlobalIdentityByHashForExistence(hash:string): Promise<boolean> { - const pending = await this.idtyDAL.getByHash(hash) + async getGlobalIdentityByHashForExistence(hash: string): Promise<boolean> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return false + return false; } } - return true + return true; } - async getGlobalIdentityByHashForHashingAndSig(hash:string): Promise<{ pubkey:string, uid:string, buid:string, sig:string }|null> { - const pending = await this.idtyDAL.getByHash(hash) + async getGlobalIdentityByHashForHashingAndSig( + hash: string + ): Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } return { pubkey: idty.pub, uid: idty.uid, buid: idty.created_on, - sig: idty.sig - } + sig: idty.sig, + }; } - return pending - } - - async getGlobalIdentityByHashForLookup(hash:string): Promise<{ pubkey:string, uid:string, buid:string, sig:string, member:boolean, wasMember:boolean }|null> { - const pending = await this.idtyDAL.getByHash(hash) + return pending; + } + + async getGlobalIdentityByHashForLookup( + hash: string + ): Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + member: boolean; + wasMember: boolean; + } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } return { pubkey: idty.pub, @@ -461,20 +542,32 @@ export class FileDAL implements ServerDAO { buid: idty.created_on, sig: idty.sig, member: idty.member, - wasMember: idty.wasMember - } + wasMember: idty.wasMember, + }; } - return pending - } - - async getGlobalIdentityByHashForJoining(hash:string): Promise<{ pubkey:string, uid:string, buid:string, sig:string, member:boolean, wasMember:boolean, revoked:boolean }|null> { - const pending = await this.idtyDAL.getByHash(hash) + return pending; + } + + async getGlobalIdentityByHashForJoining( + hash: string + ): Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + member: boolean; + wasMember: boolean; + revoked: boolean; + } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } - const membership = await this.mindexDAL.getReducedMSForImplicitRevocation(idty.pub) as FullMindexEntry + const membership = (await this.mindexDAL.getReducedMSForImplicitRevocation( + idty.pub + )) as FullMindexEntry; return { pubkey: idty.pub, uid: idty.uid, @@ -482,38 +575,54 @@ export class FileDAL implements ServerDAO { sig: idty.sig, member: idty.member, wasMember: idty.wasMember, - revoked: !!(membership.revoked_on) - } + revoked: !!membership.revoked_on, + }; } - return pending + return pending; } - async getGlobalIdentityByHashForIsMember(hash:string): Promise<{ pub:string, member:boolean }|null> { - const pending = await this.idtyDAL.getByHash(hash) + async getGlobalIdentityByHashForIsMember( + hash: string + ): Promise<{ pub: string; member: boolean } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } return { pub: idty.pub, - member: idty.member - } + member: idty.member, + }; } return { pub: pending.pubkey, - member: pending.member - } + member: pending.member, + }; } - async getGlobalIdentityByHashForRevocation(hash:string): Promise<{ pub:string, uid:string, created_on:string, sig:string, member:boolean, wasMember:boolean, revoked:boolean, revocation_sig:string|null, expires_on:number }|null> { - const pending = await this.idtyDAL.getByHash(hash) + async getGlobalIdentityByHashForRevocation( + hash: string + ): Promise<{ + pub: string; + uid: string; + created_on: string; + sig: string; + member: boolean; + wasMember: boolean; + revoked: boolean; + revocation_sig: string | null; + expires_on: number; + } | null> { + const pending = await this.idtyDAL.getByHash(hash); if (!pending) { - const idty = await this.iindexDAL.getFullFromHash(hash) + const idty = await this.iindexDAL.getFullFromHash(hash); if (!idty) { - return null + return null; } - const membership = await this.mindexDAL.getReducedMSForImplicitRevocation(idty.pub) as FullMindexEntry + const membership = (await this.mindexDAL.getReducedMSForImplicitRevocation( + idty.pub + )) as FullMindexEntry; return { pub: idty.pub, uid: idty.uid, @@ -522,9 +631,9 @@ export class FileDAL implements ServerDAO { wasMember: idty.wasMember, expires_on: membership.expires_on, created_on: idty.created_on, - revoked: !!(membership.revoked_on), - revocation_sig: membership.revocation - } + revoked: !!membership.revoked_on, + revocation_sig: membership.revocation, + }; } return { pub: pending.pubkey, @@ -535,150 +644,198 @@ export class FileDAL implements ServerDAO { member: pending.member, wasMember: pending.wasMember, revoked: pending.revoked, - revocation_sig: pending.revocation_sig - } + revocation_sig: pending.revocation_sig, + }; } getMembers() { - return this.iindexDAL.getMembers() + return this.iindexDAL.getMembers(); } - async getWrittenIdtyByPubkeyForHash(pubkey:string): Promise<{ hash:string }> { - return this.getWrittenForSureIdtyByPubkey(pubkey) + async getWrittenIdtyByPubkeyForHash( + pubkey: string + ): Promise<{ hash: string }> { + return this.getWrittenForSureIdtyByPubkey(pubkey); } - async getWrittenIdtyByPubkeyForHashing(pubkey:string): Promise<{ uid:string, created_on:string, pub:string }> { - return this.getWrittenForSureIdtyByPubkey(pubkey) + async getWrittenIdtyByPubkeyForHashing( + pubkey: string + ): Promise<{ uid: string; created_on: string; pub: string }> { + return this.getWrittenForSureIdtyByPubkey(pubkey); } - async getWrittenIdtyByPubkeyForWotbID(pubkey:string): Promise<{ wotb_id:number }> { - return this.getWrittenForSureIdtyByPubkey(pubkey) + async getWrittenIdtyByPubkeyForWotbID( + pubkey: string + ): Promise<{ wotb_id: number }> { + return this.getWrittenForSureIdtyByPubkey(pubkey); } - async getWrittenIdtyByPubkeyForUidAndPubkey(pubkey:string): Promise<{ pub:string, uid:string }> { - return this.getWrittenForSureIdtyByPubkey(pubkey) + async getWrittenIdtyByPubkeyForUidAndPubkey( + pubkey: string + ): Promise<{ pub: string; uid: string }> { + return this.getWrittenForSureIdtyByPubkey(pubkey); } - async getWrittenIdtyByPubkeyForIsMember(pubkey:string): Promise<{ member:boolean }|null> { - return this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForIsMember( + pubkey: string + ): Promise<{ member: boolean } | null> { + return this.iindexDAL.getFromPubkey(pubkey); } - async getWrittenIdtyByPubkeyForUidAndIsMemberAndWasMember(pubkey:string): Promise<{ uid:string, member:boolean, wasMember:boolean }|null> { - return this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForUidAndIsMemberAndWasMember( + pubkey: string + ): Promise<{ uid: string; member: boolean; wasMember: boolean } | null> { + return this.iindexDAL.getFromPubkey(pubkey); } - async getWrittenIdtyByPubkeyOrUidForIsMemberAndPubkey(search:string): Promise<{ pub:string, member:boolean }|null> { - return this.iindexDAL.getFromPubkeyOrUid(search) + async getWrittenIdtyByPubkeyOrUidForIsMemberAndPubkey( + search: string + ): Promise<{ pub: string; member: boolean } | null> { + return this.iindexDAL.getFromPubkeyOrUid(search); } - async getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember(search:string): Promise<{ uid:string, created_on:string, pub:string, member:boolean }|null> { - return await this.iindexDAL.getFromPubkeyOrUid(search) + async getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember( + search: string + ): Promise<{ + uid: string; + created_on: string; + pub: string; + member: boolean; + } | null> { + return await this.iindexDAL.getFromPubkeyOrUid(search); } - async getWrittenIdtyByPubkeyForRevocationCheck(pubkey:string): Promise<{ pub:string, uid:string, created_on:string, sig:string, revoked_on:string|null }|null> { - const idty = await this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForRevocationCheck( + pubkey: string + ): Promise<{ + pub: string; + uid: string; + created_on: string; + sig: string; + revoked_on: string | null; + } | null> { + const idty = await this.iindexDAL.getFromPubkey(pubkey); if (!idty) { - return null + return null; } - const membership = await this.mindexDAL.getReducedMSForImplicitRevocation(pubkey) as FullMindexEntry + const membership = (await this.mindexDAL.getReducedMSForImplicitRevocation( + pubkey + )) as FullMindexEntry; return { pub: idty.pub, uid: idty.uid, sig: idty.sig, created_on: idty.created_on, - revoked_on: membership.revoked_on - } + revoked_on: membership.revoked_on, + }; } - async getWrittenIdtyByPubkeyForCertificationCheck(pubkey:string): Promise<{ pub:string, uid:string, created_on:string, sig:string }|null> { - const idty = await this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForCertificationCheck( + pubkey: string + ): Promise<{ + pub: string; + uid: string; + created_on: string; + sig: string; + } | null> { + const idty = await this.iindexDAL.getFromPubkey(pubkey); if (!idty) { - return null + return null; } return { pub: idty.pub, uid: idty.uid, sig: idty.sig, created_on: idty.created_on, - } + }; } - async getWrittenIdtyByPubkeyForUidAndMemberAndCreatedOn(pubkey:string): Promise<{ uid:string, member:boolean, created_on:string }|null> { - const idty = await this.iindexDAL.getFromPubkey(pubkey) + async getWrittenIdtyByPubkeyForUidAndMemberAndCreatedOn( + pubkey: string + ): Promise<{ uid: string; member: boolean; created_on: string } | null> { + const idty = await this.iindexDAL.getFromPubkey(pubkey); if (!idty) { - return null + return null; } return { uid: idty.uid, member: idty.member, created_on: idty.created_on, - } + }; } - private async getWrittenForSureIdtyByPubkey(pubkey:string) { - const idty = await this.iindexDAL.getFromPubkey(pubkey) + private async getWrittenForSureIdtyByPubkey(pubkey: string) { + const idty = await this.iindexDAL.getFromPubkey(pubkey); if (!idty) { - throw Error(DataErrors[DataErrors.MEMBER_NOT_FOUND]) + throw Error(DataErrors[DataErrors.MEMBER_NOT_FOUND]); } - return idty + return idty; } - private async getWrittenForSureIdtyByUid(pubkey:string) { - const idty = (await this.iindexDAL.getFullFromUID(pubkey)) + private async getWrittenForSureIdtyByUid(pubkey: string) { + const idty = await this.iindexDAL.getFullFromUID(pubkey); if (!idty) { - throw Error(DataErrors[DataErrors.MEMBER_NOT_FOUND]) + throw Error(DataErrors[DataErrors.MEMBER_NOT_FOUND]); } - return idty + return idty; } // Duniter-UI dependency - async getWrittenIdtyByPubkey(pub:string): Promise<FullIindexEntry | null> { - return await this.iindexDAL.getFromPubkey(pub) + async getWrittenIdtyByPubkey(pub: string): Promise<FullIindexEntry | null> { + return await this.iindexDAL.getFromPubkey(pub); } - async getWrittenIdtyByPubkeyForExistence(uid:string) { - return !!(await this.iindexDAL.getFromPubkey(uid)) + async getWrittenIdtyByPubkeyForExistence(uid: string) { + return !!(await this.iindexDAL.getFromPubkey(uid)); } - async getWrittenIdtyByUIDForExistence(uid:string) { - return !!(await this.iindexDAL.getFromUID(uid)) + async getWrittenIdtyByUIDForExistence(uid: string) { + return !!(await this.iindexDAL.getFromUID(uid)); } - async getWrittenIdtyByUidForHashing(uid:string): Promise<{ uid:string, created_on:string, pub:string }> { - return this.getWrittenForSureIdtyByUid(uid) + async getWrittenIdtyByUidForHashing( + uid: string + ): Promise<{ uid: string; created_on: string; pub: string }> { + return this.getWrittenForSureIdtyByUid(uid); } - async getWrittenIdtyByUIDForWotbId(uid:string): Promise<{ wotb_id:number }> { - return this.getWrittenForSureIdtyByUid(uid) + async getWrittenIdtyByUIDForWotbId( + uid: string + ): Promise<{ wotb_id: number }> { + return this.getWrittenForSureIdtyByUid(uid); } - async findPeersWhoseHashIsIn(hashes:string[]) { + async findPeersWhoseHashIsIn(hashes: string[]) { const peers = await this.peerDAL.listAll(); - return Underscore.chain(peers).filter((p:DBPeer) => hashes.indexOf(p.hash) !== -1).value() + return Underscore.chain(peers) + .filter((p: DBPeer) => hashes.indexOf(p.hash) !== -1) + .value(); } - getTxByHash(hash:string) { - return this.txsDAL.getTX(hash) + getTxByHash(hash: string) { + return this.txsDAL.getTX(hash); } - removeTxByHash(hash:string) { - return this.txsDAL.removeTX(hash) + removeTxByHash(hash: string) { + return this.txsDAL.removeTX(hash); } getTransactionsPending(versionMin = 0) { - return this.txsDAL.getAllPending(versionMin) + return this.txsDAL.getAllPending(versionMin); } - async getNonWritten(pubkey:string) { + async getNonWritten(pubkey: string) { const pending = await this.idtyDAL.getPendingIdentities(); - return Underscore.chain(pending).where({pubkey: pubkey}).value() + return Underscore.chain(pending).where({ pubkey: pubkey }).value(); } async getRevocatingMembers() { const revoking = await this.idtyDAL.getToRevoke(); const toRevoke = []; for (const pending of revoking) { - const idty = await this.getWrittenIdtyByPubkeyForRevocationCheck(pending.pubkey) + const idty = await this.getWrittenIdtyByPubkeyForRevocationCheck( + pending.pubkey + ); if (idty && !idty.revoked_on) { toRevoke.push(pending); } @@ -687,67 +844,79 @@ export class FileDAL implements ServerDAO { } getToBeKickedPubkeys() { - return this.iindexDAL.getToBeKickedPubkeys() + return this.iindexDAL.getToBeKickedPubkeys(); } getRevokedPubkeys() { - return this.mindexDAL.getRevokedPubkeys() + return this.mindexDAL.getRevokedPubkeys(); } - async searchJustIdentities(search:string): Promise<DBIdentity[]> { + async searchJustIdentities(search: string): Promise<DBIdentity[]> { const pendings = await this.idtyDAL.searchThoseMatching(search); const writtens = await this.iindexDAL.searchThoseMatching(search); - const nonPendings = Underscore.filter(writtens, (w:IindexEntry) => { + const nonPendings = Underscore.filter(writtens, (w: IindexEntry) => { return Underscore.where(pendings, { pubkey: w.pub }).length == 0; }); - const found = pendings.concat(nonPendings.map((i:any) => { - // Use the correct field - i.pubkey = i.pub - return i - })); - return await Promise.all<DBIdentity>(found.map(async (f) => { - const ms = await this.mindexDAL.getReducedMSForImplicitRevocation(f.pubkey); - if (ms) { - f.revoked_on = null; - if (ms.revoked_on) { - const blockOfRevocation = (await this.getAbsoluteBlockByBlockstamp(ms.revoked_on)) as DBBlock - f.revoked_on = blockOfRevocation.medianTime + const found = pendings.concat( + nonPendings.map((i: any) => { + // Use the correct field + i.pubkey = i.pub; + return i; + }) + ); + return await Promise.all<DBIdentity>( + found.map(async (f) => { + const ms = await this.mindexDAL.getReducedMSForImplicitRevocation( + f.pubkey + ); + if (ms) { + f.revoked_on = null; + if (ms.revoked_on) { + const blockOfRevocation = (await this.getAbsoluteBlockByBlockstamp( + ms.revoked_on + )) as DBBlock; + f.revoked_on = blockOfRevocation.medianTime; + } + f.revoked = !!f.revoked_on; + f.revocation_sig = f.revocation_sig || ms.revocation || null; } - f.revoked = !!f.revoked_on; - f.revocation_sig = f.revocation_sig || ms.revocation || null; - } - return f; - })) + return f; + }) + ); } - async certsToTarget(pub:string, hash:string) { + async certsToTarget(pub: string, hash: string) { const certs = await this.certDAL.getToTarget(hash); const links = await this.cindexDAL.getValidLinksTo(pub); let matching = certs; - await Promise.all(links.map(async (entry:any) => { - matching.push(await this.cindexEntry2DBCert(entry)) - })) - matching = Underscore.sortBy(matching, (c:DBCert) => -c.block); + await Promise.all( + links.map(async (entry: any) => { + matching.push(await this.cindexEntry2DBCert(entry)); + }) + ); + matching = Underscore.sortBy(matching, (c: DBCert) => -c.block); matching.reverse(); return matching; } - async certsFrom(pubkey:string) { + async certsFrom(pubkey: string) { const certs = await this.certDAL.getFromPubkeyCerts(pubkey); const links = await this.cindexDAL.getValidLinksFrom(pubkey); let matching = certs; - await Promise.all(links.map(async (entry:CindexEntry) => { - matching.push(await this.cindexEntry2DBCert(entry)) - })) - matching = Underscore.sortBy(matching, (c:DBCert) => -c.block); + await Promise.all( + links.map(async (entry: CindexEntry) => { + matching.push(await this.cindexEntry2DBCert(entry)); + }) + ); + matching = Underscore.sortBy(matching, (c: DBCert) => -c.block); matching.reverse(); return matching; } - async cindexEntry2DBCert(entry:CindexEntry): Promise<DBCert> { - const idty = await this.getWrittenIdtyByPubkeyForHash(entry.receiver) - const wbt = entry.written_on.split('-') - const block = (await this.getBlock(entry.created_on)) as DBBlock + async cindexEntry2DBCert(entry: CindexEntry): Promise<DBCert> { + const idty = await this.getWrittenIdtyByPubkeyForHash(entry.receiver); + const wbt = entry.written_on.split("-"); + const block = (await this.getBlock(entry.created_on)) as DBBlock; return { issuers: [entry.issuer], linked: true, @@ -763,10 +932,10 @@ export class FileDAL implements ServerDAO { block: block.number, expired: !!entry.expired_on, expires_on: entry.expires_on, - } + }; } - async isSentry(pubkey:string, conf:ConfDTO) { + async isSentry(pubkey: string, conf: ConfDTO) { const current = await this.getCurrentBlockOrNull(); if (current) { const dSen = Math.ceil(Math.pow(current.membersCount, 1 / conf.stepMax)); @@ -779,17 +948,24 @@ export class FileDAL implements ServerDAO { async certsFindNew() { const certs = await this.certDAL.getNotLinked(); - return Underscore.chain(certs).where({linked: false}).sortBy((c:DBCert) => -c.block).value() + return Underscore.chain(certs) + .where({ linked: false }) + .sortBy((c: DBCert) => -c.block) + .value(); } - async certsNotLinkedToTarget(hash:string) { + async certsNotLinkedToTarget(hash: string) { const certs = await this.certDAL.getNotLinkedToTarget(hash); - return Underscore.chain(certs).sortBy((c:any) => -c.block).value(); + return Underscore.chain(certs) + .sortBy((c: any) => -c.block) + .value(); } - async getMostRecentMembershipNumberForIssuer(issuer:string) { + async getMostRecentMembershipNumberForIssuer(issuer: string) { const mss = await this.msDAL.getMembershipsOfIssuer(issuer); - const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation(issuer); + const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation( + issuer + ); let max = reduced ? parseInt(reduced.created_on) : -1; for (const ms of mss) { max = Math.max(ms.number, max); @@ -797,64 +973,100 @@ export class FileDAL implements ServerDAO { return max; } - async lastJoinOfIdentity(target:string) { + async lastJoinOfIdentity(target: string) { let pending = await this.msDAL.getPendingINOfTarget(target); - return Underscore.sortBy(pending, (ms:any) => -ms.number)[0]; + return Underscore.sortBy(pending, (ms: any) => -ms.number)[0]; } async findNewcomers(blockMedianTime = 0): Promise<DBMembership[]> { - const pending = await this.msDAL.getPendingIN() - const mss: DBMembership[] = await Promise.all<DBMembership>(pending.map(async (p:any) => { - const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation(p.issuer) - if (!reduced || !reduced.chainable_on || blockMedianTime >= reduced.chainable_on || blockMedianTime < constants.TIME_TO_TURN_ON_BRG_107) { - return p - } - return null - })) - return Underscore.chain(Underscore.filter(mss, ms => !!ms) as DBMembership[]) - .sortBy((ms:DBMembership) => -ms.blockNumber) - .value() + const pending = await this.msDAL.getPendingIN(); + const mss: DBMembership[] = await Promise.all<DBMembership>( + pending.map(async (p: any) => { + const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation( + p.issuer + ); + if ( + !reduced || + !reduced.chainable_on || + blockMedianTime >= reduced.chainable_on || + blockMedianTime < constants.TIME_TO_TURN_ON_BRG_107 + ) { + return p; + } + return null; + }) + ); + return Underscore.chain( + Underscore.filter(mss, (ms) => !!ms) as DBMembership[] + ) + .sortBy((ms: DBMembership) => -ms.blockNumber) + .value(); } async findLeavers(blockMedianTime = 0): Promise<DBMembership[]> { const pending = await this.msDAL.getPendingOUT(); - const mss = await Promise.all<DBMembership|null>(pending.map(async p => { - const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation(p.issuer) - if (!reduced || !reduced.chainable_on || blockMedianTime >= reduced.chainable_on || blockMedianTime < constants.TIME_TO_TURN_ON_BRG_107) { - return p - } - return null - })) - return Underscore.chain(Underscore.filter(mss, ms => !!ms) as DBMembership[]) - .sortBy(ms => -ms.blockNumber) + const mss = await Promise.all<DBMembership | null>( + pending.map(async (p) => { + const reduced = await this.mindexDAL.getReducedMSForImplicitRevocation( + p.issuer + ); + if ( + !reduced || + !reduced.chainable_on || + blockMedianTime >= reduced.chainable_on || + blockMedianTime < constants.TIME_TO_TURN_ON_BRG_107 + ) { + return p; + } + return null; + }) + ); + return Underscore.chain( + Underscore.filter(mss, (ms) => !!ms) as DBMembership[] + ) + .sortBy((ms) => -ms.blockNumber) .value(); } - existsNonReplayableLink(from:string, to:string, medianTime: number, version: number) { - return this.cindexDAL.existsNonReplayableLink(from, to, medianTime, version) - } - - async getSource(identifier:string, pos:number, isDividend: boolean): Promise<SimpleTxInput | null> { + existsNonReplayableLink( + from: string, + to: string, + medianTime: number, + version: number + ) { + return this.cindexDAL.existsNonReplayableLink( + from, + to, + medianTime, + version + ); + } + + async getSource( + identifier: string, + pos: number, + isDividend: boolean + ): Promise<SimpleTxInput | null> { if (isDividend) { - return this.dividendDAL.getUDSource(identifier, pos) + return this.dividendDAL.getUDSource(identifier, pos); } else { - return this.sindexDAL.getTxSource(identifier, pos) + return this.sindexDAL.getTxSource(identifier, pos); } } - async isMember(pubkey:string):Promise<boolean> { + async isMember(pubkey: string): Promise<boolean> { try { const idty = await this.iindexDAL.getFromPubkey(pubkey); if (idty && idty.member) { - return true + return true; } - return false + return false; } catch (err) { return false; } } - async isMemberAndNonLeaver(pubkey:string) { + async isMemberAndNonLeaver(pubkey: string) { try { const idty = await this.iindexDAL.getFromPubkey(pubkey); if (idty && idty.member) { @@ -866,44 +1078,49 @@ export class FileDAL implements ServerDAO { } } - async isLeaving(pubkey:string) { + async isLeaving(pubkey: string) { const ms = await this.mindexDAL.getReducedMSForImplicitRevocation(pubkey); return (ms && ms.leaving) || false; } - async existsCert(cert: DBCert, current: DBBlock|null) { + async existsCert(cert: DBCert, current: DBBlock | null) { const existing = await this.certDAL.existsGivenCert(cert); if (existing) return existing; if (!current) { - return false + return false; } - const existsLink = await this.cindexDAL.existsNonReplayableLink(cert.from, cert.to, current.medianTime, current.version) + const existsLink = await this.cindexDAL.existsNonReplayableLink( + cert.from, + cert.to, + current.medianTime, + current.version + ); return !!existsLink; } - deleteCert(cert:any) { - return this.certDAL.deleteCert(cert) + deleteCert(cert: any) { + return this.certDAL.deleteCert(cert); } - deleteMS(ms:any) { - return this.msDAL.deleteMS(ms) + deleteMS(ms: any) { + return this.msDAL.deleteMS(ms); } - async setRevoked(pubkey:string) { - return await this.idtyDAL.setRevoked(pubkey) + async setRevoked(pubkey: string) { + return await this.idtyDAL.setRevoked(pubkey); } - setRevocating = (idty:BasicRevocableIdentity, revocation_sig:string) => { - const dbIdentity = IdentityDTO.fromBasicIdentity(idty) - dbIdentity.member = idty.member - dbIdentity.wasMember = idty.wasMember - dbIdentity.expires_on = idty.expires_on - dbIdentity.revocation_sig = revocation_sig - dbIdentity.revoked = false - return this.idtyDAL.saveIdentity(dbIdentity) - } + setRevocating = (idty: BasicRevocableIdentity, revocation_sig: string) => { + const dbIdentity = IdentityDTO.fromBasicIdentity(idty); + dbIdentity.member = idty.member; + dbIdentity.wasMember = idty.wasMember; + dbIdentity.expires_on = idty.expires_on; + dbIdentity.revocation_sig = revocation_sig; + dbIdentity.revoked = false; + return this.idtyDAL.saveIdentity(dbIdentity); + }; - async getPeerOrNull(pubkey:string) { + async getPeerOrNull(pubkey: string) { let peer = null; try { peer = await this.getPeer(pubkey); @@ -915,29 +1132,34 @@ export class FileDAL implements ServerDAO { return peer; } - async removePeerByPubkey(pubkey:string) { - return this.peerDAL.removePeerByPubkey(pubkey) + async removePeerByPubkey(pubkey: string) { + return this.peerDAL.removePeerByPubkey(pubkey); } - async findAllPeersBut(pubkeys:string[]) { + async findAllPeersBut(pubkeys: string[]) { const peers = await this.listAllPeers(); - return peers.filter((peer:DBPeer) => pubkeys.indexOf(peer.pubkey) == -1 - && ['UP'].indexOf(peer.status) !== -1); + return peers.filter( + (peer: DBPeer) => + pubkeys.indexOf(peer.pubkey) == -1 && ["UP"].indexOf(peer.status) !== -1 + ); } async listAllPeersWithStatusNewUP() { const peers = await this.peerDAL.listAll(); return Underscore.chain(peers) - .filter((p:DBPeer) => ['UP'] - .indexOf(p.status) !== -1).value(); + .filter((p: DBPeer) => ["UP"].indexOf(p.status) !== -1) + .value(); } - async listAllPeersWithStatusNewUPWithtout(pub:string) { + async listAllPeersWithStatusNewUPWithtout(pub: string) { const peers = await this.peerDAL.listAll(); - return Underscore.chain(peers).filter((p:DBPeer) => p.status == 'UP').filter((p:DBPeer) => p.pubkey !== pub).value(); + return Underscore.chain(peers) + .filter((p: DBPeer) => p.status == "UP") + .filter((p: DBPeer) => p.pubkey !== pub) + .value(); } - async findPeers(pubkey:string): Promise<DBPeer[]> { + async findPeers(pubkey: string): Promise<DBPeer[]> { try { const peer = await this.getPeer(pubkey); return [peer]; @@ -946,15 +1168,15 @@ export class FileDAL implements ServerDAO { } } - async getRandomlyUPsWithout(pubkeys:string[]): Promise<DBPeer[]> { + async getRandomlyUPsWithout(pubkeys: string[]): Promise<DBPeer[]> { const peers = await this.listAllPeersWithStatusNewUP(); - return peers.filter(peer => pubkeys.indexOf(peer.pubkey) == -1) + return peers.filter((peer) => pubkeys.indexOf(peer.pubkey) == -1); } - async setPeerUP(pubkey:string) { + async setPeerUP(pubkey: string) { try { - const p = await this.getPeer(pubkey) - p.status = 'UP'; + const p = await this.getPeer(pubkey); + p.status = "UP"; p.first_down = null; p.last_try = null; return this.peerDAL.savePeer(p); @@ -963,19 +1185,19 @@ export class FileDAL implements ServerDAO { } } - async setPeerDown(pubkey:string) { + async setPeerDown(pubkey: string) { try { // We do not set mirror peers as down (ex. of mirror: 'M1_HnFcSms8jzwngtVomTTnzudZx7SHUQY8sVE1y8yBmULk') if (!pubkey.match(/_/)) { - const p = await this.getPeer(pubkey) + const p = await this.getPeer(pubkey); if (p) { - const now = (new Date()).getTime(); - p.status = 'DOWN'; + const now = new Date().getTime(); + p.status = "DOWN"; if (!p.first_down) { p.first_down = now; } p.last_try = now; - await this.peerDAL.savePeer(p) + await this.peerDAL.savePeer(p); } } } catch (err) { @@ -983,41 +1205,64 @@ export class FileDAL implements ServerDAO { } } - async saveBlock(dbb:DBBlock) { + async saveBlock(dbb: DBBlock) { dbb.wrong = false; await Promise.all([ this.saveBlockInFile(dbb), - this.saveTxsInFiles(dbb.transactions, dbb.number, dbb.medianTime) - ]) + this.saveTxsInFiles(dbb.transactions, dbb.number, dbb.medianTime), + ]); } - async generateIndexes(block:BlockDTO, conf:ConfDTO, index:IndexEntry[], aHEAD:DBHead|null) { + async generateIndexes( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[], + aHEAD: DBHead | null + ) { // We need to recompute the indexes for block#0 - let HEAD:DBHead + let HEAD: DBHead; if (!index || !aHEAD || aHEAD.number == 0) { - index = indexer.localIndex(block, conf) - HEAD = await indexer.completeGlobalScope(block, conf, index, this) + index = indexer.localIndex(block, conf); + HEAD = await indexer.completeGlobalScope(block, conf, index, this); } else { - HEAD = aHEAD + HEAD = aHEAD; } let mindex = indexer.mindex(index); let iindex = indexer.iindex(index); let sindex = indexer.sindex(index); let cindex = indexer.cindex(index); - const dividends = await indexer.ruleIndexGenDividend(HEAD, iindex, this) // Requires that newcomers are already in DividendDAO - sindex = sindex.concat(await indexer.ruleIndexGarbageSmallAccounts(HEAD, sindex, dividends, this)); - cindex = cindex.concat(await indexer.ruleIndexGenCertificationExpiry(HEAD, this)); - mindex = mindex.concat(await indexer.ruleIndexGenMembershipExpiry(HEAD, this)); - iindex = iindex.concat(await indexer.ruleIndexGenExclusionByMembership(HEAD, mindex, this)); - iindex = iindex.concat(await indexer.ruleIndexGenExclusionByCertificatons(HEAD, cindex, iindex, conf, this)); - mindex = mindex.concat(await indexer.ruleIndexGenImplicitRevocation(HEAD, this)); + const dividends = await indexer.ruleIndexGenDividend(HEAD, iindex, this); // Requires that newcomers are already in DividendDAO + sindex = sindex.concat( + await indexer.ruleIndexGarbageSmallAccounts(HEAD, sindex, dividends, this) + ); + cindex = cindex.concat( + await indexer.ruleIndexGenCertificationExpiry(HEAD, this) + ); + mindex = mindex.concat( + await indexer.ruleIndexGenMembershipExpiry(HEAD, this) + ); + iindex = iindex.concat( + await indexer.ruleIndexGenExclusionByMembership(HEAD, mindex, this) + ); + iindex = iindex.concat( + await indexer.ruleIndexGenExclusionByCertificatons( + HEAD, + cindex, + iindex, + conf, + this + ) + ); + mindex = mindex.concat( + await indexer.ruleIndexGenImplicitRevocation(HEAD, this) + ); await indexer.ruleIndexCorrectMembershipExpiryDate(HEAD, mindex, this); await indexer.ruleIndexCorrectCertificationExpiryDate(HEAD, cindex, this); return { HEAD, mindex, iindex, sindex, cindex, dividends }; } - async updateWotbLinks(cindex:CindexEntry[], instance?: Wot) { - const wotb = instance || this.wotb + async updateWotbLinks(cindex: CindexEntry[], instance?: Wot) { + const wotb = instance || this.wotb; for (const entry of cindex) { const from = await this.getWrittenIdtyByPubkeyForWotbID(entry.issuer); const to = await this.getWrittenIdtyByPubkeyForWotbID(entry.receiver); @@ -1026,109 +1271,124 @@ export class FileDAL implements ServerDAO { wotb.addLink(from.wotb_id, to.wotb_id); } else { // Update = removal - NewLogger().trace('removeLink %s -> %s', from.wotb_id, to.wotb_id) + NewLogger().trace("removeLink %s -> %s", from.wotb_id, to.wotb_id); wotb.removeLink(from.wotb_id, to.wotb_id); } } } @MonitorExecutionTime() - async trimIndexes(maxNumber:number) { + async trimIndexes(maxNumber: number) { if (!cliprogram.notrim) { - await this.bindexDAL.trimBlocks(maxNumber) - await this.iindexDAL.trimRecords(maxNumber) - await this.mindexDAL.trimRecords(maxNumber) + await this.bindexDAL.trimBlocks(maxNumber); + await this.iindexDAL.trimRecords(maxNumber); + await this.mindexDAL.trimRecords(maxNumber); if (!cliprogram.notrimc) { - await this.cindexDAL.trimExpiredCerts(maxNumber) + await this.cindexDAL.trimExpiredCerts(maxNumber); } } - await this.sindexDAL.trimConsumedSource(maxNumber) - await this.dividendDAL.trimConsumedUDs(maxNumber) + await this.sindexDAL.trimConsumedSource(maxNumber); + await this.dividendDAL.trimConsumedUDs(maxNumber); } - async trimSandboxes(block:{ medianTime: number }) { + async trimSandboxes(block: { medianTime: number }) { await this.certDAL.trimExpiredCerts(block.medianTime); await this.msDAL.trimExpiredMemberships(block.medianTime); await this.idtyDAL.trimExpiredIdentities(block.medianTime); - await this.txsDAL.trimExpiredNonWrittenTxs(block.medianTime - CommonConstants.TX_WINDOW) + await this.txsDAL.trimExpiredNonWrittenTxs( + block.medianTime - CommonConstants.TX_WINDOW + ); return true; } - savePendingMembership(ms:DBMembership) { - return this.msDAL.savePendingMembership(ms) + savePendingMembership(ms: DBMembership) { + return this.msDAL.savePendingMembership(ms); } - async saveBlockInFile(block:DBBlock) { - await this.writeFileOfBlock(block) + async saveBlockInFile(block: DBBlock) { + await this.writeFileOfBlock(block); } - saveSideBlockInFile(block:DBBlock) { - return this.writeSideFileOfBlock(block) + saveSideBlockInFile(block: DBBlock) { + return this.writeSideFileOfBlock(block); } - async saveTxsInFiles(txs:TransactionDTO[], block_number:number, medianTime:number) { - return Promise.all(txs.map(async (tx) => { - const sp = tx.blockstamp.split('-'); - const basedBlock = (await this.getAbsoluteBlockByNumberAndHash(parseInt(sp[0]), sp[1])) as DBBlock - tx.blockstampTime = basedBlock.medianTime; - const txEntity = TransactionDTO.fromJSONObject(tx) - txEntity.computeAllHashes(); - return this.txsDAL.addLinked(TransactionDTO.fromJSONObject(txEntity), block_number, medianTime); - })) + async saveTxsInFiles( + txs: TransactionDTO[], + block_number: number, + medianTime: number + ) { + return Promise.all( + txs.map(async (tx) => { + const sp = tx.blockstamp.split("-"); + const basedBlock = (await this.getAbsoluteBlockByNumberAndHash( + parseInt(sp[0]), + sp[1] + )) as DBBlock; + tx.blockstampTime = basedBlock.medianTime; + const txEntity = TransactionDTO.fromJSONObject(tx); + txEntity.computeAllHashes(); + return this.txsDAL.addLinked( + TransactionDTO.fromJSONObject(txEntity), + block_number, + medianTime + ); + }) + ); } async merkleForPeers() { let peers = await this.listAllPeersWithStatusNewUP(); - const leaves = peers.map((peer:DBPeer) => peer.hash); + const leaves = peers.map((peer: DBPeer) => peer.hash); const merkle = new MerkleDTO(); merkle.initialize(leaves); return merkle; } - savePendingIdentity(idty:DBIdentity) { - return this.idtyDAL.saveIdentity(idty) + savePendingIdentity(idty: DBIdentity) { + return this.idtyDAL.saveIdentity(idty); } - revokeIdentity(pubkey:string) { - return this.idtyDAL.revokeIdentity(pubkey) + revokeIdentity(pubkey: string) { + return this.idtyDAL.revokeIdentity(pubkey); } - async removeUnWrittenWithPubkey(pubkey:string) { - return await this.idtyDAL.removeUnWrittenWithPubkey(pubkey) + async removeUnWrittenWithPubkey(pubkey: string) { + return await this.idtyDAL.removeUnWrittenWithPubkey(pubkey); } - async removeUnWrittenWithUID(pubkey:string) { + async removeUnWrittenWithUID(pubkey: string) { return await this.idtyDAL.removeUnWrittenWithUID(pubkey); } - registerNewCertification(cert:DBCert) { - return this.certDAL.saveNewCertification(cert) + registerNewCertification(cert: DBCert) { + return this.certDAL.saveNewCertification(cert); } - saveTransaction(tx:DBTx) { - return this.txsDAL.addPending(tx) + saveTransaction(tx: DBTx) { + return this.txsDAL.addPending(tx); } - async getTransactionsHistory(pubkey:string) { - const history:{ - sent: DBTx[] - received: DBTx[] - sending: DBTx[] - receiving: DBTx[] - pending: DBTx[] + async getTransactionsHistory(pubkey: string) { + const history: { + sent: DBTx[]; + received: DBTx[]; + sending: DBTx[]; + receiving: DBTx[]; + pending: DBTx[]; } = { sent: [], received: [], sending: [], receiving: [], - pending: [] + pending: [], }; const res = await Promise.all([ this.txsDAL.getLinkedWithIssuer(pubkey), this.txsDAL.getLinkedWithRecipient(pubkey), this.txsDAL.getPendingWithIssuer(pubkey), - this.txsDAL.getPendingWithRecipient(pubkey) - ]) + this.txsDAL.getPendingWithRecipient(pubkey), + ]); history.sent = res[0] || []; history.received = res[1] || []; history.sending = res[2] || []; @@ -1136,32 +1396,34 @@ export class FileDAL implements ServerDAO { return history; } - async getUDHistory(pubkey:string): Promise<{ history: HttpUD[] }> { - const sources: UDSource[] = await this.dividendDAL.getUDSources(pubkey) + async getUDHistory(pubkey: string): Promise<{ history: HttpUD[] }> { + const sources: UDSource[] = await this.dividendDAL.getUDSources(pubkey); return { - history: (await Promise.all<HttpUD>(sources.map(async (src) => { - const block = await this.getBlockWeHaveItForSure(src.pos) - return { - block_number: src.pos, - time: block.medianTime, - consumed: src.consumed, - amount: src.amount, - base: src.base - } - }))) - } + history: await Promise.all<HttpUD>( + sources.map(async (src) => { + const block = await this.getBlockWeHaveItForSure(src.pos); + return { + block_number: src.pos, + time: block.medianTime, + consumed: src.consumed, + amount: src.amount, + base: src.base, + }; + }) + ), + }; } - savePeer(peer:DBPeer) { - return this.peerDAL.savePeer(peer) + savePeer(peer: DBPeer) { + return this.peerDAL.savePeer(peer); } - async getUniqueIssuersBetween(start:number, end:number) { - const current = (await this.blockDAL.getCurrent()) as DBBlock + async getUniqueIssuersBetween(start: number, end: number) { + const current = (await this.blockDAL.getCurrent()) as DBBlock; const firstBlock = Math.max(0, start); const lastBlock = Math.max(0, Math.min(current.number, end)); const blocks = await this.blockDAL.getBlocks(firstBlock, lastBlock); - return Underscore.uniq(blocks.map(b => b.issuer)) + return Underscore.uniq(blocks.map((b) => b.issuer)); } /** @@ -1170,11 +1432,11 @@ export class FileDAL implements ServerDAO { * @param end The ending entry (max. BINDEX length) * @param property If provided, transforms the range of entries into an array of the asked property. */ - async range(start:number, end:number, property:string) { + async range(start: number, end: number, property: string) { const range = await this.bindexDAL.range(start, end); if (property) { // Filter on a particular property - return range.map((b:any) => b[property]); + return range.map((b: any) => b[property]); } else { return range; } @@ -1184,8 +1446,8 @@ export class FileDAL implements ServerDAO { * Get the last `n`th entry from the BINDEX. * @param n The entry number (min. 1). */ - head(n:number) { - return this.bindexDAL.head(n) + head(n: number) { + return this.bindexDAL.head(n); } /*********************** @@ -1193,28 +1455,29 @@ export class FileDAL implements ServerDAO { **********************/ getParameters() { - return this.confDAL.getParameters() + return this.confDAL.getParameters(); } - async loadConf(overrideConf:ConfDTO, defaultConf = false) { + async loadConf(overrideConf: ConfDTO, defaultConf = false) { let conf = ConfDTO.complete(overrideConf || {}); if (!defaultConf) { - const savedConf = await this.confDAL.loadConf() - conf = Underscore.extend(savedConf, overrideConf || {}) - if (overrideConf.proxiesConf !== undefined) {} else { + const savedConf = await this.confDAL.loadConf(); + conf = Underscore.extend(savedConf, overrideConf || {}); + if (overrideConf.proxiesConf !== undefined) { + } else { } } if (this.loadConfHook) { - await this.loadConfHook(conf) + await this.loadConfHook(conf); } return conf; } - async saveConf(confToSave:ConfDTO) { + async saveConf(confToSave: ConfDTO) { // Save the conf in file let theConf = confToSave; if (this.saveConfHook) { - theConf = await this.saveConfHook(theConf) + theConf = await this.saveConfHook(theConf); } return this.confDAL.saveConf(theConf); } @@ -1223,16 +1486,16 @@ export class FileDAL implements ServerDAO { * WALLETS **********************/ - async getWallet(conditions:string) { - let wallet = await this.walletDAL.getWallet(conditions) + async getWallet(conditions: string) { + let wallet = await this.walletDAL.getWallet(conditions); if (!wallet) { - wallet = { conditions, balance: 0 } + wallet = { conditions, balance: 0 }; } - return wallet + return wallet; } - saveWallet(wallet:DBWallet) { - return this.walletDAL.saveWallet(wallet) + saveWallet(wallet: DBWallet) { + return this.walletDAL.saveWallet(wallet); } /*********************** @@ -1242,74 +1505,81 @@ export class FileDAL implements ServerDAO { getStat(name: StatName) { switch (name) { case "newcomers": - return this.blockDAL.findWithIdentities() + return this.blockDAL.findWithIdentities(); case "certs": - return this.blockDAL.findWithCertifications() + return this.blockDAL.findWithCertifications(); case "joiners": - return this.blockDAL.findWithJoiners() + return this.blockDAL.findWithJoiners(); case "actives": - return this.blockDAL.findWithActives() + return this.blockDAL.findWithActives(); case "leavers": - return this.blockDAL.findWithLeavers() + return this.blockDAL.findWithLeavers(); case "excluded": - return this.blockDAL.findWithExcluded() + return this.blockDAL.findWithExcluded(); case "revoked": - return this.blockDAL.findWithRevoked() + return this.blockDAL.findWithRevoked(); case "ud": - return this.blockDAL.findWithUD() + return this.blockDAL.findWithUD(); case "tx": - return this.blockDAL.findWithTXs() + return this.blockDAL.findWithTXs(); default: - throw DataErrors[DataErrors.WRONG_STAT_NAME] + throw DataErrors[DataErrors.WRONG_STAT_NAME]; } } async cleanCaches() { - await Underscore.values(this.newDals).map((dal:Initiable) => dal.cleanCache && dal.cleanCache()) + await Underscore.values(this.newDals).map( + (dal: Initiable) => dal.cleanCache && dal.cleanCache() + ); } async close() { - await Promise.all(Underscore.values(this.newDals).map(async (dal:Initiable) => { - dal.cleanCache() - await dal.close() - })) + await Promise.all( + Underscore.values(this.newDals).map(async (dal: Initiable) => { + dal.cleanCache(); + await dal.close(); + }) + ); await this.sqliteDriver.closeConnection(); } async resetPeers() { await this.peerDAL.removeAll(); - return await this.close() + return await this.close(); } - getLogContent(linesQuantity:number) { + getLogContent(linesQuantity: number) { return new Promise((resolve, reject) => { try { - let lines:string[] = [], i = 0; - const logPath = path.join(this.rootPath, 'duniter.log'); + let lines: string[] = [], + i = 0; + const logPath = path.join(this.rootPath, "duniter.log"); const readStream = fs.createReadStream(logPath); - readStream.on('error', (err:any) => reject(err)); + readStream.on("error", (err: any) => reject(err)); const lineReader = readline.createInterface({ - input: readStream + input: readStream, }); - lineReader.on('line', (line:string) => { + lineReader.on("line", (line: string) => { line = "\n" + line; lines.push(line); i++; if (i >= linesQuantity) lines.shift(); }); - lineReader.on('close', () => resolve(lines)); - lineReader.on('error', (err:any) => reject(err)); + lineReader.on("close", () => resolve(lines)); + lineReader.on("error", (err: any) => reject(err)); } catch (e) { reject(e); } - }) + }); } async findReceiversAbove(minsig: number) { - const receiversAbove:string[] = await this.cindexDAL.getReceiversAbove(minsig) - const members:IdentityForRequirements[] = [] + const receiversAbove: string[] = await this.cindexDAL.getReceiversAbove( + minsig + ); + const members: IdentityForRequirements[] = []; for (const r of receiversAbove) { - const i = await this.iindexDAL.getFullFromPubkey(r) + const i = await this.iindexDAL.getFullFromPubkey(r); members.push({ hash: i.hash || "", member: i.member || false, @@ -1320,39 +1590,58 @@ export class FileDAL implements ServerDAO { sig: i.sig || "", revocation_sig: "", revoked: false, - revoked_on: 0 - }) + revoked_on: 0, + }); } - return members + return members; } @MonitorFlushedIndex() async flushIndexes(indexes: IndexBatch) { if (indexes.mindex.length) { - await this.mindexDAL.insertBatch(indexes.mindex) + await this.mindexDAL.insertBatch(indexes.mindex); } if (indexes.iindex.length) { - await this.iindexDAL.insertBatch(indexes.iindex) + await this.iindexDAL.insertBatch(indexes.iindex); } - const sindex_txs = indexes.sindex.filter(s => s.srcType === 'T') + const sindex_txs = indexes.sindex.filter((s) => s.srcType === "T"); if (sindex_txs.length) { - await this.sindexDAL.insertBatch(sindex_txs) // We don't store dividends in SINDEX + await this.sindexDAL.insertBatch(sindex_txs); // We don't store dividends in SINDEX } - const sindex_uds = indexes.sindex.filter(s => s.srcType === 'D') + const sindex_uds = indexes.sindex.filter((s) => s.srcType === "D"); if (sindex_uds.length) { - await this.dividendDAL.consume(sindex_uds) + await this.dividendDAL.consume(sindex_uds); } if (indexes.cindex.length) { - await this.cindexDAL.insertBatch(indexes.cindex) + await this.cindexDAL.insertBatch(indexes.cindex); } } - async updateDividend(blockNumber: number, dividend: number|null, unitbase: number, local_iindex: IindexEntry[]): Promise<SimpleUdEntryForWallet[]> { + async updateDividend( + blockNumber: number, + dividend: number | null, + unitbase: number, + local_iindex: IindexEntry[] + ): Promise<SimpleUdEntryForWallet[]> { if (dividend) { - return this.dividendDAL.produceDividend(blockNumber, dividend, unitbase, local_iindex) + return this.dividendDAL.produceDividend( + blockNumber, + dividend, + unitbase, + local_iindex + ); } - return [] + return []; } } -export type StatName = 'newcomers'|'certs'|'joiners'|'actives'|'leavers'|'revoked'|'excluded'|'ud'|'tx' +export type StatName = + | "newcomers" + | "certs" + | "joiners" + | "actives" + | "leavers" + | "revoked" + | "excluded" + | "ud" + | "tx"; diff --git a/app/lib/dal/fileDALs/AbstractCFS.ts b/app/lib/dal/fileDALs/AbstractCFS.ts index 128a441af4ad55135ca8425267ff347a4388e713..b3ced8d5482ba086fce88c7e741321d584634811 100644 --- a/app/lib/dal/fileDALs/AbstractCFS.ts +++ b/app/lib/dal/fileDALs/AbstractCFS.ts @@ -11,21 +11,19 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CFSCore} from "./CFSCore"; -import {Initiable} from "../sqliteDAL/Initiable" -import {FileSystem} from "../../system/directory" -import {FileDAL} from "../fileDAL" +import { CFSCore } from "./CFSCore"; +import { Initiable } from "../sqliteDAL/Initiable"; +import { FileSystem } from "../../system/directory"; +import { FileDAL } from "../fileDAL"; export abstract class AbstractCFS extends Initiable { + public coreFS: CFSCore; + protected dal: FileDAL; - public coreFS:CFSCore - protected dal:FileDAL - - constructor(rootPath:string, qioFS:FileSystem) { - super() - this.coreFS = new CFSCore(rootPath, qioFS) + constructor(rootPath: string, qioFS: FileSystem) { + super(); + this.coreFS = new CFSCore(rootPath, qioFS); } - cleanCache() { - } + cleanCache() {} } diff --git a/app/lib/dal/fileDALs/CFSCore.ts b/app/lib/dal/fileDALs/CFSCore.ts index 019155abe5149360d5199937a4de0f9a48732919..c42bc9891acc3ab87f2087b6bc2199d9cf8e3dfe 100644 --- a/app/lib/dal/fileDALs/CFSCore.ts +++ b/app/lib/dal/fileDALs/CFSCore.ts @@ -13,28 +13,29 @@ "use strict"; -import {FileSystem} from "../../system/directory" -import {Underscore} from "../../common-libs/underscore" +import { FileSystem } from "../../system/directory"; +import { Underscore } from "../../common-libs/underscore"; -const path = require('path'); +const path = require("path"); const DEEP_WRITE = true; export class CFSCore { + private deletedFolder: string; + private deletionFolderPromise: Promise<any> | null; + private createDeletionFolder: () => Promise<any> | null; - private deletedFolder:string - private deletionFolderPromise: Promise<any> | null - private createDeletionFolder: () => Promise<any> | null - - constructor(private rootPath:string, private qfs:FileSystem) { - this.deletedFolder = path.join(rootPath, '.deleted') - this.deletionFolderPromise = null + constructor(private rootPath: string, private qfs: FileSystem) { + this.deletedFolder = path.join(rootPath, ".deleted"); + this.deletionFolderPromise = null; /** * Creates the deletion folder before effective deletion. * @returns {*|any|Promise<void>} Promise of creation. */ - this.createDeletionFolder = () => this.deletionFolderPromise || (this.deletionFolderPromise = this.makeTree('.deleted')) + this.createDeletionFolder = () => + this.deletionFolderPromise || + (this.deletionFolderPromise = this.makeTree(".deleted")); } /** @@ -42,16 +43,18 @@ export class CFSCore { * @param filePath Path to the file. * @returns {*} Promise for file content. */ - async read(filePath:string): Promise<string | null> { + async read(filePath: string): Promise<string | null> { try { - const isDeleted = await this.qfs.fsExists(path.join(this.deletedFolder, this.toRemoveFileName(filePath))); + const isDeleted = await this.qfs.fsExists( + path.join(this.deletedFolder, this.toRemoveFileName(filePath)) + ); if (isDeleted) { // A deleted file must be considered non-existant return null; } return await this.qfs.fsReadFile(path.join(this.rootPath, filePath)); } catch (e) { - return null + return null; } } @@ -60,16 +63,18 @@ export class CFSCore { * @param filePath Path to the file. * @returns {*} Promise for file content. */ - async exists(filePath:string): Promise<boolean | null> { + async exists(filePath: string): Promise<boolean | null> { try { - const isDeleted = await this.qfs.fsExists(path.join(this.deletedFolder, this.toRemoveFileName(filePath))); + const isDeleted = await this.qfs.fsExists( + path.join(this.deletedFolder, this.toRemoveFileName(filePath)) + ); if (isDeleted) { // A deleted file must be considered non-existant return false; } - return await this.qfs.fsExists(path.join(this.rootPath, filePath)) + return await this.qfs.fsExists(path.join(this.rootPath, filePath)); } catch (e) { - return null + return null; } } @@ -79,9 +84,10 @@ export class CFSCore { * @param localLevel Limit listing to local level. * @returns {*} Promise of file names. */ - async list(ofPath:string): Promise<string[]> { + async list(ofPath: string): Promise<string[]> { const dirPath = path.normalize(ofPath); - let files: string[] = [], folder = path.join(this.rootPath, dirPath); + let files: string[] = [], + folder = path.join(this.rootPath, dirPath); const hasDir = await this.qfs.fsExists(folder); if (hasDir) { files = files.concat(await this.qfs.fsList(folder)); @@ -89,13 +95,16 @@ export class CFSCore { const hasDeletedFiles = await this.qfs.fsExists(this.deletedFolder); if (hasDeletedFiles) { const deletedFiles = await this.qfs.fsList(this.deletedFolder); - const deletedOfThisPath = deletedFiles.filter((f:string) => f.match(new RegExp('^' + this.toRemoveDirName(dirPath)))); - const locallyDeletedFiles = deletedOfThisPath.map((f:string) => f.replace(this.toRemoveDirName(dirPath), '') - .replace(/^__/, '')); - files = Underscore.difference(files, locallyDeletedFiles) + const deletedOfThisPath = deletedFiles.filter((f: string) => + f.match(new RegExp("^" + this.toRemoveDirName(dirPath))) + ); + const locallyDeletedFiles = deletedOfThisPath.map((f: string) => + f.replace(this.toRemoveDirName(dirPath), "").replace(/^__/, "") + ); + files = Underscore.difference(files, locallyDeletedFiles); } - return Underscore.uniq(files) - }; + return Underscore.uniq(files); + } /** * WRITE operation of CFS. Writes the file in local Core. @@ -103,9 +112,9 @@ export class CFSCore { * @param content String content to write. * @param deep Wether to make a deep write or not. */ - async write(filePath:string, content:string, deep:boolean): Promise<void> { + async write(filePath: string, content: string, deep: boolean): Promise<void> { return this.qfs.fsWrite(path.join(this.rootPath, filePath), content); - }; + } /** * REMOVE operation of CFS. Set given file as removed. Logical deletion since physical won't work due to the algorithm of CFS. @@ -113,7 +122,7 @@ export class CFSCore { * @param deep Wether to remove the file in the root core or not. * @returns {*} Promise of removal. */ - async remove(filePath:string, deep = false): Promise<void> { + async remove(filePath: string, deep = false): Promise<void> { // Make a deep physical deletion // Root core: physical deletion await this.qfs.fsUnlink(path.join(this.rootPath, filePath)); @@ -124,15 +133,15 @@ export class CFSCore { * @param filePath File to set as removed. * @returns {*} Promise of removal. */ - removeDeep(filePath:string) { - return this.remove(filePath, DEEP_WRITE) + removeDeep(filePath: string) { + return this.remove(filePath, DEEP_WRITE); } /** * Create a directory tree. * @param treePath Tree path to create. */ - async makeTree(treePath:string) { + async makeTree(treePath: string) { // Note: qfs.makeTree does not work on windows, so we implement it manually try { let normalized = path.normalize(treePath); @@ -156,8 +165,8 @@ export class CFSCore { * @param content JSON content to stringify and write. * @param deep Wether to make a deep write or not. */ - writeJSON(filePath:string, content:any, deep:boolean = false) { - return this.write(filePath, JSON.stringify(content, null, ' '), deep) + writeJSON(filePath: string, content: any, deep: boolean = false) { + return this.write(filePath, JSON.stringify(content, null, " "), deep); } /** @@ -165,23 +174,23 @@ export class CFSCore { * @param filePath File path. * @param content JSON content to stringify and write. */ - writeJSONDeep(filePath:string, content:any) { - return this.writeJSON(filePath, content, DEEP_WRITE) + writeJSONDeep(filePath: string, content: any) { + return this.writeJSON(filePath, content, DEEP_WRITE); } /** * Read a file and parse its content as JSON. * @param filePath File to read. */ - async readJSON(filePath:string) { - let data:any; + async readJSON(filePath: string) { + let data: any; try { data = await this.read(filePath); return JSON.parse(data); - } catch(err) { + } catch (err) { if (data && err.message.match(/^Unexpected token {/)) { // This is a bug thrown during Unit Tests with MEMORY_MODE true... - return JSON.parse(data.match(/^(.*)}{.*/)[1] + '}'); + return JSON.parse(data.match(/^(.*)}{.*/)[1] + "}"); } else if (err.message.match(/^Unexpected end of input/)) { // Could not read, return empty object return {}; @@ -195,16 +204,20 @@ export class CFSCore { * @param dirPath Path to get the files' contents. * @param localLevel Wether to read only local level or not. */ - listJSON(dirPath:string) { - return this.list(dirPath).then(async (files) => Promise.all(files.map((f:string) => this.readJSON(path.join(dirPath, f))))) + listJSON(dirPath: string) { + return this.list(dirPath).then(async (files) => + Promise.all( + files.map((f: string) => this.readJSON(path.join(dirPath, f))) + ) + ); } /** * Read contents of files at given LOCAL path and parse it as JSON. * @param dirPath Path to get the files' contents. */ - listJSONLocal(dirPath:string) { - return this.listJSON(dirPath) + listJSONLocal(dirPath: string) { + return this.listJSON(dirPath); } /** @@ -212,11 +225,11 @@ export class CFSCore { * @param dirPath Directory path to normalize. * @returns {string|ng.ILocationService|XML} Normalized dir path. */ - private toRemoveDirName(dirPath:string) { + private toRemoveDirName(dirPath: string) { if (!dirPath.match(/\/$/)) { - dirPath += '/'; + dirPath += "/"; } - return path.normalize(dirPath).replace(/\//g, '__').replace(/\\/g, '__'); + return path.normalize(dirPath).replace(/\//g, "__").replace(/\\/g, "__"); } /** @@ -224,11 +237,11 @@ export class CFSCore { * @param filePath Full path of the file, included file name. * @returns {string|ng.ILocationService|XML} Normalized file name. */ - private toRemoveFileName(filePath:string) { - return path.normalize(filePath).replace(/\//g, '__').replace(/\\/g, '__'); + private toRemoveFileName(filePath: string) { + return path.normalize(filePath).replace(/\//g, "__").replace(/\\/g, "__"); } fsStreamTo(filename: string, iterator: IterableIterator<string>) { - return this.qfs.fsStreamTo(path.join(this.rootPath, filename), iterator) + return this.qfs.fsStreamTo(path.join(this.rootPath, filename), iterator); } } diff --git a/app/lib/dal/fileDALs/ConfDAL.ts b/app/lib/dal/fileDALs/ConfDAL.ts index 617c91aa1c2a9ebed60edc131f897e76a6d18f30..c4e95848694d96d456e6b46a825af10534d4b092 100644 --- a/app/lib/dal/fileDALs/ConfDAL.ts +++ b/app/lib/dal/fileDALs/ConfDAL.ts @@ -11,74 +11,71 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractCFS} from "./AbstractCFS" -import {ConfDTO} from "../../dto/ConfDTO" -import {CommonConstants} from "../../common-libs/constants"; -import {FileSystem} from "../../system/directory" -import {Underscore} from "../../common-libs/underscore" -import {ConfDAO} from "../indexDAL/abstract/ConfDAO" +import { AbstractCFS } from "./AbstractCFS"; +import { ConfDTO } from "../../dto/ConfDTO"; +import { CommonConstants } from "../../common-libs/constants"; +import { FileSystem } from "../../system/directory"; +import { Underscore } from "../../common-libs/underscore"; +import { ConfDAO } from "../indexDAL/abstract/ConfDAO"; export class ConfDAL extends AbstractCFS implements ConfDAO { + private logger: any; - private logger:any - - constructor(rootPath:string, qioFS:FileSystem) { - super(rootPath, qioFS) - this.logger = require('../../logger').NewLogger() + constructor(rootPath: string, qioFS: FileSystem) { + super(rootPath, qioFS); + this.logger = require("../../logger").NewLogger(); } - async init() { - } + async init() {} - async close() { - } + async close() {} async getParameters() { - const conf = await this.loadConf() + const conf = await this.loadConf(); return { - "currency": conf.currency, - "c": parseFloat(conf.c), - "dt": parseInt(conf.dt,10), - "ud0": parseInt(conf.ud0,10), - "sigPeriod": parseInt(conf.sigPeriod,10), - "sigStock": parseInt(conf.sigStock,10), - "sigWindow": parseInt(conf.sigWindow,10), - "sigValidity": parseInt(conf.sigValidity,10), - "sigQty": parseInt(conf.sigQty,10), - "sigReplay": parseInt(conf.sigReplay,10), - "idtyWindow": parseInt(conf.idtyWindow,10), - "msWindow": parseInt(conf.msWindow,10), - "msPeriod": parseInt(conf.msPeriod,10), - "xpercent": parseFloat(conf.xpercent), - "msValidity": parseInt(conf.msValidity,10), - "stepMax": parseInt(conf.stepMax,10), - "medianTimeBlocks": parseInt(conf.medianTimeBlocks,10), - "avgGenTime": parseInt(conf.avgGenTime,10), - "dtDiffEval": parseInt(conf.dtDiffEval,10), - "percentRot": parseFloat(conf.percentRot), - "udTime0": parseInt(conf.udTime0), - "udReevalTime0": parseInt(conf.udReevalTime0), - "dtReeval": parseInt(conf.dtReeval), - "switchOnHeadAdvance": CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS - } + currency: conf.currency, + c: parseFloat(conf.c), + dt: parseInt(conf.dt, 10), + ud0: parseInt(conf.ud0, 10), + sigPeriod: parseInt(conf.sigPeriod, 10), + sigStock: parseInt(conf.sigStock, 10), + sigWindow: parseInt(conf.sigWindow, 10), + sigValidity: parseInt(conf.sigValidity, 10), + sigQty: parseInt(conf.sigQty, 10), + sigReplay: parseInt(conf.sigReplay, 10), + idtyWindow: parseInt(conf.idtyWindow, 10), + msWindow: parseInt(conf.msWindow, 10), + msPeriod: parseInt(conf.msPeriod, 10), + xpercent: parseFloat(conf.xpercent), + msValidity: parseInt(conf.msValidity, 10), + stepMax: parseInt(conf.stepMax, 10), + medianTimeBlocks: parseInt(conf.medianTimeBlocks, 10), + avgGenTime: parseInt(conf.avgGenTime, 10), + dtDiffEval: parseInt(conf.dtDiffEval, 10), + percentRot: parseFloat(conf.percentRot), + udTime0: parseInt(conf.udTime0), + udReevalTime0: parseInt(conf.udReevalTime0), + dtReeval: parseInt(conf.dtReeval), + switchOnHeadAdvance: CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS, + }; } async readRawConfFile() { - return this.coreFS.read('conf.json') + return this.coreFS.read("conf.json"); } async loadConf() { - const data = await this.coreFS.readJSON('conf.json'); + const data = await this.coreFS.readJSON("conf.json"); if (data) { - return Underscore.extend(ConfDTO.defaultConf(), data) + return Underscore.extend(ConfDTO.defaultConf(), data); } else { // Silent error - this.logger.warn('No configuration loaded'); + this.logger.warn("No configuration loaded"); return {}; } } - async saveConf(confToSave:ConfDTO) { - await this.coreFS.writeJSONDeep('conf.json', confToSave) + async saveConf(confToSave: ConfDTO) { + await this.coreFS.writeJSONDeep("conf.json", confToSave); } } diff --git a/app/lib/dal/fileDALs/PowDAL.ts b/app/lib/dal/fileDALs/PowDAL.ts index 11df5b13af1e805e7d76a6e9935d8d8b30561861..dc034976efac5981c3372dea058f4ebb55eab5c6 100644 --- a/app/lib/dal/fileDALs/PowDAL.ts +++ b/app/lib/dal/fileDALs/PowDAL.ts @@ -11,29 +11,27 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractCFS} from "./AbstractCFS" -import {FileSystem} from "../../system/directory" +import { AbstractCFS } from "./AbstractCFS"; +import { FileSystem } from "../../system/directory"; export class PowDAL extends AbstractCFS { + private static POW_FILE = "pow.txt"; - private static POW_FILE = "pow.txt" - - constructor(rootPath:string, qioFS:FileSystem) { - super(rootPath, qioFS) + constructor(rootPath: string, qioFS: FileSystem) { + super(rootPath, qioFS); } init() { - return this.coreFS.remove(PowDAL.POW_FILE, false).catch(() => {}) + return this.coreFS.remove(PowDAL.POW_FILE, false).catch(() => {}); } - async close() { - } + async close() {} async getCurrent() { return await this.coreFS.read(PowDAL.POW_FILE); } - async writeCurrent(current:string) { + async writeCurrent(current: string) { await this.coreFS.write(PowDAL.POW_FILE, current, false); } } diff --git a/app/lib/dal/indexDAL/abstract/BIndexDAO.ts b/app/lib/dal/indexDAL/abstract/BIndexDAO.ts index 8ca8ff55a0f6b0d737a7000b8fcf07ea44ae551b..9989c774bbacbb606709911f0d04e68295b00fff 100644 --- a/app/lib/dal/indexDAL/abstract/BIndexDAO.ts +++ b/app/lib/dal/indexDAL/abstract/BIndexDAO.ts @@ -1,13 +1,12 @@ -import {GenericDAO} from "./GenericDAO" -import {DBHead} from "../../../db/DBHead" +import { GenericDAO } from "./GenericDAO"; +import { DBHead } from "../../../db/DBHead"; export interface BIndexDAO extends GenericDAO<DBHead> { + head(n: number): Promise<DBHead>; // TODO: possibly null? - head(n:number): Promise<DBHead> // TODO: possibly null? + tail(): Promise<DBHead>; // TODO: possibly null? - tail(): Promise<DBHead> // TODO: possibly null? + range(n: number, m: number): Promise<DBHead[]>; - range(n:number, m:number): Promise<DBHead[]> - - trimBlocks(maxnumber:number): Promise<void> + trimBlocks(maxnumber: number): Promise<void>; } diff --git a/app/lib/dal/indexDAL/abstract/BlockchainDAO.ts b/app/lib/dal/indexDAL/abstract/BlockchainDAO.ts index 2e55efdeaddc465b7a91c18c521d28462af35f7f..cbc15a3ffb2fa43f576dd82772d6bfc2d906ff7f 100644 --- a/app/lib/dal/indexDAL/abstract/BlockchainDAO.ts +++ b/app/lib/dal/indexDAL/abstract/BlockchainDAO.ts @@ -1,34 +1,33 @@ -import {GenericDAO} from "./GenericDAO" -import {DBBlock} from "../../../db/DBBlock" -import {ForksDAO} from "./software/ForksDAO" +import { GenericDAO } from "./GenericDAO"; +import { DBBlock } from "../../../db/DBBlock"; +import { ForksDAO } from "./software/ForksDAO"; export interface BlockchainDAO extends GenericDAO<DBBlock>, ForksDAO { + getCurrent(): Promise<DBBlock | null>; - getCurrent(): Promise<DBBlock|null> + getBlock(number: string | number): Promise<DBBlock | null>; - getBlock(number:string | number): Promise<DBBlock|null> + getAbsoluteBlock(number: number, hash: string): Promise<DBBlock | null>; - getAbsoluteBlock(number:number, hash:string): Promise<DBBlock|null> + saveBlock(block: DBBlock): Promise<DBBlock>; - saveBlock(block:DBBlock): Promise<DBBlock> + getBlocks(start: number, end: number): Promise<DBBlock[]>; - getBlocks(start:number, end:number): Promise<DBBlock[]> + lastBlockOfIssuer(issuer: string): Promise<DBBlock | null>; - lastBlockOfIssuer(issuer:string): Promise<DBBlock|null> + lastBlockWithDividend(): Promise<DBBlock | null>; - lastBlockWithDividend(): Promise<DBBlock|null> + getCountOfBlocksIssuedBy(issuer: string): Promise<number>; - getCountOfBlocksIssuedBy(issuer:string): Promise<number> + dropNonForkBlocksAbove(number: number): Promise<void>; - dropNonForkBlocksAbove(number: number): Promise<void> - - findWithIdentities(): Promise<number[]> - findWithCertifications(): Promise<number[]> - findWithJoiners(): Promise<number[]> - findWithActives(): Promise<number[]> - findWithLeavers(): Promise<number[]> - findWithExcluded(): Promise<number[]> - findWithRevoked(): Promise<number[]> - findWithUD(): Promise<number[]> - findWithTXs(): Promise<number[]> + findWithIdentities(): Promise<number[]>; + findWithCertifications(): Promise<number[]>; + findWithJoiners(): Promise<number[]>; + findWithActives(): Promise<number[]>; + findWithLeavers(): Promise<number[]>; + findWithExcluded(): Promise<number[]>; + findWithRevoked(): Promise<number[]>; + findWithUD(): Promise<number[]>; + findWithTXs(): Promise<number[]>; } diff --git a/app/lib/dal/indexDAL/abstract/CIndexDAO.ts b/app/lib/dal/indexDAL/abstract/CIndexDAO.ts index 2998c6b41f7b43cc8b9cb87235f45390c6206c23..f970270abe69b686564f2e02a2fcd74ec7802468 100644 --- a/app/lib/dal/indexDAL/abstract/CIndexDAO.ts +++ b/app/lib/dal/indexDAL/abstract/CIndexDAO.ts @@ -1,27 +1,40 @@ -import {CindexEntry, FullCindexEntry} from "../../../indexer" -import {ReduceableDAO} from "./ReduceableDAO" +import { CindexEntry, FullCindexEntry } from "../../../indexer"; +import { ReduceableDAO } from "./ReduceableDAO"; export interface CIndexDAO extends ReduceableDAO<CindexEntry> { + getValidLinksTo(receiver: string): Promise<CindexEntry[]>; - getValidLinksTo(receiver:string): Promise<CindexEntry[]> + getValidLinksFrom(issuer: string): Promise<CindexEntry[]>; - getValidLinksFrom(issuer:string): Promise<CindexEntry[]> + findExpiresOnLteNotExpiredYet(medianTime: number): Promise<CindexEntry[]>; - findExpiresOnLteNotExpiredYet(medianTime:number): Promise<CindexEntry[]> + findByIssuerAndReceiver( + issuer: string, + receiver: string + ): Promise<CindexEntry[]>; - findByIssuerAndReceiver(issuer: string, receiver: string): Promise<CindexEntry[]> + findByIssuerAndChainableOnGt( + issuer: string, + medianTime: number + ): Promise<CindexEntry[]>; - findByIssuerAndChainableOnGt(issuer: string, medianTime: number): Promise<CindexEntry[]> + findByReceiverAndExpiredOn( + pub: string, + expired_on: number + ): Promise<CindexEntry[]>; - findByReceiverAndExpiredOn(pub: string, expired_on: number): Promise<CindexEntry[]> + existsNonReplayableLink( + issuer: string, + receiver: string, + medianTime: number, + version: number + ): Promise<boolean>; - existsNonReplayableLink(issuer:string, receiver:string, medianTime: number, version: number): Promise<boolean> + getReceiversAbove(minsig: number): Promise<string[]>; - getReceiversAbove(minsig: number): Promise<string[]> + reducablesFrom(from: string): Promise<FullCindexEntry[]>; - reducablesFrom(from:string): Promise<FullCindexEntry[]> + trimExpiredCerts(belowNumber: number): Promise<void>; - trimExpiredCerts(belowNumber:number): Promise<void> - - findByIssuer(issuer: string): Promise<CindexEntry[]> + findByIssuer(issuer: string): Promise<CindexEntry[]>; } diff --git a/app/lib/dal/indexDAL/abstract/ConfDAO.ts b/app/lib/dal/indexDAL/abstract/ConfDAO.ts index 03fab78b9136145344f5b8f999cd088b3e970ca7..6f6fd527875e70b4aa94dae8ed01d1509d82916e 100644 --- a/app/lib/dal/indexDAL/abstract/ConfDAO.ts +++ b/app/lib/dal/indexDAL/abstract/ConfDAO.ts @@ -1,17 +1,16 @@ -import {ConfDTO, CurrencyConfDTO} from "../../../dto/ConfDTO" -import {Initiable} from "../../sqliteDAL/Initiable" +import { ConfDTO, CurrencyConfDTO } from "../../../dto/ConfDTO"; +import { Initiable } from "../../sqliteDAL/Initiable"; export interface ConfDAO extends Initiable { + init(): Promise<void>; - init(): Promise<void> + close(): Promise<void>; - close(): Promise<void> + getParameters(): Promise<CurrencyConfDTO>; - getParameters(): Promise<CurrencyConfDTO> + readRawConfFile(): Promise<string | null>; - readRawConfFile(): Promise<string|null> + loadConf(): Promise<ConfDTO | {}>; - loadConf(): Promise<ConfDTO|{}> - - saveConf(confToSave:ConfDTO): Promise<void> + saveConf(confToSave: ConfDTO): Promise<void>; } diff --git a/app/lib/dal/indexDAL/abstract/DividendDAO.ts b/app/lib/dal/indexDAL/abstract/DividendDAO.ts index fb8caf4c7bd4c49e60b4a56b3ab84b30673ca481..9065378892e7b663643dbc3d2e400a8380618f7a 100644 --- a/app/lib/dal/indexDAL/abstract/DividendDAO.ts +++ b/app/lib/dal/indexDAL/abstract/DividendDAO.ts @@ -1,56 +1,75 @@ -import {GenericDAO} from "./GenericDAO" -import {IindexEntry, SimpleTxInput, SimpleUdEntryForWallet, SindexEntry} from "../../../indexer" +import { GenericDAO } from "./GenericDAO"; +import { + IindexEntry, + SimpleTxInput, + SimpleUdEntryForWallet, + SindexEntry, +} from "../../../indexer"; export interface DividendEntry { - pub: string - member: boolean - availables: number[] - consumed: number[] + pub: string; + member: boolean; + availables: number[]; + consumed: number[]; consumedUDs: { - dividendNumber: number, - txHash: string, - txCreatedOn: string, - txLocktime: number, + dividendNumber: number; + txHash: string; + txCreatedOn: string; + txLocktime: number; dividend: { - amount: number, - base: number - } - }[] - dividends: { amount: number, base: number }[] + amount: number; + base: number; + }; + }[]; + dividends: { amount: number; base: number }[]; } export interface UDSource { - consumed: boolean - pos: number - amount: number - base: number + consumed: boolean; + pos: number; + amount: number; + base: number; } export interface DividendDAO extends GenericDAO<DividendEntry> { + setMember(member: boolean, pub: string): Promise<void>; - setMember(member: boolean, pub: string): Promise<void> + produceDividend( + blockNumber: number, + dividend: number, + unitbase: number, + local_iindex: IindexEntry[] + ): Promise<SimpleUdEntryForWallet[]>; - produceDividend(blockNumber: number, dividend: number, unitbase: number, local_iindex: IindexEntry[]): Promise<SimpleUdEntryForWallet[]> + getUDSources(pub: string): Promise<UDSource[]>; - getUDSources(pub: string): Promise<UDSource[]> + findUdSourceByIdentifierPosAmountBase( + identifier: string, + pos: number, + amount: number, + base: number + ): Promise<SimpleTxInput[]>; - findUdSourceByIdentifierPosAmountBase(identifier: string, pos: number, amount: number, base: number): Promise<SimpleTxInput[]> + getUDSource(identifier: string, pos: number): Promise<SimpleTxInput | null>; - getUDSource(identifier: string, pos: number): Promise<SimpleTxInput|null> + createMember(pub: string): Promise<void>; - createMember(pub: string): Promise<void> + consume(filter: SindexEntry[]): Promise<void>; - consume(filter: SindexEntry[]): Promise<void> + deleteMember(pub: string): Promise<void>; - deleteMember(pub: string): Promise<void> + getWrittenOnUDs(number: number): Promise<SimpleUdEntryForWallet[]>; - getWrittenOnUDs(number: number): Promise<SimpleUdEntryForWallet[]> + revertUDs( + number: number + ): Promise<{ + createdUDsDestroyedByRevert: SimpleUdEntryForWallet[]; + consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[]; + }>; - revertUDs(number: number): Promise<{ createdUDsDestroyedByRevert: SimpleUdEntryForWallet[], consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[] }> + findForDump(criterion: any): Promise<SindexEntry[]>; - findForDump(criterion: any): Promise<SindexEntry[]> + trimConsumedUDs(belowNumber: number): Promise<void>; - trimConsumedUDs(belowNumber:number): Promise<void> - - listAll(): Promise<DividendEntry[]> + listAll(): Promise<DividendEntry[]>; } diff --git a/app/lib/dal/indexDAL/abstract/GenericDAO.ts b/app/lib/dal/indexDAL/abstract/GenericDAO.ts index f43907cd7eca776be7071593d7ea5884f22baf4a..b7df7ad288e7130df8bfee6d5f89bd0ffc7a9f69 100644 --- a/app/lib/dal/indexDAL/abstract/GenericDAO.ts +++ b/app/lib/dal/indexDAL/abstract/GenericDAO.ts @@ -1,11 +1,10 @@ -import {Initiable} from "../../sqliteDAL/Initiable" +import { Initiable } from "../../sqliteDAL/Initiable"; export interface GenericDAO<T> extends Initiable { - /** * Trigger the initialization of the DAO. Called when the underlying DB is ready. */ - triggerInit(): void + triggerInit(): void; /** * Make a generic find with some ordering. @@ -13,35 +12,38 @@ export interface GenericDAO<T> extends Initiable { * @param sort A LokiJS's compunded sort object. * @returns {Promise<any>} A set of records. */ - findRawWithOrder(criterion: { - pub?: string - }, sort:((string|((string|boolean)[]))[])): Promise<T[]> + findRawWithOrder( + criterion: { + pub?: string; + }, + sort: (string | (string | boolean)[])[] + ): Promise<T[]>; /** * Make a single insert. * @param record The record to insert. */ - insert(record:T): Promise<void> + insert(record: T): Promise<void>; /** * Make a batch insert. * @param records The records to insert as a batch. */ - insertBatch(records:T[]): Promise<void> + insertBatch(records: T[]): Promise<void>; /** * Get the set of records written on a particular blockstamp. * @param {string} blockstamp The blockstamp we want the records written at. * @returns {Promise<T[]>} The records (array). */ - getWrittenOn(blockstamp:string): Promise<T[]> + getWrittenOn(blockstamp: string): Promise<T[]>; /** * Remove all entries written at given `blockstamp`, if these entries are still in the index. * @param {string} blockstamp Blockstamp of the entries we want to remove. * @returns {Promise<void>} */ - removeBlock(blockstamp:string): Promise<void> + removeBlock(blockstamp: string): Promise<void>; - count(): Promise<number> + count(): Promise<number>; } diff --git a/app/lib/dal/indexDAL/abstract/IIndexDAO.ts b/app/lib/dal/indexDAL/abstract/IIndexDAO.ts index bec034fdf2ff650d4d13edcbb52beeae53529588..1c1a08cd8052ec1a003e478b563ea2ef5562dc15 100644 --- a/app/lib/dal/indexDAL/abstract/IIndexDAO.ts +++ b/app/lib/dal/indexDAL/abstract/IIndexDAO.ts @@ -1,30 +1,29 @@ -import {FullIindexEntry, IindexEntry} from "../../../indexer" -import {ReduceableDAO} from "./ReduceableDAO" -import {OldIindexEntry} from "../../../db/OldIindexEntry" +import { FullIindexEntry, IindexEntry } from "../../../indexer"; +import { ReduceableDAO } from "./ReduceableDAO"; +import { OldIindexEntry } from "../../../db/OldIindexEntry"; export interface IIndexDAO extends ReduceableDAO<IindexEntry> { + reducable(pub: string): Promise<IindexEntry[]>; - reducable(pub:string): Promise<IindexEntry[]> + findByPub(pub: string): Promise<IindexEntry[]>; - findByPub(pub:string): Promise<IindexEntry[]> + findByUid(uid: string): Promise<IindexEntry[]>; - findByUid(uid:string): Promise<IindexEntry[]> + getMembers(): Promise<{ pubkey: string; uid: string | null }[]>; - getMembers(): Promise<{ pubkey:string, uid:string|null }[]> + getFromPubkey(pub: string): Promise<FullIindexEntry | null>; - getFromPubkey(pub:string): Promise<FullIindexEntry|null> + getFromUID(uid: string): Promise<FullIindexEntry | null>; - getFromUID(uid:string): Promise<FullIindexEntry|null> + getFromPubkeyOrUid(search: string): Promise<FullIindexEntry | null>; - getFromPubkeyOrUid(search:string): Promise<FullIindexEntry|null> + searchThoseMatching(search: string): Promise<OldIindexEntry[]>; - searchThoseMatching(search:string): Promise<OldIindexEntry[]> + getFullFromUID(uid: string): Promise<FullIindexEntry>; - getFullFromUID(uid:string): Promise<FullIindexEntry> + getFullFromPubkey(pub: string): Promise<FullIindexEntry>; - getFullFromPubkey(pub:string): Promise<FullIindexEntry> + getFullFromHash(hash: string): Promise<FullIindexEntry | null>; - getFullFromHash(hash:string): Promise<FullIindexEntry|null> - - getToBeKickedPubkeys(): Promise<string[]> + getToBeKickedPubkeys(): Promise<string[]>; } diff --git a/app/lib/dal/indexDAL/abstract/MIndexDAO.ts b/app/lib/dal/indexDAL/abstract/MIndexDAO.ts index 2f79c3850fb6a91d028fa1124a75305ff89b1881..2980a180ed5ac5442dc69413dabcac09bed03bb5 100644 --- a/app/lib/dal/indexDAL/abstract/MIndexDAO.ts +++ b/app/lib/dal/indexDAL/abstract/MIndexDAO.ts @@ -1,21 +1,27 @@ -import {FullMindexEntry, MindexEntry} from "../../../indexer" -import {ReduceableDAO} from "./ReduceableDAO" +import { FullMindexEntry, MindexEntry } from "../../../indexer"; +import { ReduceableDAO } from "./ReduceableDAO"; -export interface MIndexDAO extends ReduceableDAO<MindexEntry> { +export interface MIndexDAO extends ReduceableDAO<MindexEntry> { + reducable(pub: string): Promise<MindexEntry[]>; - reducable(pub:string): Promise<MindexEntry[]> + getRevokedPubkeys(): Promise<string[]>; - getRevokedPubkeys(): Promise<string[]> + findByPubAndChainableOnGt( + pub: string, + medianTime: number + ): Promise<MindexEntry[]>; - findByPubAndChainableOnGt(pub:string, medianTime:number): Promise<MindexEntry[]> + findRevokesOnLteAndRevokedOnIsNull(medianTime: number): Promise<string[]>; - findRevokesOnLteAndRevokedOnIsNull(medianTime:number): Promise<string[]> + findExpiresOnLteAndRevokesOnGt(medianTime: number): Promise<string[]>; - findExpiresOnLteAndRevokesOnGt(medianTime:number): Promise<string[]> + getReducedMS(pub: string): Promise<FullMindexEntry | null>; - getReducedMS(pub:string): Promise<FullMindexEntry|null> + findPubkeysThatShouldExpire( + medianTime: number + ): Promise<{ pub: string; created_on: string }[]>; - findPubkeysThatShouldExpire(medianTime:number): Promise<{ pub: string, created_on: string }[]> - - getReducedMSForImplicitRevocation(pub:string): Promise<FullMindexEntry|null> + getReducedMSForImplicitRevocation( + pub: string + ): Promise<FullMindexEntry | null>; } diff --git a/app/lib/dal/indexDAL/abstract/PeerDAO.ts b/app/lib/dal/indexDAL/abstract/PeerDAO.ts index 0ef09d6d8ed08c34d2776fe3b89cb375c8bdc83c..0432e2c15d551ab0944e7cd8e18aaec610915470 100644 --- a/app/lib/dal/indexDAL/abstract/PeerDAO.ts +++ b/app/lib/dal/indexDAL/abstract/PeerDAO.ts @@ -1,67 +1,66 @@ -import {DBPeer} from "../../../db/DBPeer" -import {Initiable} from "../../sqliteDAL/Initiable" +import { DBPeer } from "../../../db/DBPeer"; +import { Initiable } from "../../sqliteDAL/Initiable"; export interface PeerDAO extends Initiable { - /** * Trigger the initialization of the DAO. Called when the underlying DB is ready. */ - triggerInit(): void + triggerInit(): void; - listAll(): Promise<DBPeer[]> + listAll(): Promise<DBPeer[]>; - withUPStatus(): Promise<DBPeer[]> + withUPStatus(): Promise<DBPeer[]>; /** * Saves a wallet. * @param {DBPeer} peer * @returns {Promise<DBPeer>} */ - savePeer(peer:DBPeer): Promise<DBPeer> + savePeer(peer: DBPeer): Promise<DBPeer>; /** * Find a wallet based on conditions. * @param {string} pubkey * @returns {Promise<DBPeer>} */ - getPeer(pubkey:string): Promise<DBPeer> + getPeer(pubkey: string): Promise<DBPeer>; /** * Find all peers with at least one endpoint matching given parameter. * @param {string} ep * @returns {Promise<DBPeer[]>} */ - getPeersWithEndpointsLike(ep:string): Promise<DBPeer[]> + getPeersWithEndpointsLike(ep: string): Promise<DBPeer[]>; /** * Make a batch insert. * @param records The records to insert as a batch. */ - insertBatch(records:DBPeer[]): Promise<void> + insertBatch(records: DBPeer[]): Promise<void>; /** * Remove a peer by its pubkey. * @param {string} pubkey * @returns {Promise<void>} */ - removePeerByPubkey(pubkey:string): Promise<void> + removePeerByPubkey(pubkey: string): Promise<void>; /** * Remove all the peers. * @returns {Promise<void>} */ - removeAll(): Promise<void> + removeAll(): Promise<void>; /** * Count the number of non-WoT peers known is the DB. * @returns {Promise<number>} The number of nonWoT peers. */ - countNonWoTPeers(): Promise<number> + countNonWoTPeers(): Promise<number>; /** * Remove all **non-WoT** peers whose last contact is above given time (timestamp in seconds). * @param {number} threshold * @returns {Promise<void>} */ - deleteNonWotPeersWhoseLastContactIsAbove(threshold: number): Promise<void> + deleteNonWotPeersWhoseLastContactIsAbove(threshold: number): Promise<void>; } diff --git a/app/lib/dal/indexDAL/abstract/ReduceableDAO.ts b/app/lib/dal/indexDAL/abstract/ReduceableDAO.ts index 5eaf55f7bb066381b9596b739f856c416b3def89..0dcc731169380a88bcbf2fdfc7753363981e33c3 100644 --- a/app/lib/dal/indexDAL/abstract/ReduceableDAO.ts +++ b/app/lib/dal/indexDAL/abstract/ReduceableDAO.ts @@ -1,11 +1,10 @@ -import {GenericDAO} from "./GenericDAO" +import { GenericDAO } from "./GenericDAO"; export interface ReduceableDAO<T> extends GenericDAO<T> { - /** * Reduce all records sharing a same reduction key that written before given block number. * @param {number} belowNumber All records written strictly under `belowNumber` have to be reduced on the reduction key. * @returns {Promise<void>} */ - trimRecords(belowNumber:number): Promise<void> + trimRecords(belowNumber: number): Promise<void>; } diff --git a/app/lib/dal/indexDAL/abstract/SIndexDAO.ts b/app/lib/dal/indexDAL/abstract/SIndexDAO.ts index 2a4e3f729cc704293264d5370b52c683ae2003b4..ee312da7d0555dd4509c16f54b5538090627f594 100644 --- a/app/lib/dal/indexDAL/abstract/SIndexDAO.ts +++ b/app/lib/dal/indexDAL/abstract/SIndexDAO.ts @@ -1,28 +1,47 @@ -import {FullSindexEntry, SimpleTxEntryForWallet, SimpleTxInput, SindexEntry} from "../../../indexer" -import {ReduceableDAO} from "./ReduceableDAO" +import { + FullSindexEntry, + SimpleTxEntryForWallet, + SimpleTxInput, + SindexEntry, +} from "../../../indexer"; +import { ReduceableDAO } from "./ReduceableDAO"; export interface UDSource { - consumed: boolean - pos: number - amount: number - base: number + consumed: boolean; + pos: number; + amount: number; + base: number; } export interface SIndexDAO extends ReduceableDAO<SindexEntry> { + findTxSourceByIdentifierPosAmountBase( + identifier: string, + pos: number, + amount: number, + base: number + ): Promise<SimpleTxInput[]>; - findTxSourceByIdentifierPosAmountBase(identifier: string, pos: number, amount: number, base: number): Promise<SimpleTxInput[]> + getTxSource(identifier: string, pos: number): Promise<FullSindexEntry | null>; - getTxSource(identifier:string, pos:number): Promise<FullSindexEntry|null> + getAvailableForPubkey( + pubkey: string + ): Promise< + { + amount: number; + base: number; + conditions: string; + identifier: string; + pos: number; + }[] + >; - getAvailableForPubkey(pubkey:string): Promise<{ amount:number, base:number, conditions: string, identifier: string, pos: number }[]> + getAvailableForConditions(conditionsStr: string): Promise<SindexEntry[]>; - getAvailableForConditions(conditionsStr:string): Promise<SindexEntry[]> + trimConsumedSource(belowNumber: number): Promise<void>; - trimConsumedSource(belowNumber:number): Promise<void> + getWrittenOnTxs(blockstamp: string): Promise<SimpleTxEntryForWallet[]>; - getWrittenOnTxs(blockstamp: string): Promise<SimpleTxEntryForWallet[]> + findByIdentifier(identifier: string): Promise<SindexEntry[]>; - findByIdentifier(identifier: string): Promise<SindexEntry[]> - - findByPos(pos: number): Promise<SindexEntry[]> + findByPos(pos: number): Promise<SindexEntry[]>; } diff --git a/app/lib/dal/indexDAL/abstract/TxsDAO.ts b/app/lib/dal/indexDAL/abstract/TxsDAO.ts index ff626bb656591938a505fc8c8453dec6260ec521..3003b44582f2faa67b2461aa7c19b89d6720d02f 100644 --- a/app/lib/dal/indexDAL/abstract/TxsDAO.ts +++ b/app/lib/dal/indexDAL/abstract/TxsDAO.ts @@ -1,35 +1,42 @@ -import {GenericDAO} from "./GenericDAO" -import {TransactionDTO} from "../../../dto/TransactionDTO" -import {SandBox} from "../../sqliteDAL/SandBox" -import {DBTx} from "../../../db/DBTx" +import { GenericDAO } from "./GenericDAO"; +import { TransactionDTO } from "../../../dto/TransactionDTO"; +import { SandBox } from "../../sqliteDAL/SandBox"; +import { DBTx } from "../../../db/DBTx"; export interface TxsDAO extends GenericDAO<DBTx> { + trimExpiredNonWrittenTxs(limitTime: number): Promise<void>; - trimExpiredNonWrittenTxs(limitTime:number): Promise<void> + getAllPending(versionMin: number): Promise<DBTx[]>; - getAllPending(versionMin:number): Promise<DBTx[]> + getTX(hash: string): Promise<DBTx>; - getTX(hash:string): Promise<DBTx> + addLinked( + tx: TransactionDTO, + block_number: number, + time: number + ): Promise<DBTx>; - addLinked(tx:TransactionDTO, block_number:number, time:number): Promise<DBTx> + addPending(dbTx: DBTx): Promise<DBTx>; - addPending(dbTx:DBTx): Promise<DBTx> + getLinkedWithIssuer(pubkey: string): Promise<DBTx[]>; - getLinkedWithIssuer(pubkey:string): Promise<DBTx[]> + getLinkedWithRecipient(pubkey: string): Promise<DBTx[]>; - getLinkedWithRecipient(pubkey:string): Promise<DBTx[]> + getPendingWithIssuer(pubkey: string): Promise<DBTx[]>; - getPendingWithIssuer(pubkey:string): Promise<DBTx[]> + getPendingWithRecipient(pubkey: string): Promise<DBTx[]>; - getPendingWithRecipient(pubkey:string): Promise<DBTx[]> + removeTX(hash: string): Promise<void>; - removeTX(hash:string): Promise<void> + removeAll(): Promise<void>; - removeAll(): Promise<void> + sandbox: SandBox<{ + issuers: string[]; + output_base: number; + output_amount: number; + }>; - sandbox:SandBox<{ issuers: string[], output_base:number, output_amount:number }> + getSandboxRoom(): Promise<number>; - getSandboxRoom(): Promise<number> - - setSandboxSize(size:number): void + setSandboxSize(size: number): void; } diff --git a/app/lib/dal/indexDAL/abstract/WalletDAO.ts b/app/lib/dal/indexDAL/abstract/WalletDAO.ts index 069c85d1120b1940e5520484982ff15ce4540f40..3e076f2ebcffe622680cf1654d32bbf0ee120d3d 100644 --- a/app/lib/dal/indexDAL/abstract/WalletDAO.ts +++ b/app/lib/dal/indexDAL/abstract/WalletDAO.ts @@ -1,36 +1,35 @@ -import {Initiable} from "../../sqliteDAL/Initiable" -import {DBWallet} from "../../../db/DBWallet" +import { Initiable } from "../../sqliteDAL/Initiable"; +import { DBWallet } from "../../../db/DBWallet"; export interface WalletDAO extends Initiable { - /** * Trigger the initialization of the DAO. Called when the underlying DB is ready. */ - triggerInit(): void + triggerInit(): void; /** * Saves a wallet. * @param {DBWallet} wallet * @returns {Promise<DBWallet>} */ - saveWallet(wallet:DBWallet): Promise<DBWallet> + saveWallet(wallet: DBWallet): Promise<DBWallet>; /** * Find a wallet based on conditions. * @param {string} conditions * @returns {Promise<DBWallet>} */ - getWallet(conditions:string): Promise<DBWallet|null> + getWallet(conditions: string): Promise<DBWallet | null>; /** * Make a batch insert. * @param records The records to insert as a batch. */ - insertBatch(records:DBWallet[]): Promise<void> + insertBatch(records: DBWallet[]): Promise<void>; /** * Lists all the wallets. * @returns {Promise<DBWallet[]>} */ - listAll(): Promise<DBWallet[]> + listAll(): Promise<DBWallet[]>; } diff --git a/app/lib/dal/indexDAL/abstract/software/ForksDAO.ts b/app/lib/dal/indexDAL/abstract/software/ForksDAO.ts index b7689eb87a2ff125b39026b171305b8496a764ac..27bb1ca2b3b4b29e874a622a2e7c01e2e1c9afed 100644 --- a/app/lib/dal/indexDAL/abstract/software/ForksDAO.ts +++ b/app/lib/dal/indexDAL/abstract/software/ForksDAO.ts @@ -11,17 +11,16 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {DBBlock} from "../../../../db/DBBlock" +import { DBBlock } from "../../../../db/DBBlock"; export interface ForksDAO { + saveSideBlock(block: DBBlock): Promise<DBBlock>; - saveSideBlock(block:DBBlock): Promise<DBBlock> + setSideBlock(number: number, previousBlock: DBBlock | null): Promise<void>; - setSideBlock(number:number, previousBlock:DBBlock|null): Promise<void> + getPotentialRoots(): Promise<DBBlock[]>; - getPotentialRoots(): Promise<DBBlock[]> - - getNextForkBlocks(number:number, hash:string): Promise<DBBlock[]> + getNextForkBlocks(number: number, hash: string): Promise<DBBlock[]>; /** * Find any fork block whose number is between `numberStart` and `maxNumber`, both included, and whose medianTime is @@ -30,9 +29,13 @@ export interface ForksDAO { * @param medianTimeStart * @param maxNumber */ - getPotentialForkBlocks(numberStart:number, medianTimeStart:number, maxNumber:number): Promise<DBBlock[]> + getPotentialForkBlocks( + numberStart: number, + medianTimeStart: number, + maxNumber: number + ): Promise<DBBlock[]>; - removeForkBlock(number:number): Promise<void> + removeForkBlock(number: number): Promise<void>; - removeForkBlockAboveOrEqual(number:number): Promise<void> + removeForkBlockAboveOrEqual(number: number): Promise<void>; } diff --git a/app/lib/dal/indexDAL/common/DividendDaoHandler.ts b/app/lib/dal/indexDAL/common/DividendDaoHandler.ts index 71649a379585a83c89d28ee1d8d7fd988d122812..5f55298ce4d2063b0a27273203f9979647685347 100644 --- a/app/lib/dal/indexDAL/common/DividendDaoHandler.ts +++ b/app/lib/dal/indexDAL/common/DividendDaoHandler.ts @@ -1,242 +1,283 @@ -import {DividendEntry, UDSource} from "../abstract/DividendDAO" -import {SimpleUdEntryForWallet, SindexEntry} from "../../../indexer" +import { DividendEntry, UDSource } from "../abstract/DividendDAO"; +import { SimpleUdEntryForWallet, SindexEntry } from "../../../indexer"; export class DividendDaoHandler { - static getNewDividendEntry(pub: string): DividendEntry { - return { pub, member: true, availables: [], dividends: [], consumed: [], consumedUDs: [] } + return { + pub, + member: true, + availables: [], + dividends: [], + consumed: [], + consumedUDs: [], + }; } - static produceDividend(r: DividendEntry, blockNumber: number, dividend: number, unitbase: number, dividends: SimpleUdEntryForWallet[] = []) { - r.availables.push(blockNumber) - r.dividends.push({ amount: dividend, base: unitbase }) + static produceDividend( + r: DividendEntry, + blockNumber: number, + dividend: number, + unitbase: number, + dividends: SimpleUdEntryForWallet[] = [] + ) { + r.availables.push(blockNumber); + r.dividends.push({ amount: dividend, base: unitbase }); dividends.push({ - srcType: 'D', + srcType: "D", amount: dividend, base: unitbase, - conditions: 'SIG(' + r.pub + ')', - op: 'CREATE', + conditions: "SIG(" + r.pub + ")", + op: "CREATE", identifier: r.pub, - pos: blockNumber - }) + pos: blockNumber, + }); } static consume(m: DividendEntry, dividendToConsume: SindexEntry) { - const index = m.availables.indexOf(dividendToConsume.pos) + const index = m.availables.indexOf(dividendToConsume.pos); // We add it to the consumption history - m.consumed.push(dividendToConsume.writtenOn) // `writtenOn` is the date (block#) of consumption + m.consumed.push(dividendToConsume.writtenOn); // `writtenOn` is the date (block#) of consumption m.consumedUDs.push({ dividendNumber: dividendToConsume.pos, dividend: m.dividends[index], txCreatedOn: dividendToConsume.created_on as string, txLocktime: dividendToConsume.locktime, txHash: dividendToConsume.tx as string, - }) + }); // We remove it from available dividends - m.availables.splice(index, 1) - m.dividends.splice(index, 1) + m.availables.splice(index, 1); + m.dividends.splice(index, 1); } static udSources(member: DividendEntry) { - return member.availables.map(pos => this.toUDSource(member, pos) as UDSource) + return member.availables.map( + (pos) => this.toUDSource(member, pos) as UDSource + ); } - private static toUDSource(entry: DividendEntry, pos: number): UDSource|null { - const index = entry.availables.indexOf(pos) + private static toUDSource( + entry: DividendEntry, + pos: number + ): UDSource | null { + const index = entry.availables.indexOf(pos); if (index === -1) { - return null + return null; } - const src = entry.dividends[index] + const src = entry.dividends[index]; return { consumed: false, pos, amount: src.amount, base: src.base, - } + }; } - static getUDSourceByIdPosAmountBase(member: DividendEntry|null, identifier: string, pos: number, amount: number, base: number) { - let src: UDSource|null = null + static getUDSourceByIdPosAmountBase( + member: DividendEntry | null, + identifier: string, + pos: number, + amount: number, + base: number + ) { + let src: UDSource | null = null; if (member) { - const udSrc = this.toUDSource(member, pos) + const udSrc = this.toUDSource(member, pos); if (udSrc && udSrc.amount === amount && udSrc.base === base) { - src = udSrc + src = udSrc; } } - return [{ - written_time: 0, - conditions: 'SIG(' + identifier + ')', - consumed: !src, - amount, - base - }] + return [ + { + written_time: 0, + conditions: "SIG(" + identifier + ")", + consumed: !src, + amount, + base, + }, + ]; } - static getUDSource(member: DividendEntry|null, identifier: string, pos: number) { - let src: UDSource|null = null + static getUDSource( + member: DividendEntry | null, + identifier: string, + pos: number + ) { + let src: UDSource | null = null; if (member) { - src = this.toUDSource(member, pos) + src = this.toUDSource(member, pos); } if (!src) { - return null + return null; } return { written_time: 0, - conditions: 'SIG(' + identifier + ')', + conditions: "SIG(" + identifier + ")", consumed: !src, amount: src.amount, - base: src.base - } + base: src.base, + }; } - static getWrittenOnUDs(m: DividendEntry, number: number, res: SimpleUdEntryForWallet[]) { - const s = this.toUDSource(m, number) as UDSource + static getWrittenOnUDs( + m: DividendEntry, + number: number, + res: SimpleUdEntryForWallet[] + ) { + const s = this.toUDSource(m, number) as UDSource; res.push({ - srcType: 'D', - op: 'CREATE', - conditions: 'SIG(' + m.pub + ')', + srcType: "D", + op: "CREATE", + conditions: "SIG(" + m.pub + ")", amount: s.amount, base: s.base, identifier: m.pub, - pos: s.pos - }) + pos: s.pos, + }); } - static removeDividendsProduced(m: DividendEntry, number: number, createdUDsDestroyedByRevert: SimpleUdEntryForWallet[]) { - const index = m.availables.indexOf(number) - const src = m.dividends[index] + static removeDividendsProduced( + m: DividendEntry, + number: number, + createdUDsDestroyedByRevert: SimpleUdEntryForWallet[] + ) { + const index = m.availables.indexOf(number); + const src = m.dividends[index]; createdUDsDestroyedByRevert.push({ - conditions: 'SIG(' + m.pub + ')', + conditions: "SIG(" + m.pub + ")", pos: number, identifier: m.pub, amount: src.amount, base: src.base, - srcType: 'D', - op: 'CREATE' - }) - m.availables.splice(index, 1) - m.dividends.splice(index, 1) + srcType: "D", + op: "CREATE", + }); + m.availables.splice(index, 1); + m.dividends.splice(index, 1); } - static unconsumeDividends(m: DividendEntry, number: number, consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[]) { + static unconsumeDividends( + m: DividendEntry, + number: number, + consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[] + ) { let index; do { - index = m.consumed.indexOf(number) + index = m.consumed.indexOf(number); if (index !== -1) { - const src = m.consumedUDs[index].dividend + const src = m.consumedUDs[index].dividend; consumedUDsRecoveredByRevert.push({ - conditions: 'SIG(' + m.pub + ')', + conditions: "SIG(" + m.pub + ")", pos: m.consumedUDs[index].dividendNumber, identifier: m.pub, amount: src.amount, base: src.base, - srcType: 'D', - op: 'CREATE' - }) + srcType: "D", + op: "CREATE", + }); // We put it back as available - m.availables.push(m.consumedUDs[index].dividendNumber) - m.dividends.push(m.consumedUDs[index].dividend) + m.availables.push(m.consumedUDs[index].dividendNumber); + m.dividends.push(m.consumedUDs[index].dividend); // We remove it from consumed - m.consumed.splice(index, 1) - m.consumedUDs.splice(index, 1) + m.consumed.splice(index, 1); + m.consumedUDs.splice(index, 1); } } while (index !== -1); } static trimConsumed(m: DividendEntry, belowNumber: number) { - let updated = false + let updated = false; for (let i = 0; i < m.consumed.length; i++) { - const consumedBlockNumber = m.consumed[i] + const consumedBlockNumber = m.consumed[i]; if (consumedBlockNumber < belowNumber) { // We trim this entry as it can't be reverted now - m.consumed.splice(i, 1) - m.consumedUDs.splice(i, 1) - i-- // The array changed, we loop back before i++ - updated = true + m.consumed.splice(i, 1); + m.consumedUDs.splice(i, 1); + i--; // The array changed, we loop back before i++ + updated = true; } } - return updated + return updated; } static toDump(rows: DividendEntry[]) { - const entries: SindexEntry[] = [] + const entries: SindexEntry[] = []; for (const m of rows) { // Generate for unspent UDs for (let i = 0; i < m.availables.length; i++) { - const writtenOn = m.availables[i] - const ud = m.dividends[i] + const writtenOn = m.availables[i]; + const ud = m.dividends[i]; entries.push({ - op: 'CREATE', - index: 'SINDEX', - srcType: 'D', + op: "CREATE", + index: "SINDEX", + srcType: "D", tx: null, identifier: m.pub, writtenOn, pos: writtenOn, - created_on: 'NULL', // TODO - written_on: writtenOn + '', // TODO + created_on: "NULL", // TODO + written_on: writtenOn + "", // TODO written_time: 0, // TODO amount: ud.amount, base: ud.base, locktime: null as any, consumed: false, - conditions: 'SIG(' + m.pub + ')', + conditions: "SIG(" + m.pub + ")", unlock: null, txObj: null as any, // TODO age: 0, - }) + }); } // Generate for spent UDs for (let i = 0; i < m.consumed.length; i++) { - const writtenOn = m.consumed[i] - const ud = m.consumedUDs[i] + const writtenOn = m.consumed[i]; + const ud = m.consumedUDs[i]; entries.push({ - op: 'CREATE', - index: 'SINDEX', - srcType: 'D', + op: "CREATE", + index: "SINDEX", + srcType: "D", tx: null, identifier: m.pub, writtenOn: ud.dividendNumber, pos: ud.dividendNumber, - created_on: 'NULL', // TODO - written_on: writtenOn + '', // TODO + created_on: "NULL", // TODO + written_on: writtenOn + "", // TODO written_time: 0, // TODO amount: ud.dividend.amount, base: ud.dividend.base, locktime: null as any, consumed: false, - conditions: 'SIG(' + m.pub + ')', + conditions: "SIG(" + m.pub + ")", unlock: null, txObj: null as any, // TODO age: 0, - }) + }); entries.push({ - op: 'UPDATE', - index: 'SINDEX', - srcType: 'D', + op: "UPDATE", + index: "SINDEX", + srcType: "D", tx: ud.txHash, identifier: m.pub, writtenOn, pos: ud.dividendNumber, created_on: ud.txCreatedOn, - written_on: writtenOn + '', // TODO + written_on: writtenOn + "", // TODO written_time: 0, // TODO amount: ud.dividend.amount, base: ud.dividend.base, locktime: ud.txLocktime, consumed: true, - conditions: 'SIG(' + m.pub + ')', + conditions: "SIG(" + m.pub + ")", unlock: null, txObj: null as any, // TODO age: 0, - }) + }); } } - return entries + return entries; } } diff --git a/app/lib/dal/indexDAL/common/OldTransformer.ts b/app/lib/dal/indexDAL/common/OldTransformer.ts index 644396068347a3d6c7f14329f56094929a89a597..5d4ed13c831f25c7461eab1b6d9a323050c41e03 100644 --- a/app/lib/dal/indexDAL/common/OldTransformer.ts +++ b/app/lib/dal/indexDAL/common/OldTransformer.ts @@ -1,9 +1,8 @@ -import {IindexEntry, Indexer} from "../../../indexer" -import {OldIindexEntry} from "../../../db/OldIindexEntry" +import { IindexEntry, Indexer } from "../../../indexer"; +import { OldIindexEntry } from "../../../db/OldIindexEntry"; export const OldTransformers = { - - toOldIindexEntry(row:IindexEntry): OldIindexEntry { + toOldIindexEntry(row: IindexEntry): OldIindexEntry { // Old field return { pubkey: row.pub, @@ -22,14 +21,18 @@ export const OldTransformers = { index: row.index, op: row.op, writtenOn: row.writtenOn, - written_on: row.written_on - } + written_on: row.written_on, + }; }, - iindexEntityOrNull: async (reducable:IindexEntry[]): Promise<OldIindexEntry|null> => { + iindexEntityOrNull: async ( + reducable: IindexEntry[] + ): Promise<OldIindexEntry | null> => { if (reducable.length) { - return OldTransformers.toOldIindexEntry(Indexer.DUP_HELPERS.reduce(reducable)) + return OldTransformers.toOldIindexEntry( + Indexer.DUP_HELPERS.reduce(reducable) + ); } - return null - } -} + return null; + }, +}; diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBBindex.ts b/app/lib/dal/indexDAL/leveldb/LevelDBBindex.ts index 13fda53f8cfec02feaec87a8d728f93c8c7d20b2..a2f567380fe3068e4028b1f57de2c102546858b6 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBBindex.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBBindex.ts @@ -1,14 +1,13 @@ -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {LevelUp} from 'levelup' -import {LevelDBTable} from "./LevelDBTable" -import {BIndexDAO} from "../abstract/BIndexDAO" -import {DBHead} from "../../../db/DBHead" -import {Underscore} from "../../../common-libs/underscore" +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { LevelUp } from "levelup"; +import { LevelDBTable } from "./LevelDBTable"; +import { BIndexDAO } from "../abstract/BIndexDAO"; +import { DBHead } from "../../../db/DBHead"; +import { Underscore } from "../../../common-libs/underscore"; export class LevelDBBindex extends LevelDBTable<DBHead> implements BIndexDAO { - - constructor(getLevelDB: (dbName: string)=> Promise<LevelUp>) { - super('level_bindex', getLevelDB) + constructor(getLevelDB: (dbName: string) => Promise<LevelUp>) { + super("level_bindex", getLevelDB); } /** @@ -17,66 +16,81 @@ export class LevelDBBindex extends LevelDBTable<DBHead> implements BIndexDAO { @MonitorExecutionTime() async insert(record: DBHead): Promise<void> { - await this.insertBatch([record]) + await this.insertBatch([record]); } @MonitorExecutionTime() async insertBatch(records: DBHead[]): Promise<void> { // Update the max headNumber - await this.batchInsertWithKeyComputing(records, r => LevelDBBindex.trimKey(r.number)) + await this.batchInsertWithKeyComputing(records, (r) => + LevelDBBindex.trimKey(r.number) + ); } - findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<DBHead[]> { - return this.findAllValues() + findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<DBHead[]> { + return this.findAllValues(); } async getWrittenOn(blockstamp: string): Promise<DBHead[]> { - return [(await this.get(LevelDBBindex.trimKey(parseInt(blockstamp))))] + return [await this.get(LevelDBBindex.trimKey(parseInt(blockstamp)))]; } async head(n: number): Promise<DBHead> { - return (await this.findAllValues({ - limit: n, - reverse: true - }))[n - 1] || null + return ( + ( + await this.findAllValues({ + limit: n, + reverse: true, + }) + )[n - 1] || null + ); } async range(n: number, m: number): Promise<DBHead[]> { - const head = await this.head(1) + const head = await this.head(1); if (!head) { - return [] + return []; } - const from = head.number - n + 2 - const to = head.number - m + const from = head.number - n + 2; + const to = head.number - m; return this.findAllValues({ gt: LevelDBBindex.trimKey(to), lt: LevelDBBindex.trimKey(from), - reverse: true - }) + reverse: true, + }); } async removeBlock(blockstamp: string): Promise<void> { - await this.del(LevelDBBindex.trimKey(parseInt(blockstamp))) + await this.del(LevelDBBindex.trimKey(parseInt(blockstamp))); } async tail(): Promise<DBHead> { - return (await this.findAllValues({ - limit: 1 - }))[0] || null + return ( + ( + await this.findAllValues({ + limit: 1, + }) + )[0] || null + ); } async trimBlocks(maxnumber: number): Promise<void> { - const tail = await this.tail() + const tail = await this.tail(); if (!tail) { - return + return; } - const from = Math.max(tail.number, 0) - await Promise.all(Underscore.range(from, maxnumber).map(async k => { - await this.del(LevelDBBindex.trimKey(k)) - })) + const from = Math.max(tail.number, 0); + await Promise.all( + Underscore.range(from, maxnumber).map(async (k) => { + await this.del(LevelDBBindex.trimKey(k)); + }) + ); } private static trimKey(number: number) { - return String(number).padStart(10, '0') + return String(number).padStart(10, "0"); } } diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBBlockchain.ts b/app/lib/dal/indexDAL/leveldb/LevelDBBlockchain.ts index b177128c4c3345ee29fee350af979d7dbd0899ac..ffcefc1478acf1ac02caaf584798bf725819dfe3 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBBlockchain.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBBlockchain.ts @@ -1,62 +1,113 @@ -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {LevelUp} from 'levelup' -import {LevelDBTable} from "./LevelDBTable" -import {DBBlock} from "../../../db/DBBlock" -import {BlockchainDAO} from "../abstract/BlockchainDAO" -import {LevelIndexBlockIdentities} from "./indexers/block/LevelIndexBlockIdentities" -import {uniqFilter} from "../../../common-libs/array-filter" -import {LevelIndexBlockCertifications} from "./indexers/block/LevelIndexBlockCertifications" -import {LDBIndex_ALL, LevelIndexBlock} from "./indexers/block/LevelIndexBlock" -import {NewLogger} from "../../../logger" -import {LevelIndexBlockTX} from "./indexers/block/LevelIndexBlockTX" -import {LevelIndexBlockUD} from "./indexers/block/LevelIndexBlockUD" -import {LevelIndexBlockRevoked} from "./indexers/block/LevelIndexBlockRevoked" -import {LevelIndexBlockExcluded} from "./indexers/block/LevelIndexBlockExcluded" -import {LevelIndexBlockJoiners} from "./indexers/block/LevelIndexBlockJoiners" -import {LevelIndexBlockActives} from "./indexers/block/LevelIndexBlockActives" -import {LevelIndexBlockLeavers} from "./indexers/block/LevelIndexBlockLeavers" - -export class LevelDBBlockchain extends LevelDBTable<DBBlock> implements BlockchainDAO { - - private forks: LevelDBTable<DBBlock> - private indexOfIdentities: LevelIndexBlock - private indexOfCertifications: LevelIndexBlock - private indexOfJoiners: LevelIndexBlock - private indexOfActives: LevelIndexBlock - private indexOfLeavers: LevelIndexBlock - private indexOfExcluded: LevelIndexBlock - private indexOfRevoked: LevelIndexBlock - private indexOfDividends: LevelIndexBlock - private indexOfTransactions: LevelIndexBlock - private indexers: LevelIndexBlock[] = [] - - constructor(protected getLevelDB: (dbName: string)=> Promise<LevelUp>) { - super('level_blockchain', getLevelDB) +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { LevelUp } from "levelup"; +import { LevelDBTable } from "./LevelDBTable"; +import { DBBlock } from "../../../db/DBBlock"; +import { BlockchainDAO } from "../abstract/BlockchainDAO"; +import { LevelIndexBlockIdentities } from "./indexers/block/LevelIndexBlockIdentities"; +import { uniqFilter } from "../../../common-libs/array-filter"; +import { LevelIndexBlockCertifications } from "./indexers/block/LevelIndexBlockCertifications"; +import { + LDBIndex_ALL, + LevelIndexBlock, +} from "./indexers/block/LevelIndexBlock"; +import { NewLogger } from "../../../logger"; +import { LevelIndexBlockTX } from "./indexers/block/LevelIndexBlockTX"; +import { LevelIndexBlockUD } from "./indexers/block/LevelIndexBlockUD"; +import { LevelIndexBlockRevoked } from "./indexers/block/LevelIndexBlockRevoked"; +import { LevelIndexBlockExcluded } from "./indexers/block/LevelIndexBlockExcluded"; +import { LevelIndexBlockJoiners } from "./indexers/block/LevelIndexBlockJoiners"; +import { LevelIndexBlockActives } from "./indexers/block/LevelIndexBlockActives"; +import { LevelIndexBlockLeavers } from "./indexers/block/LevelIndexBlockLeavers"; + +export class LevelDBBlockchain extends LevelDBTable<DBBlock> + implements BlockchainDAO { + private forks: LevelDBTable<DBBlock>; + private indexOfIdentities: LevelIndexBlock; + private indexOfCertifications: LevelIndexBlock; + private indexOfJoiners: LevelIndexBlock; + private indexOfActives: LevelIndexBlock; + private indexOfLeavers: LevelIndexBlock; + private indexOfExcluded: LevelIndexBlock; + private indexOfRevoked: LevelIndexBlock; + private indexOfDividends: LevelIndexBlock; + private indexOfTransactions: LevelIndexBlock; + private indexers: LevelIndexBlock[] = []; + + constructor(protected getLevelDB: (dbName: string) => Promise<LevelUp>) { + super("level_blockchain", getLevelDB); } async init(): Promise<void> { - await super.init() + await super.init(); if (this.indexers.length === 0) { - this.forks = new LevelDBTable<DBBlock>('level_blockchain/forks', this.getLevelDB) - this.indexers.push(this.indexOfIdentities = new LevelIndexBlockIdentities('level_blockchain/idty', this.getLevelDB)) - this.indexers.push(this.indexOfCertifications = new LevelIndexBlockCertifications('level_blockchain/certs', this.getLevelDB)) - this.indexers.push(this.indexOfJoiners = new LevelIndexBlockJoiners('level_blockchain/joiners', this.getLevelDB)) - this.indexers.push(this.indexOfActives = new LevelIndexBlockActives('level_blockchain/actives', this.getLevelDB)) - this.indexers.push(this.indexOfLeavers = new LevelIndexBlockLeavers('level_blockchain/leavers', this.getLevelDB)) - this.indexers.push(this.indexOfExcluded = new LevelIndexBlockExcluded('level_blockchain/excluded', this.getLevelDB)) - this.indexers.push(this.indexOfRevoked = new LevelIndexBlockRevoked('level_blockchain/revoked', this.getLevelDB)) - this.indexers.push(this.indexOfDividends = new LevelIndexBlockUD('level_blockchain/dividends', this.getLevelDB)) - this.indexers.push(this.indexOfTransactions = new LevelIndexBlockTX('level_blockchain/transactions', this.getLevelDB)) + this.forks = new LevelDBTable<DBBlock>( + "level_blockchain/forks", + this.getLevelDB + ); + this.indexers.push( + (this.indexOfIdentities = new LevelIndexBlockIdentities( + "level_blockchain/idty", + this.getLevelDB + )) + ); + this.indexers.push( + (this.indexOfCertifications = new LevelIndexBlockCertifications( + "level_blockchain/certs", + this.getLevelDB + )) + ); + this.indexers.push( + (this.indexOfJoiners = new LevelIndexBlockJoiners( + "level_blockchain/joiners", + this.getLevelDB + )) + ); + this.indexers.push( + (this.indexOfActives = new LevelIndexBlockActives( + "level_blockchain/actives", + this.getLevelDB + )) + ); + this.indexers.push( + (this.indexOfLeavers = new LevelIndexBlockLeavers( + "level_blockchain/leavers", + this.getLevelDB + )) + ); + this.indexers.push( + (this.indexOfExcluded = new LevelIndexBlockExcluded( + "level_blockchain/excluded", + this.getLevelDB + )) + ); + this.indexers.push( + (this.indexOfRevoked = new LevelIndexBlockRevoked( + "level_blockchain/revoked", + this.getLevelDB + )) + ); + this.indexers.push( + (this.indexOfDividends = new LevelIndexBlockUD( + "level_blockchain/dividends", + this.getLevelDB + )) + ); + this.indexers.push( + (this.indexOfTransactions = new LevelIndexBlockTX( + "level_blockchain/transactions", + this.getLevelDB + )) + ); } - await this.forks.init() - NewLogger().debug(`Now open indexers...`) - await Promise.all(this.indexers.map(i => i.init())) + await this.forks.init(); + NewLogger().debug(`Now open indexers...`); + await Promise.all(this.indexers.map((i) => i.init())); } async close(): Promise<void> { - await super.close() - await this.forks.close() - await Promise.all(this.indexers.map(i => i.close())) + await super.close(); + await this.forks.close(); + await Promise.all(this.indexers.map((i) => i.close())); } /** @@ -65,229 +116,266 @@ export class LevelDBBlockchain extends LevelDBTable<DBBlock> implements Blockcha @MonitorExecutionTime() async insert(record: DBBlock): Promise<void> { - await this.insertBatch([record]) + await this.insertBatch([record]); } @MonitorExecutionTime() async insertBatch(records: DBBlock[]): Promise<void> { // Indexation - await Promise.all(this.indexers.map(i => i.onInsert(records))) + await Promise.all(this.indexers.map((i) => i.onInsert(records))); // Update the max headNumber - await this.batchInsertWithKeyComputing(records, r => { - return LevelDBBlockchain.trimKey(r.number) - }) + await this.batchInsertWithKeyComputing(records, (r) => { + return LevelDBBlockchain.trimKey(r.number); + }); } async dropNonForkBlocksAbove(number: number): Promise<void> { - await this.applyAllKeyValue(async kv => { - // console.log(`DROPPING FORK ${kv.key}`) - return this.del(kv.key) - }, { - gt: LevelDBBlockchain.trimKey(number) - }) + await this.applyAllKeyValue( + async (kv) => { + // console.log(`DROPPING FORK ${kv.key}`) + return this.del(kv.key); + }, + { + gt: LevelDBBlockchain.trimKey(number), + } + ); } // Never used - async findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<DBBlock[]> { - return [] - } - - async getAbsoluteBlock(number: number, hash: string): Promise<DBBlock | null> { - const block = await this.getBlock(number) + async findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<DBBlock[]> { + return []; + } + + async getAbsoluteBlock( + number: number, + hash: string + ): Promise<DBBlock | null> { + const block = await this.getBlock(number); if (block && block.hash === hash) { - return block + return block; } - const fork = await this.forks.getOrNull(LevelDBBlockchain.trimForkKey(number, hash)) + const fork = await this.forks.getOrNull( + LevelDBBlockchain.trimForkKey(number, hash) + ); if (!fork) { - return null + return null; } - fork.fork = true - return fork + fork.fork = true; + return fork; } getBlock(number: string | number): Promise<DBBlock | null> { - return this.getOrNull(LevelDBBlockchain.trimKey(parseInt(String(number)))) + return this.getOrNull(LevelDBBlockchain.trimKey(parseInt(String(number)))); } getBlocks(start: number, end: number): Promise<DBBlock[]> { return this.findAllValues({ gt: LevelDBBlockchain.trimKey(start - 1), - lt: LevelDBBlockchain.trimKey(end + 1) - }) + lt: LevelDBBlockchain.trimKey(end + 1), + }); } // Used by DuniterUI async getCountOfBlocksIssuedBy(issuer: string): Promise<number> { - let nb = 0 - await this.readAllKeyValue(kv => { + let nb = 0; + await this.readAllKeyValue((kv) => { if (kv.value.issuer === issuer) { - nb++ + nb++; } - }) - return nb + }); + return nb; } async getCurrent(): Promise<DBBlock | null> { - return (await this.findAllValues({ - limit: 1, - reverse: true - }))[0] + return ( + await this.findAllValues({ + limit: 1, + reverse: true, + }) + )[0]; } async getNextForkBlocks(number: number, hash: string): Promise<DBBlock[]> { - const potentialForks = await this.findBetween(this.forks, number + 1, number + 1) - return potentialForks.filter(f => f.previousHash === hash) - } - - - async getPotentialForkBlocks(numberStart: number, medianTimeStart: number, maxNumber: number): Promise<DBBlock[]> { - const potentialForks = await this.findBetween(this.forks, numberStart, maxNumber) - return potentialForks.filter(f => f.medianTime >= medianTimeStart) + const potentialForks = await this.findBetween( + this.forks, + number + 1, + number + 1 + ); + return potentialForks.filter((f) => f.previousHash === hash); + } + + async getPotentialForkBlocks( + numberStart: number, + medianTimeStart: number, + maxNumber: number + ): Promise<DBBlock[]> { + const potentialForks = await this.findBetween( + this.forks, + numberStart, + maxNumber + ); + return potentialForks.filter((f) => f.medianTime >= medianTimeStart); } getPotentialRoots(): Promise<DBBlock[]> { - return this.findBetween(this.forks, 0, 0) + return this.findBetween(this.forks, 0, 0); } // TODO: potentially never called? async getWrittenOn(blockstamp: string): Promise<DBBlock[]> { - const number = parseInt(blockstamp) - const blocks = await this.findBetween(this.forks, number, number) - const block = await this.getOrNull(LevelDBBlockchain.trimKey(parseInt(blockstamp))) - return block ? blocks.concat(block) : blocks + const number = parseInt(blockstamp); + const blocks = await this.findBetween(this.forks, number, number); + const block = await this.getOrNull( + LevelDBBlockchain.trimKey(parseInt(blockstamp)) + ); + return block ? blocks.concat(block) : blocks; } // TODO: Unused? potentially costly because of full scan async lastBlockOfIssuer(issuer: string): Promise<DBBlock | null> { - let theLast: DBBlock | null = null - await this.readAllKeyValue(kv => { + let theLast: DBBlock | null = null; + await this.readAllKeyValue((kv) => { if (!theLast && kv.value.issuer === issuer) { - theLast = kv.value + theLast = kv.value; } - }) - return theLast + }); + return theLast; } // TODO: Unused? potentially costly because of full scan async lastBlockWithDividend(): Promise<DBBlock | null> { - let theLast: DBBlock | null = null - await this.readAllKeyValue(kv => { + let theLast: DBBlock | null = null; + await this.readAllKeyValue((kv) => { if (!theLast && kv.value.dividend) { - theLast = kv.value + theLast = kv.value; } - }) - return theLast + }); + return theLast; } async removeBlock(blockstamp: string): Promise<void> { - await this.del(LevelDBBlockchain.trimKey(parseInt(blockstamp))) + await this.del(LevelDBBlockchain.trimKey(parseInt(blockstamp))); } async removeForkBlock(number: number): Promise<void> { - await this.forks.applyAllKeyValue(async kv => this.forks.del(kv.key), { + await this.forks.applyAllKeyValue(async (kv) => this.forks.del(kv.key), { gt: LevelDBBlockchain.trimKey(number - 1), - lt: LevelDBBlockchain.trimKey(number + 1) - }) + lt: LevelDBBlockchain.trimKey(number + 1), + }); } async removeForkBlockAboveOrEqual(number: number): Promise<void> { - await this.forks.applyAllKeyValue(async kv => this.forks.del(kv.key), { - gt: LevelDBBlockchain.trimKey(number - 1) - }) + await this.forks.applyAllKeyValue(async (kv) => this.forks.del(kv.key), { + gt: LevelDBBlockchain.trimKey(number - 1), + }); } async saveBlock(block: DBBlock): Promise<DBBlock> { // We add the new block into legit blockchain - await this.insert(block) - block.fork = false + await this.insert(block); + block.fork = false; // We remove the eventual fork - const forkKey = LevelDBBlockchain.trimForkKey(block.number, block.hash) + const forkKey = LevelDBBlockchain.trimForkKey(block.number, block.hash); if (this.forks.getOrNull(forkKey)) { - await this.forks.del(forkKey) + await this.forks.del(forkKey); } // We return the saved block - return this.get(LevelDBBlockchain.trimKey(block.number)) + return this.get(LevelDBBlockchain.trimKey(block.number)); } async saveSideBlock(block: DBBlock): Promise<DBBlock> { - const k = LevelDBBlockchain.trimForkKey(block.number, block.hash) - block.fork = true - await this.forks.put(k, block) - return this.forks.get(k) - } - - async setSideBlock(number: number, previousBlock: DBBlock | null): Promise<void> { - const k = LevelDBBlockchain.trimKey(number) - const block = await this.get(k) - block.fork = true + const k = LevelDBBlockchain.trimForkKey(block.number, block.hash); + block.fork = true; + await this.forks.put(k, block); + return this.forks.get(k); + } + + async setSideBlock( + number: number, + previousBlock: DBBlock | null + ): Promise<void> { + const k = LevelDBBlockchain.trimKey(number); + const block = await this.get(k); + block.fork = true; // Indexation - await Promise.all(this.indexers.map(i => i.onRemove([block]))) - await this.del(k) - await this.forks.put(LevelDBBlockchain.trimForkKey(block.number, block.hash), block) - } - - async findBetween(db: LevelDBTable<DBBlock>, start: number, end: number): Promise<DBBlock[]> { + await Promise.all(this.indexers.map((i) => i.onRemove([block]))); + await this.del(k); + await this.forks.put( + LevelDBBlockchain.trimForkKey(block.number, block.hash), + block + ); + } + + async findBetween( + db: LevelDBTable<DBBlock>, + start: number, + end: number + ): Promise<DBBlock[]> { return await db.findAllValues({ gte: LevelDBBlockchain.trimKey(start), - lt: LevelDBBlockchain.trimKey(end + 1) - }) + lt: LevelDBBlockchain.trimKey(end + 1), + }); } async findWithIdentities(): Promise<number[]> { - return this.findIndexed(this.indexOfIdentities) + return this.findIndexed(this.indexOfIdentities); } async findWithCertifications(): Promise<number[]> { - return this.findIndexed(this.indexOfCertifications) + return this.findIndexed(this.indexOfCertifications); } async findWithJoiners(): Promise<number[]> { - return this.findIndexed(this.indexOfJoiners) + return this.findIndexed(this.indexOfJoiners); } async findWithActives(): Promise<number[]> { - return this.findIndexed(this.indexOfActives) + return this.findIndexed(this.indexOfActives); } async findWithLeavers(): Promise<number[]> { - return this.findIndexed(this.indexOfLeavers) + return this.findIndexed(this.indexOfLeavers); } async findWithExcluded(): Promise<number[]> { - return this.findIndexed(this.indexOfExcluded) + return this.findIndexed(this.indexOfExcluded); } async findWithRevoked(): Promise<number[]> { - return this.findIndexed(this.indexOfRevoked) + return this.findIndexed(this.indexOfRevoked); } async findWithUD(): Promise<number[]> { - return this.findIndexed(this.indexOfDividends) + return this.findIndexed(this.indexOfDividends); } async findWithTXs(): Promise<number[]> { - return this.findIndexed(this.indexOfTransactions) + return this.findIndexed(this.indexOfTransactions); } private async findIndexed(indexer: LevelIndexBlock): Promise<number[]> { - const found = await indexer.getOrNull(LDBIndex_ALL) + const found = await indexer.getOrNull(LDBIndex_ALL); if (!found) { // When the entry does not exist (may occur for 'ALL' key) - return [] + return []; } // Otherwise: return the records - return Promise.all(found - .reduce((all, some) => all.concat(some), [] as number[]) - .filter(uniqFilter) - .sort((b, a) => b - a) - ) + return Promise.all( + found + .reduce((all, some) => all.concat(some), [] as number[]) + .filter(uniqFilter) + .sort((b, a) => b - a) + ); } private static trimKey(number: number) { - return String(number).padStart(10, '0') + return String(number).padStart(10, "0"); } private static trimForkKey(number: number, hash: string) { - return `${String(number).padStart(10, '0')}-${hash}` + return `${String(number).padStart(10, "0")}-${hash}`; } } diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBCindex.ts b/app/lib/dal/indexDAL/leveldb/LevelDBCindex.ts index cdc57532eb168939e6e87a2169a53466a29d4f53..bfa05669f9ea3eed182f1d5797a0e9eea91fd477 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBCindex.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBCindex.ts @@ -1,25 +1,31 @@ -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {CindexEntry, FullCindexEntry, Indexer, reduce, reduceBy} from "../../../indexer" -import {LevelUp} from 'levelup' -import {LevelDBTable} from "./LevelDBTable" -import {Underscore} from "../../../common-libs/underscore" -import {pint} from "../../../common-libs/pint" -import {CIndexDAO} from "../abstract/CIndexDAO" -import {reduceConcat} from "../../../common-libs/reduce" -import {AbstractIteratorOptions} from "abstract-leveldown" +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { + CindexEntry, + FullCindexEntry, + Indexer, + reduce, + reduceBy, +} from "../../../indexer"; +import { LevelUp } from "levelup"; +import { LevelDBTable } from "./LevelDBTable"; +import { Underscore } from "../../../common-libs/underscore"; +import { pint } from "../../../common-libs/pint"; +import { CIndexDAO } from "../abstract/CIndexDAO"; +import { reduceConcat } from "../../../common-libs/reduce"; +import { AbstractIteratorOptions } from "abstract-leveldown"; export interface LevelDBCindexEntry { - received: string[] - issued: CindexEntry[] + received: string[]; + issued: CindexEntry[]; } -export class LevelDBCindex extends LevelDBTable<LevelDBCindexEntry> implements CIndexDAO { +export class LevelDBCindex extends LevelDBTable<LevelDBCindexEntry> + implements CIndexDAO { + private indexForExpiresOn: LevelDBTable<string[]>; + private indexForWrittenOn: LevelDBTable<string[]>; - private indexForExpiresOn: LevelDBTable<string[]> - private indexForWrittenOn: LevelDBTable<string[]> - - constructor(protected getLevelDB: (dbName: string)=> Promise<LevelUp>) { - super('level_cindex', getLevelDB) + constructor(protected getLevelDB: (dbName: string) => Promise<LevelUp>) { + super("level_cindex", getLevelDB); } /** @@ -27,17 +33,23 @@ export class LevelDBCindex extends LevelDBTable<LevelDBCindexEntry> implements C */ async init(): Promise<void> { - await super.init() - this.indexForExpiresOn = new LevelDBTable<string[]>('level_cindex/expiresOn', this.getLevelDB) - this.indexForWrittenOn = new LevelDBTable<string[]>('level_cindex/writtenOn', this.getLevelDB) - await this.indexForExpiresOn.init() - await this.indexForWrittenOn.init() + await super.init(); + this.indexForExpiresOn = new LevelDBTable<string[]>( + "level_cindex/expiresOn", + this.getLevelDB + ); + this.indexForWrittenOn = new LevelDBTable<string[]>( + "level_cindex/writtenOn", + this.getLevelDB + ); + await this.indexForExpiresOn.init(); + await this.indexForWrittenOn.init(); } async close(): Promise<void> { - await super.close() - await this.indexForExpiresOn.close() - await this.indexForWrittenOn.close() + await super.close(); + await this.indexForExpiresOn.close(); + await this.indexForWrittenOn.close(); } /** @@ -46,227 +58,372 @@ export class LevelDBCindex extends LevelDBTable<LevelDBCindexEntry> implements C @MonitorExecutionTime() async insert(record: CindexEntry): Promise<void> { - await this.insertBatch([record]) + await this.insertBatch([record]); } @MonitorExecutionTime() async insertBatch(records: CindexEntry[]): Promise<void> { for (const r of records) { - const existingIssuer = await this.getOrNull(r.issuer) - const existingReceiver = await this.getOrNull(r.receiver) + const existingIssuer = await this.getOrNull(r.issuer); + const existingReceiver = await this.getOrNull(r.receiver); let newValue4Issuer = existingIssuer || { received: [], - issued: [] - } + issued: [], + }; let newValue4Receiver = existingReceiver || { received: [], - issued: [] - } - newValue4Issuer.issued.push(r) - if (!newValue4Receiver.received.includes(r.issuer) && r.op === 'CREATE') { - newValue4Receiver.received.push(r.issuer) + issued: [], + }; + newValue4Issuer.issued.push(r); + if (!newValue4Receiver.received.includes(r.issuer) && r.op === "CREATE") { + newValue4Receiver.received.push(r.issuer); } await Promise.all([ this.put(r.issuer, newValue4Issuer), - this.put(r.receiver, newValue4Receiver) - ]) + this.put(r.receiver, newValue4Receiver), + ]); } - await this.indexRecords(records) + await this.indexRecords(records); } - /** * Reduceable DAO */ async trimRecords(belowNumber: number): Promise<void> { // Trim writtenOn: we remove from the index the blocks below `belowNumber`, and keep track of the deleted values - let issuers: string[] = Underscore.uniq((await this.indexForWrittenOn.deleteWhere({ lt: LevelDBCindex.trimWrittenOnKey(belowNumber) })) - .map(kv => kv.value) - .reduce(reduceConcat, [])) - // Trim expired certs that won't be rolled back + we remember the max value of expired_on that was trimmed - let maxExpired = 0 - issuers = Underscore.uniq(issuers) - await Promise.all(issuers.map(async issuer => { - const entry = await this.get(issuer) - const fullEntries = reduceBy(entry.issued, ['issuer', 'receiver']) - const toRemove: string[] = [] - // We remember the maximum value of expired_on, for efficient trimming search - fullEntries - .filter(f => f.expired_on && f.writtenOn < belowNumber) - .forEach(f => { - maxExpired = Math.max(maxExpired, f.expired_on) - // We must remove **all** the remaining entries for this issuer + receiver - entry.issued - .filter(e => e.issuer === f.issuer && e.receiver === f.receiver) - .forEach(e => toRemove.push(LevelDBCindex.trimFullKey(e.issuer, e.receiver, e.created_on))) + let issuers: string[] = Underscore.uniq( + ( + await this.indexForWrittenOn.deleteWhere({ + lt: LevelDBCindex.trimWrittenOnKey(belowNumber), }) - if (toRemove.length) { - // Trim the expired certs that won't be rolled back ever - entry.issued = entry.issued.filter(entry => !toRemove.includes(LevelDBCindex.trimFullKey(entry.issuer, entry.receiver, entry.created_on))) - await this.put(issuer, entry) - } - })) + ) + .map((kv) => kv.value) + .reduce(reduceConcat, []) + ); + // Trim expired certs that won't be rolled back + we remember the max value of expired_on that was trimmed + let maxExpired = 0; + issuers = Underscore.uniq(issuers); + await Promise.all( + issuers.map(async (issuer) => { + const entry = await this.get(issuer); + const fullEntries = reduceBy(entry.issued, ["issuer", "receiver"]); + const toRemove: string[] = []; + // We remember the maximum value of expired_on, for efficient trimming search + fullEntries + .filter((f) => f.expired_on && f.writtenOn < belowNumber) + .forEach((f) => { + maxExpired = Math.max(maxExpired, f.expired_on); + // We must remove **all** the remaining entries for this issuer + receiver + entry.issued + .filter((e) => e.issuer === f.issuer && e.receiver === f.receiver) + .forEach((e) => + toRemove.push( + LevelDBCindex.trimFullKey(e.issuer, e.receiver, e.created_on) + ) + ); + }); + if (toRemove.length) { + // Trim the expired certs that won't be rolled back ever + entry.issued = entry.issued.filter( + (entry) => + !toRemove.includes( + LevelDBCindex.trimFullKey( + entry.issuer, + entry.receiver, + entry.created_on + ) + ) + ); + await this.put(issuer, entry); + } + }) + ); // Finally, we trim the expiredOn index - await this.indexForExpiresOn.deleteWhere({ lte: LevelDBCindex.trimExpiredOnKey(maxExpired) }) + await this.indexForExpiresOn.deleteWhere({ + lte: LevelDBCindex.trimExpiredOnKey(maxExpired), + }); } /** * Generic DAO */ - async findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<CindexEntry[]> { - const rows: CindexEntry[] = (await this.findAllValues()).map(r => r.issued).reduce(reduceConcat, []) - return Underscore.sortBy(rows, r => LevelDBCindex.trimDumpSortKey(r.written_on, r.issuer, r.receiver)) + async findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<CindexEntry[]> { + const rows: CindexEntry[] = (await this.findAllValues()) + .map((r) => r.issued) + .reduce(reduceConcat, []); + return Underscore.sortBy(rows, (r) => + LevelDBCindex.trimDumpSortKey(r.written_on, r.issuer, r.receiver) + ); } async getWrittenOn(blockstamp: string): Promise<CindexEntry[]> { - const ids = (await this.indexForWrittenOn.getOrNull(LevelDBCindex.trimWrittenOnKey(pint(blockstamp)))) || [] - return (await Promise.all(ids.map(async id => (await this.get(id)).issued))).reduce(reduceConcat, []).filter(e => e.written_on === blockstamp) + const ids = + (await this.indexForWrittenOn.getOrNull( + LevelDBCindex.trimWrittenOnKey(pint(blockstamp)) + )) || []; + return ( + await Promise.all(ids.map(async (id) => (await this.get(id)).issued)) + ) + .reduce(reduceConcat, []) + .filter((e) => e.written_on === blockstamp); } async removeBlock(blockstamp: string): Promise<void> { - const writtenOn = pint(blockstamp) - const issuers = (await this.indexForWrittenOn.getOrNull(LevelDBCindex.trimWrittenOnKey(writtenOn))) || [] - const toRemove: CindexEntry[] = [] + const writtenOn = pint(blockstamp); + const issuers = + (await this.indexForWrittenOn.getOrNull( + LevelDBCindex.trimWrittenOnKey(writtenOn) + )) || []; + const toRemove: CindexEntry[] = []; for (const issuer of issuers) { // Remove the entries - const entry = await this.get(issuer) - const previousLength = entry.issued.length - entry.issued = entry.issued.filter(e => { - const shouldBeDeleted = e.written_on === blockstamp + const entry = await this.get(issuer); + const previousLength = entry.issued.length; + entry.issued = entry.issued.filter((e) => { + const shouldBeDeleted = e.written_on === blockstamp; if (shouldBeDeleted) { - toRemove.push(e) + toRemove.push(e); } - return !shouldBeDeleted - }) + return !shouldBeDeleted; + }); if (entry.issued.length !== previousLength) { // Update the entry - await this.put(issuer, entry) + await this.put(issuer, entry); } } // Remove the "received" arrays for (const e of toRemove) { - const receiver = await this.get(e.receiver) - const issuer = await this.get(e.issuer) - const certification = reduce(issuer.issued.filter(i => i.receiver === e.receiver)) + const receiver = await this.get(e.receiver); + const issuer = await this.get(e.issuer); + const certification = reduce( + issuer.issued.filter((i) => i.receiver === e.receiver) + ); // We remove ONLY IF no valid link still exist, i.e. we remove if the link **has expired** (we may be here because // of a certification replay before term that is being reverted ==> in such case, even after the revert, the link // between issuer and receiver is still valid. So don't remove it. if (certification.expired_on) { // Remove the certification - receiver.received = receiver.received.filter(issuer => issuer !== e.issuer) + receiver.received = receiver.received.filter( + (issuer) => issuer !== e.issuer + ); // Persist - await this.put(e.receiver, receiver) + await this.put(e.receiver, receiver); } } // Remove the expires_on index entries - const expires = Underscore.uniq(toRemove.filter(e => e.expires_on).map(e => e.expires_on)) - await Promise.all(expires.map(async e => this.indexForExpiresOn.del(LevelDBCindex.trimExpiredOnKey(e)))) + const expires = Underscore.uniq( + toRemove.filter((e) => e.expires_on).map((e) => e.expires_on) + ); + await Promise.all( + expires.map(async (e) => + this.indexForExpiresOn.del(LevelDBCindex.trimExpiredOnKey(e)) + ) + ); } private static trimExpiredOnKey(writtenOn: number) { - return String(writtenOn).padStart(10, '0') + return String(writtenOn).padStart(10, "0"); } private static trimWrittenOnKey(writtenOn: number) { - return String(writtenOn).padStart(10, '0') + return String(writtenOn).padStart(10, "0"); } - private static trimFullKey(issuer: string, receiver: string, created_on: number) { - return `${issuer}-${receiver}-${String(created_on).padStart(10, '0')}` + private static trimFullKey( + issuer: string, + receiver: string, + created_on: number + ) { + return `${issuer}-${receiver}-${String(created_on).padStart(10, "0")}`; } - private static trimDumpSortKey(written_on: string, issuer: string, receiver: string) { - return `${written_on.padStart(100, '0')}-${issuer}-${receiver}` + private static trimDumpSortKey( + written_on: string, + issuer: string, + receiver: string + ) { + return `${written_on.padStart(100, "0")}-${issuer}-${receiver}`; } private async indexRecords(records: CindexEntry[]) { - const byExpiresOn: { [k: number]: CindexEntry[] } = {} - const byWrittenOn: { [k: number]: CindexEntry[] } = {} + const byExpiresOn: { [k: number]: CindexEntry[] } = {}; + const byWrittenOn: { [k: number]: CindexEntry[] } = {}; records - .filter(r => r.expires_on) - .forEach(r => (byExpiresOn[r.expires_on] || (byExpiresOn[r.expires_on] = [])).push(r)) - records - .forEach(r => (byWrittenOn[r.writtenOn] || (byWrittenOn[r.writtenOn] = [])).push(r)) + .filter((r) => r.expires_on) + .forEach((r) => + (byExpiresOn[r.expires_on] || (byExpiresOn[r.expires_on] = [])).push(r) + ); + records.forEach((r) => + (byWrittenOn[r.writtenOn] || (byWrittenOn[r.writtenOn] = [])).push(r) + ); // Index expires_on => issuers for (const k of Underscore.keys(byExpiresOn)) { - const issuers: string[] = ((await this.indexForExpiresOn.getOrNull(LevelDBCindex.trimExpiredOnKey(k))) || []) - .concat(byExpiresOn[k].map(r => r.issuer)) - await this.indexForExpiresOn.put(LevelDBCindex.trimExpiredOnKey(k), issuers) + const issuers: string[] = ( + (await this.indexForExpiresOn.getOrNull( + LevelDBCindex.trimExpiredOnKey(k) + )) || [] + ).concat(byExpiresOn[k].map((r) => r.issuer)); + await this.indexForExpiresOn.put( + LevelDBCindex.trimExpiredOnKey(k), + issuers + ); } // Index writtenOn => issuers for (const k of Underscore.keys(byWrittenOn)) { - await this.indexForWrittenOn.put(LevelDBCindex.trimWrittenOnKey(k), byWrittenOn[k].map(r => r.issuer)) + await this.indexForWrittenOn.put( + LevelDBCindex.trimWrittenOnKey(k), + byWrittenOn[k].map((r) => r.issuer) + ); } } - async existsNonReplayableLink(issuer: string, receiver: string, medianTime: number, version: number): Promise<boolean> { - const entries = await this.findByIssuer(issuer) - const reduced = Indexer.DUP_HELPERS.reduceBy(entries, ['issuer', 'receiver']) - return reduced.filter(e => e.receiver === receiver && (version <= 10 || e.replayable_on >= medianTime)).length > 0 + async existsNonReplayableLink( + issuer: string, + receiver: string, + medianTime: number, + version: number + ): Promise<boolean> { + const entries = await this.findByIssuer(issuer); + const reduced = Indexer.DUP_HELPERS.reduceBy(entries, [ + "issuer", + "receiver", + ]); + return ( + reduced.filter( + (e) => + e.receiver === receiver && + (version <= 10 || e.replayable_on >= medianTime) + ).length > 0 + ); } async findByIssuer(issuer: string): Promise<CindexEntry[]> { - return (await this.getOrNull(issuer) || { issued: [], received: [] }).issued + return ((await this.getOrNull(issuer)) || { issued: [], received: [] }) + .issued; } - async findByIssuerAndChainableOnGt(issuer: string, medianTime: number): Promise<CindexEntry[]> { - return (await this.findByIssuer(issuer)).filter(e => e.chainable_on > medianTime) + async findByIssuerAndChainableOnGt( + issuer: string, + medianTime: number + ): Promise<CindexEntry[]> { + return (await this.findByIssuer(issuer)).filter( + (e) => e.chainable_on > medianTime + ); } - async findByIssuerAndReceiver(issuer: string, receiver: string): Promise<CindexEntry[]> { - return (await this.findByIssuer(issuer)).filter(e => e.receiver === receiver) + async findByIssuerAndReceiver( + issuer: string, + receiver: string + ): Promise<CindexEntry[]> { + return (await this.findByIssuer(issuer)).filter( + (e) => e.receiver === receiver + ); } - async findByReceiverAndExpiredOn(pub: string, expired_on: number): Promise<CindexEntry[]> { - const receiver = (await this.getOrNull(pub)) || { issued: [], received: [] } - const issuers = receiver.received - return (await Promise.all(issuers.map(async issuer => { - const fullEntries = Indexer.DUP_HELPERS.reduceBy((await this.get(issuer)).issued, ['issuer', 'receiver']) - return fullEntries.filter(e => e.receiver === pub && e.expired_on === 0) - }))).reduce(reduceConcat, []) + async findByReceiverAndExpiredOn( + pub: string, + expired_on: number + ): Promise<CindexEntry[]> { + const receiver = (await this.getOrNull(pub)) || { + issued: [], + received: [], + }; + const issuers = receiver.received; + return ( + await Promise.all( + issuers.map(async (issuer) => { + const fullEntries = Indexer.DUP_HELPERS.reduceBy( + (await this.get(issuer)).issued, + ["issuer", "receiver"] + ); + return fullEntries.filter( + (e) => e.receiver === pub && e.expired_on === 0 + ); + }) + ) + ).reduce(reduceConcat, []); } - async findExpiresOnLteNotExpiredYet(medianTime: number): Promise<CindexEntry[]> { - const issuers: string[] = Underscore.uniq((await this.indexForExpiresOn.findAllValues({ lte: LevelDBCindex.trimExpiredOnKey(medianTime) })).reduce(reduceConcat, [])) - return (await Promise.all(issuers.map(async issuer => { - const fullEntries = Indexer.DUP_HELPERS.reduceBy((await this.get(issuer)).issued, ['issuer', 'receiver']) - return fullEntries.filter(e => e.expires_on <= medianTime && !e.expired_on) - }))).reduce(reduceConcat, []) + async findExpiresOnLteNotExpiredYet( + medianTime: number + ): Promise<CindexEntry[]> { + const issuers: string[] = Underscore.uniq( + ( + await this.indexForExpiresOn.findAllValues({ + lte: LevelDBCindex.trimExpiredOnKey(medianTime), + }) + ).reduce(reduceConcat, []) + ); + return ( + await Promise.all( + issuers.map(async (issuer) => { + const fullEntries = Indexer.DUP_HELPERS.reduceBy( + (await this.get(issuer)).issued, + ["issuer", "receiver"] + ); + return fullEntries.filter( + (e) => e.expires_on <= medianTime && !e.expired_on + ); + }) + ) + ).reduce(reduceConcat, []); } async getReceiversAbove(minsig: number): Promise<string[]> { - return this.findWhereTransform(i => i.received.length >= minsig, i => i.key) + return this.findWhereTransform( + (i) => i.received.length >= minsig, + (i) => i.key + ); } async getValidLinksFrom(issuer: string): Promise<CindexEntry[]> { - const fullEntries = Indexer.DUP_HELPERS.reduceBy(((await this.getOrNull(issuer)) || { issued: [] }).issued, ['issuer', 'receiver']) - return fullEntries.filter(e => !e.expired_on) + const fullEntries = Indexer.DUP_HELPERS.reduceBy( + ((await this.getOrNull(issuer)) || { issued: [] }).issued, + ["issuer", "receiver"] + ); + return fullEntries.filter((e) => !e.expired_on); } async getValidLinksTo(receiver: string): Promise<CindexEntry[]> { - const issuers: string[] = ((await this.getOrNull(receiver)) || { issued: [], received: [] }).received - return (await Promise.all(issuers.map(async issuer => { - const fullEntries = Indexer.DUP_HELPERS.reduceBy((await this.get(issuer)).issued, ['issuer', 'receiver']) - return fullEntries.filter(e => e.receiver === receiver && !e.expired_on) - }))).reduce(reduceConcat, []) + const issuers: string[] = ( + (await this.getOrNull(receiver)) || { issued: [], received: [] } + ).received; + return ( + await Promise.all( + issuers.map(async (issuer) => { + const fullEntries = Indexer.DUP_HELPERS.reduceBy( + (await this.get(issuer)).issued, + ["issuer", "receiver"] + ); + return fullEntries.filter( + (e) => e.receiver === receiver && !e.expired_on + ); + }) + ) + ).reduce(reduceConcat, []); } async reducablesFrom(from: string): Promise<FullCindexEntry[]> { - const entries = ((await this.getOrNull(from)) || { issued: [], received: [] }).issued - return Indexer.DUP_HELPERS.reduceBy(entries, ['issuer', 'receiver']) + const entries = ( + (await this.getOrNull(from)) || { issued: [], received: [] } + ).issued; + return Indexer.DUP_HELPERS.reduceBy(entries, ["issuer", "receiver"]); } trimExpiredCerts(belowNumber: number): Promise<void> { - return this.trimRecords(belowNumber) + return this.trimRecords(belowNumber); } async count(options?: AbstractIteratorOptions): Promise<number> { - let count = 0 - await this.readAllKeyValue(entry => { - count += entry.value.issued.length - }) - return count + let count = 0; + await this.readAllKeyValue((entry) => { + count += entry.value.issued.length; + }); + return count; } } diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts b/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts index 6d6f4bebcff10cb45791d63480035b8e778ffa18..4d1afc8e8beab4867fc4c5287da5e7b156854a72 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts @@ -1,47 +1,52 @@ -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {DividendDAO, DividendEntry, UDSource} from "../abstract/DividendDAO" -import {IindexEntry, SimpleTxInput, SimpleUdEntryForWallet, SindexEntry} from "../../../indexer" -import {DividendDaoHandler} from "../common/DividendDaoHandler" -import {DataErrors} from "../../../common-libs/errors" -import {LevelUp} from 'levelup' -import {LevelDBTable} from "./LevelDBTable" -import {Underscore} from "../../../common-libs/underscore" -import {AbstractIteratorOptions} from "abstract-leveldown" +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { DividendDAO, DividendEntry, UDSource } from "../abstract/DividendDAO"; +import { + IindexEntry, + SimpleTxInput, + SimpleUdEntryForWallet, + SindexEntry, +} from "../../../indexer"; +import { DividendDaoHandler } from "../common/DividendDaoHandler"; +import { DataErrors } from "../../../common-libs/errors"; +import { LevelUp } from "levelup"; +import { LevelDBTable } from "./LevelDBTable"; +import { Underscore } from "../../../common-libs/underscore"; +import { AbstractIteratorOptions } from "abstract-leveldown"; interface Consumption { - writtenOn: number - pub: string + writtenOn: number; + pub: string; } -export class LevelDBDividend extends LevelDBTable<DividendEntry> implements DividendDAO { +export class LevelDBDividend extends LevelDBTable<DividendEntry> + implements DividendDAO { + private indexForTrimming: LevelDBTable<string[]>; + private hasIndexed = false; - private indexForTrimming: LevelDBTable<string[]> - private hasIndexed = false - - constructor(protected getLevelDB: (dbName: string)=> Promise<LevelUp>) { - super('level_dividend', getLevelDB) + constructor(protected getLevelDB: (dbName: string) => Promise<LevelUp>) { + super("level_dividend", getLevelDB); } /** * TECHNICAL */ - cleanCache(): void { - } - - triggerInit(): void { - } + cleanCache(): void {} + triggerInit(): void {} async init(): Promise<void> { - await super.init() - this.indexForTrimming = new LevelDBTable<string[]>('level_dividend/level_dividend_trim_index', this.getLevelDB) - await this.indexForTrimming.init() + await super.init(); + this.indexForTrimming = new LevelDBTable<string[]>( + "level_dividend/level_dividend_trim_index", + this.getLevelDB + ); + await this.indexForTrimming.init(); } async close(): Promise<void> { - await super.close() - await this.indexForTrimming.close() + await super.close(); + await this.indexForTrimming.close(); } /** @@ -50,192 +55,261 @@ export class LevelDBDividend extends LevelDBTable<DividendEntry> implements Divi @MonitorExecutionTime() async insert(record: DividendEntry): Promise<void> { - await this.insertBatch([record]) + await this.insertBatch([record]); } @MonitorExecutionTime() async insertBatch(records: DividendEntry[]): Promise<void> { - await this.batchInsert(records, 'pub') + await this.batchInsert(records, "pub"); } private async indexConsumptions(consumptions: Consumption[]) { // Index the operations by write date, for future trimming - const consumedPerWrittenOn: { [k: number]: string[] } = {} - consumptions.forEach(f => { + const consumedPerWrittenOn: { [k: number]: string[] } = {}; + consumptions.forEach((f) => { if (!consumedPerWrittenOn[f.writtenOn]) { - consumedPerWrittenOn[f.writtenOn] = [] + consumedPerWrittenOn[f.writtenOn] = []; } - consumedPerWrittenOn[f.writtenOn].push(f.pub) - }) - const writtenOns = Underscore.keys(consumedPerWrittenOn) - await Promise.all(writtenOns.map(async writtenOn => { - const existing: string[] = (await (this.indexForTrimming.getOrNull(LevelDBDividend.trimKey(writtenOn)))) || [] - const toBeStored = Underscore.uniq(existing.concat(consumedPerWrittenOn[writtenOn])) - await this.indexForTrimming.put(LevelDBDividend.trimKey(writtenOn), toBeStored) - })) + consumedPerWrittenOn[f.writtenOn].push(f.pub); + }); + const writtenOns = Underscore.keys(consumedPerWrittenOn); + await Promise.all( + writtenOns.map(async (writtenOn) => { + const existing: string[] = + (await this.indexForTrimming.getOrNull( + LevelDBDividend.trimKey(writtenOn) + )) || []; + const toBeStored = Underscore.uniq( + existing.concat(consumedPerWrittenOn[writtenOn]) + ); + await this.indexForTrimming.put( + LevelDBDividend.trimKey(writtenOn), + toBeStored + ); + }) + ); } async consume(filter: SindexEntry[]): Promise<void> { for (const dividendToConsume of filter) { - const row = await this.get(dividendToConsume.identifier) - DividendDaoHandler.consume(row, dividendToConsume) - await this.put(row.pub, row) + const row = await this.get(dividendToConsume.identifier); + DividendDaoHandler.consume(row, dividendToConsume); + await this.put(row.pub, row); } - await this.indexConsumptions(filter.map(f => ({ writtenOn: f.writtenOn, pub: f.identifier }))) + await this.indexConsumptions( + filter.map((f) => ({ writtenOn: f.writtenOn, pub: f.identifier })) + ); } async createMember(pub: string): Promise<void> { - const existing = await this.getOrNull(pub) + const existing = await this.getOrNull(pub); if (!existing) { - await this.insert(DividendDaoHandler.getNewDividendEntry(pub)) - } - else { - await this.setMember(true, pub) + await this.insert(DividendDaoHandler.getNewDividendEntry(pub)); + } else { + await this.setMember(true, pub); } } async deleteMember(pub: string): Promise<void> { - await this.del(pub) + await this.del(pub); } async findForDump(criterion: any): Promise<SindexEntry[]> { - const entries: DividendEntry[] = [] - await this.readAll(entry => entries.push(entry)) - return DividendDaoHandler.toDump(entries) + const entries: DividendEntry[] = []; + await this.readAll((entry) => entries.push(entry)); + return DividendDaoHandler.toDump(entries); } - async findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<DividendEntry[]> { - const entries: DividendEntry[] = [] - await this.readAll(entry => entries.push(entry)) - return entries + async findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<DividendEntry[]> { + const entries: DividendEntry[] = []; + await this.readAll((entry) => entries.push(entry)); + return entries; } - async findUdSourceByIdentifierPosAmountBase(identifier: string, pos: number, amount: number, base: number): Promise<SimpleTxInput[]> { - const member: DividendEntry|null = await this.get(identifier) - return DividendDaoHandler.getUDSourceByIdPosAmountBase(member, identifier, pos, amount, base) + async findUdSourceByIdentifierPosAmountBase( + identifier: string, + pos: number, + amount: number, + base: number + ): Promise<SimpleTxInput[]> { + const member: DividendEntry | null = await this.get(identifier); + return DividendDaoHandler.getUDSourceByIdPosAmountBase( + member, + identifier, + pos, + amount, + base + ); } - async getUDSource(identifier: string, pos: number): Promise<SimpleTxInput | null> { - const member: DividendEntry|null = await this.get(identifier) - return DividendDaoHandler.getUDSource(member, identifier, pos) + async getUDSource( + identifier: string, + pos: number + ): Promise<SimpleTxInput | null> { + const member: DividendEntry | null = await this.get(identifier); + return DividendDaoHandler.getUDSource(member, identifier, pos); } async getUDSources(pub: string): Promise<UDSource[]> { - const member: DividendEntry|null = await this.getOrNull(pub) + const member: DividendEntry | null = await this.getOrNull(pub); if (!member) { - return [] + return []; } - return DividendDaoHandler.udSources(member) + return DividendDaoHandler.udSources(member); } getWrittenOn(blockstamp: string): Promise<DividendEntry[]> { - throw Error(DataErrors[DataErrors.DIVIDEND_GET_WRITTEN_ON_SHOULD_NOT_BE_USED_DIVIDEND_DAO]) + throw Error( + DataErrors[ + DataErrors.DIVIDEND_GET_WRITTEN_ON_SHOULD_NOT_BE_USED_DIVIDEND_DAO + ] + ); } async getWrittenOnUDs(number: number): Promise<SimpleUdEntryForWallet[]> { - const res: SimpleUdEntryForWallet[] = [] - await this.readAll(entry => { + const res: SimpleUdEntryForWallet[] = []; + await this.readAll((entry) => { if (entry.member) { - DividendDaoHandler.getWrittenOnUDs(entry, number, res) + DividendDaoHandler.getWrittenOnUDs(entry, number, res); } - }) - return res + }); + return res; } - async produceDividend(blockNumber: number, dividend: number, unitbase: number, local_iindex: IindexEntry[]): Promise<SimpleUdEntryForWallet[]> { - const dividends: SimpleUdEntryForWallet[] = [] - const updates: Promise<void>[] = [] - await this.readAll(entry => { + async produceDividend( + blockNumber: number, + dividend: number, + unitbase: number, + local_iindex: IindexEntry[] + ): Promise<SimpleUdEntryForWallet[]> { + const dividends: SimpleUdEntryForWallet[] = []; + const updates: Promise<void>[] = []; + await this.readAll((entry) => { if (entry.member) { - DividendDaoHandler.produceDividend(entry, blockNumber, dividend, unitbase, dividends) - updates.push(this.put(entry.pub, entry)) + DividendDaoHandler.produceDividend( + entry, + blockNumber, + dividend, + unitbase, + dividends + ); + updates.push(this.put(entry.pub, entry)); } - }) - await Promise.all(updates) - return dividends + }); + await Promise.all(updates); + return dividends; } removeBlock(blockstamp: string): Promise<void> { - throw Error(DataErrors[DataErrors.DIVIDEND_REMOVE_BLOCK_SHOULD_NOT_BE_USED_BY_DIVIDEND_DAO]) + throw Error( + DataErrors[ + DataErrors.DIVIDEND_REMOVE_BLOCK_SHOULD_NOT_BE_USED_BY_DIVIDEND_DAO + ] + ); } - async revertUDs(number: number): Promise<{ - createdUDsDestroyedByRevert: SimpleUdEntryForWallet[] - consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[] + async revertUDs( + number: number + ): Promise<{ + createdUDsDestroyedByRevert: SimpleUdEntryForWallet[]; + consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[]; }> { - const createdUDsDestroyedByRevert: SimpleUdEntryForWallet[] = [] - const consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[] = [] - const updates: Promise<void>[] = [] + const createdUDsDestroyedByRevert: SimpleUdEntryForWallet[] = []; + const consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[] = []; + const updates: Promise<void>[] = []; // Remove produced dividends at this block - await this.readAll(entry => { + await this.readAll((entry) => { if (entry.availables.includes(number)) { - DividendDaoHandler.removeDividendsProduced(entry, number, createdUDsDestroyedByRevert) - updates.push(this.put(entry.pub, entry)) + DividendDaoHandler.removeDividendsProduced( + entry, + number, + createdUDsDestroyedByRevert + ); + updates.push(this.put(entry.pub, entry)); } if (entry.consumed.includes(number)) { - DividendDaoHandler.unconsumeDividends(entry, number, consumedUDsRecoveredByRevert) - updates.push(this.put(entry.pub, entry)) + DividendDaoHandler.unconsumeDividends( + entry, + number, + consumedUDsRecoveredByRevert + ); + updates.push(this.put(entry.pub, entry)); } - }) - await Promise.all(updates) - await this.indexForTrimming.del(LevelDBDividend.trimKey(number)) // TODO: test + }); + await Promise.all(updates); + await this.indexForTrimming.del(LevelDBDividend.trimKey(number)); // TODO: test return { createdUDsDestroyedByRevert, consumedUDsRecoveredByRevert, - } + }; } async setMember(member: boolean, pub: string): Promise<void> { - const entry = await this.get(pub) - entry.member = member - await this.put(pub, entry) + const entry = await this.get(pub); + entry.member = member; + await this.put(pub, entry); } async trimConsumedUDs(belowNumber: number): Promise<void> { - const count = await this.indexForTrimming.count() + const count = await this.indexForTrimming.count(); if (count === 0 && !this.hasIndexed) { - this.hasIndexed = true + this.hasIndexed = true; await this.applyAllKeyValue(async (data) => { - await this.indexConsumptions(data.value.consumed.map(c => ({ writtenOn: c, pub: data.value.pub }))) - }) + await this.indexConsumptions( + data.value.consumed.map((c) => ({ + writtenOn: c, + pub: data.value.pub, + })) + ); + }); } - const updates: Promise<void>[] = [] - const trimmedNumbers: string[] = [] + const updates: Promise<void>[] = []; + const trimmedNumbers: string[] = []; // Remove produced dividends at this block - await this.indexForTrimming.readAllKeyValue(kv => { - updates.push((async () => { - const pubkeys = kv.value - const trimNumber = kv.key - for (const pub of pubkeys) { - const entry = await this.get(pub) - if (DividendDaoHandler.trimConsumed(entry, belowNumber)) { - await this.put(entry.pub, entry) - trimmedNumbers.push(trimNumber) + await this.indexForTrimming.readAllKeyValue( + (kv) => { + updates.push( + (async () => { + const pubkeys = kv.value; + const trimNumber = kv.key; + for (const pub of pubkeys) { + const entry = await this.get(pub); + if (DividendDaoHandler.trimConsumed(entry, belowNumber)) { + await this.put(entry.pub, entry); + trimmedNumbers.push(trimNumber); + } } - } - })()) - }, { - lt: LevelDBDividend.trimKey(belowNumber) - }) - await Promise.all(updates) - await Promise.all(trimmedNumbers.map(trimKey => this.indexForTrimming.del(trimKey))) + })() + ); + }, + { + lt: LevelDBDividend.trimKey(belowNumber), + } + ); + await Promise.all(updates); + await Promise.all( + trimmedNumbers.map((trimKey) => this.indexForTrimming.del(trimKey)) + ); } async listAll(): Promise<DividendEntry[]> { - const entries: DividendEntry[] = [] - await this.readAll(entry => entries.push(entry)) - return entries + const entries: DividendEntry[] = []; + await this.readAll((entry) => entries.push(entry)); + return entries; } private static trimKey(writtenOn: number) { - return String(writtenOn).padStart(10, '0') + return String(writtenOn).padStart(10, "0"); } async count(options?: AbstractIteratorOptions): Promise<number> { - let count = 0 - await this.readAllKeyValue(entry => { - count += entry.value.availables.length - }) - return count + let count = 0; + await this.readAllKeyValue((entry) => { + count += entry.value.availables.length; + }); + return count; } } diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBIindex.ts b/app/lib/dal/indexDAL/leveldb/LevelDBIindex.ts index 1e86f9a760a4861c15b9582a246711a2dc1935f7..c53f3dfc48a11b38ca1e4d721a5d9ef17a59e0c8 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBIindex.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBIindex.ts @@ -1,28 +1,33 @@ -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {FullIindexEntry, IindexEntry, reduce, reduceForDBTrimming} from "../../../indexer" -import {LevelUp} from 'levelup' -import {LevelDBTable} from "./LevelDBTable" -import {Underscore} from "../../../common-libs/underscore" -import {pint} from "../../../common-libs/pint" -import {IIndexDAO} from "../abstract/IIndexDAO" -import {LevelIIndexHashIndexer} from "./indexers/LevelIIndexHashIndexer" -import {reduceConcat, reduceGroupBy} from "../../../common-libs/reduce" -import {LevelDBWrittenOnIndexer} from "./indexers/LevelDBWrittenOnIndexer" -import {OldIindexEntry} from "../../../db/OldIindexEntry" -import {LevelIIndexUidIndexer} from "./indexers/LevelIIndexUidIndexer" -import {LevelIIndexKickIndexer} from "./indexers/LevelIIndexKickIndexer" -import {DataErrors} from "../../../common-libs/errors" -import {OldTransformers} from "../common/OldTransformer" - -export class LevelDBIindex extends LevelDBTable<IindexEntry[]> implements IIndexDAO { - - private indexForHash: LevelIIndexHashIndexer - private indexForUid: LevelIIndexUidIndexer - private indexForKick: LevelIIndexKickIndexer - private indexForWrittenOn: LevelDBWrittenOnIndexer<IindexEntry> - - constructor(protected getLevelDB: (dbName: string)=> Promise<LevelUp>) { - super('level_iindex', getLevelDB) +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { + FullIindexEntry, + IindexEntry, + reduce, + reduceForDBTrimming, +} from "../../../indexer"; +import { LevelUp } from "levelup"; +import { LevelDBTable } from "./LevelDBTable"; +import { Underscore } from "../../../common-libs/underscore"; +import { pint } from "../../../common-libs/pint"; +import { IIndexDAO } from "../abstract/IIndexDAO"; +import { LevelIIndexHashIndexer } from "./indexers/LevelIIndexHashIndexer"; +import { reduceConcat, reduceGroupBy } from "../../../common-libs/reduce"; +import { LevelDBWrittenOnIndexer } from "./indexers/LevelDBWrittenOnIndexer"; +import { OldIindexEntry } from "../../../db/OldIindexEntry"; +import { LevelIIndexUidIndexer } from "./indexers/LevelIIndexUidIndexer"; +import { LevelIIndexKickIndexer } from "./indexers/LevelIIndexKickIndexer"; +import { DataErrors } from "../../../common-libs/errors"; +import { OldTransformers } from "../common/OldTransformer"; + +export class LevelDBIindex extends LevelDBTable<IindexEntry[]> + implements IIndexDAO { + private indexForHash: LevelIIndexHashIndexer; + private indexForUid: LevelIIndexUidIndexer; + private indexForKick: LevelIIndexKickIndexer; + private indexForWrittenOn: LevelDBWrittenOnIndexer<IindexEntry>; + + constructor(protected getLevelDB: (dbName: string) => Promise<LevelUp>) { + super("level_iindex", getLevelDB); } /** @@ -30,23 +35,36 @@ export class LevelDBIindex extends LevelDBTable<IindexEntry[]> implements IIndex */ async init(): Promise<void> { - await super.init() - this.indexForHash = new LevelIIndexHashIndexer('level_iindex/hash', this.getLevelDB) - this.indexForUid = new LevelIIndexUidIndexer('level_iindex/uid', this.getLevelDB) - this.indexForKick = new LevelIIndexKickIndexer('level_iindex/kick', this.getLevelDB) - this.indexForWrittenOn = new LevelDBWrittenOnIndexer('level_iindex/writtenOn', this.getLevelDB, i => i.pub) - await this.indexForHash.init() - await this.indexForUid.init() - await this.indexForKick.init() - await this.indexForWrittenOn.init() + await super.init(); + this.indexForHash = new LevelIIndexHashIndexer( + "level_iindex/hash", + this.getLevelDB + ); + this.indexForUid = new LevelIIndexUidIndexer( + "level_iindex/uid", + this.getLevelDB + ); + this.indexForKick = new LevelIIndexKickIndexer( + "level_iindex/kick", + this.getLevelDB + ); + this.indexForWrittenOn = new LevelDBWrittenOnIndexer( + "level_iindex/writtenOn", + this.getLevelDB, + (i) => i.pub + ); + await this.indexForHash.init(); + await this.indexForUid.init(); + await this.indexForKick.init(); + await this.indexForWrittenOn.init(); } async close(): Promise<void> { - await super.close() - await this.indexForHash.close() - await this.indexForUid.close() - await this.indexForKick.close() - await this.indexForWrittenOn.close() + await super.close(); + await this.indexForHash.close(); + await this.indexForUid.close(); + await this.indexForKick.close(); + await this.indexForWrittenOn.close(); } /** @@ -55,176 +73,211 @@ export class LevelDBIindex extends LevelDBTable<IindexEntry[]> implements IIndex @MonitorExecutionTime() async insert(record: IindexEntry): Promise<void> { - await this.insertBatch([record]) + await this.insertBatch([record]); } @MonitorExecutionTime() async insertBatch(records: IindexEntry[]): Promise<void> { // Database insertion - const recordsByPub = reduceGroupBy(records, 'pub') - await Promise.all(Underscore.keys(recordsByPub).map(String).map(async pub => { - const existing = (await this.getOrNull(pub)) || [] - await this.put(pub, existing.concat(recordsByPub[pub])) - })) + const recordsByPub = reduceGroupBy(records, "pub"); + await Promise.all( + Underscore.keys(recordsByPub) + .map(String) + .map(async (pub) => { + const existing = (await this.getOrNull(pub)) || []; + await this.put(pub, existing.concat(recordsByPub[pub])); + }) + ); // Indexation - await this.indexForHash.onInsert(records) - await this.indexForUid.onInsert(records) - await this.indexForKick.onInsert(records) - await this.indexForWrittenOn.onInsert(records) + await this.indexForHash.onInsert(records); + await this.indexForUid.onInsert(records); + await this.indexForKick.onInsert(records); + await this.indexForWrittenOn.onInsert(records); } - /** * Reduceable DAO */ async trimRecords(belowNumber: number): Promise<void> { // Trim writtenOn: we remove from the index the blocks below `belowNumber`, and keep track of the deleted values - const pubkeys: string[] = Underscore.uniq((await this.indexForWrittenOn.deleteBelow(belowNumber))) + const pubkeys: string[] = Underscore.uniq( + await this.indexForWrittenOn.deleteBelow(belowNumber) + ); // For each entry, we trim the records of our INDEX - await Promise.all(pubkeys.map(async pub => { - const oldEntries = await this.get(pub) - const newEntries = reduceForDBTrimming(oldEntries, belowNumber) - await this.put(pub, newEntries) - })) - await this.indexForHash.onTrimming(belowNumber) - await this.indexForUid.onTrimming(belowNumber) - await this.indexForKick.onTrimming(belowNumber) + await Promise.all( + pubkeys.map(async (pub) => { + const oldEntries = await this.get(pub); + const newEntries = reduceForDBTrimming(oldEntries, belowNumber); + await this.put(pub, newEntries); + }) + ); + await this.indexForHash.onTrimming(belowNumber); + await this.indexForUid.onTrimming(belowNumber); + await this.indexForKick.onTrimming(belowNumber); } /** * Generic DAO */ - async findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<IindexEntry[]> { - const rows: IindexEntry[] = (await this.findAllValues()).reduce(reduceConcat, []) - return Underscore.sortBy(rows, r => `${String(r.writtenOn).padStart(10, '0')}-${String(r.wotb_id).padStart(10, '0')}`) + async findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<IindexEntry[]> { + const rows: IindexEntry[] = (await this.findAllValues()).reduce( + reduceConcat, + [] + ); + return Underscore.sortBy( + rows, + (r) => + `${String(r.writtenOn).padStart(10, "0")}-${String(r.wotb_id).padStart( + 10, + "0" + )}` + ); } async getWrittenOn(blockstamp: string): Promise<IindexEntry[]> { - const ids = (await this.indexForWrittenOn.getWrittenOnKeys(pint(blockstamp))) || [] - return (await Promise.all(ids.map(id => this.get(id)))).reduce(reduceConcat, []).filter(e => e.written_on === blockstamp) + const ids = + (await this.indexForWrittenOn.getWrittenOnKeys(pint(blockstamp))) || []; + return (await Promise.all(ids.map((id) => this.get(id)))) + .reduce(reduceConcat, []) + .filter((e) => e.written_on === blockstamp); } async removeBlock(blockstamp: string): Promise<void> { // Trim writtenOn: we remove from the index the blocks below `belowNumber`, and keep track of the deleted values - const writteOn = pint(blockstamp) - const pubkeys: string[] = Underscore.uniq((await this.indexForWrittenOn.deleteAt(writteOn))) - let removedRecords: IindexEntry[] = [] + const writteOn = pint(blockstamp); + const pubkeys: string[] = Underscore.uniq( + await this.indexForWrittenOn.deleteAt(writteOn) + ); + let removedRecords: IindexEntry[] = []; // For each entry, we trim the records of our INDEX - await Promise.all(pubkeys.map(async pub => { - const records = await this.get(pub) - const keptRecords = records.filter(e => e.written_on !== blockstamp) - removedRecords = removedRecords.concat(records.filter(e => e.written_on === blockstamp)) - await this.put(pub, keptRecords) - })) + await Promise.all( + pubkeys.map(async (pub) => { + const records = await this.get(pub); + const keptRecords = records.filter((e) => e.written_on !== blockstamp); + removedRecords = removedRecords.concat( + records.filter((e) => e.written_on === blockstamp) + ); + await this.put(pub, keptRecords); + }) + ); // Update indexes - await this.indexForHash.onRemove(removedRecords) - await this.indexForUid.onRemove(removedRecords) - await this.indexForKick.onRemove(removedRecords) + await this.indexForHash.onRemove(removedRecords); + await this.indexForUid.onRemove(removedRecords); + await this.indexForKick.onRemove(removedRecords); } async findByPub(pub: string): Promise<IindexEntry[]> { if (!pub) { - return [] + return []; } - return (await this.getOrNull(pub)) || [] + return (await this.getOrNull(pub)) || []; } async findByUid(uid: string): Promise<IindexEntry[]> { - const pub = await this.indexForUid.getPubByUid(uid) + const pub = await this.indexForUid.getPubByUid(uid); if (!pub) { - return [] + return []; } - return this.get(pub) + return this.get(pub); } async getFromPubkey(pub: string): Promise<FullIindexEntry | null> { - const entries = (await this.getOrNull(pub)) || [] + const entries = (await this.getOrNull(pub)) || []; if (!entries || entries.length === 0) { - return null + return null; } - return reduce(entries) as FullIindexEntry + return reduce(entries) as FullIindexEntry; } async getFromPubkeyOrUid(search: string): Promise<FullIindexEntry | null> { - const fromPub = await this.getFromPubkey(search) - const fromUid = await this.getFromUID(search) - return fromPub || fromUid + const fromPub = await this.getFromPubkey(search); + const fromUid = await this.getFromUID(search); + return fromPub || fromUid; } async getFromUID(uid: string): Promise<FullIindexEntry | null> { - const pub = await this.indexForUid.getPubByUid(uid) + const pub = await this.indexForUid.getPubByUid(uid); if (!pub) { - return null + return null; } - const entries = (await this.getOrNull(pub)) || [] + const entries = (await this.getOrNull(pub)) || []; if (!entries || entries.length === 0) { - return null + return null; } - return reduce(entries) as FullIindexEntry + return reduce(entries) as FullIindexEntry; } - async getFullFromHash(hash: string): Promise<FullIindexEntry|null> { - const pub = await this.indexForHash.getByHash(hash) as string + async getFullFromHash(hash: string): Promise<FullIindexEntry | null> { + const pub = (await this.indexForHash.getByHash(hash)) as string; if (!pub) { - return null + return null; } - const entries = await this.get(pub) - return OldTransformers.iindexEntityOrNull(entries) as Promise<FullIindexEntry> + const entries = await this.get(pub); + return OldTransformers.iindexEntityOrNull(entries) as Promise< + FullIindexEntry + >; } async getFullFromPubkey(pub: string): Promise<FullIindexEntry> { - const entries = await this.get(pub) - return reduce(entries) as FullIindexEntry + const entries = await this.get(pub); + return reduce(entries) as FullIindexEntry; } async getFullFromUID(uid: string): Promise<FullIindexEntry> { - const pub = await this.indexForUid.getPubByUid(uid) + const pub = await this.indexForUid.getPubByUid(uid); if (!pub) { - throw Error(DataErrors[DataErrors.IDENTITY_UID_NOT_FOUND]) + throw Error(DataErrors[DataErrors.IDENTITY_UID_NOT_FOUND]); } - const entries = await this.get(pub) - return reduce(entries) as FullIindexEntry + const entries = await this.get(pub); + return reduce(entries) as FullIindexEntry; } // Full scan async getMembers(): Promise<{ pubkey: string; uid: string | null }[]> { - const members: IindexEntry[] = [] - await this.findWhere(e => { + const members: IindexEntry[] = []; + await this.findWhere((e) => { if (reduce(e).member as boolean) { - members.push(e[0]) + members.push(e[0]); } - return false - }) - return members.map(m => ({ + return false; + }); + return members.map((m) => ({ pubkey: m.pub, - uid: m.uid - })) + uid: m.uid, + })); } async getToBeKickedPubkeys(): Promise<string[]> { - return this.indexForKick.getAll() + return this.indexForKick.getAll(); } async reducable(pub: string): Promise<IindexEntry[]> { - return this.findByPub(pub) + return this.findByPub(pub); } // Full scan async searchThoseMatching(search: string): Promise<OldIindexEntry[]> { - const uidKeys = await this.indexForUid.findAllKeys() - const pubKeys = await this.findAllKeys() - const uids = (uidKeys).filter(u => u.includes(search)) - const pubs = (pubKeys).filter(p => p.includes(search)) - const uidIdentities = await Promise.all(uids.map(async uid => OldTransformers.toOldIindexEntry(reduce(await this.findByUid(uid))))) - const pubIdentities = await Promise.all(pubs.map(async pub => OldTransformers.toOldIindexEntry(reduce(await this.findByPub(pub))))) - return uidIdentities - .filter(u => u.pub) - .concat( - pubIdentities - .filter(p => p.pub) + const uidKeys = await this.indexForUid.findAllKeys(); + const pubKeys = await this.findAllKeys(); + const uids = uidKeys.filter((u) => u.includes(search)); + const pubs = pubKeys.filter((p) => p.includes(search)); + const uidIdentities = await Promise.all( + uids.map(async (uid) => + OldTransformers.toOldIindexEntry(reduce(await this.findByUid(uid))) ) + ); + const pubIdentities = await Promise.all( + pubs.map(async (pub) => + OldTransformers.toOldIindexEntry(reduce(await this.findByPub(pub))) + ) + ); + return uidIdentities + .filter((u) => u.pub) + .concat(pubIdentities.filter((p) => p.pub)); } - } diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBMindex.ts b/app/lib/dal/indexDAL/leveldb/LevelDBMindex.ts index f01f699c0142fba99bb71c7be04139e61c0bcdb0..dcadf4ad721d46e668bc6809c47dd91f68334672 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBMindex.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBMindex.ts @@ -1,23 +1,29 @@ -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {FullMindexEntry, MindexEntry, reduce, reduceForDBTrimming, reduceOrNull} from "../../../indexer" -import {LevelUp} from 'levelup' -import {LevelDBTable} from "./LevelDBTable" -import {Underscore} from "../../../common-libs/underscore" -import {pint} from "../../../common-libs/pint" -import {reduceConcat, reduceGroupBy} from "../../../common-libs/reduce" -import {LevelDBWrittenOnIndexer} from "./indexers/LevelDBWrittenOnIndexer" -import {MIndexDAO} from "../abstract/MIndexDAO" -import {LevelMIndexRevokesOnIndexer} from "./indexers/LevelMIndexRevokesOnIndexer" -import {LevelMIndexExpiresOnIndexer} from "./indexers/LevelMIndexExpiresOnIndexer" - -export class LevelDBMindex extends LevelDBTable<MindexEntry[]> implements MIndexDAO { - - private indexForExpiresOn: LevelMIndexExpiresOnIndexer - private indexForRevokesOn: LevelMIndexRevokesOnIndexer - private indexForWrittenOn: LevelDBWrittenOnIndexer<MindexEntry> - - constructor(protected getLevelDB: (dbName: string)=> Promise<LevelUp>) { - super('level_mindex', getLevelDB) +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { + FullMindexEntry, + MindexEntry, + reduce, + reduceForDBTrimming, + reduceOrNull, +} from "../../../indexer"; +import { LevelUp } from "levelup"; +import { LevelDBTable } from "./LevelDBTable"; +import { Underscore } from "../../../common-libs/underscore"; +import { pint } from "../../../common-libs/pint"; +import { reduceConcat, reduceGroupBy } from "../../../common-libs/reduce"; +import { LevelDBWrittenOnIndexer } from "./indexers/LevelDBWrittenOnIndexer"; +import { MIndexDAO } from "../abstract/MIndexDAO"; +import { LevelMIndexRevokesOnIndexer } from "./indexers/LevelMIndexRevokesOnIndexer"; +import { LevelMIndexExpiresOnIndexer } from "./indexers/LevelMIndexExpiresOnIndexer"; + +export class LevelDBMindex extends LevelDBTable<MindexEntry[]> + implements MIndexDAO { + private indexForExpiresOn: LevelMIndexExpiresOnIndexer; + private indexForRevokesOn: LevelMIndexRevokesOnIndexer; + private indexForWrittenOn: LevelDBWrittenOnIndexer<MindexEntry>; + + constructor(protected getLevelDB: (dbName: string) => Promise<LevelUp>) { + super("level_mindex", getLevelDB); } /** @@ -25,20 +31,30 @@ export class LevelDBMindex extends LevelDBTable<MindexEntry[]> implements MIndex */ async init(): Promise<void> { - await super.init() - this.indexForExpiresOn = new LevelMIndexExpiresOnIndexer('level_mindex/expiresOn', this.getLevelDB) - this.indexForRevokesOn = new LevelMIndexRevokesOnIndexer('level_mindex/revokesOn', this.getLevelDB) - this.indexForWrittenOn = new LevelDBWrittenOnIndexer('level_mindex/writtenOn', this.getLevelDB, i => i.pub) - await this.indexForExpiresOn.init() - await this.indexForRevokesOn.init() - await this.indexForWrittenOn.init() + await super.init(); + this.indexForExpiresOn = new LevelMIndexExpiresOnIndexer( + "level_mindex/expiresOn", + this.getLevelDB + ); + this.indexForRevokesOn = new LevelMIndexRevokesOnIndexer( + "level_mindex/revokesOn", + this.getLevelDB + ); + this.indexForWrittenOn = new LevelDBWrittenOnIndexer( + "level_mindex/writtenOn", + this.getLevelDB, + (i) => i.pub + ); + await this.indexForExpiresOn.init(); + await this.indexForRevokesOn.init(); + await this.indexForWrittenOn.init(); } async close(): Promise<void> { - await super.close() - await this.indexForExpiresOn.close() - await this.indexForRevokesOn.close() - await this.indexForWrittenOn.close() + await super.close(); + await this.indexForExpiresOn.close(); + await this.indexForRevokesOn.close(); + await this.indexForWrittenOn.close(); } /** @@ -47,122 +63,159 @@ export class LevelDBMindex extends LevelDBTable<MindexEntry[]> implements MIndex @MonitorExecutionTime() async insert(record: MindexEntry): Promise<void> { - await this.insertBatch([record]) + await this.insertBatch([record]); } @MonitorExecutionTime() async insertBatch(records: MindexEntry[]): Promise<void> { // Database insertion - let prevRecords: MindexEntry[] = [] - const recordsByPub = reduceGroupBy(records, 'pub') - await Promise.all(Underscore.keys(recordsByPub).map(String).map(async pub => { - const existing = (await this.getOrNull(pub)) || [] - prevRecords = prevRecords.concat(existing) - await this.put(pub, existing.concat(recordsByPub[pub])) - })) + let prevRecords: MindexEntry[] = []; + const recordsByPub = reduceGroupBy(records, "pub"); + await Promise.all( + Underscore.keys(recordsByPub) + .map(String) + .map(async (pub) => { + const existing = (await this.getOrNull(pub)) || []; + prevRecords = prevRecords.concat(existing); + await this.put(pub, existing.concat(recordsByPub[pub])); + }) + ); // Indexation - await this.indexForExpiresOn.onInsert(records, prevRecords) - await this.indexForRevokesOn.onInsert(records, prevRecords) - await this.indexForWrittenOn.onInsert(records) + await this.indexForExpiresOn.onInsert(records, prevRecords); + await this.indexForRevokesOn.onInsert(records, prevRecords); + await this.indexForWrittenOn.onInsert(records); } - /** * Reduceable DAO */ async trimRecords(belowNumber: number): Promise<void> { // Trim writtenOn: we remove from the index the blocks below `belowNumber`, and keep track of the deleted values - const pubkeys: string[] = Underscore.uniq((await this.indexForWrittenOn.deleteBelow(belowNumber))) + const pubkeys: string[] = Underscore.uniq( + await this.indexForWrittenOn.deleteBelow(belowNumber) + ); // For each entry, we trim the records of our INDEX - await Promise.all(pubkeys.map(async pub => { - const oldEntries = await this.get(pub) - const newEntries = reduceForDBTrimming(oldEntries, belowNumber) - await this.put(pub, newEntries) - })) - await this.indexForExpiresOn.onTrimming(belowNumber) - await this.indexForRevokesOn.onTrimming(belowNumber) + await Promise.all( + pubkeys.map(async (pub) => { + const oldEntries = await this.get(pub); + const newEntries = reduceForDBTrimming(oldEntries, belowNumber); + await this.put(pub, newEntries); + }) + ); + await this.indexForExpiresOn.onTrimming(belowNumber); + await this.indexForRevokesOn.onTrimming(belowNumber); } /** * Generic DAO */ - async findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<MindexEntry[]> { - const rows: MindexEntry[] = (await this.findAllValues()).reduce(reduceConcat, []) - return Underscore.sortBy(rows, r => `${String(r.writtenOn).padStart(10, '0')}-${r.pub}`) + async findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<MindexEntry[]> { + const rows: MindexEntry[] = (await this.findAllValues()).reduce( + reduceConcat, + [] + ); + return Underscore.sortBy( + rows, + (r) => `${String(r.writtenOn).padStart(10, "0")}-${r.pub}` + ); } async getWrittenOn(blockstamp: string): Promise<MindexEntry[]> { - const ids = (await this.indexForWrittenOn.getWrittenOnKeys(pint(blockstamp))) || [] - return (await Promise.all(ids.map(id => this.get(id)))).reduce(reduceConcat, []).filter(e => e.written_on === blockstamp) + const ids = + (await this.indexForWrittenOn.getWrittenOnKeys(pint(blockstamp))) || []; + return (await Promise.all(ids.map((id) => this.get(id)))) + .reduce(reduceConcat, []) + .filter((e) => e.written_on === blockstamp); } async removeBlock(blockstamp: string): Promise<void> { // Trim writtenOn: we remove from the index the blocks below `belowNumber`, and keep track of the deleted values - let newStateRecords: MindexEntry[] = [] - const writteOn = pint(blockstamp) - const pubkeys: string[] = Underscore.uniq((await this.indexForWrittenOn.deleteAt(writteOn))) - let removedRecords: MindexEntry[] = [] + let newStateRecords: MindexEntry[] = []; + const writteOn = pint(blockstamp); + const pubkeys: string[] = Underscore.uniq( + await this.indexForWrittenOn.deleteAt(writteOn) + ); + let removedRecords: MindexEntry[] = []; // For each entry, we trim the records of our INDEX - await Promise.all(pubkeys.map(async pub => { - const records = await this.get(pub) - const keptRecords = records.filter(e => e.written_on !== blockstamp) - removedRecords = removedRecords.concat(records.filter(e => e.written_on === blockstamp)) - newStateRecords = newStateRecords.concat(keptRecords) - await this.put(pub, keptRecords) - })) + await Promise.all( + pubkeys.map(async (pub) => { + const records = await this.get(pub); + const keptRecords = records.filter((e) => e.written_on !== blockstamp); + removedRecords = removedRecords.concat( + records.filter((e) => e.written_on === blockstamp) + ); + newStateRecords = newStateRecords.concat(keptRecords); + await this.put(pub, keptRecords); + }) + ); // Update indexes - await this.indexForExpiresOn.onRemove(removedRecords, newStateRecords) - await this.indexForRevokesOn.onRemove(removedRecords, newStateRecords) + await this.indexForExpiresOn.onRemove(removedRecords, newStateRecords); + await this.indexForRevokesOn.onRemove(removedRecords, newStateRecords); } //------------- DAO QUERIES -------------- - async findByPubAndChainableOnGt(pub: string, medianTime: number): Promise<MindexEntry[]> { - return (await this.reducable(pub)).filter(e => e.chainable_on && e.chainable_on > medianTime) + async findByPubAndChainableOnGt( + pub: string, + medianTime: number + ): Promise<MindexEntry[]> { + return (await this.reducable(pub)).filter( + (e) => e.chainable_on && e.chainable_on > medianTime + ); } async findExpiresOnLteAndRevokesOnGt(medianTime: number): Promise<string[]> { - return this.indexForExpiresOn.findExpiresOnLte(medianTime) + return this.indexForExpiresOn.findExpiresOnLte(medianTime); } - async findPubkeysThatShouldExpire(medianTime: number): Promise<{ pub: string; created_on: string }[]> { - const results: { pub: string; created_on: string }[] = [] - const pubkeys = await this.findExpiresOnLteAndRevokesOnGt(medianTime) + async findPubkeysThatShouldExpire( + medianTime: number + ): Promise<{ pub: string; created_on: string }[]> { + const results: { pub: string; created_on: string }[] = []; + const pubkeys = await this.findExpiresOnLteAndRevokesOnGt(medianTime); for (const pub of pubkeys) { - const MS = await this.getReducedMS(pub) as FullMindexEntry // We are sure because `memberships` already comes from the MINDEX + const MS = (await this.getReducedMS(pub)) as FullMindexEntry; // We are sure because `memberships` already comes from the MINDEX const hasRenewedSince = MS.expires_on > medianTime; if (!MS.expired_on && !hasRenewedSince) { results.push({ pub: MS.pub, created_on: MS.created_on, - }) + }); } } - return results + return results; } - async findRevokesOnLteAndRevokedOnIsNull(medianTime: number): Promise<string[]> { - return this.indexForRevokesOn.findRevokesOnLte(medianTime) + async findRevokesOnLteAndRevokedOnIsNull( + medianTime: number + ): Promise<string[]> { + return this.indexForRevokesOn.findRevokesOnLte(medianTime); } async getReducedMS(pub: string): Promise<FullMindexEntry | null> { - const reducable = await this.reducable(pub) - return reduceOrNull(reducable) as FullMindexEntry + const reducable = await this.reducable(pub); + return reduceOrNull(reducable) as FullMindexEntry; } - async getReducedMSForImplicitRevocation(pub: string): Promise<FullMindexEntry | null> { - return this.getReducedMS(pub) + async getReducedMSForImplicitRevocation( + pub: string + ): Promise<FullMindexEntry | null> { + return this.getReducedMS(pub); } async getRevokedPubkeys(): Promise<string[]> { - return this.findWhereTransform(v => !!reduce(v).revoked_on, kv => kv.key) + return this.findWhereTransform( + (v) => !!reduce(v).revoked_on, + (kv) => kv.key + ); } async reducable(pub: string): Promise<MindexEntry[]> { - return (await this.getOrNull(pub)) || [] + return (await this.getOrNull(pub)) || []; } - - } diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBSindex.ts b/app/lib/dal/indexDAL/leveldb/LevelDBSindex.ts index 099a565bf2683cd8a3bdf2c7c8da45d998fa31b6..a8b4aceba9bf7314135e8a1a47c41b7ad96f7ffd 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBSindex.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBSindex.ts @@ -1,20 +1,26 @@ -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {FullSindexEntry, Indexer, SimpleTxEntryForWallet, SimpleTxInput, SindexEntry} from "../../../indexer" -import {LevelUp} from 'levelup' -import {LevelDBTable} from "./LevelDBTable" -import {SIndexDAO} from "../abstract/SIndexDAO" -import {Underscore} from "../../../common-libs/underscore" -import {pint} from "../../../common-libs/pint" -import {arrayPruneAllCopy} from "../../../common-libs/array-prune" - -export class LevelDBSindex extends LevelDBTable<SindexEntry> implements SIndexDAO { - - private indexForTrimming: LevelDBTable<string[]> - private indexForConsumed: LevelDBTable<string[]> - private indexForConditions: LevelDBTable<string[]> - - constructor(protected getLevelDB: (dbName: string)=> Promise<LevelUp>) { - super('level_sindex', getLevelDB) +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { + FullSindexEntry, + Indexer, + SimpleTxEntryForWallet, + SimpleTxInput, + SindexEntry, +} from "../../../indexer"; +import { LevelUp } from "levelup"; +import { LevelDBTable } from "./LevelDBTable"; +import { SIndexDAO } from "../abstract/SIndexDAO"; +import { Underscore } from "../../../common-libs/underscore"; +import { pint } from "../../../common-libs/pint"; +import { arrayPruneAllCopy } from "../../../common-libs/array-prune"; + +export class LevelDBSindex extends LevelDBTable<SindexEntry> + implements SIndexDAO { + private indexForTrimming: LevelDBTable<string[]>; + private indexForConsumed: LevelDBTable<string[]>; + private indexForConditions: LevelDBTable<string[]>; + + constructor(protected getLevelDB: (dbName: string) => Promise<LevelUp>) { + super("level_sindex", getLevelDB); } /** @@ -22,20 +28,29 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry> implements SIndexDA */ async init(): Promise<void> { - await super.init() - this.indexForTrimming = new LevelDBTable<string[]>('level_sindex/written_on', this.getLevelDB) - this.indexForConsumed = new LevelDBTable<string[]>('level_sindex/consumed_on', this.getLevelDB) - this.indexForConditions = new LevelDBTable<string[]>('level_sindex/conditions', this.getLevelDB) - await this.indexForTrimming.init() - await this.indexForConsumed.init() - await this.indexForConditions.init() + await super.init(); + this.indexForTrimming = new LevelDBTable<string[]>( + "level_sindex/written_on", + this.getLevelDB + ); + this.indexForConsumed = new LevelDBTable<string[]>( + "level_sindex/consumed_on", + this.getLevelDB + ); + this.indexForConditions = new LevelDBTable<string[]>( + "level_sindex/conditions", + this.getLevelDB + ); + await this.indexForTrimming.init(); + await this.indexForConsumed.init(); + await this.indexForConditions.init(); } async close(): Promise<void> { - await super.close() - await this.indexForTrimming.close() - await this.indexForConsumed.close() - await this.indexForConditions.close() + await super.close(); + await this.indexForTrimming.close(); + await this.indexForConsumed.close(); + await this.indexForConditions.close(); } /** @@ -44,118 +59,158 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry> implements SIndexDA @MonitorExecutionTime() async insert(record: SindexEntry): Promise<void> { - await this.insertBatch([record]) + await this.insertBatch([record]); } @MonitorExecutionTime() async insertBatch(records: SindexEntry[]): Promise<void> { - await this.batchInsertWithKeyComputing(records, r => { - return LevelDBSindex.trimKey(r.identifier, r.pos, r.consumed) - }) - await this.indexRecords(records) + await this.batchInsertWithKeyComputing(records, (r) => { + return LevelDBSindex.trimKey(r.identifier, r.pos, r.consumed); + }); + await this.indexRecords(records); } findByIdentifier(identifier: string): Promise<SindexEntry[]> { return this.findAllValues({ gte: identifier, - lt: LevelDBSindex.upperIdentifier(identifier) - }) + lt: LevelDBSindex.upperIdentifier(identifier), + }); } - findByIdentifierAndPos(identifier: string, pos: number): Promise<SindexEntry[]> { + findByIdentifierAndPos( + identifier: string, + pos: number + ): Promise<SindexEntry[]> { return this.findAllValues({ gte: LevelDBSindex.trimPartialKey(identifier, pos), - lt: LevelDBSindex.upperIdentifier(LevelDBSindex.trimPartialKey(identifier, pos)) - }) + lt: LevelDBSindex.upperIdentifier( + LevelDBSindex.trimPartialKey(identifier, pos) + ), + }); } // Not used by the protocol: we can accept a full scan async findByPos(pos: number): Promise<SindexEntry[]> { - return (await this.findAllValues()).filter(r => r.pos === pos) + return (await this.findAllValues()).filter((r) => r.pos === pos); } - async findTxSourceByIdentifierPosAmountBase(identifier: string, pos: number, amount: number, base: number): Promise<SimpleTxInput[]> { - return (await this.findByIdentifier(identifier)) - .filter(r => r.pos === pos && r.amount === amount && r.base === base) + async findTxSourceByIdentifierPosAmountBase( + identifier: string, + pos: number, + amount: number, + base: number + ): Promise<SimpleTxInput[]> { + return (await this.findByIdentifier(identifier)).filter( + (r) => r.pos === pos && r.amount === amount && r.base === base + ); } - async getAvailableForConditions(conditionsStr: string): Promise<SindexEntry[]> { - const forConditions = await this.getForConditions(conditionsStr) - const reduced = Indexer.DUP_HELPERS.reduceBy(forConditions, ['identifier', 'pos']) - return reduced.filter(r => !r.consumed) + async getAvailableForConditions( + conditionsStr: string + ): Promise<SindexEntry[]> { + const forConditions = await this.getForConditions(conditionsStr); + const reduced = Indexer.DUP_HELPERS.reduceBy(forConditions, [ + "identifier", + "pos", + ]); + return reduced.filter((r) => !r.consumed); } - async getAvailableForPubkey(pubkey: string): Promise<{ amount: number; base: number; conditions: string; identifier: string; pos: number }[]> { + async getAvailableForPubkey( + pubkey: string + ): Promise< + { + amount: number; + base: number; + conditions: string; + identifier: string; + pos: number; + }[] + > { // TODO: very costly: needs a full scan, would be better to change this implementatio - const entries = await this.findWhere(e => e.conditions.includes(`SIG(${pubkey})`)) - const reduced = Indexer.DUP_HELPERS.reduceBy(entries, ['identifier', 'pos']) - return reduced.filter(r => !r.consumed) + const entries = await this.findWhere((e) => + e.conditions.includes(`SIG(${pubkey})`) + ); + const reduced = Indexer.DUP_HELPERS.reduceBy(entries, [ + "identifier", + "pos", + ]); + return reduced.filter((r) => !r.consumed); } - async getTxSource(identifier: string, pos: number): Promise<FullSindexEntry | null> { - const entries = (await this.findByIdentifierAndPos(identifier, pos)) - return Indexer.DUP_HELPERS.reduceOrNull(entries) + async getTxSource( + identifier: string, + pos: number + ): Promise<FullSindexEntry | null> { + const entries = await this.findByIdentifierAndPos(identifier, pos); + return Indexer.DUP_HELPERS.reduceOrNull(entries); } async getWrittenOnTxs(blockstamp: string): Promise<SimpleTxEntryForWallet[]> { - const writtenOn = await this.getWrittenOn(blockstamp) - const entries: SimpleTxEntryForWallet[] = [] - writtenOn.forEach(w => { + const writtenOn = await this.getWrittenOn(blockstamp); + const entries: SimpleTxEntryForWallet[] = []; + writtenOn.forEach((w) => { entries.push({ - srcType: 'T', + srcType: "T", op: w.op, conditions: w.conditions, amount: w.amount, base: w.base, identifier: w.identifier, pos: w.pos, - }) - }) - return entries + }); + }); + return entries; } async trimConsumedSource(belowNumber: number): Promise<void> { - let belowNumberIds: string[] = [] - const mapIds: { [k: string]: { - conditions: string - writtenOn: number - } - } = {} - const mapIds2WrittenOn: { [k: string]: number } = {} + let belowNumberIds: string[] = []; + const mapIds: { + [k: string]: { + conditions: string; + writtenOn: number; + }; + } = {}; + const mapIds2WrittenOn: { [k: string]: number } = {}; // First: we look at what was written before `belowNumber` - await this.indexForConsumed.readAllKeyValue(async kv => { - belowNumberIds = belowNumberIds.concat(kv.value) - for (const id of kv.value) { - mapIds2WrittenOn[id] = pint(kv.key) + await this.indexForConsumed.readAllKeyValue( + async (kv) => { + belowNumberIds = belowNumberIds.concat(kv.value); + for (const id of kv.value) { + mapIds2WrittenOn[id] = pint(kv.key); + } + }, + { + lt: LevelDBSindex.trimWrittenOnKey(belowNumber), } - }, { - lt: LevelDBSindex.trimWrittenOnKey(belowNumber) - }) + ); // Second: we identify the corresponding **consumed** sources and remove them. for (const id of belowNumberIds) { // Remove consumed sources - const identifier = id.split('-')[0] - const pos = pint(id.split('-')[1]) - const entry = await this.getOrNull(LevelDBSindex.trimKey(identifier, pos, true)) + const identifier = id.split("-")[0]; + const pos = pint(id.split("-")[1]); + const entry = await this.getOrNull( + LevelDBSindex.trimKey(identifier, pos, true) + ); if (entry && entry.writtenOn < belowNumber) { // We remember the trimmed source id to remove it from the writtenOn and conditions index mapIds[id] = { writtenOn: mapIds2WrittenOn[id], - conditions: entry.conditions - } - await this.del(LevelDBSindex.trimKey(identifier, pos, false)) - await this.del(LevelDBSindex.trimKey(identifier, pos, true)) + conditions: entry.conditions, + }; + await this.del(LevelDBSindex.trimKey(identifier, pos, false)); + await this.del(LevelDBSindex.trimKey(identifier, pos, true)); } } // We update indexes for (const id of Underscore.keys(mapIds).map(String)) { - const map = mapIds[id] - await this.trimConditions(map.conditions, id) - await this.trimConsumed(map.writtenOn, id) - await this.trimWrittenOn(map.writtenOn, id) + const map = mapIds[id]; + await this.trimConditions(map.conditions, id); + await this.trimConsumed(map.writtenOn, id); + await this.trimWrittenOn(map.writtenOn, id); } } @@ -164,90 +219,113 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry> implements SIndexDA */ trimRecords(belowNumber: number): Promise<void> { - return this.trimConsumedSource(belowNumber) + return this.trimConsumedSource(belowNumber); } /** * Generic DAO */ - async findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<SindexEntry[]> { - const rows = await this.findAllValues() - return Underscore.sortBy(rows, r => 1000 * r.writtenOn + (r.consumed ? 1 : 0)) + async findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<SindexEntry[]> { + const rows = await this.findAllValues(); + return Underscore.sortBy( + rows, + (r) => 1000 * r.writtenOn + (r.consumed ? 1 : 0) + ); } async getWrittenOn(blockstamp: string): Promise<SindexEntry[]> { - const ids = Underscore.uniq((await this.indexForTrimming.getOrNull(LevelDBSindex.trimWrittenOnKey(pint(blockstamp)))) || []) - const found: SindexEntry[] = [] + const ids = Underscore.uniq( + (await this.indexForTrimming.getOrNull( + LevelDBSindex.trimWrittenOnKey(pint(blockstamp)) + )) || [] + ); + const found: SindexEntry[] = []; for (const id of ids) { - const entries = await this.findByIdentifierAndPos(id.split('-')[0], pint(id.split('-')[1])) + const entries = await this.findByIdentifierAndPos( + id.split("-")[0], + pint(id.split("-")[1]) + ); entries - .filter(e => e.written_on === blockstamp) - .forEach(e => found.push(e)) + .filter((e) => e.written_on === blockstamp) + .forEach((e) => found.push(e)); } - return found + return found; } async getForConditions(conditions: string): Promise<SindexEntry[]> { - const ids = (await this.indexForConditions.getOrNull(conditions)) || [] - const found: SindexEntry[] = [] + const ids = (await this.indexForConditions.getOrNull(conditions)) || []; + const found: SindexEntry[] = []; for (const id of ids) { - const entries = await this.findByIdentifierAndPos(id.split('-')[0], pint(id.split('-')[1])) - entries.forEach(e => found.push(e)) + const entries = await this.findByIdentifierAndPos( + id.split("-")[0], + pint(id.split("-")[1]) + ); + entries.forEach((e) => found.push(e)); } - return found + return found; } async removeBlock(blockstamp: string): Promise<void> { - const writtenOn = pint(blockstamp) + const writtenOn = pint(blockstamp); // We look at records written on this blockstamp: `indexForTrimming` allows to get them - const ids = (await this.indexForTrimming.getOrNull(LevelDBSindex.trimWrittenOnKey(writtenOn))) || [] + const ids = + (await this.indexForTrimming.getOrNull( + LevelDBSindex.trimWrittenOnKey(writtenOn) + )) || []; // `ids` contains both CREATE and UPDATE sources for (const id of ids) { // Remove sources - const identifier = id.split('-')[0] - const pos = parseInt(id.split('-')[1]) - const conditions: string[] = [] - const createKey = LevelDBSindex.trimKey(identifier, pos, false) - const updateKey = LevelDBSindex.trimKey(identifier, pos, true) - const createRecord = await this.getOrNull(createKey) - const updateRecord = await this.getOrNull(updateKey) + const identifier = id.split("-")[0]; + const pos = parseInt(id.split("-")[1]); + const conditions: string[] = []; + const createKey = LevelDBSindex.trimKey(identifier, pos, false); + const updateKey = LevelDBSindex.trimKey(identifier, pos, true); + const createRecord = await this.getOrNull(createKey); + const updateRecord = await this.getOrNull(updateKey); // Undo consumption if (updateRecord && updateRecord.writtenOn === writtenOn) { - conditions.push(updateRecord.conditions) - await this.del(updateKey) + conditions.push(updateRecord.conditions); + await this.del(updateKey); } // Undo creation? if (createRecord && createRecord.writtenOn === writtenOn) { - conditions.push(createRecord.conditions) - await this.del(createKey) + conditions.push(createRecord.conditions); + await this.del(createKey); } // Update balance // 1. Conditions - const uniqConditions = Underscore.uniq(conditions) + const uniqConditions = Underscore.uniq(conditions); for (const condition of uniqConditions) { // Remove this source from the balance - await this.trimConditions(condition, id) + await this.trimConditions(condition, id); } } if (ids.length) { // 2. WrittenOn - await this.indexForTrimming.del(LevelDBSindex.trimWrittenOnKey(writtenOn)) - await this.indexForConsumed.del(LevelDBSindex.trimWrittenOnKey(writtenOn)) + await this.indexForTrimming.del( + LevelDBSindex.trimWrittenOnKey(writtenOn) + ); + await this.indexForConsumed.del( + LevelDBSindex.trimWrittenOnKey(writtenOn) + ); } } private async trimConditions(condition: string, id: string) { // Get all the account's TX sources - const existing = (await this.indexForConditions.getOrNull(condition)) || [] + const existing = (await this.indexForConditions.getOrNull(condition)) || []; // Prune the source from the account - const trimmed = arrayPruneAllCopy(existing, id) + const trimmed = arrayPruneAllCopy(existing, id); if (trimmed.length) { // If some sources are left for this "account", persist what remains - await this.indexForConditions.put(condition, trimmed) + await this.indexForConditions.put(condition, trimmed); } else { // Otherwise just delete the "account" - await this.indexForConditions.del(condition) + await this.indexForConditions.del(condition); } } @@ -257,97 +335,116 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry> implements SIndexDA * @param id */ private async trimWrittenOn(writtenOn: number, id: string) { - const k = LevelDBSindex.trimWrittenOnKey(writtenOn) - const existing = await this.getWrittenOnSourceIds(writtenOn) - const trimmed = arrayPruneAllCopy(existing, id) + const k = LevelDBSindex.trimWrittenOnKey(writtenOn); + const existing = await this.getWrittenOnSourceIds(writtenOn); + const trimmed = arrayPruneAllCopy(existing, id); if (trimmed.length) { - await this.indexForConditions.put(k, trimmed) + await this.indexForConditions.put(k, trimmed); } else { - await this.indexForConditions.del(k) + await this.indexForConditions.del(k); } } private async trimConsumed(writtenOn: number, id: string) { - const k = LevelDBSindex.trimWrittenOnKey(writtenOn) - const existing = (await this.indexForConsumed.getOrNull(k)) || [] - const trimmed = arrayPruneAllCopy(existing, id) + const k = LevelDBSindex.trimWrittenOnKey(writtenOn); + const existing = (await this.indexForConsumed.getOrNull(k)) || []; + const trimmed = arrayPruneAllCopy(existing, id); if (trimmed.length) { - await this.indexForConsumed.put(k, trimmed) + await this.indexForConsumed.put(k, trimmed); } else { - await this.indexForConsumed.del(k) + await this.indexForConsumed.del(k); } } private async getWrittenOnSourceIds(writtenOn: number) { - const indexForTrimmingId = LevelDBSindex.trimWrittenOnKey(writtenOn) - return (await this.indexForTrimming.getOrNull(indexForTrimmingId)) || [] + const indexForTrimmingId = LevelDBSindex.trimWrittenOnKey(writtenOn); + return (await this.indexForTrimming.getOrNull(indexForTrimmingId)) || []; } private static trimKey(identifier: string, pos: number, consumed: boolean) { - return `${identifier}-${String(pos).padStart(10, '0')}-${consumed ? 1 : 0}` + return `${identifier}-${String(pos).padStart(10, "0")}-${consumed ? 1 : 0}`; } private static trimWrittenOnKey(writtenOn: number) { - return String(writtenOn).padStart(10, '0') + return String(writtenOn).padStart(10, "0"); } private static trimPartialKey(identifier: string, pos: number) { - return `${identifier}-${String(pos).padStart(10, '0')}` + return `${identifier}-${String(pos).padStart(10, "0")}`; } public static upperIdentifier(identifier: string) { - let indexOfLastNonFletter = identifier.length - 1 - let nextLastLetter = String.fromCharCode(identifier.charCodeAt(indexOfLastNonFletter) + 1) + let indexOfLastNonFletter = identifier.length - 1; + let nextLastLetter = String.fromCharCode( + identifier.charCodeAt(indexOfLastNonFletter) + 1 + ); // We only use 0-9A-G notation - if (nextLastLetter === ':') { - nextLastLetter = 'A' + if (nextLastLetter === ":") { + nextLastLetter = "A"; } - return identifier.substr(0, indexOfLastNonFletter) - + nextLastLetter - + identifier.substr(indexOfLastNonFletter + 1) + return ( + identifier.substr(0, indexOfLastNonFletter) + + nextLastLetter + + identifier.substr(indexOfLastNonFletter + 1) + ); } private async indexRecords(records: SindexEntry[]) { - const byConsumed: { [k: number]: SindexEntry[] } = {} - const byWrittenOn: { [k: number]: SindexEntry[] } = {} - const byConditions: { [k: string]: SindexEntry[] } = {} + const byConsumed: { [k: number]: SindexEntry[] } = {}; + const byWrittenOn: { [k: number]: SindexEntry[] } = {}; + const byConditions: { [k: string]: SindexEntry[] } = {}; records - .filter(r => r.consumed) - .forEach(r => { - // WrittenOn consumed - let arrConsumed = byConsumed[r.writtenOn] - if (!arrConsumed) { - arrConsumed = byConsumed[r.writtenOn] = [] - } - arrConsumed.push(r) - }) - records.forEach(r => { + .filter((r) => r.consumed) + .forEach((r) => { + // WrittenOn consumed + let arrConsumed = byConsumed[r.writtenOn]; + if (!arrConsumed) { + arrConsumed = byConsumed[r.writtenOn] = []; + } + arrConsumed.push(r); + }); + records.forEach((r) => { // WrittenOn - let arrWO = byWrittenOn[r.writtenOn] + let arrWO = byWrittenOn[r.writtenOn]; if (!arrWO) { - arrWO = byWrittenOn[r.writtenOn] = [] + arrWO = byWrittenOn[r.writtenOn] = []; } - arrWO.push(r) + arrWO.push(r); // Conditiosn - let arrCN = byConditions[r.conditions] + let arrCN = byConditions[r.conditions]; if (!arrCN) { - arrCN = byConditions[r.conditions] = [] + arrCN = byConditions[r.conditions] = []; } - arrCN.push(r) - }) + arrCN.push(r); + }); // Index consumed => (identifier + pos)[] for (const k of Underscore.keys(byConsumed)) { - await this.indexForConsumed.put(LevelDBSindex.trimWrittenOnKey(k), byConsumed[k].map(r => LevelDBSindex.trimPartialKey(r.identifier, r.pos))) + await this.indexForConsumed.put( + LevelDBSindex.trimWrittenOnKey(k), + byConsumed[k].map((r) => + LevelDBSindex.trimPartialKey(r.identifier, r.pos) + ) + ); } // Index writtenOn => (identifier + pos)[] for (const k of Underscore.keys(byWrittenOn)) { - await this.indexForTrimming.put(LevelDBSindex.trimWrittenOnKey(k), byWrittenOn[k].map(r => LevelDBSindex.trimPartialKey(r.identifier, r.pos))) + await this.indexForTrimming.put( + LevelDBSindex.trimWrittenOnKey(k), + byWrittenOn[k].map((r) => + LevelDBSindex.trimPartialKey(r.identifier, r.pos) + ) + ); } // Index conditions => (identifier + pos)[] for (const k of Underscore.keys(byConditions).map(String)) { - const existing = (await this.indexForConditions.getOrNull(k)) || [] - const newSources = byConditions[k].map(r => LevelDBSindex.trimPartialKey(r.identifier, r.pos)) - await this.indexForConditions.put(k, Underscore.uniq(existing.concat(newSources))) + const existing = (await this.indexForConditions.getOrNull(k)) || []; + const newSources = byConditions[k].map((r) => + LevelDBSindex.trimPartialKey(r.identifier, r.pos) + ); + await this.indexForConditions.put( + k, + Underscore.uniq(existing.concat(newSources)) + ); } } } diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts b/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts index 0d2b8c1f9bf11200522547ed66725e5cae1d7920..c6b1f6de2c9dab34d2780dae2bc64ce8af44d1af 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts @@ -1,183 +1,199 @@ -import {LevelUp} from "levelup"; -import {AbstractIteratorOptions} from "abstract-leveldown"; -import {NewLogger} from "../../../logger" +import { LevelUp } from "levelup"; +import { AbstractIteratorOptions } from "abstract-leveldown"; +import { NewLogger } from "../../../logger"; export class LevelDBTable<T> { - - private db: LevelUp + private db: LevelUp; constructor( private name: string, - protected getLevelDB: (dbName: string)=> Promise<LevelUp>, - ) { - } + protected getLevelDB: (dbName: string) => Promise<LevelUp> + ) {} - cleanCache(): void { - } + cleanCache(): void {} - triggerInit(): void { - } + triggerInit(): void {} async close() { - await this.db.close() + await this.db.close(); } async init(): Promise<void> { - this.db = await this.getLevelDB(`${this.name}`) + this.db = await this.getLevelDB(`${this.name}`); } public async get(k: string): Promise<T> { - const data = await this.db.get(k) - return JSON.parse(String(data)) as any + const data = await this.db.get(k); + return JSON.parse(String(data)) as any; } - public async getOrNull(k: string): Promise<T|null> { + public async getOrNull(k: string): Promise<T | null> { try { - const data = await this.db.get(k) - return JSON.parse(String(data)) as any + const data = await this.db.get(k); + return JSON.parse(String(data)) as any; } catch (e) { - if (!e || e.type !== 'NotFoundError') { - throw Error(e) + if (!e || e.type !== "NotFoundError") { + throw Error(e); } - return null + return null; } } public async del(k: string): Promise<void> { - return await this.db.del(k) + return await this.db.del(k); } public async put(k: string, record: T): Promise<void> { - return await this.db.put(k, JSON.stringify(record)) + return await this.db.put(k, JSON.stringify(record)); } public async batchInsert(records: T[], key: keyof T) { - const batch = records.map(r => { + const batch = records.map((r) => { return { - type: 'put', + type: "put", key: r[key], - value: JSON.stringify(r) - } - }) as any - await this.db.batch(batch) + value: JSON.stringify(r), + }; + }) as any; + await this.db.batch(batch); } - public async batchInsertWithKeyComputing(records: T[], keyComputing: (record: T) => string) { - const batch = records.map(r => { + public async batchInsertWithKeyComputing( + records: T[], + keyComputing: (record: T) => string + ) { + const batch = records.map((r) => { return { - type: 'put', + type: "put", key: keyComputing(r), - value: JSON.stringify(r) - } - }) as any - await this.db.batch(batch) + value: JSON.stringify(r), + }; + }) as any; + await this.db.batch(batch); } public async count(options?: AbstractIteratorOptions) { - let count = 0 - await new Promise(res => { - this.db.createReadStream(options) - .on('data', () => count++) - .on('close', res) - }) - return count - } - - public async readAll(callback: (entry: T) => void, options?: AbstractIteratorOptions) { - await new Promise(res => { - this.db.createReadStream(options) - .on('data', data => callback(JSON.parse(String(data.value)))) - .on('close', res) - }) - } - - public async readAllKeyValue(callback: (entry: { - key: string, - value: T - }) => void, options?: AbstractIteratorOptions) { - await new Promise(res => { - this.db.createReadStream(options) - .on('data', data => callback({ - key: String(data.key), - value: JSON.parse(String(data.value)) - })) - .on('close', res) - }) - } - - public async applyAllKeyValue(callback: (entry: { - key: string, - value: T - }) => Promise<void>, options?: AbstractIteratorOptions) { - const ops: Promise<void>[] = [] - await new Promise(res => { - this.db.createReadStream(options) - .on('data', data => ops.push(callback({ - key: String(data.key), - value: JSON.parse(String(data.value)) - }))) - .on('close', res) - }) - await Promise.all(ops) + let count = 0; + await new Promise((res) => { + this.db + .createReadStream(options) + .on("data", () => count++) + .on("close", res); + }); + return count; + } + + public async readAll( + callback: (entry: T) => void, + options?: AbstractIteratorOptions + ) { + await new Promise((res) => { + this.db + .createReadStream(options) + .on("data", (data) => callback(JSON.parse(String(data.value)))) + .on("close", res); + }); } - public async deleteWhere(options?: AbstractIteratorOptions) { - const deletedKv: { - key: string, - value: T - }[] = [] - await this.applyAllKeyValue(async kv => { - deletedKv.push(kv) - await this.del(kv.key) - }, options) - return deletedKv + public async readAllKeyValue( + callback: (entry: { key: string; value: T }) => void, + options?: AbstractIteratorOptions + ) { + await new Promise((res) => { + this.db + .createReadStream(options) + .on("data", (data) => + callback({ + key: String(data.key), + value: JSON.parse(String(data.value)), + }) + ) + .on("close", res); + }); + } + + public async applyAllKeyValue( + callback: (entry: { key: string; value: T }) => Promise<void>, + options?: AbstractIteratorOptions + ) { + const ops: Promise<void>[] = []; + await new Promise((res) => { + this.db + .createReadStream(options) + .on("data", (data) => + ops.push( + callback({ + key: String(data.key), + value: JSON.parse(String(data.value)), + }) + ) + ) + .on("close", res); + }); + await Promise.all(ops); } - public async findAllKeys(options?: AbstractIteratorOptions): Promise<string[]> { - const data: string[] = [] - await this.readAllKeyValue(kv => { - data.push(kv.key) - }, options) - return data + public async deleteWhere(options?: AbstractIteratorOptions) { + const deletedKv: { + key: string; + value: T; + }[] = []; + await this.applyAllKeyValue(async (kv) => { + deletedKv.push(kv); + await this.del(kv.key); + }, options); + return deletedKv; + } + + public async findAllKeys( + options?: AbstractIteratorOptions + ): Promise<string[]> { + const data: string[] = []; + await this.readAllKeyValue((kv) => { + data.push(kv.key); + }, options); + return data; } public async findAllValues(options?: AbstractIteratorOptions): Promise<T[]> { - const data: T[] = [] - await this.readAllKeyValue(kv => { - data.push(kv.value) - }, options) - return data + const data: T[] = []; + await this.readAllKeyValue((kv) => { + data.push(kv.value); + }, options); + return data; } public async findWhere(filter: (t: T) => boolean): Promise<T[]> { - return this.findWhereTransform<T>(filter, t => t.value) + return this.findWhereTransform<T>(filter, (t) => t.value); } - public async findWhereTransform<R>(filter: (t: T) => boolean, transform: (t: { - key: string, - value: T - }) => R): Promise<R[]> { - const data: R[] = [] - await this.readAllKeyValue(kv => { + public async findWhereTransform<R>( + filter: (t: T) => boolean, + transform: (t: { key: string; value: T }) => R + ): Promise<R[]> { + const data: R[] = []; + await this.readAllKeyValue((kv) => { if (!filter || filter(kv.value)) { - data.push(transform(kv)) + data.push(transform(kv)); } - }, {}) - return data - } - - async dump(dumpValue: (t: { - key: string, - value: T - }) => any = (v) => v): Promise<number> { - let count = 0 - await this.readAllKeyValue(entry => { - console.log(entry.key, dumpValue({ - key: entry.key, - value: entry.value - })) - count++ - }) - return count + }, {}); + return data; + } + + async dump( + dumpValue: (t: { key: string; value: T }) => any = (v) => v + ): Promise<number> { + let count = 0; + await this.readAllKeyValue((entry) => { + console.log( + entry.key, + dumpValue({ + key: entry.key, + value: entry.value, + }) + ); + count++; + }); + return count; } } diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBWallet.ts b/app/lib/dal/indexDAL/leveldb/LevelDBWallet.ts index bf7be56e23f1247f478e87da24bc5ee3bc8872ae..f8474ea47068a225084024955de3b7d6be21e289 100644 --- a/app/lib/dal/indexDAL/leveldb/LevelDBWallet.ts +++ b/app/lib/dal/indexDAL/leveldb/LevelDBWallet.ts @@ -1,13 +1,12 @@ -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {LevelUp} from 'levelup' -import {LevelDBTable} from "./LevelDBTable" -import {DBWallet} from "../../../db/DBWallet" -import {WalletDAO} from "../abstract/WalletDAO" +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { LevelUp } from "levelup"; +import { LevelDBTable } from "./LevelDBTable"; +import { DBWallet } from "../../../db/DBWallet"; +import { WalletDAO } from "../abstract/WalletDAO"; export class LevelDBWallet extends LevelDBTable<DBWallet> implements WalletDAO { - - constructor(getLevelDB: (dbName: string)=> Promise<LevelUp>) { - super('level_wallet', getLevelDB) + constructor(getLevelDB: (dbName: string) => Promise<LevelUp>) { + super("level_wallet", getLevelDB); } /** @@ -16,24 +15,24 @@ export class LevelDBWallet extends LevelDBTable<DBWallet> implements WalletDAO { @MonitorExecutionTime() async insert(record: DBWallet): Promise<void> { - await this.insertBatch([record]) + await this.insertBatch([record]); } @MonitorExecutionTime() async insertBatch(records: DBWallet[]): Promise<void> { - await this.batchInsertWithKeyComputing(records, r => r.conditions) + await this.batchInsertWithKeyComputing(records, (r) => r.conditions); } - getWallet(conditions: string): Promise<DBWallet|null> { - return this.getOrNull(conditions) + getWallet(conditions: string): Promise<DBWallet | null> { + return this.getOrNull(conditions); } listAll(): Promise<DBWallet[]> { - return this.findAllValues() + return this.findAllValues(); } async saveWallet(wallet: DBWallet): Promise<DBWallet> { - await this.put(wallet.conditions, wallet) - return wallet + await this.put(wallet.conditions, wallet); + return wallet; } } diff --git a/app/lib/dal/indexDAL/leveldb/generic/LevelDBDataIndex.ts b/app/lib/dal/indexDAL/leveldb/generic/LevelDBDataIndex.ts index 5763232221d496007dc81e8926b58ddbebfc455e..72940fd9b9eacea02f5efeace147a5816ad955d4 100644 --- a/app/lib/dal/indexDAL/leveldb/generic/LevelDBDataIndex.ts +++ b/app/lib/dal/indexDAL/leveldb/generic/LevelDBDataIndex.ts @@ -1,10 +1,9 @@ -import {LevelDBTable} from "../LevelDBTable" +import { LevelDBTable } from "../LevelDBTable"; export abstract class LevelDBDataIndex<T, R> extends LevelDBTable<T> { + public abstract onInsert(records: R[], newState: R[]): Promise<void>; - public abstract onInsert(records: R[], newState: R[]): Promise<void> - - public abstract onRemove(records: R[], newState: R[]): Promise<void> + public abstract onRemove(records: R[], newState: R[]): Promise<void>; public async onTrimming(belowNumber: number): Promise<void> {} } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/LevelDBWrittenOnIndexer.ts b/app/lib/dal/indexDAL/leveldb/indexers/LevelDBWrittenOnIndexer.ts index bdbd5af51ff7ae4af35da08dfe30892b68bbbce6..ca01ba2d837747ac59524b27c89a280a687e1563 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/LevelDBWrittenOnIndexer.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/LevelDBWrittenOnIndexer.ts @@ -1,55 +1,66 @@ -import {LevelUp} from "levelup"; -import {reduceConcat, reduceGroupBy} from "../../../../common-libs/reduce" -import {Underscore} from "../../../../common-libs/underscore" -import {LevelDBTable} from "../LevelDBTable" -import {pint} from "../../../../common-libs/pint" +import { LevelUp } from "levelup"; +import { reduceConcat, reduceGroupBy } from "../../../../common-libs/reduce"; +import { Underscore } from "../../../../common-libs/underscore"; +import { LevelDBTable } from "../LevelDBTable"; +import { pint } from "../../../../common-libs/pint"; export interface WrittenOnData { - writtenOn: number + writtenOn: number; } -export class LevelDBWrittenOnIndexer<T extends WrittenOnData> extends LevelDBTable<string[]> { - +export class LevelDBWrittenOnIndexer< + T extends WrittenOnData +> extends LevelDBTable<string[]> { constructor( name: string, - getLevelDB: (dbName: string)=> Promise<LevelUp>, - protected toKey: (t: T) => string) { - super(name, getLevelDB) + getLevelDB: (dbName: string) => Promise<LevelUp>, + protected toKey: (t: T) => string + ) { + super(name, getLevelDB); } async onInsert(records: T[]): Promise<void> { - const byWrittenOn = reduceGroupBy(records, 'writtenOn') - await Promise.all(Underscore.keys(byWrittenOn).map(async writtenOn => { - await this.put(LevelDBWrittenOnIndexer.trimWrittenOnKey(pint(writtenOn)), byWrittenOn[writtenOn].map(e => this.toKey(e))) - })) + const byWrittenOn = reduceGroupBy(records, "writtenOn"); + await Promise.all( + Underscore.keys(byWrittenOn).map(async (writtenOn) => { + await this.put( + LevelDBWrittenOnIndexer.trimWrittenOnKey(pint(writtenOn)), + byWrittenOn[writtenOn].map((e) => this.toKey(e)) + ); + }) + ); } - getWrittenOnKeys(writtenOn: number): Promise<string[]|null> { - return this.getOrNull(LevelDBWrittenOnIndexer.trimWrittenOnKey(writtenOn)) + getWrittenOnKeys(writtenOn: number): Promise<string[] | null> { + return this.getOrNull(LevelDBWrittenOnIndexer.trimWrittenOnKey(writtenOn)); } trim(writtenOn: number): Promise<void> { - return this.del(LevelDBWrittenOnIndexer.trimWrittenOnKey(writtenOn)) + return this.del(LevelDBWrittenOnIndexer.trimWrittenOnKey(writtenOn)); } private static trimWrittenOnKey(writtenOn: number) { - return String(writtenOn).padStart(10, '0') + return String(writtenOn).padStart(10, "0"); } async deleteBelow(writtenOn: number): Promise<string[]> { - return (await this.deleteWhere({ lt: LevelDBWrittenOnIndexer.trimWrittenOnKey(writtenOn) })) - .map(kv => kv.value) - .reduce(reduceConcat, []) + return ( + await this.deleteWhere({ + lt: LevelDBWrittenOnIndexer.trimWrittenOnKey(writtenOn), + }) + ) + .map((kv) => kv.value) + .reduce(reduceConcat, []); } async deleteAt(writtenOn: number): Promise<string[]> { - const k = LevelDBWrittenOnIndexer.trimWrittenOnKey(writtenOn) - const value = await this.getOrNull(k) + const k = LevelDBWrittenOnIndexer.trimWrittenOnKey(writtenOn); + const value = await this.getOrNull(k); if (!value) { // Nothing to delete, nothing to return - return [] + return []; } - await this.del(k) - return value + await this.del(k); + return value; } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexHashIndexer.ts b/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexHashIndexer.ts index 5cd3a9dd7fa09f1ce31b1d228b490fdf42bdd337..002e9b4bc02a1847a5715dbd9301505846dfad1f 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexHashIndexer.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexHashIndexer.ts @@ -1,27 +1,31 @@ -import {LevelDBDataIndex} from "../generic/LevelDBDataIndex" -import {IindexEntry} from "../../../../indexer" +import { LevelDBDataIndex } from "../generic/LevelDBDataIndex"; +import { IindexEntry } from "../../../../indexer"; -export type Hash = string -export type Pubkey = string - -export class LevelIIndexHashIndexer extends LevelDBDataIndex<Pubkey[], IindexEntry> { +export type Hash = string; +export type Pubkey = string; +export class LevelIIndexHashIndexer extends LevelDBDataIndex< + Pubkey[], + IindexEntry +> { async onInsert(records: IindexEntry[]): Promise<void> { - await Promise.all(records - .filter(e => e.op === 'CREATE' && e.hash) - .map(async e => this.put(e.hash as string, [e.pub])) - ) + await Promise.all( + records + .filter((e) => e.op === "CREATE" && e.hash) + .map(async (e) => this.put(e.hash as string, [e.pub])) + ); } async onRemove(records: IindexEntry[]): Promise<void> { - await Promise.all(records - .filter(e => e.op === 'CREATE' && e.hash) - .map(async e => this.del(e.hash as string)) - ) + await Promise.all( + records + .filter((e) => e.op === "CREATE" && e.hash) + .map(async (e) => this.del(e.hash as string)) + ); } - async getByHash(hash: Hash): Promise<Pubkey|null> { - const res = await this.getOrNull(hash) - return res && res[0] + async getByHash(hash: Hash): Promise<Pubkey | null> { + const res = await this.getOrNull(hash); + return res && res[0]; } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexKickIndexer.ts b/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexKickIndexer.ts index 127e2386b1a98a2554de91b6d7cd99c1f97fc4f0..f1efc84583181c55edda08d58b95b8afa8a44546 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexKickIndexer.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexKickIndexer.ts @@ -1,100 +1,117 @@ -import {LevelDBDataIndex} from "../generic/LevelDBDataIndex" -import {IindexEntry} from "../../../../indexer" -import {DataErrors} from "../../../../common-libs/errors" +import { LevelDBDataIndex } from "../generic/LevelDBDataIndex"; +import { IindexEntry } from "../../../../indexer"; +import { DataErrors } from "../../../../common-libs/errors"; -export type Pubkey = string +export type Pubkey = string; export interface KickEntry { - on: number|undefined // The next time that the identity must be kicked - done: number[] // The revertion history + on: number | undefined; // The next time that the identity must be kicked + done: number[]; // The revertion history } -export class LevelIIndexKickIndexer extends LevelDBDataIndex<KickEntry, IindexEntry> { - +export class LevelIIndexKickIndexer extends LevelDBDataIndex< + KickEntry, + IindexEntry +> { async onInsert(records: IindexEntry[]): Promise<void> { // Case 1: to be kicked - await Promise.all(records - .filter(e => e.kick) - .map(async e => { - let entry = await this.getOrNull(e.pub) - if (!entry) { - entry = { - on: e.writtenOn, - done: [] + await Promise.all( + records + .filter((e) => e.kick) + .map(async (e) => { + let entry = await this.getOrNull(e.pub); + if (!entry) { + entry = { + on: e.writtenOn, + done: [], + }; } - } - entry.on = e.writtenOn - await this.put(e.pub, entry) - }) - ) + entry.on = e.writtenOn; + await this.put(e.pub, entry); + }) + ); // Case 2: just kicked - await Promise.all(records - .filter(e => e.member === false) - .map(async e => { - const entry = await this.getOrNull(e.pub) - if (entry && entry.on === e.writtenOn - 1) { // Members are excluded at B# +1 - entry.done.push(entry.on) - entry.on = undefined - await this.put(e.pub, entry) - } - // Otherwise it is not a kicking - }) - ) + await Promise.all( + records + .filter((e) => e.member === false) + .map(async (e) => { + const entry = await this.getOrNull(e.pub); + if (entry && entry.on === e.writtenOn - 1) { + // Members are excluded at B# +1 + entry.done.push(entry.on); + entry.on = undefined; + await this.put(e.pub, entry); + } + // Otherwise it is not a kicking + }) + ); } async onRemove(records: IindexEntry[]): Promise<void> { // Case 1: to be kicked => unkicked - await Promise.all(records - .filter(e => e.kick) - .map(async e => { - const entry = await this.get(e.pub) - if (entry.on === e.writtenOn) { - entry.on = entry.done.pop() - if (entry.on === undefined) { - // No more kicking left - await this.del(e.pub) + await Promise.all( + records + .filter((e) => e.kick) + .map(async (e) => { + const entry = await this.get(e.pub); + if (entry.on === e.writtenOn) { + entry.on = entry.done.pop(); + if (entry.on === undefined) { + // No more kicking left + await this.del(e.pub); + } + // Some kicks left + await this.put(e.pub, entry); // TODO: test this, can occur, probably not covered + } else { + throw Error( + DataErrors[DataErrors.INVALID_LEVELDB_IINDEX_DATA_TO_BE_KICKED] + ); } - // Some kicks left - await this.put(e.pub, entry) // TODO: test this, can occur, probably not covered - } else { - throw Error(DataErrors[DataErrors.INVALID_LEVELDB_IINDEX_DATA_TO_BE_KICKED]) - } - }) - ) + }) + ); // Case 2: just kicked => to be kicked - await Promise.all(records - .filter(e => e.member === false) - .map(async e => { - const entry = await this.getOrNull(e.pub) - if (entry && entry.done.includes(e.writtenOn - 1)) { - // It was a kicking - entry.on = entry.done.pop() - if (!entry.on) { - throw Error(DataErrors[DataErrors.INVALID_LEVELDB_IINDEX_DATA_WAS_KICKED]) + await Promise.all( + records + .filter((e) => e.member === false) + .map(async (e) => { + const entry = await this.getOrNull(e.pub); + if (entry && entry.done.includes(e.writtenOn - 1)) { + // It was a kicking + entry.on = entry.done.pop(); + if (!entry.on) { + throw Error( + DataErrors[DataErrors.INVALID_LEVELDB_IINDEX_DATA_WAS_KICKED] + ); + } + await this.put(e.pub, entry); } - await this.put(e.pub, entry) - } - }) - ) + }) + ); } async onTrimming(belowNumber: number): Promise<void> { - await this.applyAllKeyValue(async kv => { - const initialLength = kv.value.done.length - kv.value.done = kv.value.done.filter(e => e >= belowNumber) + await this.applyAllKeyValue(async (kv) => { + const initialLength = kv.value.done.length; + kv.value.done = kv.value.done.filter((e) => e >= belowNumber); if (kv.value.done.length !== initialLength && kv.value.done.length > 0) { // We simply update the entry which was pruned - await this.put(kv.key, kv.value) - } - else if (kv.value.done.length !== initialLength && kv.value.done.length === 0 && !kv.value.on) { + await this.put(kv.key, kv.value); + } else if ( + kv.value.done.length !== initialLength && + kv.value.done.length === 0 && + !kv.value.on + ) { // We remove the entry, no more necessary - await this.del(kv.key) + await this.del(kv.key); } - }) + }); } async getAll(): Promise<Pubkey[]> { - return this.findWhereTransform(t => !!t.on, kv => kv.key) + return this.findWhereTransform( + (t) => !!t.on, + (kv) => kv.key + ); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexUidIndexer.ts b/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexUidIndexer.ts index 69b4756f73e6f4776ac4c16e1898d5bad9331e67..3047117d021abc3b25a16b6a0b8c0d7e5b3c958f 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexUidIndexer.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/LevelIIndexUidIndexer.ts @@ -1,27 +1,31 @@ -import {LevelDBDataIndex} from "../generic/LevelDBDataIndex" -import {IindexEntry} from "../../../../indexer" +import { LevelDBDataIndex } from "../generic/LevelDBDataIndex"; +import { IindexEntry } from "../../../../indexer"; -export type Uid = string -export type Pubkey = string - -export class LevelIIndexUidIndexer extends LevelDBDataIndex<Pubkey[], IindexEntry> { +export type Uid = string; +export type Pubkey = string; +export class LevelIIndexUidIndexer extends LevelDBDataIndex< + Pubkey[], + IindexEntry +> { async onInsert(records: IindexEntry[]): Promise<void> { - await Promise.all(records - .filter(e => e.op === 'CREATE' && e.uid) - .map(async e => this.put(e.uid as string, [e.pub])) - ) + await Promise.all( + records + .filter((e) => e.op === "CREATE" && e.uid) + .map(async (e) => this.put(e.uid as string, [e.pub])) + ); } async onRemove(records: IindexEntry[]): Promise<void> { - await Promise.all(records - .filter(e => e.op === 'CREATE' && e.uid) - .map(async e => this.del(e.uid as string)) - ) + await Promise.all( + records + .filter((e) => e.op === "CREATE" && e.uid) + .map(async (e) => this.del(e.uid as string)) + ); } - async getPubByUid(uid: Uid): Promise<Pubkey|null> { - const res = await this.getOrNull(uid) - return res && res[0] + async getPubByUid(uid: Uid): Promise<Pubkey | null> { + const res = await this.getOrNull(uid); + return res && res[0]; } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexExpiresOnIndexer.ts b/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexExpiresOnIndexer.ts index 483a8595155c0de2aa35fde3288e4e952f4ccd7e..2548c87d19351856647be0240fec4d1c9978036a 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexExpiresOnIndexer.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexExpiresOnIndexer.ts @@ -1,108 +1,152 @@ -import {LevelDBDataIndex} from "../generic/LevelDBDataIndex" -import {MindexEntry, reduce} from "../../../../indexer" -import {reduceConcat, reduceGroupBy} from "../../../../common-libs/reduce" -import {pint} from "../../../../common-libs/pint" -import {Underscore} from "../../../../common-libs/underscore" +import { LevelDBDataIndex } from "../generic/LevelDBDataIndex"; +import { MindexEntry, reduce } from "../../../../indexer"; +import { reduceConcat, reduceGroupBy } from "../../../../common-libs/reduce"; +import { pint } from "../../../../common-libs/pint"; +import { Underscore } from "../../../../common-libs/underscore"; -export type Pubkey = string +export type Pubkey = string; -export class LevelMIndexExpiresOnIndexer extends LevelDBDataIndex<Pubkey[], MindexEntry> { - - async onInsert(records: MindexEntry[], prevState: MindexEntry[]): Promise<void> { - - const prevStateByPub = reduceGroupBy(prevState, 'pub') +export class LevelMIndexExpiresOnIndexer extends LevelDBDataIndex< + Pubkey[], + MindexEntry +> { + async onInsert( + records: MindexEntry[], + prevState: MindexEntry[] + ): Promise<void> { + const prevStateByPub = reduceGroupBy(prevState, "pub"); // Case 1: expires_on change (when MS JOIN|RENEW) - const byExpiresOn = reduceGroupBy(records.filter(e => e.expires_on), "expires_on") - await Promise.all(Underscore.keys(byExpiresOn) - .map(async expiresOn => { - const pubkeys = byExpiresOn[expiresOn].map(e => e.pub) + const byExpiresOn = reduceGroupBy( + records.filter((e) => e.expires_on), + "expires_on" + ); + await Promise.all( + Underscore.keys(byExpiresOn).map(async (expiresOn) => { + const pubkeys = byExpiresOn[expiresOn].map((e) => e.pub); // 1. If the key had a previous revokes_on, we remove it - const reducedWhosExpiresOnChanges = pubkeys.filter(p => prevStateByPub[p]) - .map(p => reduce(prevStateByPub[p])) - .filter(r => r.expires_on && !r.expired_on) + const reducedWhosExpiresOnChanges = pubkeys + .filter((p) => prevStateByPub[p]) + .map((p) => reduce(prevStateByPub[p])) + .filter((r) => r.expires_on && !r.expired_on); for (const reduced of reducedWhosExpiresOnChanges) { - await this.removeAllKeysFromExpiresOn(reduced.expires_on as number, [reduced.pub]) + await this.removeAllKeysFromExpiresOn(reduced.expires_on as number, [ + reduced.pub, + ]); } // 2. We put the new value - await this.addAllKeysToExpiresOn(pint(expiresOn), byExpiresOn[expiresOn].map(e => e.pub)) + await this.addAllKeysToExpiresOn( + pint(expiresOn), + byExpiresOn[expiresOn].map((e) => e.pub) + ); }) - ) + ); // Case 2: expiration occurs - const pubkeysToexpire = Underscore.uniq(records.filter(e => e.expired_on).map(r => r.pub)) - const prevStateFM = Underscore.values(prevStateByPub).map(reduce) - const byExpiresOnPrevState = reduceGroupBy(prevStateFM.filter(r => pubkeysToexpire.includes(r.pub)), 'expires_on') - await Promise.all(Underscore.keys(byExpiresOnPrevState) - .map(async expiresOn => this.removeAllKeysFromExpiresOn(pint(expiresOn), byExpiresOnPrevState[expiresOn].map(e => e.pub))) - ) + const pubkeysToexpire = Underscore.uniq( + records.filter((e) => e.expired_on).map((r) => r.pub) + ); + const prevStateFM = Underscore.values(prevStateByPub).map(reduce); + const byExpiresOnPrevState = reduceGroupBy( + prevStateFM.filter((r) => pubkeysToexpire.includes(r.pub)), + "expires_on" + ); + await Promise.all( + Underscore.keys(byExpiresOnPrevState).map(async (expiresOn) => + this.removeAllKeysFromExpiresOn( + pint(expiresOn), + byExpiresOnPrevState[expiresOn].map((e) => e.pub) + ) + ) + ); } - async onRemove(records: MindexEntry[], newState: MindexEntry[]): Promise<void> { - - const newStateByPub = reduceGroupBy(newState, 'pub') + async onRemove( + records: MindexEntry[], + newState: MindexEntry[] + ): Promise<void> { + const newStateByPub = reduceGroupBy(newState, "pub"); // Case 1: expires_on change REVERT - const byExpiresOn = reduceGroupBy(records.filter(e => e.expires_on), "expires_on") - await Promise.all(Underscore.keys(byExpiresOn) - .map(async expiresOn => { - const pubkeys = byExpiresOn[expiresOn].map(e => e.pub) + const byExpiresOn = reduceGroupBy( + records.filter((e) => e.expires_on), + "expires_on" + ); + await Promise.all( + Underscore.keys(byExpiresOn).map(async (expiresOn) => { + const pubkeys = byExpiresOn[expiresOn].map((e) => e.pub); // 1. Remove the existing value - await this.removeAllKeysFromExpiresOn(pint(expiresOn), pubkeys) + await this.removeAllKeysFromExpiresOn(pint(expiresOn), pubkeys); // 2. Put back the old one if it exists const reduced = pubkeys - .filter(p => newStateByPub[p]) - .map(p => newStateByPub[p]) + .filter((p) => newStateByPub[p]) + .map((p) => newStateByPub[p]) .map(reduce) - .filter(r => r.expires_on) + .filter((r) => r.expires_on); for (const r of reduced) { - await this.addAllKeysToExpiresOn(r.expires_on as number, [r.pub]) - + await this.addAllKeysToExpiresOn(r.expires_on as number, [r.pub]); } }) - ) + ); // Case 2: expiration REVERT - const values: MindexEntry[] = Underscore.values(newStateByPub).map(entries => reduce(entries)) - const byExpiredOn = reduceGroupBy(values, "expired_on") - await Promise.all(Underscore.keys(byExpiredOn) - .map(async expiresOn => this.addAllKeysToExpiresOn(pint(expiresOn), byExpiredOn[expiresOn].map(e => e.pub))) - ) + const values: MindexEntry[] = Underscore.values( + newStateByPub + ).map((entries) => reduce(entries)); + const byExpiredOn = reduceGroupBy(values, "expired_on"); + await Promise.all( + Underscore.keys(byExpiredOn).map(async (expiresOn) => + this.addAllKeysToExpiresOn( + pint(expiresOn), + byExpiredOn[expiresOn].map((e) => e.pub) + ) + ) + ); } - async addAllKeysToExpiresOn(expiresOn: number, pubkeys: Pubkey[]): Promise<void> { - const key = LevelMIndexExpiresOnIndexer.trimKey(expiresOn) - let entry = await this.getOrNull(key) + async addAllKeysToExpiresOn( + expiresOn: number, + pubkeys: Pubkey[] + ): Promise<void> { + const key = LevelMIndexExpiresOnIndexer.trimKey(expiresOn); + let entry = await this.getOrNull(key); if (!entry) { - entry = [] + entry = []; } for (const pub of pubkeys) { - entry.push(pub) + entry.push(pub); } - await this.put(key, entry) + await this.put(key, entry); } - async removeAllKeysFromExpiresOn(expiresOn: number, pubkeys: Pubkey[]): Promise<void> { + async removeAllKeysFromExpiresOn( + expiresOn: number, + pubkeys: Pubkey[] + ): Promise<void> { // We remove the "expires_on" indexed values - const key = LevelMIndexExpiresOnIndexer.trimKey(expiresOn) - const entry = await this.get(key) + const key = LevelMIndexExpiresOnIndexer.trimKey(expiresOn); + const entry = await this.get(key); for (const pub of pubkeys) { if (entry.includes(pub)) { - entry.splice(entry.indexOf(pub), 1) + entry.splice(entry.indexOf(pub), 1); } } if (entry.length) { // Some expirations left - await this.put(key, entry) // TODO: test this, can occur, probably not covered + await this.put(key, entry); // TODO: test this, can occur, probably not covered } else { // No more expirations left - await this.del(key) + await this.del(key); } } async findExpiresOnLte(medianTime: number) { - return (await this.findAllValues({ lte: LevelMIndexExpiresOnIndexer.trimKey(medianTime) })).reduce(reduceConcat, []) + return ( + await this.findAllValues({ + lte: LevelMIndexExpiresOnIndexer.trimKey(medianTime), + }) + ).reduce(reduceConcat, []); } private static trimKey(expiresOn: number) { - return String(expiresOn).padStart(10, '0') + return String(expiresOn).padStart(10, "0"); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexRevokesOnIndexer.ts b/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexRevokesOnIndexer.ts index 5a62f742eeb485f71a8635c78a73bf4faa1d80d2..cc34691e8c23daf28622e83779ad75d0d78bcdb6 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexRevokesOnIndexer.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexRevokesOnIndexer.ts @@ -1,108 +1,152 @@ -import {LevelDBDataIndex} from "../generic/LevelDBDataIndex" -import {MindexEntry, reduce} from "../../../../indexer" -import {reduceConcat, reduceGroupBy} from "../../../../common-libs/reduce" -import {Underscore} from "../../../../common-libs/underscore" -import {pint} from "../../../../common-libs/pint" +import { LevelDBDataIndex } from "../generic/LevelDBDataIndex"; +import { MindexEntry, reduce } from "../../../../indexer"; +import { reduceConcat, reduceGroupBy } from "../../../../common-libs/reduce"; +import { Underscore } from "../../../../common-libs/underscore"; +import { pint } from "../../../../common-libs/pint"; -export type Pubkey = string +export type Pubkey = string; -export class LevelMIndexRevokesOnIndexer extends LevelDBDataIndex<Pubkey[], MindexEntry> { - - async onInsert(records: MindexEntry[], prevState: MindexEntry[]): Promise<void> { - - const prevStateByPub = reduceGroupBy(prevState, 'pub') +export class LevelMIndexRevokesOnIndexer extends LevelDBDataIndex< + Pubkey[], + MindexEntry +> { + async onInsert( + records: MindexEntry[], + prevState: MindexEntry[] + ): Promise<void> { + const prevStateByPub = reduceGroupBy(prevState, "pub"); // Case 1: revokes_on change (when MS JOIN|RENEW) - const byRevokesOn = reduceGroupBy(records.filter(e => e.revokes_on), "revokes_on") - await Promise.all(Underscore.keys(byRevokesOn) - .map(async revokesOn => { - const pubkeys = byRevokesOn[revokesOn].map(e => e.pub) + const byRevokesOn = reduceGroupBy( + records.filter((e) => e.revokes_on), + "revokes_on" + ); + await Promise.all( + Underscore.keys(byRevokesOn).map(async (revokesOn) => { + const pubkeys = byRevokesOn[revokesOn].map((e) => e.pub); // 1. If the key had a previous revokes_on, we remove it - const reducedWhosRevokesOnChanges = pubkeys.filter(p => prevStateByPub[p]) - .map(p => reduce(prevStateByPub[p])) - .filter(r => r.revokes_on && !r.revoked_on) + const reducedWhosRevokesOnChanges = pubkeys + .filter((p) => prevStateByPub[p]) + .map((p) => reduce(prevStateByPub[p])) + .filter((r) => r.revokes_on && !r.revoked_on); for (const reduced of reducedWhosRevokesOnChanges) { - await this.removeAllKeysFromRevokesOn(reduced.revokes_on as number, [reduced.pub]) + await this.removeAllKeysFromRevokesOn(reduced.revokes_on as number, [ + reduced.pub, + ]); } // 2. We put the new value - await this.addAllKeysToRevokesOn(pint(revokesOn), byRevokesOn[revokesOn].map(e => e.pub)) + await this.addAllKeysToRevokesOn( + pint(revokesOn), + byRevokesOn[revokesOn].map((e) => e.pub) + ); }) - ) + ); // Case 2: revocation occurs - const pubkeysToRevoke = Underscore.uniq(records.filter(e => e.revoked_on).map(r => r.pub)) - const prevStateFM = Underscore.values(prevStateByPub).map(reduce) - const byRevokesOnPrevState = reduceGroupBy(prevStateFM.filter(r => pubkeysToRevoke.includes(r.pub)), 'revokes_on') - await Promise.all(Underscore.keys(byRevokesOnPrevState) - .map(async revokesOn => this.removeAllKeysFromRevokesOn(pint(revokesOn), byRevokesOnPrevState[revokesOn].map(e => e.pub))) - ) + const pubkeysToRevoke = Underscore.uniq( + records.filter((e) => e.revoked_on).map((r) => r.pub) + ); + const prevStateFM = Underscore.values(prevStateByPub).map(reduce); + const byRevokesOnPrevState = reduceGroupBy( + prevStateFM.filter((r) => pubkeysToRevoke.includes(r.pub)), + "revokes_on" + ); + await Promise.all( + Underscore.keys(byRevokesOnPrevState).map(async (revokesOn) => + this.removeAllKeysFromRevokesOn( + pint(revokesOn), + byRevokesOnPrevState[revokesOn].map((e) => e.pub) + ) + ) + ); } - async onRemove(records: MindexEntry[], newState: MindexEntry[]): Promise<void> { - - const newStateByPub = reduceGroupBy(newState, 'pub') + async onRemove( + records: MindexEntry[], + newState: MindexEntry[] + ): Promise<void> { + const newStateByPub = reduceGroupBy(newState, "pub"); // Case 1: revokes_on change REVERT - const byRevokesOn = reduceGroupBy(records.filter(e => e.revokes_on), "revokes_on") - await Promise.all(Underscore.keys(byRevokesOn) - .map(async revokesOn => { - const pubkeys = byRevokesOn[revokesOn].map(e => e.pub) + const byRevokesOn = reduceGroupBy( + records.filter((e) => e.revokes_on), + "revokes_on" + ); + await Promise.all( + Underscore.keys(byRevokesOn).map(async (revokesOn) => { + const pubkeys = byRevokesOn[revokesOn].map((e) => e.pub); // 1. Remove the existing value - await this.removeAllKeysFromRevokesOn(pint(revokesOn), pubkeys) + await this.removeAllKeysFromRevokesOn(pint(revokesOn), pubkeys); // 2. Put back the old one if it exists const reduced = pubkeys - .filter(p => newStateByPub[p]) - .map(p => newStateByPub[p]) + .filter((p) => newStateByPub[p]) + .map((p) => newStateByPub[p]) .map(reduce) - .filter(r => r.revokes_on) + .filter((r) => r.revokes_on); for (const r of reduced) { - await this.addAllKeysToRevokesOn(r.revokes_on as number, [r.pub]) - + await this.addAllKeysToRevokesOn(r.revokes_on as number, [r.pub]); } }) - ) + ); // Case 2: revocation REVERT - const values: MindexEntry[] = Underscore.values(newStateByPub).map(entries => reduce(entries)) - const byExpiredOn = reduceGroupBy(values, "revoked_on") - await Promise.all(Underscore.keys(byExpiredOn) - .map(async revokesOn => this.addAllKeysToRevokesOn(pint(revokesOn), byExpiredOn[revokesOn].map(e => e.pub))) - ) + const values: MindexEntry[] = Underscore.values( + newStateByPub + ).map((entries) => reduce(entries)); + const byExpiredOn = reduceGroupBy(values, "revoked_on"); + await Promise.all( + Underscore.keys(byExpiredOn).map(async (revokesOn) => + this.addAllKeysToRevokesOn( + pint(revokesOn), + byExpiredOn[revokesOn].map((e) => e.pub) + ) + ) + ); } - async addAllKeysToRevokesOn(revokesOn: number, pubkeys: Pubkey[]): Promise<void> { - const key = LevelMIndexRevokesOnIndexer.trimKey(revokesOn) - let entry = await this.getOrNull(key) + async addAllKeysToRevokesOn( + revokesOn: number, + pubkeys: Pubkey[] + ): Promise<void> { + const key = LevelMIndexRevokesOnIndexer.trimKey(revokesOn); + let entry = await this.getOrNull(key); if (!entry) { - entry = [] + entry = []; } for (const pub of pubkeys) { - entry.push(pub) + entry.push(pub); } - await this.put(key, entry) + await this.put(key, entry); } - async removeAllKeysFromRevokesOn(revokesOn: number, pubkeys: Pubkey[]): Promise<void> { + async removeAllKeysFromRevokesOn( + revokesOn: number, + pubkeys: Pubkey[] + ): Promise<void> { // We remove the "revokes_on" indexed values - const key = LevelMIndexRevokesOnIndexer.trimKey(revokesOn) - const entry = await this.get(key) + const key = LevelMIndexRevokesOnIndexer.trimKey(revokesOn); + const entry = await this.get(key); for (const pub of pubkeys) { if (entry.includes(pub)) { - entry.splice(entry.indexOf(pub), 1) + entry.splice(entry.indexOf(pub), 1); } } if (entry.length) { // Some revocations left - await this.put(key, entry) // TODO: test this, can occur, probably not covered + await this.put(key, entry); // TODO: test this, can occur, probably not covered } else { // No more revocations left - await this.del(key) + await this.del(key); } } async findRevokesOnLte(revokesOn: number): Promise<Pubkey[]> { - return (await this.findAllValues({ lte: LevelMIndexRevokesOnIndexer.trimKey(revokesOn) })).reduce(reduceConcat, []) + return ( + await this.findAllValues({ + lte: LevelMIndexRevokesOnIndexer.trimKey(revokesOn), + }) + ).reduce(reduceConcat, []); } private static trimKey(revokesOn: number) { - return String(revokesOn).padStart(10, '0') + return String(revokesOn).padStart(10, "0"); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlock.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlock.ts index 2fbaad6deeed113a3b5e2f6b520e4bcaeae52898..21e88452799f67bdddfb114b3d6b6461a50463a5 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlock.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlock.ts @@ -1,70 +1,64 @@ -import {LevelDBDataIndex} from "../../generic/LevelDBDataIndex" -import {DBBlock} from "../../../../../db/DBBlock" -import {DataErrors} from "../../../../../common-libs/errors" +import { LevelDBDataIndex } from "../../generic/LevelDBDataIndex"; +import { DBBlock } from "../../../../../db/DBBlock"; +import { DataErrors } from "../../../../../common-libs/errors"; -export abstract class LevelIndexBlock extends LevelDBDataIndex<number[], DBBlock> { +export abstract class LevelIndexBlock extends LevelDBDataIndex< + number[], + DBBlock +> { + abstract matches(b: DBBlock): boolean; - abstract matches(b: DBBlock): boolean - - abstract keys(b: DBBlock): string[] + abstract keys(b: DBBlock): string[]; async onInsert(records: DBBlock[]): Promise<void> { - const recordsByBlock = records - .filter(this.matches) - .map(b => ({ - keys: this.keys(b), - b - })) - const map: { [k: string]: number[] } = {} - recordsByBlock.forEach(m => { - m.keys.forEach(k => { + const recordsByBlock = records.filter(this.matches).map((b) => ({ + keys: this.keys(b), + b, + })); + const map: { [k: string]: number[] } = {}; + recordsByBlock.forEach((m) => { + m.keys.forEach((k) => { if (!map[k]) { - map[k] = [] + map[k] = []; } - map[k].push(m.b.number) - }) - }) - await Promise.all( - Object.keys(map) - .map(k => this.indexIt(k, map[k])) - ) + map[k].push(m.b.number); + }); + }); + await Promise.all(Object.keys(map).map((k) => this.indexIt(k, map[k]))); } async onRemove(records: DBBlock[]): Promise<void> { - await Promise.all(records - .filter(this.matches) - .map(async b => - Promise.all( - this - .keys(b) - .map(i => this.unindexIt(i, b.number)) + await Promise.all( + records + .filter(this.matches) + .map(async (b) => + Promise.all(this.keys(b).map((i) => this.unindexIt(i, b.number))) ) - ) - ) + ); } private async indexIt(pub: string, newNumbers: number[]) { - const blockNumbers = (await this.getOrNull(pub)) || [] - newNumbers.forEach(n => blockNumbers.push(n)) - await this.put(pub, blockNumbers) + const blockNumbers = (await this.getOrNull(pub)) || []; + newNumbers.forEach((n) => blockNumbers.push(n)); + await this.put(pub, blockNumbers); } private async unindexIt(pub: string, blockNumber: number) { - const blockNumbers = (await this.getOrNull(pub)) + const blockNumbers = await this.getOrNull(pub); if (!blockNumbers) { - throw DataErrors[DataErrors.DB_INCORRECT_INDEX] + throw DataErrors[DataErrors.DB_INCORRECT_INDEX]; } - const index = blockNumbers.indexOf(blockNumber) + const index = blockNumbers.indexOf(blockNumber); if (index === -1) { - throw DataErrors[DataErrors.DB_INDEXED_BLOCK_NOT_FOUND] + throw DataErrors[DataErrors.DB_INDEXED_BLOCK_NOT_FOUND]; } - blockNumbers.splice(index, 1) + blockNumbers.splice(index, 1); if (blockNumbers.length) { - await this.put(pub, blockNumbers) + await this.put(pub, blockNumbers); } else { - await this.del(pub) + await this.del(pub); } } } -export const LDBIndex_ALL = 'ALL' +export const LDBIndex_ALL = "ALL"; diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockActives.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockActives.ts index 42ce81a704113b8313b59d91753403c0c4e71480..43c5f8c94ac1af46d5efa4f00985e2a5f511445e 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockActives.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockActives.ts @@ -1,17 +1,16 @@ -import {DBBlock} from "../../../../../db/DBBlock" -import {LDBIndex_ALL, LevelIndexBlock} from "./LevelIndexBlock" -import {MembershipDTO} from "../../../../../dto/MembershipDTO" +import { DBBlock } from "../../../../../db/DBBlock"; +import { LDBIndex_ALL, LevelIndexBlock } from "./LevelIndexBlock"; +import { MembershipDTO } from "../../../../../dto/MembershipDTO"; export class LevelIndexBlockActives extends LevelIndexBlock { - matches(b: DBBlock): boolean { - return b.actives.length > 0 + return b.actives.length > 0; } keys(b: DBBlock): string[] { return b.actives - .map(m => MembershipDTO.fromInline(m)) - .map(m => m.issuer) - .concat([LDBIndex_ALL]) + .map((m) => MembershipDTO.fromInline(m)) + .map((m) => m.issuer) + .concat([LDBIndex_ALL]); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockCertifications.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockCertifications.ts index e39c2e782b80adc20ae834cf5fc1c0637de05db7..b2b47afc0aa1d9998a47964025c03a4079b2b80c 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockCertifications.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockCertifications.ts @@ -1,18 +1,17 @@ -import {DBBlock} from "../../../../../db/DBBlock" -import {LDBIndex_ALL, LevelIndexBlock} from "./LevelIndexBlock" -import {CertificationDTO} from "../../../../../dto/CertificationDTO" +import { DBBlock } from "../../../../../db/DBBlock"; +import { LDBIndex_ALL, LevelIndexBlock } from "./LevelIndexBlock"; +import { CertificationDTO } from "../../../../../dto/CertificationDTO"; export class LevelIndexBlockCertifications extends LevelIndexBlock { - matches(b: DBBlock): boolean { - return b.certifications.length > 0 + return b.certifications.length > 0; } keys(b: DBBlock): string[] { return b.certifications - .map(c => CertificationDTO.fromInline(c)) - .map(c => [c.from, c.to]) + .map((c) => CertificationDTO.fromInline(c)) + .map((c) => [c.from, c.to]) .reduce((all, some) => all.concat(some), []) - .concat([LDBIndex_ALL]) + .concat([LDBIndex_ALL]); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockExcluded.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockExcluded.ts index ef344e14f2ed9960ec1abce990e458b0c44d1cd1..276675a7496487402ee123c3376d611450ddd924 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockExcluded.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockExcluded.ts @@ -1,13 +1,12 @@ -import {DBBlock} from "../../../../../db/DBBlock" -import {LDBIndex_ALL, LevelIndexBlock} from "./LevelIndexBlock" +import { DBBlock } from "../../../../../db/DBBlock"; +import { LDBIndex_ALL, LevelIndexBlock } from "./LevelIndexBlock"; export class LevelIndexBlockExcluded extends LevelIndexBlock { - matches(b: DBBlock): boolean { - return b.excluded.length > 0 + return b.excluded.length > 0; } keys(b: DBBlock): string[] { - return b.excluded.concat([LDBIndex_ALL]) + return b.excluded.concat([LDBIndex_ALL]); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockIdentities.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockIdentities.ts index 771646e2450869d74491beb930f4756411c9590a..e153ca1ff3500037522f693f1aceda5642f532b9 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockIdentities.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockIdentities.ts @@ -1,17 +1,16 @@ -import {DBBlock} from "../../../../../db/DBBlock" -import {LDBIndex_ALL, LevelIndexBlock} from "./LevelIndexBlock" -import {IdentityDTO} from "../../../../../dto/IdentityDTO" +import { DBBlock } from "../../../../../db/DBBlock"; +import { LDBIndex_ALL, LevelIndexBlock } from "./LevelIndexBlock"; +import { IdentityDTO } from "../../../../../dto/IdentityDTO"; export class LevelIndexBlockIdentities extends LevelIndexBlock { - matches(b: DBBlock): boolean { - return b.identities.length > 0 + return b.identities.length > 0; } keys(b: DBBlock): string[] { return b.identities - .map(i => IdentityDTO.fromInline(i)) - .map(i => i.pubkey) - .concat([LDBIndex_ALL]) + .map((i) => IdentityDTO.fromInline(i)) + .map((i) => i.pubkey) + .concat([LDBIndex_ALL]); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockJoiners.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockJoiners.ts index 92329c4248a13214dda78a2a740915e3c451e71d..d3049fe5ac60b4b3cc6fc9f096c031fd4fd4a552 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockJoiners.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockJoiners.ts @@ -1,17 +1,16 @@ -import {DBBlock} from "../../../../../db/DBBlock" -import {LDBIndex_ALL, LevelIndexBlock} from "./LevelIndexBlock" -import {MembershipDTO} from "../../../../../dto/MembershipDTO" +import { DBBlock } from "../../../../../db/DBBlock"; +import { LDBIndex_ALL, LevelIndexBlock } from "./LevelIndexBlock"; +import { MembershipDTO } from "../../../../../dto/MembershipDTO"; export class LevelIndexBlockJoiners extends LevelIndexBlock { - matches(b: DBBlock): boolean { - return b.joiners.length > 0 + return b.joiners.length > 0; } keys(b: DBBlock): string[] { return b.joiners - .map(m => MembershipDTO.fromInline(m)) - .map(m => m.issuer) - .concat([LDBIndex_ALL]) + .map((m) => MembershipDTO.fromInline(m)) + .map((m) => m.issuer) + .concat([LDBIndex_ALL]); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockLeavers.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockLeavers.ts index 8714d1dc565bf5a8a476c2493dd92e117777a37f..0f239a9c2b14b55995fce3a55871b4d0e2a2af54 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockLeavers.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockLeavers.ts @@ -1,17 +1,16 @@ -import {DBBlock} from "../../../../../db/DBBlock" -import {LDBIndex_ALL, LevelIndexBlock} from "./LevelIndexBlock" -import {MembershipDTO} from "../../../../../dto/MembershipDTO" +import { DBBlock } from "../../../../../db/DBBlock"; +import { LDBIndex_ALL, LevelIndexBlock } from "./LevelIndexBlock"; +import { MembershipDTO } from "../../../../../dto/MembershipDTO"; export class LevelIndexBlockLeavers extends LevelIndexBlock { - matches(b: DBBlock): boolean { - return b.leavers.length > 0 + return b.leavers.length > 0; } keys(b: DBBlock): string[] { return b.leavers - .map(m => MembershipDTO.fromInline(m)) - .map(m => m.issuer) - .concat([LDBIndex_ALL]) + .map((m) => MembershipDTO.fromInline(m)) + .map((m) => m.issuer) + .concat([LDBIndex_ALL]); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockRevoked.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockRevoked.ts index 02648341b94f28f1551880ace0ad0db6807518d9..338ba631df8961a2fd7c6effb140ad7f31b9b385 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockRevoked.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockRevoked.ts @@ -1,13 +1,12 @@ -import {DBBlock} from "../../../../../db/DBBlock" -import {LDBIndex_ALL, LevelIndexBlock} from "./LevelIndexBlock" +import { DBBlock } from "../../../../../db/DBBlock"; +import { LDBIndex_ALL, LevelIndexBlock } from "./LevelIndexBlock"; export class LevelIndexBlockRevoked extends LevelIndexBlock { - matches(b: DBBlock): boolean { - return b.revoked.length > 0 + return b.revoked.length > 0; } keys(b: DBBlock): string[] { - return b.revoked.concat([LDBIndex_ALL]) + return b.revoked.concat([LDBIndex_ALL]); } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockTX.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockTX.ts index 1cb918363827a31df811dc1734accc91175d0276..8ccfa614e2162efca39138efadc2411fc4b8aee4 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockTX.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockTX.ts @@ -1,13 +1,12 @@ -import {DBBlock} from "../../../../../db/DBBlock" -import {LDBIndex_ALL, LevelIndexBlock} from "./LevelIndexBlock" +import { DBBlock } from "../../../../../db/DBBlock"; +import { LDBIndex_ALL, LevelIndexBlock } from "./LevelIndexBlock"; export class LevelIndexBlockTX extends LevelIndexBlock { - matches(b: DBBlock): boolean { - return b.transactions.length > 0 + return b.transactions.length > 0; } keys(b: DBBlock): string[] { - return [LDBIndex_ALL] + return [LDBIndex_ALL]; } } diff --git a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockUD.ts b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockUD.ts index 7a9c31949d4f23d812758d410f10199f6eca04cb..9b2703047f925c4ba588fe08b18a369a4ce9969a 100644 --- a/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockUD.ts +++ b/app/lib/dal/indexDAL/leveldb/indexers/block/LevelIndexBlockUD.ts @@ -1,13 +1,12 @@ -import {DBBlock} from "../../../../../db/DBBlock" -import {LDBIndex_ALL, LevelIndexBlock} from "./LevelIndexBlock" +import { DBBlock } from "../../../../../db/DBBlock"; +import { LDBIndex_ALL, LevelIndexBlock } from "./LevelIndexBlock"; export class LevelIndexBlockUD extends LevelIndexBlock { - matches(b: DBBlock): boolean { - return !!b.dividend + return !!b.dividend; } keys(b: DBBlock): string[] { - return [LDBIndex_ALL] + return [LDBIndex_ALL]; } } diff --git a/app/lib/dal/indexDAL/sqlite/SqlFieldDefinition.ts b/app/lib/dal/indexDAL/sqlite/SqlFieldDefinition.ts index 47f7d7d3ce91c457cc37a34e27113f5f9b308a95..9211723179e35f30b2c25d9deebd6a9486b0ed6b 100644 --- a/app/lib/dal/indexDAL/sqlite/SqlFieldDefinition.ts +++ b/app/lib/dal/indexDAL/sqlite/SqlFieldDefinition.ts @@ -1,31 +1,36 @@ -export type SqlFieldType = 'BOOLEAN' | 'VARCHAR' | 'TEXT' | 'JSON' | 'CHAR' | 'INT' +export type SqlFieldType = + | "BOOLEAN" + | "VARCHAR" + | "TEXT" + | "JSON" + | "CHAR" + | "INT"; export class SqlFieldDefinition { - constructor( public readonly type: SqlFieldType, public readonly indexed = false, public readonly nullable = false, - public readonly length = 0) { - } + public readonly length = 0 + ) {} } export class SqlNotNullableFieldDefinition extends SqlFieldDefinition { - constructor( public readonly type: SqlFieldType, public readonly indexed = false, - public readonly length = 0) { - super(type, indexed, false, length) + public readonly length = 0 + ) { + super(type, indexed, false, length); } } export class SqlNullableFieldDefinition extends SqlFieldDefinition { - constructor( public readonly type: SqlFieldType, public readonly indexed = false, - public readonly length = 0) { - super(type, indexed, true, length) + public readonly length = 0 + ) { + super(type, indexed, true, length); } } diff --git a/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts b/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts index d8fe23a9064300ddb1ac1fdbd86a12ea4b7fa114..3152d1174b3ee7d3aeb8d19d5ed10aec0804a024 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts @@ -1,38 +1,41 @@ -import {SQLiteDriver} from "../../drivers/SQLiteDriver" -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {SqliteTable} from "./SqliteTable" -import {SqlNotNullableFieldDefinition} from "./SqlFieldDefinition" -import {DividendDAO, DividendEntry, UDSource} from "../abstract/DividendDAO" -import {IindexEntry, SimpleTxInput, SimpleUdEntryForWallet, SindexEntry} from "../../../indexer" -import {DividendDaoHandler} from "../common/DividendDaoHandler" -import {DataErrors} from "../../../common-libs/errors" - -export class SqliteDividend extends SqliteTable<DividendEntry> implements DividendDAO { - - constructor(getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>) { +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { SqliteTable } from "./SqliteTable"; +import { SqlNotNullableFieldDefinition } from "./SqlFieldDefinition"; +import { DividendDAO, DividendEntry, UDSource } from "../abstract/DividendDAO"; +import { + IindexEntry, + SimpleTxInput, + SimpleUdEntryForWallet, + SindexEntry, +} from "../../../indexer"; +import { DividendDaoHandler } from "../common/DividendDaoHandler"; +import { DataErrors } from "../../../common-libs/errors"; + +export class SqliteDividend extends SqliteTable<DividendEntry> + implements DividendDAO { + constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) { super( - 'dividend', + "dividend", { - 'pub': new SqlNotNullableFieldDefinition('VARCHAR', true, 50), - 'member': new SqlNotNullableFieldDefinition('BOOLEAN', true), - 'availables': new SqlNotNullableFieldDefinition('JSON', false), - 'consumed': new SqlNotNullableFieldDefinition('JSON', false), - 'consumedUDs': new SqlNotNullableFieldDefinition('JSON', false), - 'dividends': new SqlNotNullableFieldDefinition('JSON', false), + pub: new SqlNotNullableFieldDefinition("VARCHAR", true, 50), + member: new SqlNotNullableFieldDefinition("BOOLEAN", true), + availables: new SqlNotNullableFieldDefinition("JSON", false), + consumed: new SqlNotNullableFieldDefinition("JSON", false), + consumedUDs: new SqlNotNullableFieldDefinition("JSON", false), + dividends: new SqlNotNullableFieldDefinition("JSON", false), }, getSqliteDB - ) + ); } /** * TECHNICAL */ - cleanCache(): void { - } + cleanCache(): void {} - triggerInit(): void { - } + triggerInit(): void {} /** * INSERT @@ -40,144 +43,224 @@ export class SqliteDividend extends SqliteTable<DividendEntry> implements Divide @MonitorExecutionTime() async insert(record: DividendEntry): Promise<void> { - await this.insertInTable(this.driver, record) + await this.insertInTable(this.driver, record); } @MonitorExecutionTime() async insertBatch(records: DividendEntry[]): Promise<void> { if (records.length) { - return this.insertBatchInTable(this.driver, records) + return this.insertBatchInTable(this.driver, records); } } private async find(sql: string, params: any[]): Promise<DividendEntry[]> { - return (await this.driver.sqlRead(sql, params)).map(r => { + return (await this.driver.sqlRead(sql, params)).map((r) => { return { pub: r.pub, member: r.member, - availables: r.availables == null ? null : JSON.parse(r.availables as any), - consumed: r.consumed == null ? null : JSON.parse(r.consumed as any), - consumedUDs: r.consumedUDs == null ? null : JSON.parse(r.consumedUDs as any), - dividends: r.dividends == null ? null : JSON.parse(r.dividends as any), - } - }) + availables: + r.availables == null ? null : JSON.parse(r.availables as any), + consumed: r.consumed == null ? null : JSON.parse(r.consumed as any), + consumedUDs: + r.consumedUDs == null ? null : JSON.parse(r.consumedUDs as any), + dividends: r.dividends == null ? null : JSON.parse(r.dividends as any), + }; + }); } async consume(filter: SindexEntry[]): Promise<void> { for (const dividendToConsume of filter) { - const row = (await this.find('SELECT * FROM dividend WHERE pub = ?', [dividendToConsume.identifier]))[0] - DividendDaoHandler.consume(row, dividendToConsume) - await this.update(this.driver, row, ['consumed', 'consumedUDs', 'availables', 'dividends'], ['pub']) + const row = ( + await this.find("SELECT * FROM dividend WHERE pub = ?", [ + dividendToConsume.identifier, + ]) + )[0]; + DividendDaoHandler.consume(row, dividendToConsume); + await this.update( + this.driver, + row, + ["consumed", "consumedUDs", "availables", "dividends"], + ["pub"] + ); } } async createMember(pub: string): Promise<void> { - const existing = (await this.find('SELECT * FROM dividend WHERE pub = ?', [pub]))[0] + const existing = ( + await this.find("SELECT * FROM dividend WHERE pub = ?", [pub]) + )[0]; if (!existing) { - await this.insert(DividendDaoHandler.getNewDividendEntry(pub)) + await this.insert(DividendDaoHandler.getNewDividendEntry(pub)); } else { - await this.setMember(true, pub) + await this.setMember(true, pub); } } deleteMember(pub: string): Promise<void> { - return this.driver.sqlWrite('DELETE FROM dividend WHERE pub = ?', [pub]) + return this.driver.sqlWrite("DELETE FROM dividend WHERE pub = ?", [pub]); } async findForDump(criterion: any): Promise<SindexEntry[]> { - return DividendDaoHandler.toDump(await this.find('SELECT * FROM dividend', [])) + return DividendDaoHandler.toDump( + await this.find("SELECT * FROM dividend", []) + ); } - findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<DividendEntry[]> { - let sql = `SELECT * FROM dividend ${criterion.pub ? 'WHERE pub = ?' : ''}` + findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<DividendEntry[]> { + let sql = `SELECT * FROM dividend ${criterion.pub ? "WHERE pub = ?" : ""}`; if (sort.length) { - sql += ` ORDER BY ${sort.map(s => `${s[0]} ${s[1] ? 'DESC' : 'ASC'}`).join(', ')}` + sql += ` ORDER BY ${sort + .map((s) => `${s[0]} ${s[1] ? "DESC" : "ASC"}`) + .join(", ")}`; } - return this.find(sql, criterion.pub ? [criterion.pub] : []) + return this.find(sql, criterion.pub ? [criterion.pub] : []); } - async findUdSourceByIdentifierPosAmountBase(identifier: string, pos: number, amount: number, base: number): Promise<SimpleTxInput[]> { - const member = (await this.find('SELECT * FROM dividend WHERE pub = ?', [identifier]))[0] - return DividendDaoHandler.getUDSourceByIdPosAmountBase(member, identifier, pos, amount, base) + async findUdSourceByIdentifierPosAmountBase( + identifier: string, + pos: number, + amount: number, + base: number + ): Promise<SimpleTxInput[]> { + const member = ( + await this.find("SELECT * FROM dividend WHERE pub = ?", [identifier]) + )[0]; + return DividendDaoHandler.getUDSourceByIdPosAmountBase( + member, + identifier, + pos, + amount, + base + ); } - async getUDSource(identifier: string, pos: number): Promise<SimpleTxInput | null> { - const member = (await this.find('SELECT * FROM dividend WHERE pub = ?', [identifier]))[0] - return DividendDaoHandler.getUDSource(member, identifier, pos) + async getUDSource( + identifier: string, + pos: number + ): Promise<SimpleTxInput | null> { + const member = ( + await this.find("SELECT * FROM dividend WHERE pub = ?", [identifier]) + )[0]; + return DividendDaoHandler.getUDSource(member, identifier, pos); } async getUDSources(pub: string): Promise<UDSource[]> { - const member = (await this.find('SELECT * FROM dividend WHERE pub = ?', [pub]))[0] + const member = ( + await this.find("SELECT * FROM dividend WHERE pub = ?", [pub]) + )[0]; if (!member) { - return [] + return []; } - return DividendDaoHandler.udSources(member) + return DividendDaoHandler.udSources(member); } getWrittenOn(blockstamp: string): Promise<DividendEntry[]> { - throw Error(DataErrors[DataErrors.DIVIDEND_GET_WRITTEN_ON_SHOULD_NOT_BE_USED_DIVIDEND_DAO]) + throw Error( + DataErrors[ + DataErrors.DIVIDEND_GET_WRITTEN_ON_SHOULD_NOT_BE_USED_DIVIDEND_DAO + ] + ); } async getWrittenOnUDs(number: number): Promise<SimpleUdEntryForWallet[]> { - const res: SimpleUdEntryForWallet[] = [] - const rows = await this.find('SELECT * FROM dividend WHERE member', []) + const res: SimpleUdEntryForWallet[] = []; + const rows = await this.find("SELECT * FROM dividend WHERE member", []); for (const row of rows) { - DividendDaoHandler.getWrittenOnUDs(row, number, res) + DividendDaoHandler.getWrittenOnUDs(row, number, res); } - return res + return res; } - async produceDividend(blockNumber: number, dividend: number, unitbase: number, local_iindex: IindexEntry[]): Promise<SimpleUdEntryForWallet[]> { - const dividends: SimpleUdEntryForWallet[] = [] - const rows = await this.find('SELECT * FROM dividend WHERE member', []) + async produceDividend( + blockNumber: number, + dividend: number, + unitbase: number, + local_iindex: IindexEntry[] + ): Promise<SimpleUdEntryForWallet[]> { + const dividends: SimpleUdEntryForWallet[] = []; + const rows = await this.find("SELECT * FROM dividend WHERE member", []); for (const row of rows) { - DividendDaoHandler.produceDividend(row, blockNumber, dividend, unitbase, dividends) - await this.update(this.driver, row, ['availables', 'dividends'], ['pub']) + DividendDaoHandler.produceDividend( + row, + blockNumber, + dividend, + unitbase, + dividends + ); + await this.update(this.driver, row, ["availables", "dividends"], ["pub"]); } - return dividends + return dividends; } removeBlock(blockstamp: string): Promise<void> { - throw Error(DataErrors[DataErrors.DIVIDEND_REMOVE_BLOCK_SHOULD_NOT_BE_USED_BY_DIVIDEND_DAO]) + throw Error( + DataErrors[ + DataErrors.DIVIDEND_REMOVE_BLOCK_SHOULD_NOT_BE_USED_BY_DIVIDEND_DAO + ] + ); } - async revertUDs(number: number): Promise<{ - createdUDsDestroyedByRevert: SimpleUdEntryForWallet[] - consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[] + async revertUDs( + number: number + ): Promise<{ + createdUDsDestroyedByRevert: SimpleUdEntryForWallet[]; + consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[]; }> { - const createdUDsDestroyedByRevert: SimpleUdEntryForWallet[] = [] - const consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[] = [] + const createdUDsDestroyedByRevert: SimpleUdEntryForWallet[] = []; + const consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[] = []; // Remove produced dividends at this block - const rows = await this.find('SELECT * FROM dividend WHERE availables like ? or dividends like ?', ['%' + number + '%', '%' + number + '%']) - for (const row of rows.filter(row => row.availables.includes(number))) { - DividendDaoHandler.removeDividendsProduced(row, number, createdUDsDestroyedByRevert) - await this.update(this.driver, row, ['availables', 'dividends'], ['pub']) + const rows = await this.find( + "SELECT * FROM dividend WHERE availables like ? or dividends like ?", + ["%" + number + "%", "%" + number + "%"] + ); + for (const row of rows.filter((row) => row.availables.includes(number))) { + DividendDaoHandler.removeDividendsProduced( + row, + number, + createdUDsDestroyedByRevert + ); + await this.update(this.driver, row, ["availables", "dividends"], ["pub"]); } // Unconsumed dividends consumed at this block - for (const row of rows.filter(row => row.consumed.includes(number))) { - DividendDaoHandler.unconsumeDividends(row, number, consumedUDsRecoveredByRevert) - await this.update(this.driver, row, ['availables', 'dividends'], ['pub']) + for (const row of rows.filter((row) => row.consumed.includes(number))) { + DividendDaoHandler.unconsumeDividends( + row, + number, + consumedUDsRecoveredByRevert + ); + await this.update(this.driver, row, ["availables", "dividends"], ["pub"]); } return { createdUDsDestroyedByRevert, consumedUDsRecoveredByRevert, - } + }; } async setMember(member: boolean, pub: string): Promise<void> { - await this.driver.sqlWrite('UPDATE dividend SET member = ? WHERE pub = ?', [true, pub]) + await this.driver.sqlWrite("UPDATE dividend SET member = ? WHERE pub = ?", [ + true, + pub, + ]); } async trimConsumedUDs(belowNumber: number): Promise<void> { - const rows = await this.find('SELECT * FROM dividend', []) + const rows = await this.find("SELECT * FROM dividend", []); for (const row of rows) { if (DividendDaoHandler.trimConsumed(row, belowNumber)) { - await this.update(this.driver, row, ['consumed', 'consumedUDs'], ['pub']) + await this.update( + this.driver, + row, + ["consumed", "consumedUDs"], + ["pub"] + ); } } } listAll(): Promise<DividendEntry[]> { - return this.find('SELECT * FROM dividend', []) + return this.find("SELECT * FROM dividend", []); } } diff --git a/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts b/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts index 9b9fe405d8a1f8f2358762b1508f3ff2fe45e7d4..fdbadbdaf23fdb168c1bfa76fee797d7e226e7b4 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts @@ -1,44 +1,45 @@ -import {FullIindexEntry, IindexEntry, Indexer} from "../../../indexer" -import {SQLiteDriver} from "../../drivers/SQLiteDriver" -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {IIndexDAO} from "../abstract/IIndexDAO" -import {OldIindexEntry} from "../../../db/OldIindexEntry" -import {OldTransformers} from "../common/OldTransformer" -import {SqliteTable} from "./SqliteTable" -import {SqlNotNullableFieldDefinition, SqlNullableFieldDefinition} from "./SqlFieldDefinition" - -export class SqliteIIndex extends SqliteTable<IindexEntry> implements IIndexDAO { - - constructor(getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>) { +import { FullIindexEntry, IindexEntry, Indexer } from "../../../indexer"; +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { IIndexDAO } from "../abstract/IIndexDAO"; +import { OldIindexEntry } from "../../../db/OldIindexEntry"; +import { OldTransformers } from "../common/OldTransformer"; +import { SqliteTable } from "./SqliteTable"; +import { + SqlNotNullableFieldDefinition, + SqlNullableFieldDefinition, +} from "./SqlFieldDefinition"; + +export class SqliteIIndex extends SqliteTable<IindexEntry> + implements IIndexDAO { + constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) { super( - 'iindex', + "iindex", { - 'op': new SqlNotNullableFieldDefinition('CHAR', false, 6), - 'pub': new SqlNotNullableFieldDefinition('VARCHAR', true, 50), - 'written_on': new SqlNotNullableFieldDefinition('VARCHAR', false, 80), - 'writtenOn': new SqlNotNullableFieldDefinition('INT', true), - 'created_on': new SqlNullableFieldDefinition('VARCHAR', false, 80), - 'uid': new SqlNullableFieldDefinition('VARCHAR', true, 100), - 'hash': new SqlNullableFieldDefinition('VARCHAR', false, 70), - 'sig': new SqlNullableFieldDefinition('VARCHAR', false, 100), - 'member': new SqlNullableFieldDefinition('BOOLEAN', true), - 'wasMember': new SqlNullableFieldDefinition('BOOLEAN', true), - 'kick': new SqlNullableFieldDefinition('BOOLEAN', true), - 'wotb_id': new SqlNullableFieldDefinition('INT', true), + op: new SqlNotNullableFieldDefinition("CHAR", false, 6), + pub: new SqlNotNullableFieldDefinition("VARCHAR", true, 50), + written_on: new SqlNotNullableFieldDefinition("VARCHAR", false, 80), + writtenOn: new SqlNotNullableFieldDefinition("INT", true), + created_on: new SqlNullableFieldDefinition("VARCHAR", false, 80), + uid: new SqlNullableFieldDefinition("VARCHAR", true, 100), + hash: new SqlNullableFieldDefinition("VARCHAR", false, 70), + sig: new SqlNullableFieldDefinition("VARCHAR", false, 100), + member: new SqlNullableFieldDefinition("BOOLEAN", true), + wasMember: new SqlNullableFieldDefinition("BOOLEAN", true), + kick: new SqlNullableFieldDefinition("BOOLEAN", true), + wotb_id: new SqlNullableFieldDefinition("INT", true), }, getSqliteDB - ) + ); } /** * TECHNICAL */ - cleanCache(): void { - } + cleanCache(): void {} - triggerInit(): void { - } + triggerInit(): void {} /** * INSERT @@ -46,13 +47,13 @@ export class SqliteIIndex extends SqliteTable<IindexEntry> implements IIndexDAO @MonitorExecutionTime() async insert(record: IindexEntry): Promise<void> { - await this.insertInTable(this.driver, record) + await this.insertInTable(this.driver, record); } @MonitorExecutionTime() async insertBatch(records: IindexEntry[]): Promise<void> { if (records.length) { - return this.insertBatchInTable(this.driver, records) + return this.insertBatchInTable(this.driver, records); } } @@ -62,26 +63,37 @@ export class SqliteIIndex extends SqliteTable<IindexEntry> implements IIndexDAO @MonitorExecutionTime() async removeBlock(blockstamp: string): Promise<void> { - await this.driver.sqlWrite(`DELETE FROM iindex WHERE written_on = ?`, [blockstamp]) + await this.driver.sqlWrite(`DELETE FROM iindex WHERE written_on = ?`, [ + blockstamp, + ]); } @MonitorExecutionTime() async trimRecords(belowNumber: number): Promise<void> { - const belowRecords:IindexEntry[] = await this.driver.sqlRead('SELECT COUNT(*) as nbRecords, pub FROM iindex ' + - 'WHERE writtenOn < ? ' + - 'GROUP BY pub ' + - 'HAVING nbRecords > 1', [belowNumber]) - const reducedByPub = Indexer.DUP_HELPERS.reduceBy(belowRecords, ['pub']); + const belowRecords: IindexEntry[] = await this.driver.sqlRead( + "SELECT COUNT(*) as nbRecords, pub FROM iindex " + + "WHERE writtenOn < ? " + + "GROUP BY pub " + + "HAVING nbRecords > 1", + [belowNumber] + ); + const reducedByPub = Indexer.DUP_HELPERS.reduceBy(belowRecords, ["pub"]); for (const record of reducedByPub) { - const recordsOfPub = await this.reducable(record.pub) - const toReduce = recordsOfPub.filter(rec => parseInt(rec.written_on) < belowNumber) + const recordsOfPub = await this.reducable(record.pub); + const toReduce = recordsOfPub.filter( + (rec) => parseInt(rec.written_on) < belowNumber + ); if (toReduce.length && recordsOfPub.length > 1) { // Clean the records in the DB - await this.driver.sqlExec('DELETE FROM iindex WHERE pub = \'' + record.pub + '\'') - const nonReduced = recordsOfPub.filter(rec => parseInt(rec.written_on) >= belowNumber) - const reduced = Indexer.DUP_HELPERS.reduce(toReduce) + await this.driver.sqlExec( + "DELETE FROM iindex WHERE pub = '" + record.pub + "'" + ); + const nonReduced = recordsOfPub.filter( + (rec) => parseInt(rec.written_on) >= belowNumber + ); + const reduced = Indexer.DUP_HELPERS.reduce(toReduce); // Persist - await this.insertBatch([reduced].concat(nonReduced)) + await this.insertBatch([reduced].concat(nonReduced)); } } } @@ -92,20 +104,25 @@ export class SqliteIIndex extends SqliteTable<IindexEntry> implements IIndexDAO @MonitorExecutionTime() async getWrittenOn(blockstamp: string): Promise<IindexEntry[]> { - return this.find('SELECT * FROM iindex WHERE written_on = ?', [blockstamp]) + return this.find("SELECT * FROM iindex WHERE written_on = ?", [blockstamp]); } @MonitorExecutionTime() - async findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<IindexEntry[]> { - let sql = `SELECT * FROM iindex ${criterion.pub ? 'WHERE pub = ?' : ''}` + async findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<IindexEntry[]> { + let sql = `SELECT * FROM iindex ${criterion.pub ? "WHERE pub = ?" : ""}`; if (sort.length) { - sql += ` ORDER BY ${sort.map(s => `${s[0]} ${s[1] ? 'DESC' : 'ASC'}`).join(', ')}` + sql += ` ORDER BY ${sort + .map((s) => `${s[0]} ${s[1] ? "DESC" : "ASC"}`) + .join(", ")}`; } - return this.find(sql, criterion.pub ? [criterion.pub] : []) + return this.find(sql, criterion.pub ? [criterion.pub] : []); } private async find(sql: string, params: any[]): Promise<IindexEntry[]> { - return this.findEntities(sql, params) + return this.findEntities(sql, params); } /** @@ -114,96 +131,136 @@ export class SqliteIIndex extends SqliteTable<IindexEntry> implements IIndexDAO @MonitorExecutionTime() async reducable(pub: string): Promise<IindexEntry[]> { - return this.find('SELECT * FROM iindex WHERE pub = ? order by writtenOn ASC', [pub]) + return this.find( + "SELECT * FROM iindex WHERE pub = ? order by writtenOn ASC", + [pub] + ); } //----------------- @MonitorExecutionTime() async findByPub(pub: string): Promise<IindexEntry[]> { - return this.find('SELECT * FROM iindex WHERE pub = ? order by writtenOn ASC', [pub]) + return this.find( + "SELECT * FROM iindex WHERE pub = ? order by writtenOn ASC", + [pub] + ); } @MonitorExecutionTime() async findByUid(uid: string): Promise<IindexEntry[]> { - return this.find('SELECT * FROM iindex WHERE uid = ? order by writtenOn ASC', [uid]) + return this.find( + "SELECT * FROM iindex WHERE uid = ? order by writtenOn ASC", + [uid] + ); } @MonitorExecutionTime() async getFromPubkey(pub: string): Promise<FullIindexEntry | null> { - const entries = await this.find('SELECT * FROM iindex WHERE pub = ? order by writtenOn ASC', [pub]) + const entries = await this.find( + "SELECT * FROM iindex WHERE pub = ? order by writtenOn ASC", + [pub] + ); if (!entries.length) { - return null + return null; } - return OldTransformers.iindexEntityOrNull(entries) as any + return OldTransformers.iindexEntityOrNull(entries) as any; } // Non-protocol @MonitorExecutionTime() async getFromPubkeyOrUid(search: string): Promise<FullIindexEntry | null> { - return Indexer.DUP_HELPERS.reduceOrNull((await this.find('SELECT * FROM iindex WHERE pub = ? OR uid = ?', [search, search])) as FullIindexEntry[]) + return Indexer.DUP_HELPERS.reduceOrNull( + (await this.find("SELECT * FROM iindex WHERE pub = ? OR uid = ?", [ + search, + search, + ])) as FullIindexEntry[] + ); } @MonitorExecutionTime() async getFromUID(uid: string): Promise<FullIindexEntry | null> { - const entries = await this.find('SELECT * FROM iindex WHERE uid = ? order by writtenOn ASC', [uid]) + const entries = await this.find( + "SELECT * FROM iindex WHERE uid = ? order by writtenOn ASC", + [uid] + ); if (!entries.length) { - return null + return null; } - return this.getFromPubkey(entries[0].pub) as any + return this.getFromPubkey(entries[0].pub) as any; } @MonitorExecutionTime() async getFullFromHash(hash: string): Promise<FullIindexEntry> { - const entries = await this.find('SELECT * FROM iindex WHERE hash = ? order by writtenOn ASC', [hash]) + const entries = await this.find( + "SELECT * FROM iindex WHERE hash = ? order by writtenOn ASC", + [hash] + ); if (!entries.length) { - return null as any + return null as any; } - return this.getFromPubkey(entries[0].pub) as any + return this.getFromPubkey(entries[0].pub) as any; } @MonitorExecutionTime() async getFullFromPubkey(pub: string): Promise<FullIindexEntry> { - return (await this.getFromPubkey(pub)) as FullIindexEntry + return (await this.getFromPubkey(pub)) as FullIindexEntry; } @MonitorExecutionTime() async getFullFromUID(uid: string): Promise<FullIindexEntry> { - return (await this.getFromUID(uid)) as FullIindexEntry + return (await this.getFromUID(uid)) as FullIindexEntry; } @MonitorExecutionTime() async getMembers(): Promise<{ pubkey: string; uid: string | null }[]> { - const members = await this.find('SELECT * FROM iindex i1 ' + - 'WHERE member AND NOT EXISTS (' + - ' SELECT * FROM iindex i2 ' + - ' WHERE i2.pub = i1.pub' + - ' AND i2.writtenOn > i1.writtenOn' + - ' AND NOT i2.member)', []) - await Promise.all(members.map(async m => { - if (!m.uid) { - const withUID = await this.find('SELECT * FROM iindex WHERE pub = ? AND uid IS NOT NULL', [m.pub]) - m.uid = withUID[0].uid - } - })) - return members.map(m => ({ + const members = await this.find( + "SELECT * FROM iindex i1 " + + "WHERE member AND NOT EXISTS (" + + " SELECT * FROM iindex i2 " + + " WHERE i2.pub = i1.pub" + + " AND i2.writtenOn > i1.writtenOn" + + " AND NOT i2.member)", + [] + ); + await Promise.all( + members.map(async (m) => { + if (!m.uid) { + const withUID = await this.find( + "SELECT * FROM iindex WHERE pub = ? AND uid IS NOT NULL", + [m.pub] + ); + m.uid = withUID[0].uid; + } + }) + ); + return members.map((m) => ({ pubkey: m.pub, - uid: m.uid - })) + uid: m.uid, + })); } @MonitorExecutionTime() async getToBeKickedPubkeys(): Promise<string[]> { - return (await this.find('SELECT * FROM iindex i1 ' + - 'WHERE kick AND NOT EXISTS (' + - ' SELECT * FROM iindex i2 ' + - ' WHERE i2.pub = i1.pub' + - ' AND i2.writtenOn > i1.writtenOn)', [])).map(r => r.pub) + return ( + await this.find( + "SELECT * FROM iindex i1 " + + "WHERE kick AND NOT EXISTS (" + + " SELECT * FROM iindex i2 " + + " WHERE i2.pub = i1.pub" + + " AND i2.writtenOn > i1.writtenOn)", + [] + ) + ).map((r) => r.pub); } @MonitorExecutionTime() async searchThoseMatching(search: string): Promise<OldIindexEntry[]> { - return (await this.find('SELECT * FROM iindex WHERE pub = ? OR uid = ?', [search, search])) - .map(OldTransformers.toOldIindexEntry) + return ( + await this.find("SELECT * FROM iindex WHERE pub = ? OR uid = ?", [ + search, + search, + ]) + ).map(OldTransformers.toOldIindexEntry); } } diff --git a/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts b/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts index 3b2c5fa9de265defcca64108ab2b6616df3aae67..6fe3b6230a545088d50a2cdcb03099cfdb1f80bb 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts @@ -1,56 +1,58 @@ -import {FullMindexEntry, Indexer, MindexEntry} from "../../../indexer" -import {SQLiteDriver} from "../../drivers/SQLiteDriver" -import {MIndexDAO} from "../abstract/MIndexDAO" -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {SqliteNodeIOManager} from "./SqliteNodeIOManager" -import {CommonConstants} from "../../../common-libs/constants" -import {SqliteTable} from "./SqliteTable" -import {SqlNotNullableFieldDefinition, SqlNullableFieldDefinition} from "./SqlFieldDefinition" +import { FullMindexEntry, Indexer, MindexEntry } from "../../../indexer"; +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; +import { MIndexDAO } from "../abstract/MIndexDAO"; +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { SqliteNodeIOManager } from "./SqliteNodeIOManager"; +import { CommonConstants } from "../../../common-libs/constants"; +import { SqliteTable } from "./SqliteTable"; +import { + SqlNotNullableFieldDefinition, + SqlNullableFieldDefinition, +} from "./SqlFieldDefinition"; -export class SqliteMIndex extends SqliteTable<MindexEntry> implements MIndexDAO { - - private readonly p2: Promise<SQLiteDriver> +export class SqliteMIndex extends SqliteTable<MindexEntry> + implements MIndexDAO { + private readonly p2: Promise<SQLiteDriver>; private d2: SqliteNodeIOManager<{ - pub: string, - created_on: string, - expires_on: number | null, - expired_on: number | null, - revokes_on: number | null, - writtenOn: number, - }> + pub: string; + created_on: string; + expires_on: number | null; + expired_on: number | null; + revokes_on: number | null; + writtenOn: number; + }>; - constructor(getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>) { + constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) { super( - 'mindex', + "mindex", { - 'op': new SqlNotNullableFieldDefinition('CHAR', false, 6), - 'pub': new SqlNotNullableFieldDefinition('VARCHAR', true, 50), - 'written_on': new SqlNotNullableFieldDefinition('VARCHAR', true, 80), - 'writtenOn': new SqlNotNullableFieldDefinition('INT', true), - 'created_on': new SqlNotNullableFieldDefinition('VARCHAR', true, 80), - 'expires_on': new SqlNullableFieldDefinition('INT', true), - 'expired_on': new SqlNullableFieldDefinition('INT', false), - 'revocation': new SqlNullableFieldDefinition('VARCHAR', false, 100), - 'revokes_on': new SqlNullableFieldDefinition('INT', true), - 'chainable_on': new SqlNullableFieldDefinition('INT', true), - 'revoked_on': new SqlNullableFieldDefinition('VARCHAR', true, 80), - 'leaving': new SqlNullableFieldDefinition('BOOLEAN', false), + op: new SqlNotNullableFieldDefinition("CHAR", false, 6), + pub: new SqlNotNullableFieldDefinition("VARCHAR", true, 50), + written_on: new SqlNotNullableFieldDefinition("VARCHAR", true, 80), + writtenOn: new SqlNotNullableFieldDefinition("INT", true), + created_on: new SqlNotNullableFieldDefinition("VARCHAR", true, 80), + expires_on: new SqlNullableFieldDefinition("INT", true), + expired_on: new SqlNullableFieldDefinition("INT", false), + revocation: new SqlNullableFieldDefinition("VARCHAR", false, 100), + revokes_on: new SqlNullableFieldDefinition("INT", true), + chainable_on: new SqlNullableFieldDefinition("INT", true), + revoked_on: new SqlNullableFieldDefinition("VARCHAR", true, 80), + leaving: new SqlNullableFieldDefinition("BOOLEAN", false), }, getSqliteDB - ) - this.p2 = getSqliteDB('c_mindex.db') + ); + this.p2 = getSqliteDB("c_mindex.db"); } /** * TECHNICAL */ - cleanCache(): void { - } + cleanCache(): void {} async init(): Promise<void> { - await super.init() - this.d2 = new SqliteNodeIOManager(await this.p2, 'c_mindex') + await super.init(); + this.d2 = new SqliteNodeIOManager(await this.p2, "c_mindex"); // COMPUTED await this.d2.sqlExec(` BEGIN; @@ -69,11 +71,10 @@ export class SqliteMIndex extends SqliteTable<MindexEntry> implements MIndexDAO CREATE INDEX IF NOT EXISTS idx_c_mindex_revokes_on ON c_mindex (revokes_on); CREATE INDEX IF NOT EXISTS idx_c_mindex_writtenOn ON c_mindex (writtenOn); COMMIT; - `) + `); } - triggerInit(): void { - } + triggerInit(): void {} /** * INSERT @@ -81,32 +82,37 @@ export class SqliteMIndex extends SqliteTable<MindexEntry> implements MIndexDAO @MonitorExecutionTime() async insert(record: MindexEntry): Promise<void> { - await this.insertInTable(this.driver, record) + await this.insertInTable(this.driver, record); } @MonitorExecutionTime() async insertBatch(records: MindexEntry[]): Promise<void> { if (records.length) { - await this.insertBatchInTable(this.driver, records) + await this.insertBatchInTable(this.driver, records); // Computed - const cCreates = records.filter(r => r.op === CommonConstants.IDX_CREATE).map(r => `( + const cCreates = records + .filter((r) => r.op === CommonConstants.IDX_CREATE) + .map( + (r) => `( '${r.pub}', '${r.created_on}', ${r.expires_on || null}, ${r.expired_on}, ${r.revokes_on || null}, ${r.writtenOn} - )`).join(',') + )` + ) + .join(","); if (cCreates) { - await this.insertD2(cCreates) + await this.insertD2(cCreates); } records - .filter(r => r.op === CommonConstants.IDX_UPDATE) + .filter((r) => r.op === CommonConstants.IDX_UPDATE) .forEach(async (r) => { - if (r.expires_on || r.expired_on || r.revokes_on) { - await this.updateD2(r) + if (r.expires_on || r.expired_on || r.revokes_on) { + await this.updateD2(r); } - }) + }); } } @@ -119,20 +125,20 @@ export class SqliteMIndex extends SqliteTable<MindexEntry> implements MIndexDAO expired_on, revokes_on, writtenOn - ) VALUES ${cCreates}` - await this.d2.sqlWrite(req, []) + ) VALUES ${cCreates}`; + await this.d2.sqlWrite(req, []); } @MonitorExecutionTime() async updateD2(r: MindexEntry) { const req = `UPDATE c_mindex SET - ${r.created_on ? `created_on = '${r.created_on}',` : ''} - ${r.expires_on ? `expires_on = ${r.expires_on},` : ''} - ${r.expired_on ? `expired_on = ${r.expired_on},` : ''} - ${r.revokes_on ? `revokes_on = ${r.revokes_on},` : ''} + ${r.created_on ? `created_on = '${r.created_on}',` : ""} + ${r.expires_on ? `expires_on = ${r.expires_on},` : ""} + ${r.expired_on ? `expired_on = ${r.expired_on},` : ""} + ${r.revokes_on ? `revokes_on = ${r.revokes_on},` : ""} writtenOn = ${r.writtenOn} - WHERE pub = ?` - await this.d2.sqlWrite(req, [r.pub]) + WHERE pub = ?`; + await this.d2.sqlWrite(req, [r.pub]); } /** @@ -141,26 +147,37 @@ export class SqliteMIndex extends SqliteTable<MindexEntry> implements MIndexDAO @MonitorExecutionTime() async removeBlock(blockstamp: string): Promise<void> { - await this.driver.sqlWrite(`DELETE FROM mindex WHERE written_on = ?`, [blockstamp]) + await this.driver.sqlWrite(`DELETE FROM mindex WHERE written_on = ?`, [ + blockstamp, + ]); } @MonitorExecutionTime() async trimRecords(belowNumber: number): Promise<void> { - const belowRecords:MindexEntry[] = await this.driver.sqlRead('SELECT COUNT(*) as nbRecords, pub FROM mindex ' + - 'WHERE writtenOn < ? ' + - 'GROUP BY pub ' + - 'HAVING nbRecords > 1', [belowNumber]) - const reducedByPub = Indexer.DUP_HELPERS.reduceBy(belowRecords, ['pub']); + const belowRecords: MindexEntry[] = await this.driver.sqlRead( + "SELECT COUNT(*) as nbRecords, pub FROM mindex " + + "WHERE writtenOn < ? " + + "GROUP BY pub " + + "HAVING nbRecords > 1", + [belowNumber] + ); + const reducedByPub = Indexer.DUP_HELPERS.reduceBy(belowRecords, ["pub"]); for (const record of reducedByPub) { - const recordsOfPub = await this.reducable(record.pub) - const toReduce = recordsOfPub.filter(rec => parseInt(rec.written_on) < belowNumber) + const recordsOfPub = await this.reducable(record.pub); + const toReduce = recordsOfPub.filter( + (rec) => parseInt(rec.written_on) < belowNumber + ); if (toReduce.length && recordsOfPub.length > 1) { // Clean the records in the DB - await this.driver.sqlExec('DELETE FROM mindex WHERE pub = \'' + record.pub + '\'') - const nonReduced = recordsOfPub.filter(rec => parseInt(rec.written_on) >= belowNumber) - const reduced = Indexer.DUP_HELPERS.reduce(toReduce) + await this.driver.sqlExec( + "DELETE FROM mindex WHERE pub = '" + record.pub + "'" + ); + const nonReduced = recordsOfPub.filter( + (rec) => parseInt(rec.written_on) >= belowNumber + ); + const reduced = Indexer.DUP_HELPERS.reduce(toReduce); // Persist - await this.insertBatch([reduced].concat(nonReduced)) + await this.insertBatch([reduced].concat(nonReduced)); } } } @@ -170,64 +187,86 @@ export class SqliteMIndex extends SqliteTable<MindexEntry> implements MIndexDAO */ @MonitorExecutionTime() - async findByPubAndChainableOnGt(pub: string, medianTime: number): Promise<MindexEntry[]> { - return this.find('SELECT * FROM mindex WHERE pub = ? AND chainable_on > ?', [pub, medianTime]) + async findByPubAndChainableOnGt( + pub: string, + medianTime: number + ): Promise<MindexEntry[]> { + return this.find( + "SELECT * FROM mindex WHERE pub = ? AND chainable_on > ?", + [pub, medianTime] + ); } @MonitorExecutionTime() - async findPubkeysThatShouldExpire(medianTime: number): Promise<{ pub: string, created_on: string }[]> { - return this.find('SELECT *, (' + - // Le dernier renouvellement - ' SELECT m2.expires_on ' + - ' FROM mindex m2 ' + - ' WHERE m2.pub = m1.pub ' + - ' AND m2.writtenOn = (' + - ' SELECT MAX(m4.writtenOn)' + - ' FROM mindex m4' + - ' WHERE pub = m2.pub' + - ' )' + - ') as renewal, (' + - // La dernière expiration - ' SELECT m2.expired_on ' + - ' FROM mindex m2 ' + - ' WHERE m2.pub = m1.pub ' + - ' AND m2.writtenOn = (' + - ' SELECT MAX(m4.writtenOn)' + - ' FROM mindex m4' + - ' WHERE pub = m2.pub' + - ' )' + - ') as expiry ' + - 'FROM mindex m1 ' + - 'WHERE m1.expires_on <= ? ' + - 'AND m1.revokes_on > ? ' + - 'AND (renewal IS NULL OR renewal <= ?) ' + - 'AND (expiry IS NULL)', [medianTime, medianTime, medianTime]) + async findPubkeysThatShouldExpire( + medianTime: number + ): Promise<{ pub: string; created_on: string }[]> { + return this.find( + "SELECT *, (" + + // Le dernier renouvellement + " SELECT m2.expires_on " + + " FROM mindex m2 " + + " WHERE m2.pub = m1.pub " + + " AND m2.writtenOn = (" + + " SELECT MAX(m4.writtenOn)" + + " FROM mindex m4" + + " WHERE pub = m2.pub" + + " )" + + ") as renewal, (" + + // La dernière expiration + " SELECT m2.expired_on " + + " FROM mindex m2 " + + " WHERE m2.pub = m1.pub " + + " AND m2.writtenOn = (" + + " SELECT MAX(m4.writtenOn)" + + " FROM mindex m4" + + " WHERE pub = m2.pub" + + " )" + + ") as expiry " + + "FROM mindex m1 " + + "WHERE m1.expires_on <= ? " + + "AND m1.revokes_on > ? " + + "AND (renewal IS NULL OR renewal <= ?) " + + "AND (expiry IS NULL)", + [medianTime, medianTime, medianTime] + ); } @MonitorExecutionTime() - async findRevokesOnLteAndRevokedOnIsNull(medianTime: number): Promise<string[]> { - return (await this.find('SELECT * FROM mindex WHERE revokes_on <= ? AND revoked_on IS NULL', [medianTime])) - .map(e => e.pub) + async findRevokesOnLteAndRevokedOnIsNull( + medianTime: number + ): Promise<string[]> { + return ( + await this.find( + "SELECT * FROM mindex WHERE revokes_on <= ? AND revoked_on IS NULL", + [medianTime] + ) + ).map((e) => e.pub); } @MonitorExecutionTime() async getWrittenOn(blockstamp: string): Promise<MindexEntry[]> { - return this.find('SELECT * FROM mindex WHERE written_on = ?', [blockstamp]) + return this.find("SELECT * FROM mindex WHERE written_on = ?", [blockstamp]); } @MonitorExecutionTime() - async findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<MindexEntry[]> { - let sql = `SELECT * FROM mindex ${criterion.pub ? 'WHERE pub = ?' : ''}` + async findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<MindexEntry[]> { + let sql = `SELECT * FROM mindex ${criterion.pub ? "WHERE pub = ?" : ""}`; if (sort.length) { - sql += ` ORDER BY ${sort.map(s => `${s[0]} ${s[1] ? 'DESC' : 'ASC'}`).join(', ')}` + sql += ` ORDER BY ${sort + .map((s) => `${s[0]} ${s[1] ? "DESC" : "ASC"}`) + .join(", ")}`; } - return this.find(sql, criterion.pub ? [criterion.pub] : []) + return this.find(sql, criterion.pub ? [criterion.pub] : []); } private async find(sql: string, params: any[]): Promise<MindexEntry[]> { - return (await this.driver.sqlRead(sql, params)).map(r => { + return (await this.driver.sqlRead(sql, params)).map((r) => { return { - index: 'MINDEX', + index: "MINDEX", op: r.op, pub: r.pub, written_on: r.written_on, @@ -243,23 +282,36 @@ export class SqliteMIndex extends SqliteTable<MindexEntry> implements MIndexDAO leaving: r.leaving !== null ? r.leaving : null, age: 0, unchainables: 0, - } - }) + }; + }); } @MonitorExecutionTime() - async getReducedMSForImplicitRevocation(pub: string): Promise<FullMindexEntry | null> { - return Indexer.DUP_HELPERS.reduceOrNull((await this.reducable(pub)) as FullMindexEntry[]) + async getReducedMSForImplicitRevocation( + pub: string + ): Promise<FullMindexEntry | null> { + return Indexer.DUP_HELPERS.reduceOrNull( + (await this.reducable(pub)) as FullMindexEntry[] + ); } @MonitorExecutionTime() - async getReducedMSForMembershipExpiry(pub: string): Promise<FullMindexEntry | null> { - return Indexer.DUP_HELPERS.reduceOrNull((await this.reducable(pub)) as FullMindexEntry[]) + async getReducedMSForMembershipExpiry( + pub: string + ): Promise<FullMindexEntry | null> { + return Indexer.DUP_HELPERS.reduceOrNull( + (await this.reducable(pub)) as FullMindexEntry[] + ); } @MonitorExecutionTime() async getRevokedPubkeys(): Promise<string[]> { - return (await this.driver.sqlRead('SELECT DISTINCT(pub) FROM mindex WHERE revoked_on IS NOT NULL', [])).map(r => r.pub) + return ( + await this.driver.sqlRead( + "SELECT DISTINCT(pub) FROM mindex WHERE revoked_on IS NOT NULL", + [] + ) + ).map((r) => r.pub); } /** @@ -269,15 +321,17 @@ export class SqliteMIndex extends SqliteTable<MindexEntry> implements MIndexDAO @MonitorExecutionTime() async reducable(pub: string): Promise<MindexEntry[]> { // await this.dump() - return this.findEntities('SELECT * FROM mindex WHERE pub = ? order by writtenOn ASC', [pub]) + return this.findEntities( + "SELECT * FROM mindex WHERE pub = ? order by writtenOn ASC", + [pub] + ); } async findExpiresOnLteAndRevokesOnGt(medianTime: number): Promise<string[]> { - return [] + return []; } async getReducedMS(pub: string): Promise<FullMindexEntry | null> { - return null + return null; } - } diff --git a/app/lib/dal/indexDAL/sqlite/SqliteNodeIOManager.ts b/app/lib/dal/indexDAL/sqlite/SqliteNodeIOManager.ts index 2ddd55b633e644003aeead0624cf2d2ba3e76f7f..be65c3308f6d23dc9121f2016eb30bd2b4191a32 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteNodeIOManager.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteNodeIOManager.ts @@ -1,50 +1,47 @@ -import {SQLiteDriver} from "../../drivers/SQLiteDriver" -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {NewLogger} from "../../../logger" -import {ExitCodes} from "../../../common-libs/exit-codes" +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { NewLogger } from "../../../logger"; +import { ExitCodes } from "../../../common-libs/exit-codes"; export class SqliteNodeIOManager<T> { + private writePromise: Promise<any> | null = null; - private writePromise: Promise<any>|null = null + constructor(private driver: SQLiteDriver, private id: string) {} - constructor(private driver: SQLiteDriver, private id: string) { - - } - - @MonitorExecutionTime('id') + @MonitorExecutionTime("id") private async wait4writing() { - await this.writePromise + await this.writePromise; // We no more need to wait - this.writePromise = null + this.writePromise = null; } public async sqlWrite(sql: string, params: any[]) { // // Just promise that the writing will be done this.writePromise = (this.writePromise || Promise.resolve()) .then(() => this.driver.executeAll(sql, params)) - .catch(e => { - NewLogger().error(e) - process.exit(ExitCodes.MINDEX_WRITING_ERROR) - }) + .catch((e) => { + NewLogger().error(e); + process.exit(ExitCodes.MINDEX_WRITING_ERROR); + }); } public async sqlExec(sql: string) { if (this.writePromise) { // Wait for current writings to be done - await this.wait4writing() + await this.wait4writing(); } - return this.driver.executeSql(sql) + return this.driver.executeSql(sql); } public async sqlRead(sql: string, params: any[]): Promise<T[]> { if (this.writePromise) { // Wait for current writings to be done - await this.wait4writing() + await this.wait4writing(); } - return this.driver.executeAll(sql, params) + return this.driver.executeAll(sql, params); } async close() { - await this.driver.closeConnection() + await this.driver.closeConnection(); } } diff --git a/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts b/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts index a3732f2c29f1178ff2fc5d54361f00fe5ba76ba7..a3e0bfe766cd0f9fb517747c2b3998b0b64cc88a 100644 --- a/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts +++ b/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts @@ -1,33 +1,32 @@ -import {SQLiteDriver} from "../../drivers/SQLiteDriver" -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {SqliteTable} from "./SqliteTable" -import {SqlNullableFieldDefinition} from "./SqlFieldDefinition" -import {DBPeer} from "../../../db/DBPeer" -import {PeerDAO} from "../abstract/PeerDAO" +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { SqliteTable } from "./SqliteTable"; +import { SqlNullableFieldDefinition } from "./SqlFieldDefinition"; +import { DBPeer } from "../../../db/DBPeer"; +import { PeerDAO } from "../abstract/PeerDAO"; export class SqlitePeers extends SqliteTable<DBPeer> implements PeerDAO { - - constructor(getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>) { + constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) { super( - 'peers', + "peers", { - 'version': new SqlNullableFieldDefinition('INT', false), - 'currency': new SqlNullableFieldDefinition('VARCHAR', false, 100), - 'status': new SqlNullableFieldDefinition('VARCHAR', true, 10), - 'statusTS': new SqlNullableFieldDefinition('INT', false), - 'hash': new SqlNullableFieldDefinition('VARCHAR', false, 70), - 'first_down': new SqlNullableFieldDefinition('INT', false), - 'last_try': new SqlNullableFieldDefinition('INT', true), - 'lastContact': new SqlNullableFieldDefinition('INT', false), - 'pubkey': new SqlNullableFieldDefinition('VARCHAR', true, 50), - 'block': new SqlNullableFieldDefinition('VARCHAR', false, 100), - 'signature': new SqlNullableFieldDefinition('VARCHAR', false, 100), - 'endpoints': new SqlNullableFieldDefinition('JSON', true), - 'raw': new SqlNullableFieldDefinition('TEXT', false), - 'nonWoT': new SqlNullableFieldDefinition('BOOLEAN', false), + version: new SqlNullableFieldDefinition("INT", false), + currency: new SqlNullableFieldDefinition("VARCHAR", false, 100), + status: new SqlNullableFieldDefinition("VARCHAR", true, 10), + statusTS: new SqlNullableFieldDefinition("INT", false), + hash: new SqlNullableFieldDefinition("VARCHAR", false, 70), + first_down: new SqlNullableFieldDefinition("INT", false), + last_try: new SqlNullableFieldDefinition("INT", true), + lastContact: new SqlNullableFieldDefinition("INT", false), + pubkey: new SqlNullableFieldDefinition("VARCHAR", true, 50), + block: new SqlNullableFieldDefinition("VARCHAR", false, 100), + signature: new SqlNullableFieldDefinition("VARCHAR", false, 100), + endpoints: new SqlNullableFieldDefinition("JSON", true), + raw: new SqlNullableFieldDefinition("TEXT", false), + nonWoT: new SqlNullableFieldDefinition("BOOLEAN", false), }, getSqliteDB - ) + ); } /** @@ -36,57 +35,69 @@ export class SqlitePeers extends SqliteTable<DBPeer> implements PeerDAO { @MonitorExecutionTime() async insert(record: DBPeer): Promise<void> { - await this.insertInTable(this.driver, record) + await this.insertInTable(this.driver, record); } @MonitorExecutionTime() async insertBatch(records: DBPeer[]): Promise<void> { if (records.length) { - return this.insertBatchInTable(this.driver, records) + return this.insertBatchInTable(this.driver, records); } } - cleanCache(): void { - } + cleanCache(): void {} async countNonWoTPeers(): Promise<number> { - return ((await this.driver.sqlRead('SELECT COUNT(*) as _count FROM peers WHERE nonWoT', []))[0] as any)['_count'] + return (( + await this.driver.sqlRead( + "SELECT COUNT(*) as _count FROM peers WHERE nonWoT", + [] + ) + )[0] as any)["_count"]; } deleteNonWotPeersWhoseLastContactIsAbove(threshold: number): Promise<void> { - return this.driver.sqlWrite('DELETE FROM peers WHERE (nonWoT OR nonWoT IS NULL) AND lastContact <= ?', [threshold]) + return this.driver.sqlWrite( + "DELETE FROM peers WHERE (nonWoT OR nonWoT IS NULL) AND lastContact <= ?", + [threshold] + ); } async getPeer(pubkey: string): Promise<DBPeer> { - return (await this.findEntities('SELECT * FROM peers WHERE pubkey = ?', [pubkey]))[0] + return ( + await this.findEntities("SELECT * FROM peers WHERE pubkey = ?", [pubkey]) + )[0]; } getPeersWithEndpointsLike(ep: string): Promise<DBPeer[]> { - return this.findEntities('SELECT * FROM peers WHERE endpoints LIKE ?', [`%${ep}%`]) + return this.findEntities("SELECT * FROM peers WHERE endpoints LIKE ?", [ + `%${ep}%`, + ]); } listAll(): Promise<DBPeer[]> { - return this.findEntities('SELECT * FROM peers', []) + return this.findEntities("SELECT * FROM peers", []); } removeAll(): Promise<void> { - return this.driver.sqlWrite('DELETE FROM peers', []) + return this.driver.sqlWrite("DELETE FROM peers", []); } removePeerByPubkey(pubkey: string): Promise<void> { - return this.driver.sqlWrite('DELETE FROM peers WHERE pubkey = ?', [pubkey]) + return this.driver.sqlWrite("DELETE FROM peers WHERE pubkey = ?", [pubkey]); } async savePeer(peer: DBPeer): Promise<DBPeer> { - await this.driver.sqlWrite('DELETE FROM peers WHERE pubkey = ?', [peer.pubkey]) - await this.insert(peer) - return peer + await this.driver.sqlWrite("DELETE FROM peers WHERE pubkey = ?", [ + peer.pubkey, + ]); + await this.insert(peer); + return peer; } - triggerInit(): void { - } + triggerInit(): void {} withUPStatus(): Promise<DBPeer[]> { - return this.findEntities('SELECT * FROM peers WHERE status = ?', ['UP']) + return this.findEntities("SELECT * FROM peers WHERE status = ?", ["UP"]); } } diff --git a/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts b/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts index afadabc9b8034516c70d00149ac1b02f8e016795..c208fa380f3395ae3980d39f7e977582e019bd3f 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts @@ -1,45 +1,52 @@ -import {FullSindexEntry, Indexer, SimpleTxEntryForWallet, SimpleTxInput, SindexEntry} from "../../../indexer" -import {SQLiteDriver} from "../../drivers/SQLiteDriver" -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {SqliteTable} from "./SqliteTable" -import {SqlNotNullableFieldDefinition, SqlNullableFieldDefinition} from "./SqlFieldDefinition" -import {SIndexDAO} from "../abstract/SIndexDAO" - -export class SqliteSIndex extends SqliteTable<SindexEntry> implements SIndexDAO { - - constructor(getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>) { +import { + FullSindexEntry, + Indexer, + SimpleTxEntryForWallet, + SimpleTxInput, + SindexEntry, +} from "../../../indexer"; +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { SqliteTable } from "./SqliteTable"; +import { + SqlNotNullableFieldDefinition, + SqlNullableFieldDefinition, +} from "./SqlFieldDefinition"; +import { SIndexDAO } from "../abstract/SIndexDAO"; + +export class SqliteSIndex extends SqliteTable<SindexEntry> + implements SIndexDAO { + constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) { super( - 'sindex', + "sindex", { - 'op': new SqlNotNullableFieldDefinition('CHAR', false, 6), - 'written_on': new SqlNotNullableFieldDefinition('VARCHAR', false, 80), - 'writtenOn': new SqlNotNullableFieldDefinition('INT', true), - 'srcType': new SqlNotNullableFieldDefinition('CHAR', true, 1), - 'tx': new SqlNullableFieldDefinition('VARCHAR', true, 70), - 'identifier': new SqlNotNullableFieldDefinition('VARCHAR', true, 70), - 'pos': new SqlNotNullableFieldDefinition('INT', true), - 'created_on': new SqlNullableFieldDefinition('VARCHAR', false, 100), - 'written_time': new SqlNotNullableFieldDefinition('INT', true), - 'locktime': new SqlNullableFieldDefinition('INT', false), - 'unlock': new SqlNullableFieldDefinition('VARCHAR', false, 255), - 'amount': new SqlNotNullableFieldDefinition('INT', false), - 'base': new SqlNotNullableFieldDefinition('INT', false), - 'conditions': new SqlNotNullableFieldDefinition('VARCHAR', true, 1000), - 'consumed': new SqlNullableFieldDefinition('BOOLEAN', true), + op: new SqlNotNullableFieldDefinition("CHAR", false, 6), + written_on: new SqlNotNullableFieldDefinition("VARCHAR", false, 80), + writtenOn: new SqlNotNullableFieldDefinition("INT", true), + srcType: new SqlNotNullableFieldDefinition("CHAR", true, 1), + tx: new SqlNullableFieldDefinition("VARCHAR", true, 70), + identifier: new SqlNotNullableFieldDefinition("VARCHAR", true, 70), + pos: new SqlNotNullableFieldDefinition("INT", true), + created_on: new SqlNullableFieldDefinition("VARCHAR", false, 100), + written_time: new SqlNotNullableFieldDefinition("INT", true), + locktime: new SqlNullableFieldDefinition("INT", false), + unlock: new SqlNullableFieldDefinition("VARCHAR", false, 255), + amount: new SqlNotNullableFieldDefinition("INT", false), + base: new SqlNotNullableFieldDefinition("INT", false), + conditions: new SqlNotNullableFieldDefinition("VARCHAR", true, 1000), + consumed: new SqlNullableFieldDefinition("BOOLEAN", true), }, getSqliteDB - ) + ); } /** * TECHNICAL */ - cleanCache(): void { - } + cleanCache(): void {} - triggerInit(): void { - } + triggerInit(): void {} /** * INSERT @@ -47,13 +54,13 @@ export class SqliteSIndex extends SqliteTable<SindexEntry> implements SIndexDAO @MonitorExecutionTime() async insert(record: SindexEntry): Promise<void> { - await this.insertInTable(this.driver, record) + await this.insertInTable(this.driver, record); } @MonitorExecutionTime() async insertBatch(records: SindexEntry[]): Promise<void> { if (records.length) { - return this.insertBatchInTable(this.driver, records) + return this.insertBatchInTable(this.driver, records); } } @@ -63,12 +70,14 @@ export class SqliteSIndex extends SqliteTable<SindexEntry> implements SIndexDAO @MonitorExecutionTime() async removeBlock(blockstamp: string): Promise<void> { - await this.driver.sqlWrite(`DELETE FROM sindex WHERE written_on = ?`, [blockstamp]) + await this.driver.sqlWrite(`DELETE FROM sindex WHERE written_on = ?`, [ + blockstamp, + ]); } @MonitorExecutionTime() async trimRecords(belowNumber: number): Promise<void> { - await this.trimConsumedSource(belowNumber) + await this.trimConsumedSource(belowNumber); } /** @@ -77,22 +86,27 @@ export class SqliteSIndex extends SqliteTable<SindexEntry> implements SIndexDAO @MonitorExecutionTime() async getWrittenOn(blockstamp: string): Promise<SindexEntry[]> { - return this.find('SELECT * FROM sindex WHERE written_on = ?', [blockstamp]) + return this.find("SELECT * FROM sindex WHERE written_on = ?", [blockstamp]); } @MonitorExecutionTime() - async findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<SindexEntry[]> { - let sql = `SELECT * FROM sindex ${criterion.pub ? 'WHERE pub = ?' : ''}` + async findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<SindexEntry[]> { + let sql = `SELECT * FROM sindex ${criterion.pub ? "WHERE pub = ?" : ""}`; if (sort.length) { - sql += ` ORDER BY ${sort.map(s => `${s[0]} ${s[1] ? 'DESC' : 'ASC'}`).join(', ')}` + sql += ` ORDER BY ${sort + .map((s) => `${s[0]} ${s[1] ? "DESC" : "ASC"}`) + .join(", ")}`; } - return this.find(sql, criterion.pub ? [criterion.pub] : []) + return this.find(sql, criterion.pub ? [criterion.pub] : []); } private async find(sql: string, params: any[]): Promise<SindexEntry[]> { - return (await this.driver.sqlRead(sql, params)).map(r => { + return (await this.driver.sqlRead(sql, params)).map((r) => { return { - index: 'CINDEX', + index: "CINDEX", op: r.op, written_on: r.written_on, writtenOn: r.writtenOn, @@ -110,8 +124,8 @@ export class SqliteSIndex extends SqliteTable<SindexEntry> implements SIndexDAO consumed: r.consumed, txObj: null as any, age: 0, - } - }) + }; + }); } /** @@ -119,67 +133,101 @@ export class SqliteSIndex extends SqliteTable<SindexEntry> implements SIndexDAO */ findByIdentifier(identifier: string): Promise<SindexEntry[]> { - return this.find('SELECT * FROM sindex WHERE identifier = ?', [identifier]) + return this.find("SELECT * FROM sindex WHERE identifier = ?", [identifier]); } findByPos(pos: number): Promise<SindexEntry[]> { - return this.find('SELECT * FROM sindex WHERE pos = ?', [pos]) + return this.find("SELECT * FROM sindex WHERE pos = ?", [pos]); } - findTxSourceByIdentifierPosAmountBase(identifier: string, pos: number, amount: number, base: number): Promise<SimpleTxInput[]> { - return this.find('SELECT * FROM sindex ' + - 'WHERE identifier = ? ' + - 'AND pos = ? ' + - 'AND amount = ? ' + - 'AND base = ?', [identifier, pos, amount, base]) + findTxSourceByIdentifierPosAmountBase( + identifier: string, + pos: number, + amount: number, + base: number + ): Promise<SimpleTxInput[]> { + return this.find( + "SELECT * FROM sindex " + + "WHERE identifier = ? " + + "AND pos = ? " + + "AND amount = ? " + + "AND base = ?", + [identifier, pos, amount, base] + ); } getAvailableForConditions(conditionsStr: string): Promise<SindexEntry[]> { - return this.find('SELECT * FROM sindex s1 ' + - 'WHERE s1.conditions LIKE ? ' + - 'AND NOT s1.consumed ' + - 'AND NOT EXISTS (' + - ' SELECT * FROM sindex s2' + - ' WHERE s1.identifier = s2.identifier' + - ' AND s1.pos = s2.pos' + - ' AND s2.consumed' + - ')', [conditionsStr]) - } - - async getAvailableForPubkey(pubkey: string): Promise<{ amount: number; base: number; conditions: string; identifier: string; pos: number }[]> { - return this.getAvailableForConditions(`SIG(${pubkey})`) // TODO: maybe %SIG(...)% - } - - async getTxSource(identifier: string, pos: number): Promise<FullSindexEntry | null> { - const entries = await this.find('SELECT * FROM sindex WHERE identifier = ? AND pos = ? ORDER BY writtenOn', [identifier, pos]) - return Indexer.DUP_HELPERS.reduceOrNull(entries) + return this.find( + "SELECT * FROM sindex s1 " + + "WHERE s1.conditions LIKE ? " + + "AND NOT s1.consumed " + + "AND NOT EXISTS (" + + " SELECT * FROM sindex s2" + + " WHERE s1.identifier = s2.identifier" + + " AND s1.pos = s2.pos" + + " AND s2.consumed" + + ")", + [conditionsStr] + ); + } + + async getAvailableForPubkey( + pubkey: string + ): Promise< + { + amount: number; + base: number; + conditions: string; + identifier: string; + pos: number; + }[] + > { + return this.getAvailableForConditions(`SIG(${pubkey})`); // TODO: maybe %SIG(...)% + } + + async getTxSource( + identifier: string, + pos: number + ): Promise<FullSindexEntry | null> { + const entries = await this.find( + "SELECT * FROM sindex WHERE identifier = ? AND pos = ? ORDER BY writtenOn", + [identifier, pos] + ); + return Indexer.DUP_HELPERS.reduceOrNull(entries); } async getWrittenOnTxs(blockstamp: string): Promise<SimpleTxEntryForWallet[]> { - const entries = await this.find('SELECT * FROM sindex WHERE written_on = ?', [blockstamp]) - const res: SimpleTxEntryForWallet[] = [] - entries.forEach(s => { + const entries = await this.find( + "SELECT * FROM sindex WHERE written_on = ?", + [blockstamp] + ); + const res: SimpleTxEntryForWallet[] = []; + entries.forEach((s) => { res.push({ - srcType: 'T', + srcType: "T", op: s.op, conditions: s.conditions, amount: s.amount, base: s.base, identifier: s.identifier, - pos: s.pos - }) - }) - return res + pos: s.pos, + }); + }); + return res; } async trimConsumedSource(belowNumber: number): Promise<void> { - const sources = await this.find('SELECT * FROM sindex WHERE consumed AND writtenOn < ?', [belowNumber]) - await Promise.all(sources.map(async s => this.driver.sqlWrite('DELETE FROM sindex ' + - 'WHERE identifier = ? ' + - 'AND pos = ?', [ - s.identifier, - s.pos, - ]) - )) + const sources = await this.find( + "SELECT * FROM sindex WHERE consumed AND writtenOn < ?", + [belowNumber] + ); + await Promise.all( + sources.map(async (s) => + this.driver.sqlWrite( + "DELETE FROM sindex " + "WHERE identifier = ? " + "AND pos = ?", + [s.identifier, s.pos] + ) + ) + ); } } diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts index c55e735af6c3ff660586d5d66f83a4567f6f39a2..dcd909b299658e2eae8442ad462b7d1099d4ac2a 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts @@ -1,90 +1,126 @@ -import {SqlFieldDefinition} from "./SqlFieldDefinition" -import {Underscore} from "../../../common-libs/underscore" -import {SqliteNodeIOManager} from "./SqliteNodeIOManager" -import {SQLiteDriver} from "../../drivers/SQLiteDriver" +import { SqlFieldDefinition } from "./SqlFieldDefinition"; +import { Underscore } from "../../../common-libs/underscore"; +import { SqliteNodeIOManager } from "./SqliteNodeIOManager"; +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; export class SqliteTable<T> { - - private readonly pdriver: Promise<SQLiteDriver> - protected driver: SqliteNodeIOManager<T> + private readonly pdriver: Promise<SQLiteDriver>; + protected driver: SqliteNodeIOManager<T>; protected constructor( protected name: string, protected fields: { - [k in keyof T]?: SqlFieldDefinition + [k in keyof T]?: SqlFieldDefinition; }, - getSqliteDB: (dbName: string)=> Promise<SQLiteDriver> - ) { - this.pdriver = getSqliteDB(`${name}.db`) + getSqliteDB: (dbName: string) => Promise<SQLiteDriver> + ) { + this.pdriver = getSqliteDB(`${name}.db`); } async init(): Promise<void> { - this.driver = new SqliteNodeIOManager(await this.pdriver, 'sindex') + this.driver = new SqliteNodeIOManager(await this.pdriver, "sindex"); await this.driver.sqlExec(` BEGIN; ${this.generateCreateTable()}; ${this.generateCreateIndexes()}; COMMIT; - `) + `); } async close(): Promise<void> { - await this.driver.close() + await this.driver.close(); } generateCreateTable() { - let sql = `CREATE TABLE IF NOT EXISTS ${this.name} (` - const fields = this.keys().map(fieldName => { - const f = this.fields[fieldName] as SqlFieldDefinition - switch (f.type) { - case 'BOOLEAN': return `\n${fieldName} BOOLEAN${f.nullable ? ' NULL' : ''}` - case 'CHAR': return `\n${fieldName} CHAR(${f.length})${f.nullable ? ' NULL' : ''}` - case 'VARCHAR': return `\n${fieldName} VARCHAR(${f.length})${f.nullable ? ' NULL' : ''}` - case 'TEXT': return `\n${fieldName} TEXT${f.nullable ? ' NULL' : ''}` - case 'JSON': return `\n${fieldName} TEXT${f.nullable ? ' NULL' : ''}` - case 'INT': return `\n${fieldName} INT${f.nullable ? ' NULL' : ''}` - } - }).join(', ') - sql += `${fields});` - return sql + let sql = `CREATE TABLE IF NOT EXISTS ${this.name} (`; + const fields = this.keys() + .map((fieldName) => { + const f = this.fields[fieldName] as SqlFieldDefinition; + switch (f.type) { + case "BOOLEAN": + return `\n${fieldName} BOOLEAN${f.nullable ? " NULL" : ""}`; + case "CHAR": + return `\n${fieldName} CHAR(${f.length})${ + f.nullable ? " NULL" : "" + }`; + case "VARCHAR": + return `\n${fieldName} VARCHAR(${f.length})${ + f.nullable ? " NULL" : "" + }`; + case "TEXT": + return `\n${fieldName} TEXT${f.nullable ? " NULL" : ""}`; + case "JSON": + return `\n${fieldName} TEXT${f.nullable ? " NULL" : ""}`; + case "INT": + return `\n${fieldName} INT${f.nullable ? " NULL" : ""}`; + } + }) + .join(", "); + sql += `${fields});`; + return sql; } generateCreateIndexes() { - return this.keys().map(fieldName => { - return `CREATE INDEX IF NOT EXISTS idx_${this.name}_${fieldName} ON ${this.name} (${fieldName});\n` - }).join('') + return this.keys() + .map((fieldName) => { + return `CREATE INDEX IF NOT EXISTS idx_${this.name}_${fieldName} ON ${this.name} (${fieldName});\n`; + }) + .join(""); } keys(): (keyof T)[] { - return Underscore.keys(this.fields) + return Underscore.keys(this.fields); } async insertInTable(driver: SqliteNodeIOManager<T>, record: T) { - return this.insertBatchInTable(driver, [record]) + return this.insertBatchInTable(driver, [record]); } - async update<K extends keyof T>(driver: SqliteNodeIOManager<T>, record: T, fieldsToUpdate: K[], whereFields: K[]) { - const valuesOfRecord = fieldsToUpdate.map(fieldName => `${fieldName} = ${this.getFieldValue(fieldName, record)}`).join(',') - const conditionsOfRecord = whereFields.map(fieldName => `${fieldName} = ${this.getFieldValue(fieldName, record)}`).join(',') - await driver.sqlWrite(`UPDATE ${this.name} SET ${valuesOfRecord} WHERE ${conditionsOfRecord};`, []) + async update<K extends keyof T>( + driver: SqliteNodeIOManager<T>, + record: T, + fieldsToUpdate: K[], + whereFields: K[] + ) { + const valuesOfRecord = fieldsToUpdate + .map( + (fieldName) => `${fieldName} = ${this.getFieldValue(fieldName, record)}` + ) + .join(","); + const conditionsOfRecord = whereFields + .map( + (fieldName) => `${fieldName} = ${this.getFieldValue(fieldName, record)}` + ) + .join(","); + await driver.sqlWrite( + `UPDATE ${this.name} SET ${valuesOfRecord} WHERE ${conditionsOfRecord};`, + [] + ); } async insertBatchInTable(driver: SqliteNodeIOManager<T>, records: T[]) { - const keys = this.keys() - const values = records.map(r => '(' + keys.map(fieldName => this.getFieldValue(fieldName, r)).join(',') + ')').join(',') + const keys = this.keys(); + const values = records + .map( + (r) => + "(" + + keys.map((fieldName) => this.getFieldValue(fieldName, r)).join(",") + + ")" + ) + .join(","); let sql = `INSERT INTO ${this.name} ( - ${keys.join(',')} - ) VALUES ${values};` - await driver.sqlWrite(sql, []) + ${keys.join(",")} + ) VALUES ${values};`; + await driver.sqlWrite(sql, []); } async findEntities(sql: string, params: any[]): Promise<T[]> { - const keys = this.keys() - return (await this.driver.sqlRead(sql, params)).map(r => { - const newValue: any = {} - keys.forEach(k => newValue[k] = this.sqlValue2Object(k, r)) - return newValue - }) + const keys = this.keys(); + return (await this.driver.sqlRead(sql, params)).map((r) => { + const newValue: any = {}; + keys.forEach((k) => (newValue[k] = this.sqlValue2Object(k, r))); + return newValue; + }); } /** @@ -94,74 +130,68 @@ export class SqliteTable<T> { * @returns {any} The translated value. */ protected sqlValue2Object<K extends keyof T>(fieldName: K, record: T): any { - const def = this.fields[fieldName] as SqlFieldDefinition - const value = record[fieldName] as any + const def = this.fields[fieldName] as SqlFieldDefinition; + const value = record[fieldName] as any; switch (def.type) { case "CHAR": case "VARCHAR": case "TEXT": - return value + return value; case "JSON": - return value === null ? value : JSON.parse(value) + return value === null ? value : JSON.parse(value); case "BOOLEAN": - return value === null ? null : (!!value) + return value === null ? null : !!value; case "INT": - return value === null ? null : value + return value === null ? null : value; } } private getFieldValue(fieldName: keyof T, record: T) { - const def = this.fields[fieldName] as SqlFieldDefinition - const value = record[fieldName] + const def = this.fields[fieldName] as SqlFieldDefinition; + const value = record[fieldName]; switch (def.type) { case "CHAR": case "VARCHAR": case "TEXT": if (!def.nullable) { - return `'${value}'` - } - else { - return value !== null && value !== undefined ? - `'${value}'` : - 'null' + return `'${value}'`; + } else { + return value !== null && value !== undefined ? `'${value}'` : "null"; } case "JSON": if (!def.nullable) { - return `'${JSON.stringify(value)}'` - } - else { - return value !== null && value !== undefined ? - `'${JSON.stringify(value)}'` : - 'null' + return `'${JSON.stringify(value)}'`; + } else { + return value !== null && value !== undefined + ? `'${JSON.stringify(value)}'` + : "null"; } case "BOOLEAN": if (!def.nullable) { - return `${value ? 1 : 0}` - } - else { - return value !== null && value !== undefined ? - `${value ? 1 : 0}` : - 'null' + return `${value ? 1 : 0}`; + } else { + return value !== null && value !== undefined + ? `${value ? 1 : 0}` + : "null"; } case "INT": if (!def.nullable) { - return `${value || 0}` - } - else { - return value !== null && value !== undefined ? - `${value}` : - 'null' + return `${value || 0}`; + } else { + return value !== null && value !== undefined ? `${value}` : "null"; } } } async dump() { - const ts: T[] = await this.findEntities(`SELECT * FROM ${this.name}`, []) - ts.forEach(t => console.log(t)) + const ts: T[] = await this.findEntities(`SELECT * FROM ${this.name}`, []); + ts.forEach((t) => console.log(t)); } async count() { - return ((await this.driver.sqlRead(`SELECT COUNT(*) as max FROM ${this.name}`, []))[0] as any).max + return (( + await this.driver.sqlRead(`SELECT COUNT(*) as max FROM ${this.name}`, []) + )[0] as any).max; } /** @@ -170,8 +200,15 @@ export class SqliteTable<T> { * @returns {Promise<void>} Promise of done. */ async copy2file(path: string) { - const copy = new SqliteTable<T>(this.name, this.fields, async () => new SQLiteDriver(path)) - await copy.init() - await copy.insertBatchInTable(this.driver, await this.driver.sqlRead(`SELECT * FROM ${this.name}`, [])) + const copy = new SqliteTable<T>( + this.name, + this.fields, + async () => new SQLiteDriver(path) + ); + await copy.init(); + await copy.insertBatchInTable( + this.driver, + await this.driver.sqlRead(`SELECT * FROM ${this.name}`, []) + ); } } diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts index 76ef91395337ece73f477f0ff4af0804d84eeeb1..f2a6e104a085b31d7fa7f4c277b1a81aee853b83 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts @@ -1,67 +1,75 @@ -import {SQLiteDriver} from "../../drivers/SQLiteDriver" -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {SqliteTable} from "./SqliteTable" -import {SqlNotNullableFieldDefinition, SqlNullableFieldDefinition} from "./SqlFieldDefinition" -import {DBTx} from "../../../db/DBTx" -import {TxsDAO} from "../abstract/TxsDAO" -import {SandBox} from "../../sqliteDAL/SandBox" -import {TransactionDTO} from "../../../dto/TransactionDTO" - -const constants = require('../../../constants') +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { SqliteTable } from "./SqliteTable"; +import { + SqlNotNullableFieldDefinition, + SqlNullableFieldDefinition, +} from "./SqlFieldDefinition"; +import { DBTx } from "../../../db/DBTx"; +import { TxsDAO } from "../abstract/TxsDAO"; +import { SandBox } from "../../sqliteDAL/SandBox"; +import { TransactionDTO } from "../../../dto/TransactionDTO"; + +const constants = require("../../../constants"); export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { - - constructor(getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>) { + constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) { super( - 'txs', + "txs", { - 'hash': new SqlNotNullableFieldDefinition('VARCHAR', true, 70), - 'block_number': new SqlNullableFieldDefinition('INT', true), - 'locktime': new SqlNullableFieldDefinition('INT', false), - 'version': new SqlNullableFieldDefinition('INT', false), - 'currency': new SqlNullableFieldDefinition('VARCHAR', false, 10), - 'comment': new SqlNullableFieldDefinition('TEXT', false), - 'blockstamp': new SqlNullableFieldDefinition('VARCHAR', false, 100), - 'blockstampTime': new SqlNullableFieldDefinition('INT', false), - 'time': new SqlNullableFieldDefinition('INT', false), - 'inputs': new SqlNullableFieldDefinition('JSON', false), - 'unlocks': new SqlNullableFieldDefinition('JSON', false), - 'outputs': new SqlNullableFieldDefinition('JSON', false), - 'issuers': new SqlNullableFieldDefinition('JSON', false), - 'signatures': new SqlNullableFieldDefinition('JSON', false), - 'recipients': new SqlNullableFieldDefinition('JSON', false), - 'written': new SqlNotNullableFieldDefinition('BOOLEAN', true), - 'removed': new SqlNotNullableFieldDefinition('BOOLEAN', true), - 'received': new SqlNullableFieldDefinition('BOOLEAN', false), - 'output_base': new SqlNullableFieldDefinition('INT', false), - 'output_amount': new SqlNullableFieldDefinition('INT', false), - 'written_on': new SqlNullableFieldDefinition('VARCHAR', false, 100), - 'writtenOn': new SqlNullableFieldDefinition('INT', false), + hash: new SqlNotNullableFieldDefinition("VARCHAR", true, 70), + block_number: new SqlNullableFieldDefinition("INT", true), + locktime: new SqlNullableFieldDefinition("INT", false), + version: new SqlNullableFieldDefinition("INT", false), + currency: new SqlNullableFieldDefinition("VARCHAR", false, 10), + comment: new SqlNullableFieldDefinition("TEXT", false), + blockstamp: new SqlNullableFieldDefinition("VARCHAR", false, 100), + blockstampTime: new SqlNullableFieldDefinition("INT", false), + time: new SqlNullableFieldDefinition("INT", false), + inputs: new SqlNullableFieldDefinition("JSON", false), + unlocks: new SqlNullableFieldDefinition("JSON", false), + outputs: new SqlNullableFieldDefinition("JSON", false), + issuers: new SqlNullableFieldDefinition("JSON", false), + signatures: new SqlNullableFieldDefinition("JSON", false), + recipients: new SqlNullableFieldDefinition("JSON", false), + written: new SqlNotNullableFieldDefinition("BOOLEAN", true), + removed: new SqlNotNullableFieldDefinition("BOOLEAN", true), + received: new SqlNullableFieldDefinition("BOOLEAN", false), + output_base: new SqlNullableFieldDefinition("INT", false), + output_amount: new SqlNullableFieldDefinition("INT", false), + written_on: new SqlNullableFieldDefinition("VARCHAR", false, 100), + writtenOn: new SqlNullableFieldDefinition("INT", false), }, getSqliteDB - ) + ); this.sandbox = new SandBox( constants.SANDBOX_SIZE_TRANSACTIONS, () => this.getSandboxTxs(), - (compared: { issuers: string[], output_base: number, output_amount: number }, - reference: { issuers: string[], output_base: number, output_amount: number } + ( + compared: { + issuers: string[]; + output_base: number; + output_amount: number; + }, + reference: { + issuers: string[]; + output_base: number; + output_amount: number; + } ) => { if (compared.output_base < reference.output_base) { return -1; - } - else if (compared.output_base > reference.output_base) { + } else if (compared.output_base > reference.output_base) { return 1; - } - else if (compared.output_amount > reference.output_amount) { + } else if (compared.output_amount > reference.output_amount) { return -1; - } - else if (compared.output_amount < reference.output_amount) { + } else if (compared.output_amount < reference.output_amount) { return 1; - } - else { + } else { return 0; } - }) + } + ); } /** @@ -70,95 +78,136 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { @MonitorExecutionTime() async insert(record: DBTx): Promise<void> { - await this.insertInTable(this.driver, record) + await this.insertInTable(this.driver, record); } @MonitorExecutionTime() async insertBatch(records: DBTx[]): Promise<void> { if (records.length) { - return this.insertBatchInTable(this.driver, records) + return this.insertBatchInTable(this.driver, records); } } - sandbox: SandBox<{ issuers: string[]; output_base: number; output_amount: number }> - - async addLinked(tx: TransactionDTO, block_number: number, time: number): Promise<DBTx> { - const dbTx = await this.getTX(tx.hash) - const theDBTx = DBTx.fromTransactionDTO(tx) - theDBTx.written = true - theDBTx.block_number = block_number - theDBTx.time = time + sandbox: SandBox<{ + issuers: string[]; + output_base: number; + output_amount: number; + }>; + + async addLinked( + tx: TransactionDTO, + block_number: number, + time: number + ): Promise<DBTx> { + const dbTx = await this.getTX(tx.hash); + const theDBTx = DBTx.fromTransactionDTO(tx); + theDBTx.written = true; + theDBTx.block_number = block_number; + theDBTx.time = time; if (!dbTx) { - await this.insert(theDBTx) - } - else { - await this.update(this.driver, theDBTx, ['block_number', 'time', 'received', 'written', 'removed', 'hash'], ['hash']) + await this.insert(theDBTx); + } else { + await this.update( + this.driver, + theDBTx, + ["block_number", "time", "received", "written", "removed", "hash"], + ["hash"] + ); } - return dbTx + return dbTx; } async addPending(dbTx: DBTx): Promise<DBTx> { - const existing = (await this.findEntities('SELECT * FROM txs WHERE hash = ?', [dbTx.hash]))[0] + const existing = ( + await this.findEntities("SELECT * FROM txs WHERE hash = ?", [dbTx.hash]) + )[0]; if (existing) { - await this.driver.sqlWrite('UPDATE txs SET written = ? WHERE hash = ?', [false, dbTx.hash]) - return existing + await this.driver.sqlWrite("UPDATE txs SET written = ? WHERE hash = ?", [ + false, + dbTx.hash, + ]); + return existing; } - await this.insert(dbTx) - return dbTx + await this.insert(dbTx); + return dbTx; } - cleanCache(): void { - } + cleanCache(): void {} - findRawWithOrder(criterion: { pub?: string }, sort: (string | (string | boolean)[])[]): Promise<DBTx[]> { - throw Error('Should not be used method findRawWithOrder() on SqliteTransactions') + findRawWithOrder( + criterion: { pub?: string }, + sort: (string | (string | boolean)[])[] + ): Promise<DBTx[]> { + throw Error( + "Should not be used method findRawWithOrder() on SqliteTransactions" + ); } getAllPending(versionMin: number): Promise<DBTx[]> { - return this.findEntities('SELECT * FROM txs WHERE NOT written', []) + return this.findEntities("SELECT * FROM txs WHERE NOT written", []); } getLinkedWithIssuer(pubkey: string): Promise<DBTx[]> { - return this.findEntities('SELECT * FROM txs WHERE written AND issuers LIKE ?', [`%${pubkey}%`]) + return this.findEntities( + "SELECT * FROM txs WHERE written AND issuers LIKE ?", + [`%${pubkey}%`] + ); } getLinkedWithRecipient(pubkey: string): Promise<DBTx[]> { - return this.findEntities('SELECT * FROM txs WHERE written AND recipients LIKE ?', [`%${pubkey}%`]) + return this.findEntities( + "SELECT * FROM txs WHERE written AND recipients LIKE ?", + [`%${pubkey}%`] + ); } getPendingWithIssuer(pubkey: string): Promise<DBTx[]> { - return this.findEntities('SELECT * FROM txs WHERE NOT written AND issuers LIKE ?', [`%${pubkey}%`]) + return this.findEntities( + "SELECT * FROM txs WHERE NOT written AND issuers LIKE ?", + [`%${pubkey}%`] + ); } getPendingWithRecipient(pubkey: string): Promise<DBTx[]> { - return this.findEntities('SELECT * FROM txs WHERE NOT written AND recipients LIKE ?', [`%${pubkey}%`]) + return this.findEntities( + "SELECT * FROM txs WHERE NOT written AND recipients LIKE ?", + [`%${pubkey}%`] + ); } async getTX(hash: string): Promise<DBTx> { - return (await this.findEntities('SELECT * FROM txs WHERE hash = ?', [hash]))[0] + return ( + await this.findEntities("SELECT * FROM txs WHERE hash = ?", [hash]) + )[0]; } getWrittenOn(blockstamp: string): Promise<DBTx[]> { - return this.findEntities('SELECT * FROM txs WHERE blockstamp = ?', [blockstamp]) + return this.findEntities("SELECT * FROM txs WHERE blockstamp = ?", [ + blockstamp, + ]); } async removeAll(): Promise<void> { - await this.driver.sqlWrite('DELETE FROM txs', []) + await this.driver.sqlWrite("DELETE FROM txs", []); } removeBlock(blockstamp: string): Promise<void> { - throw Error('Should not be used method removeBlock() on SqliteTransactions') + throw Error( + "Should not be used method removeBlock() on SqliteTransactions" + ); } removeTX(hash: string): Promise<void> { - return this.driver.sqlWrite('DELETE FROM txs WHERE hash = ?', [hash]) + return this.driver.sqlWrite("DELETE FROM txs WHERE hash = ?", [hash]); } - triggerInit(): void { - } + triggerInit(): void {} trimExpiredNonWrittenTxs(limitTime: number): Promise<void> { - return this.driver.sqlWrite('DELETE FROM txs WHERE NOT written AND blockstampTime <= ?', [limitTime]) + return this.driver.sqlWrite( + "DELETE FROM txs WHERE NOT written AND blockstampTime <= ?", + [limitTime] + ); } /************************** @@ -167,14 +216,17 @@ export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO { @MonitorExecutionTime() async getSandboxTxs() { - return this.findEntities('SELECT * FROM txs WHERE NOT written AND NOT removed ORDER BY output_base DESC, output_amount DESC', []) + return this.findEntities( + "SELECT * FROM txs WHERE NOT written AND NOT removed ORDER BY output_base DESC, output_amount DESC", + [] + ); } getSandboxRoom() { - return this.sandbox.getSandboxRoom() + return this.sandbox.getSandboxRoom(); } setSandboxSize(maxSize: number) { - this.sandbox.maxSize = maxSize + this.sandbox.maxSize = maxSize; } } diff --git a/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts b/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts index baf9db0650986704c8ff9dc5d152b8bbbc496524..3b70811fe3ee3a6106ee3898fec4f5a6d6413536 100644 --- a/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts +++ b/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts @@ -1,32 +1,29 @@ -import {SQLiteDriver} from "../../drivers/SQLiteDriver" -import {MonitorExecutionTime} from "../../../debug/MonitorExecutionTime" -import {SqliteTable} from "./SqliteTable" -import {SqlNotNullableFieldDefinition} from "./SqlFieldDefinition" -import {WalletDAO} from "../abstract/WalletDAO" -import {DBWallet} from "../../../db/DBWallet" +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; +import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime"; +import { SqliteTable } from "./SqliteTable"; +import { SqlNotNullableFieldDefinition } from "./SqlFieldDefinition"; +import { WalletDAO } from "../abstract/WalletDAO"; +import { DBWallet } from "../../../db/DBWallet"; export class SqliteWallet extends SqliteTable<DBWallet> implements WalletDAO { - - constructor(getSqliteDB: (dbName: string)=> Promise<SQLiteDriver>) { + constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) { super( - 'wallet', + "wallet", { - 'conditions': new SqlNotNullableFieldDefinition('VARCHAR', true, 1000), - 'balance': new SqlNotNullableFieldDefinition('INT', true), + conditions: new SqlNotNullableFieldDefinition("VARCHAR", true, 1000), + balance: new SqlNotNullableFieldDefinition("INT", true), }, getSqliteDB - ) + ); } /** * TECHNICAL */ - cleanCache(): void { - } + cleanCache(): void {} - triggerInit(): void { - } + triggerInit(): void {} /** * INSERT @@ -34,35 +31,37 @@ export class SqliteWallet extends SqliteTable<DBWallet> implements WalletDAO { @MonitorExecutionTime() async insert(record: DBWallet): Promise<void> { - await this.insertInTable(this.driver, record) + await this.insertInTable(this.driver, record); } @MonitorExecutionTime() async insertBatch(records: DBWallet[]): Promise<void> { if (records.length) { - return this.insertBatchInTable(this.driver, records) + return this.insertBatchInTable(this.driver, records); } } private async find(sql: string, params: any[]): Promise<DBWallet[]> { - return (await this.driver.sqlRead(sql, params)).map(r => { + return (await this.driver.sqlRead(sql, params)).map((r) => { return { conditions: r.conditions, balance: r.balance, - } - }) + }; + }); } async getWallet(conditions: string): Promise<DBWallet> { - return (await this.find('SELECT * FROM wallet WHERE conditions = ?', [conditions]))[0] + return ( + await this.find("SELECT * FROM wallet WHERE conditions = ?", [conditions]) + )[0]; } async saveWallet(wallet: DBWallet): Promise<DBWallet> { - await this.insert(wallet) - return wallet + await this.insert(wallet); + return wallet; } listAll(): Promise<DBWallet[]> { - return this.find('SELECT * FROM wallet', []) + return this.find("SELECT * FROM wallet", []); } } diff --git a/app/lib/dal/server-dao.ts b/app/lib/dal/server-dao.ts index 456c9604edca8ef562f47d08c8c9a64dd605f63a..2d6a2453b4a34d24b11ab6197a0f280a7c835edb 100644 --- a/app/lib/dal/server-dao.ts +++ b/app/lib/dal/server-dao.ts @@ -1,7 +1,6 @@ -import {DBBlock} from "../db/DBBlock" +import { DBBlock } from "../db/DBBlock"; export interface ServerDAO { - // TODO: check that a module is actually using this method - lastBlockOfIssuer(issuer:string): Promise<DBBlock | null> -} \ No newline at end of file + lastBlockOfIssuer(issuer: string): Promise<DBBlock | null>; +} diff --git a/app/lib/dal/sqliteDAL/AbstractSQLite.ts b/app/lib/dal/sqliteDAL/AbstractSQLite.ts index 39dce4618e60aab97351562191ba398a9e0b3fdc..bc7875a9691fef1a3b4280cd73410d87e7d36efe 100644 --- a/app/lib/dal/sqliteDAL/AbstractSQLite.ts +++ b/app/lib/dal/sqliteDAL/AbstractSQLite.ts @@ -11,22 +11,21 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {SQLiteDriver} from "../drivers/SQLiteDriver" -import {Initiable} from "./Initiable" -import {Underscore} from "../../common-libs/underscore" -import {NewLogger} from "../../logger" -import {MonitorSQLExecutionTime} from "../../debug/MonitorSQLExecutionTime" +import { SQLiteDriver } from "../drivers/SQLiteDriver"; +import { Initiable } from "./Initiable"; +import { Underscore } from "../../common-libs/underscore"; +import { NewLogger } from "../../logger"; +import { MonitorSQLExecutionTime } from "../../debug/MonitorSQLExecutionTime"; -const logger = NewLogger('sqlite') +const logger = NewLogger("sqlite"); export interface BeforeSaveHook<T> { - (t:T): void + (t: T): void; } export abstract class AbstractSQLite<T> extends Initiable { - constructor( - private driver:SQLiteDriver, + private driver: SQLiteDriver, public readonly table: string, private pkFields: string[] = [], protected fields: string[] = [], @@ -36,188 +35,256 @@ export abstract class AbstractSQLite<T> extends Initiable { private transientFields: string[] = [], private beforeSaveHook: BeforeSaveHook<T> | null = null ) { - super() + super(); } @MonitorSQLExecutionTime() - async query(sql:string, params: any[] = []): Promise<T[]> { + async query(sql: string, params: any[] = []): Promise<T[]> { try { const res = await this.driver.executeAll(sql, params || []); - return res.map((t:T) => this.toEntity(t)) + return res.map((t: T) => this.toEntity(t)); } catch (e) { - logger.error('ERROR >> %s', sql, JSON.stringify(params || []), e.stack || e.message || e); + logger.error( + "ERROR >> %s", + sql, + JSON.stringify(params || []), + e.stack || e.message || e + ); throw e; } } cleanData(): Promise<void> { - return this.exec("DELETE FROM " + this.table) + return this.exec("DELETE FROM " + this.table); } sqlListAll(): Promise<T[]> { - return this.query("SELECT * FROM " + this.table) + return this.query("SELECT * FROM " + this.table); } sqlDeleteAll() { - return this.cleanData() + return this.cleanData(); } - sqlFind(obj:any, sortObj:any = {}): Promise<T[]> { - const conditions = this.toConditionsArray(obj).join(' and '); + sqlFind(obj: any, sortObj: any = {}): Promise<T[]> { + const conditions = this.toConditionsArray(obj).join(" and "); const values = this.toParams(obj); - const sortKeys: string[] = Underscore.keys(sortObj).map(String) - const sort = sortKeys.length ? ' ORDER BY ' + sortKeys.map((k) => "`" + k + "` " + (sortObj[k] ? 'DESC' : 'ASC')).join(',') : ''; - return this.query('SELECT * FROM ' + this.table + ' WHERE ' + conditions + sort, values); + const sortKeys: string[] = Underscore.keys(sortObj).map(String); + const sort = sortKeys.length + ? " ORDER BY " + + sortKeys + .map((k) => "`" + k + "` " + (sortObj[k] ? "DESC" : "ASC")) + .join(",") + : ""; + return this.query( + "SELECT * FROM " + this.table + " WHERE " + conditions + sort, + values + ); } - async sqlFindOne(obj:any, sortObj:any = null): Promise<T> { - const res = await this.sqlFind(obj, sortObj) - return res[0] + async sqlFindOne(obj: any, sortObj: any = null): Promise<T> { + const res = await this.sqlFind(obj, sortObj); + return res[0]; } - sqlFindLikeAny(obj:any): Promise<T[]> { - const keys:string[] = Underscore.keys(obj).map(String) - return this.query('SELECT * FROM ' + this.table + ' WHERE ' + keys.map((k) => 'UPPER(`' + k + '`) like ?').join(' or '), keys.map((k) => obj[k].toUpperCase())) + sqlFindLikeAny(obj: any): Promise<T[]> { + const keys: string[] = Underscore.keys(obj).map(String); + return this.query( + "SELECT * FROM " + + this.table + + " WHERE " + + keys.map((k) => "UPPER(`" + k + "`) like ?").join(" or "), + keys.map((k) => obj[k].toUpperCase()) + ); } - async sqlRemoveWhere(obj:any): Promise<void> { - const keys:string[] = Underscore.keys(obj).map(String) - await this.query('DELETE FROM ' + this.table + ' WHERE ' + keys.map((k) => '`' + k + '` = ?').join(' and '), keys.map((k) => obj[k])) + async sqlRemoveWhere(obj: any): Promise<void> { + const keys: string[] = Underscore.keys(obj).map(String); + await this.query( + "DELETE FROM " + + this.table + + " WHERE " + + keys.map((k) => "`" + k + "` = ?").join(" and "), + keys.map((k) => obj[k]) + ); } - sqlExisting(entity:T): Promise<T> { - return this.getEntity(entity) + sqlExisting(entity: T): Promise<T> { + return this.getEntity(entity); } - async saveEntity(entity:any): Promise<void> { - let toSave:any = entity; + async saveEntity(entity: any): Promise<void> { + let toSave: any = entity; if (this.beforeSaveHook) { this.beforeSaveHook(toSave); } const existing = await this.getEntity(toSave); if (existing) { toSave = this.toRow(toSave); - const valorizations = this.fields.map((field) => '`' + field + '` = ?').join(', '); - const conditions = this.getPKFields().map((field) => '`' + field + '` = ?').join(' and '); + const valorizations = this.fields + .map((field) => "`" + field + "` = ?") + .join(", "); + const conditions = this.getPKFields() + .map((field) => "`" + field + "` = ?") + .join(" and "); const setValues = this.fields.map((field) => toSave[field]); const condValues = this.getPKFields().map((k) => toSave[k]); - await this.query('UPDATE ' + this.table + ' SET ' + valorizations + ' WHERE ' + conditions, setValues.concat(condValues)); - return + await this.query( + "UPDATE " + + this.table + + " SET " + + valorizations + + " WHERE " + + conditions, + setValues.concat(condValues) + ); + return; } await this.insert(toSave); } - async insert(entity:T): Promise<void> { + async insert(entity: T): Promise<void> { const row = this.toRow(entity); const values = this.fields.map((f) => row[f]); - await this.query(this.getInsertQuery(), values) + await this.query(this.getInsertQuery(), values); } - async getEntity(entity:any): Promise<T> { - const conditions = this.getPKFields().map((field) => '`' + field + '` = ?').join(' and '); + async getEntity(entity: any): Promise<T> { + const conditions = this.getPKFields() + .map((field) => "`" + field + "` = ?") + .join(" and "); const params = this.toParams(entity, this.getPKFields()); - return (await this.query('SELECT * FROM ' + this.table + ' WHERE ' + conditions, params))[0]; + return ( + await this.query( + "SELECT * FROM " + this.table + " WHERE " + conditions, + params + ) + )[0]; } - async deleteEntity(entity:any): Promise<void> { + async deleteEntity(entity: any): Promise<void> { const toSave = this.toRow(entity); if (this.beforeSaveHook) { this.beforeSaveHook(toSave); } - const conditions = this.getPKFields().map((field) => '`' + field + '` = ?').join(' and '); + const conditions = this.getPKFields() + .map((field) => "`" + field + "` = ?") + .join(" and "); const condValues = this.getPKFields().map((k) => toSave[k]); - await this.query('DELETE FROM ' + this.table + ' WHERE ' + conditions, condValues) + await this.query( + "DELETE FROM " + this.table + " WHERE " + conditions, + condValues + ); } @MonitorSQLExecutionTime() - async exec(sql:string) { - await this.driver.executeSql(sql) + async exec(sql: string) { + await this.driver.executeSql(sql); } getInsertQuery(): string { - return "INSERT INTO " + this.table + " (" + this.fields.map(f => '`' + f + '`').join(',') + ") VALUES (" + "?,".repeat(this.fields.length - 1) + "?);" + return ( + "INSERT INTO " + + this.table + + " (" + + this.fields.map((f) => "`" + f + "`").join(",") + + ") VALUES (" + + "?,".repeat(this.fields.length - 1) + + "?);" + ); } getInsertHead(): string { - const valuesKeys = this.fields - return 'INSERT INTO ' + this.table + " (" + valuesKeys.map(f => '`' + f + '`').join(',') + ") VALUES "; + const valuesKeys = this.fields; + return ( + "INSERT INTO " + + this.table + + " (" + + valuesKeys.map((f) => "`" + f + "`").join(",") + + ") VALUES " + ); } - getInsertValue(toSave:T): string { + getInsertValue(toSave: T): string { if (this.beforeSaveHook) { this.beforeSaveHook(toSave); } const row = this.toRow(toSave); - const valuesKeys = this.fields + const valuesKeys = this.fields; const values = valuesKeys.map((field) => this.escapeToSQLite(row[field])); - return "(" + values.join(',') + ")"; + return "(" + values.join(",") + ")"; } - toInsertValues(entity:T): string { + toInsertValues(entity: T): string { const row = this.toRow(entity); const values = this.fields.map((f) => row[f]); - const formatted = values.map((s:string) => this.escapeToSQLite(s)) - return "(" + formatted.join(',') + ")"; + const formatted = values.map((s: string) => this.escapeToSQLite(s)); + return "(" + formatted.join(",") + ")"; } /** * Make a batch insert. * @param records The records to insert as a batch. */ - async insertBatch(records:T[]): Promise<void> { + async insertBatch(records: T[]): Promise<void> { const queries = []; if (records.length) { const insert = this.getInsertHead(); const values = records.map((src) => this.getInsertValue(src)); - queries.push(insert + '\n' + values.join(',\n') + ';'); + queries.push(insert + "\n" + values.join(",\n") + ";"); } if (queries.length) { - await this.exec(queries.join('\n')) + await this.exec(queries.join("\n")); } } /** * To redefine if necessary in subclasses. */ - cleanCache() { - } + cleanCache() {} async close(): Promise<void> { // Does nothing: the SqliteDriver is shared among all instances, we close it in a single time in fileDAL.close() } - private toConditionsArray(obj:any): string[] { - return Underscore.keys(obj).map((k:string) => { + private toConditionsArray(obj: any): string[] { + return Underscore.keys(obj).map((k: string) => { if (obj[k].$lte !== undefined) { - return '`' + k + '` <= ?'; + return "`" + k + "` <= ?"; } else if (obj[k].$gte !== undefined) { - return '`' + k + '` >= ?'; + return "`" + k + "` >= ?"; } else if (obj[k].$gt !== undefined) { - return '`' + k + '` > ?'; - } else if (obj[k].$lt !== undefined) { - return '`' + k + '` < ?'; - } else if (obj[k].$null !== undefined) { - return '`' + k + '` IS ' + (!obj[k].$null ? 'NOT' : '') + ' NULL'; - } else if (obj[k].$contains !== undefined) { - return '`' + k + '` LIKE ?'; + return "`" + k + "` > ?"; + } else if (obj[k].$lt !== undefined) { + return "`" + k + "` < ?"; + } else if (obj[k].$null !== undefined) { + return "`" + k + "` IS " + (!obj[k].$null ? "NOT" : "") + " NULL"; + } else if (obj[k].$contains !== undefined) { + return "`" + k + "` LIKE ?"; } else { - return '`' + k + '` = ?'; + return "`" + k + "` = ?"; } }); } - private toParams(obj:any, fields:string[] | null = null): any[] { - let params:any[] = []; - (fields || Underscore.keys(obj)).forEach((f:string) => { + private toParams(obj: any, fields: string[] | null = null): any[] { + let params: any[] = []; + (fields || Underscore.keys(obj)).forEach((f: string) => { if (obj[f].$null === undefined) { let pValue; - if (obj[f].$lte !== undefined) { pValue = obj[f].$lte; } - else if (obj[f].$gte !== undefined) { pValue = obj[f].$gte; } - else if (obj[f].$gt !== undefined) { pValue = obj[f].$gt; } - else if (obj[f].$lt !== undefined) { pValue = obj[f].$lt; } - else if (obj[f].$null !== undefined) { pValue = obj[f].$null; } - else if (obj[f].$contains !== undefined) { pValue = "%" + obj[f].$contains + "%"; } - else if (~this.bigintegers.indexOf(f) && typeof obj[f] !== "string") { + if (obj[f].$lte !== undefined) { + pValue = obj[f].$lte; + } else if (obj[f].$gte !== undefined) { + pValue = obj[f].$gte; + } else if (obj[f].$gt !== undefined) { + pValue = obj[f].$gt; + } else if (obj[f].$lt !== undefined) { + pValue = obj[f].$lt; + } else if (obj[f].$null !== undefined) { + pValue = obj[f].$null; + } else if (obj[f].$contains !== undefined) { + pValue = "%" + obj[f].$contains + "%"; + } else if (~this.bigintegers.indexOf(f) && typeof obj[f] !== "string") { pValue = String(obj[f]); } else { pValue = obj[f]; @@ -228,7 +295,7 @@ export abstract class AbstractSQLite<T> extends Initiable { return params; } - private escapeToSQLite(val:string): any { + private escapeToSQLite(val: string): any { if (typeof val == "boolean") { // SQLite specific: true => 1, false => 0 if (val !== null && val !== undefined) { @@ -236,11 +303,9 @@ export abstract class AbstractSQLite<T> extends Initiable { } else { return null; } - } - else if (typeof val == "string") { + } else if (typeof val == "string") { return "'" + val.replace(/'/g, "\\'") + "'"; - } - else if (val === undefined) { + } else if (val === undefined) { return "null"; } else { return JSON.stringify(val); @@ -248,10 +313,10 @@ export abstract class AbstractSQLite<T> extends Initiable { } private getPKFields(): string[] { - return this.pkFields + return this.pkFields; } - private toEntity(row:any): T { + private toEntity(row: any): T { for (const arr of this.arrays) { row[arr] = row[arr] ? JSON.parse(row[arr]) : []; } @@ -266,16 +331,16 @@ export abstract class AbstractSQLite<T> extends Initiable { row[f] = row[f] !== null ? Boolean(row[f]) : null; } // Transient - for (const f of (this.transientFields || [])) { + for (const f of this.transientFields || []) { row[f] = row[f]; } return row; } - private toRow(entity:any): any { - let row:any = {}; + private toRow(entity: any): any { + let row: any = {}; for (const f of this.fields) { - row[f] = entity[f] + row[f] = entity[f]; } for (const arr of this.arrays) { row[arr] = JSON.stringify(row[arr] || []); diff --git a/app/lib/dal/sqliteDAL/BlockDAL.ts b/app/lib/dal/sqliteDAL/BlockDAL.ts index 5dc123d4bc291a169ca486bb2d7fe40ed6fdf7b4..3f82cfe5c5c0f99c5217f2da2c296fa42b84933f 100644 --- a/app/lib/dal/sqliteDAL/BlockDAL.ts +++ b/app/lib/dal/sqliteDAL/BlockDAL.ts @@ -11,36 +11,78 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractSQLite} from "./AbstractSQLite" -import {SQLiteDriver} from "../drivers/SQLiteDriver" -import {DBBlock} from "../../db/DBBlock" +import { AbstractSQLite } from "./AbstractSQLite"; +import { SQLiteDriver } from "../drivers/SQLiteDriver"; +import { DBBlock } from "../../db/DBBlock"; -const constants = require('../../constants'); +const constants = require("../../constants"); const IS_FORK = true; const IS_NOT_FORK = false; export class BlockDAL extends AbstractSQLite<DBBlock> { + private current: DBBlock | null; - private current: DBBlock|null - - constructor(driver:SQLiteDriver) { + constructor(driver: SQLiteDriver) { super( driver, - 'block', + "block", // PK fields - ['number','hash'], + ["number", "hash"], // Fields - ['fork', 'hash', 'inner_hash', 'signature', 'currency', 'issuer', 'issuersCount', 'issuersFrame', 'issuersFrameVar', 'parameters', 'previousHash', 'previousIssuer', 'version', 'membersCount', 'monetaryMass', 'UDTime', 'medianTime', 'dividend', 'unitbase', 'time', 'powMin', 'number', 'nonce', 'transactions', 'certifications', 'identities', 'joiners', 'actives', 'leavers', 'revoked', 'excluded', 'len', 'legacy'], + [ + "fork", + "hash", + "inner_hash", + "signature", + "currency", + "issuer", + "issuersCount", + "issuersFrame", + "issuersFrameVar", + "parameters", + "previousHash", + "previousIssuer", + "version", + "membersCount", + "monetaryMass", + "UDTime", + "medianTime", + "dividend", + "unitbase", + "time", + "powMin", + "number", + "nonce", + "transactions", + "certifications", + "identities", + "joiners", + "actives", + "leavers", + "revoked", + "excluded", + "len", + "legacy", + ], // Arrays - ['identities','certifications','actives','revoked','excluded','leavers','joiners','transactions'], + [ + "identities", + "certifications", + "actives", + "revoked", + "excluded", + "leavers", + "joiners", + "transactions", + ], // Booleans - ['wrong', 'legacy'], + ["wrong", "legacy"], // BigIntegers - ['monetaryMass'], + ["monetaryMass"], // Transient [] - ) + ); /** * Periodically cleans the current block cache. @@ -50,91 +92,129 @@ export class BlockDAL extends AbstractSQLite<DBBlock> { } async init() { - await this.exec('BEGIN;' + - 'CREATE TABLE IF NOT EXISTS ' + this.table + ' (' + - 'fork BOOLEAN NOT NULL,' + - 'legacy BOOLEAN NOT NULL,' + - 'hash VARCHAR(64) NOT NULL,' + - 'inner_hash VARCHAR(64) NOT NULL,' + - 'signature VARCHAR(100) NOT NULL,' + - 'currency VARCHAR(50) NOT NULL,' + - 'issuer VARCHAR(50) NOT NULL,' + - 'issuersFrame INTEGER NULL,' + - 'issuersFrameVar INTEGER NULL,' + - 'issuersCount INTEGER NULL,' + - 'len INTEGER NULL,' + - 'parameters VARCHAR(255),' + - 'previousHash VARCHAR(64),' + - 'previousIssuer VARCHAR(50),' + - 'version INTEGER NOT NULL,' + - 'membersCount INTEGER NOT NULL,' + - 'monetaryMass VARCHAR(100) DEFAULT \'0\',' + - 'UDTime DATETIME,' + - 'medianTime DATETIME NOT NULL,' + - 'dividend INTEGER DEFAULT \'0\',' + - 'unitbase INTEGER NULL,' + - 'time DATETIME NOT NULL,' + - 'powMin INTEGER NOT NULL,' + - 'number INTEGER NOT NULL,' + - 'nonce INTEGER NOT NULL,' + - 'transactions TEXT,' + - 'certifications TEXT,' + - 'identities TEXT,' + - 'joiners TEXT,' + - 'actives TEXT,' + - 'leavers TEXT,' + - 'revoked TEXT,' + - 'excluded TEXT,' + - 'created DATETIME DEFAULT NULL,' + - 'updated DATETIME DEFAULT NULL,' + - 'PRIMARY KEY (number,hash)' + - ');' + - 'CREATE INDEX IF NOT EXISTS idx_block_hash ON block (hash);' + - 'CREATE INDEX IF NOT EXISTS idx_block_fork ON block (fork);' + - 'COMMIT;') + await this.exec( + "BEGIN;" + + "CREATE TABLE IF NOT EXISTS " + + this.table + + " (" + + "fork BOOLEAN NOT NULL," + + "legacy BOOLEAN NOT NULL," + + "hash VARCHAR(64) NOT NULL," + + "inner_hash VARCHAR(64) NOT NULL," + + "signature VARCHAR(100) NOT NULL," + + "currency VARCHAR(50) NOT NULL," + + "issuer VARCHAR(50) NOT NULL," + + "issuersFrame INTEGER NULL," + + "issuersFrameVar INTEGER NULL," + + "issuersCount INTEGER NULL," + + "len INTEGER NULL," + + "parameters VARCHAR(255)," + + "previousHash VARCHAR(64)," + + "previousIssuer VARCHAR(50)," + + "version INTEGER NOT NULL," + + "membersCount INTEGER NOT NULL," + + "monetaryMass VARCHAR(100) DEFAULT '0'," + + "UDTime DATETIME," + + "medianTime DATETIME NOT NULL," + + "dividend INTEGER DEFAULT '0'," + + "unitbase INTEGER NULL," + + "time DATETIME NOT NULL," + + "powMin INTEGER NOT NULL," + + "number INTEGER NOT NULL," + + "nonce INTEGER NOT NULL," + + "transactions TEXT," + + "certifications TEXT," + + "identities TEXT," + + "joiners TEXT," + + "actives TEXT," + + "leavers TEXT," + + "revoked TEXT," + + "excluded TEXT," + + "created DATETIME DEFAULT NULL," + + "updated DATETIME DEFAULT NULL," + + "PRIMARY KEY (number,hash)" + + ");" + + "CREATE INDEX IF NOT EXISTS idx_block_hash ON block (hash);" + + "CREATE INDEX IF NOT EXISTS idx_block_fork ON block (fork);" + + "COMMIT;" + ); } cleanCache() { - this.current = null + this.current = null; } async getCurrent() { if (!this.current) { - this.current = (await this.query('SELECT * FROM block WHERE NOT fork ORDER BY number DESC LIMIT 1'))[0]; + this.current = ( + await this.query( + "SELECT * FROM block WHERE NOT fork ORDER BY number DESC LIMIT 1" + ) + )[0]; } - return this.current - } - - async getBlock(number:string | number): Promise<DBBlock|null> { - return (await this.query('SELECT * FROM block WHERE number = ? and NOT fork', [parseInt(String(number))]))[0]; - } - - async getAbsoluteBlock(number:number, hash:string): Promise<DBBlock|null> { - return (await this.query('SELECT * FROM block WHERE number = ? and hash = ?', [number, hash]))[0]; - } - - getBlocks(start:number, end:number) { - return this.query('SELECT * FROM block WHERE number BETWEEN ? and ? and NOT fork ORDER BY number ASC', [start, end]); - } - - async lastBlockOfIssuer(issuer:string) { - return (await this.query('SELECT * FROM block WHERE issuer = ? and NOT fork ORDER BY number DESC LIMIT 1', [issuer]))[0] - } - - async getCountOfBlocksIssuedBy(issuer:string) { - let res: any = await this.query('SELECT COUNT(*) as quantity FROM block WHERE issuer = ? and NOT fork', [issuer]); + return this.current; + } + + async getBlock(number: string | number): Promise<DBBlock | null> { + return ( + await this.query("SELECT * FROM block WHERE number = ? and NOT fork", [ + parseInt(String(number)), + ]) + )[0]; + } + + async getAbsoluteBlock( + number: number, + hash: string + ): Promise<DBBlock | null> { + return ( + await this.query("SELECT * FROM block WHERE number = ? and hash = ?", [ + number, + hash, + ]) + )[0]; + } + + getBlocks(start: number, end: number) { + return this.query( + "SELECT * FROM block WHERE number BETWEEN ? and ? and NOT fork ORDER BY number ASC", + [start, end] + ); + } + + async lastBlockOfIssuer(issuer: string) { + return ( + await this.query( + "SELECT * FROM block WHERE issuer = ? and NOT fork ORDER BY number DESC LIMIT 1", + [issuer] + ) + )[0]; + } + + async getCountOfBlocksIssuedBy(issuer: string) { + let res: any = await this.query( + "SELECT COUNT(*) as quantity FROM block WHERE issuer = ? and NOT fork", + [issuer] + ); return res[0].quantity; } - getPotentialForkBlocks(numberStart:number, medianTimeStart:number, maxNumber:number) { - return this.query('SELECT * FROM block WHERE fork AND number >= ? AND number <= ? AND medianTime >= ? ORDER BY number DESC', [numberStart, maxNumber, medianTimeStart]); + getPotentialForkBlocks( + numberStart: number, + medianTimeStart: number, + maxNumber: number + ) { + return this.query( + "SELECT * FROM block WHERE fork AND number >= ? AND number <= ? AND medianTime >= ? ORDER BY number DESC", + [numberStart, maxNumber, medianTimeStart] + ); } getPotentialRoots() { - return this.query('SELECT * FROM block WHERE fork AND number = ?', [0]) + return this.query("SELECT * FROM block WHERE fork AND number = ?", [0]); } - async saveBlock(block:DBBlock) { + async saveBlock(block: DBBlock) { let saved = await this.saveBlockAs(block, IS_NOT_FORK); if (!this.current || this.current.number < block.number) { this.current = block; @@ -142,21 +222,27 @@ export class BlockDAL extends AbstractSQLite<DBBlock> { return saved; } - saveSideBlock(block:DBBlock) { - return this.saveBlockAs(block, IS_FORK) + saveSideBlock(block: DBBlock) { + return this.saveBlockAs(block, IS_FORK); } - private async saveBlockAs(block:DBBlock, fork:boolean) { + private async saveBlockAs(block: DBBlock, fork: boolean) { block.fork = fork; return await this.saveEntity(block); } - async setSideBlock(number:number, previousBlock:DBBlock|null) { - await this.query('UPDATE block SET fork = ? WHERE number = ?', [true, number]); + async setSideBlock(number: number, previousBlock: DBBlock | null) { + await this.query("UPDATE block SET fork = ? WHERE number = ?", [ + true, + number, + ]); this.current = previousBlock; } - getNextForkBlocks(number:number, hash:string) { - return this.query('SELECT * FROM block WHERE fork AND number = ? AND previousHash like ? ORDER BY number', [number + 1, hash]); + getNextForkBlocks(number: number, hash: string) { + return this.query( + "SELECT * FROM block WHERE fork AND number = ? AND previousHash like ? ORDER BY number", + [number + 1, hash] + ); } } diff --git a/app/lib/dal/sqliteDAL/CertDAL.ts b/app/lib/dal/sqliteDAL/CertDAL.ts index 7cc11df2ee72e677f0dce703f5c629ae5b4b676f..875587b6bbaa5b133a6f4585fd27fc13ac259b95 100644 --- a/app/lib/dal/sqliteDAL/CertDAL.ts +++ b/app/lib/dal/sqliteDAL/CertDAL.ts @@ -11,148 +11,163 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {SQLiteDriver} from "../drivers/SQLiteDriver" -import {AbstractSQLite} from "./AbstractSQLite" -import {SandBox} from './SandBox'; -import {DBDocument} from './DocumentDAL'; +import { SQLiteDriver } from "../drivers/SQLiteDriver"; +import { AbstractSQLite } from "./AbstractSQLite"; +import { SandBox } from "./SandBox"; +import { DBDocument } from "./DocumentDAL"; -const constants = require('../../constants'); +const constants = require("../../constants"); export interface DBCert extends DBDocument { - linked:boolean - written:boolean - written_block:number|null - written_hash:string|null - sig:string - block_number:number - block_hash:string - target:string - to:string - from:string - block:number - expired: boolean | null - expires_on: number + linked: boolean; + written: boolean; + written_block: number | null; + written_hash: string | null; + sig: string; + block_number: number; + block_hash: string; + target: string; + to: string; + from: string; + block: number; + expired: boolean | null; + expires_on: number; } export class CertDAL extends AbstractSQLite<DBCert> { - - constructor(driver:SQLiteDriver) { + constructor(driver: SQLiteDriver) { super( driver, - 'cert', + "cert", // PK fields - ['from','target','sig'], + ["from", "target", "sig"], // Fields [ - 'linked', - 'written', - 'written_block', - 'written_hash', - 'sig', - 'block_number', - 'block_hash', - 'target', - 'to', - 'from', - 'block', - 'expired', - 'expires_on' + "linked", + "written", + "written_block", + "written_hash", + "sig", + "block_number", + "block_hash", + "target", + "to", + "from", + "block", + "expired", + "expires_on", ], // Arrays [], // Booleans - ['linked', 'written'], + ["linked", "written"], // BigIntegers [], // Transient [], - (entity:DBCert) => { - entity.written = entity.written || !!(entity.written_hash) + (entity: DBCert) => { + entity.written = entity.written || !!entity.written_hash; } - ) + ); } async init() { - await this.exec('BEGIN;' + - 'CREATE TABLE IF NOT EXISTS ' + this.table + ' (' + - '`from` VARCHAR(50) NOT NULL,' + - '`to` VARCHAR(50) NOT NULL,' + - 'target CHAR(64) NOT NULL,' + - 'sig VARCHAR(100) NOT NULL,' + - 'block_number INTEGER NOT NULL,' + - 'block_hash VARCHAR(64),' + - 'block INTEGER NOT NULL,' + - 'linked BOOLEAN NOT NULL,' + - 'written BOOLEAN NOT NULL,' + - 'written_block INTEGER,' + - 'written_hash VARCHAR(64),' + - 'expires_on INTEGER NULL,' + - 'PRIMARY KEY (`from`, target, sig, written_block)' + - ');' + - 'CREATE INDEX IF NOT EXISTS idx_cert_from ON cert (`from`);' + - 'CREATE INDEX IF NOT EXISTS idx_cert_target ON cert (target);' + - 'CREATE INDEX IF NOT EXISTS idx_cert_linked ON cert (linked);' + - 'COMMIT;') + await this.exec( + "BEGIN;" + + "CREATE TABLE IF NOT EXISTS " + + this.table + + " (" + + "`from` VARCHAR(50) NOT NULL," + + "`to` VARCHAR(50) NOT NULL," + + "target CHAR(64) NOT NULL," + + "sig VARCHAR(100) NOT NULL," + + "block_number INTEGER NOT NULL," + + "block_hash VARCHAR(64)," + + "block INTEGER NOT NULL," + + "linked BOOLEAN NOT NULL," + + "written BOOLEAN NOT NULL," + + "written_block INTEGER," + + "written_hash VARCHAR(64)," + + "expires_on INTEGER NULL," + + "PRIMARY KEY (`from`, target, sig, written_block)" + + ");" + + "CREATE INDEX IF NOT EXISTS idx_cert_from ON cert (`from`);" + + "CREATE INDEX IF NOT EXISTS idx_cert_target ON cert (target);" + + "CREATE INDEX IF NOT EXISTS idx_cert_linked ON cert (linked);" + + "COMMIT;" + ); } - getToTarget(hash:string) { + getToTarget(hash: string) { return this.sqlFind({ - target: hash - }) + target: hash, + }); } - getFromPubkeyCerts(pubkey:string) { + getFromPubkeyCerts(pubkey: string) { return this.sqlFind({ - from: pubkey - }) + from: pubkey, + }); } getNotLinked() { return this.sqlFind({ - linked: false - }) + linked: false, + }); } - getNotLinkedToTarget(hash:string) { + getNotLinkedToTarget(hash: string) { return this.sqlFind({ target: hash, - linked: false - }) + linked: false, + }); } - saveNewCertification(cert:DBCert) { - return this.saveEntity(cert) + saveNewCertification(cert: DBCert) { + return this.saveEntity(cert); } - existsGivenCert(cert:DBCert) { - return this.sqlExisting(cert) + existsGivenCert(cert: DBCert) { + return this.sqlExisting(cert); } - deleteCert(cert:{ from:string, target:string, sig:string }) { - return this.deleteEntity(cert) + deleteCert(cert: { from: string; target: string; sig: string }) { + return this.deleteEntity(cert); } - async trimExpiredCerts(medianTime:number) { - await this.exec('DELETE FROM ' + this.table + ' WHERE expires_on IS NULL OR expires_on < ' + medianTime) + async trimExpiredCerts(medianTime: number) { + await this.exec( + "DELETE FROM " + + this.table + + " WHERE expires_on IS NULL OR expires_on < " + + medianTime + ); } /************************** * SANDBOX STUFF */ - getSandboxForKey = (pub:string) => { - const getRecorded = () => this.query('SELECT * FROM cert WHERE `from` = ? ORDER BY block_number ASC LIMIT ' + constants.SANDBOX_SIZE_CERTIFICATIONS, [pub]) - const compare = (compared:DBCert, reference:DBCert) => { + getSandboxForKey = (pub: string) => { + const getRecorded = () => + this.query( + "SELECT * FROM cert WHERE `from` = ? ORDER BY block_number ASC LIMIT " + + constants.SANDBOX_SIZE_CERTIFICATIONS, + [pub] + ); + const compare = (compared: DBCert, reference: DBCert) => { if (compared.block_number < reference.block_number) { - return -1 - } - else if (compared.block_number > reference.block_number) { - return 1 + return -1; + } else if (compared.block_number > reference.block_number) { + return 1; + } else { + return 0; } - else { - return 0 - } - } - return new SandBox(constants.SANDBOX_SIZE_CERTIFICATIONS, getRecorded, compare) - } + }; + return new SandBox( + constants.SANDBOX_SIZE_CERTIFICATIONS, + getRecorded, + compare + ); + }; } diff --git a/app/lib/dal/sqliteDAL/DocumentDAL.ts b/app/lib/dal/sqliteDAL/DocumentDAL.ts index c6bbf5d68c4d4728c41fc630c1141683f78abc88..817c15042ebce42bb8fa7cc6de4544b27ea6c08e 100644 --- a/app/lib/dal/sqliteDAL/DocumentDAL.ts +++ b/app/lib/dal/sqliteDAL/DocumentDAL.ts @@ -12,5 +12,5 @@ // GNU Affero General Public License for more details. export interface DBDocument { - issuers: string[] -} \ No newline at end of file + issuers: string[]; +} diff --git a/app/lib/dal/sqliteDAL/IdentityDAL.ts b/app/lib/dal/sqliteDAL/IdentityDAL.ts index af22cf43e9588faa108ab29d184cb6eb52e7c56e..96d602a0af9da5b84031716ff0f3d18bcb54938b 100644 --- a/app/lib/dal/sqliteDAL/IdentityDAL.ts +++ b/app/lib/dal/sqliteDAL/IdentityDAL.ts @@ -11,308 +11,330 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractSQLite} from "./AbstractSQLite" -import {SQLiteDriver} from "../drivers/SQLiteDriver" -import {SandBox} from './SandBox'; -import {IdentityDTO} from "../../dto/IdentityDTO" -import {Cloneable} from "../../dto/Cloneable"; -import {DBDocument} from './DocumentDAL'; +import { AbstractSQLite } from "./AbstractSQLite"; +import { SQLiteDriver } from "../drivers/SQLiteDriver"; +import { SandBox } from "./SandBox"; +import { IdentityDTO } from "../../dto/IdentityDTO"; +import { Cloneable } from "../../dto/Cloneable"; +import { DBDocument } from "./DocumentDAL"; -const constants = require('../../constants'); +const constants = require("../../constants"); export abstract class DBIdentity implements Cloneable { - clone(): any { - return DBIdentity.copyFromExisting(this) + return DBIdentity.copyFromExisting(this); } - certs:any[] = [] + certs: any[] = []; signed: { idty: { - pubkey: string - uid: string - buid: string - sig: string - member: string - wasMember: string - } - block_number: number - block_hash: string - sig: string - }[] = [] - - revoked: boolean - currentMSN: null - currentINN: null - buid: string - member: boolean - kick: boolean - leaving: boolean | null - wasMember: boolean - pubkey: string - uid: string - sig: string - revocation_sig: string | null - hash: string - written: boolean - wotb_id: number | null - revoked_on: number | null - expires_on: number + pubkey: string; + uid: string; + buid: string; + sig: string; + member: string; + wasMember: string; + }; + block_number: number; + block_hash: string; + sig: string; + }[] = []; + + revoked: boolean; + currentMSN: null; + currentINN: null; + buid: string; + member: boolean; + kick: boolean; + leaving: boolean | null; + wasMember: boolean; + pubkey: string; + uid: string; + sig: string; + revocation_sig: string | null; + hash: string; + written: boolean; + wotb_id: number | null; + revoked_on: number | null; + expires_on: number; getTargetHash() { return IdentityDTO.getTargetHash({ pub: this.pubkey, created_on: this.buid, - uid: this.uid - }) + uid: this.uid, + }); } json() { - const others:any[] = []; + const others: any[] = []; this.certs.forEach((cert) => { others.push({ - "pubkey": cert.from, - "meta": { - "block_number": cert.block_number, - "block_hash": cert.block_hash + pubkey: cert.from, + meta: { + block_number: cert.block_number, + block_hash: cert.block_hash, }, - "uids": cert.uids, - "isMember": cert.isMember, - "wasMember": cert.wasMember, - "signature": cert.sig + uids: cert.uids, + isMember: cert.isMember, + wasMember: cert.wasMember, + signature: cert.sig, }); }); - const uids = [{ - "uid": this.uid, - "meta": { - "timestamp": this.buid + const uids = [ + { + uid: this.uid, + meta: { + timestamp: this.buid, + }, + revoked: this.revoked, + revoked_on: parseInt(String(this.revoked_on)), + revocation_sig: this.revocation_sig, + self: this.sig, + others: others, }, - "revoked": this.revoked, - "revoked_on": parseInt(String(this.revoked_on)), - "revocation_sig": this.revocation_sig, - "self": this.sig, - "others": others - }]; - const signed:any[] = []; + ]; + const signed: any[] = []; this.signed.forEach((cert) => { signed.push({ - "uid": cert.idty.uid, - "pubkey": cert.idty.pubkey, - "meta": { - "timestamp": cert.idty.buid + uid: cert.idty.uid, + pubkey: cert.idty.pubkey, + meta: { + timestamp: cert.idty.buid, }, - "cert_time": { - "block": cert.block_number, - "block_hash": cert.block_hash + cert_time: { + block: cert.block_number, + block_hash: cert.block_hash, }, - "isMember": cert.idty.member, - "wasMember": cert.idty.wasMember, - "signature": cert.sig + isMember: cert.idty.member, + wasMember: cert.idty.wasMember, + signature: cert.sig, }); }); return { - "pubkey": this.pubkey, - "uids": uids, - "signed": signed - } + pubkey: this.pubkey, + uids: uids, + signed: signed, + }; } - static copyFromExisting(idty:DBIdentity) { - return new ExistingDBIdentity(idty) + static copyFromExisting(idty: DBIdentity) { + return new ExistingDBIdentity(idty); } } export class NewDBIdentity extends DBIdentity { - - revoked = false - currentMSN = null - currentINN = null - member = false - kick = false - leaving = false - wasMember = false - revocation_sig = null - written = false - wotb_id = null - revoked_on = null - expires_on = 0 + revoked = false; + currentMSN = null; + currentINN = null; + member = false; + kick = false; + leaving = false; + wasMember = false; + revocation_sig = null; + written = false; + wotb_id = null; + revoked_on = null; + expires_on = 0; constructor( - public pubkey:string, + public pubkey: string, public sig: string, public buid: string, public uid: string, - public hash: string, + public hash: string ) { - super() + super(); } } export class ExistingDBIdentity extends DBIdentity { - - constructor(idty:DBIdentity) { - super() - this.pubkey = idty.pubkey - this.sig = idty.sig - this.buid = idty.buid - this.uid = idty.uid - this.hash = idty.hash - this.revoked = idty.revoked - this.currentMSN = idty.currentMSN - this.currentINN = idty.currentINN - this.member = idty.member - this.kick = idty.kick - this.leaving = idty.leaving - this.wasMember = idty.wasMember - this.revocation_sig = idty.revocation_sig - this.written = idty.written - this.wotb_id = idty.wotb_id - this.revoked_on = idty.revoked_on - this.expires_on = idty.expires_on - this.certs = idty.certs || [] - this.signed = idty.signed || [] + constructor(idty: DBIdentity) { + super(); + this.pubkey = idty.pubkey; + this.sig = idty.sig; + this.buid = idty.buid; + this.uid = idty.uid; + this.hash = idty.hash; + this.revoked = idty.revoked; + this.currentMSN = idty.currentMSN; + this.currentINN = idty.currentINN; + this.member = idty.member; + this.kick = idty.kick; + this.leaving = idty.leaving; + this.wasMember = idty.wasMember; + this.revocation_sig = idty.revocation_sig; + this.written = idty.written; + this.wotb_id = idty.wotb_id; + this.revoked_on = idty.revoked_on; + this.expires_on = idty.expires_on; + this.certs = idty.certs || []; + this.signed = idty.signed || []; } } export interface DBSandboxIdentity extends DBDocument { - certsCount: number - ref_block: number + certsCount: number; + ref_block: number; } export class IdentityDAL extends AbstractSQLite<DBIdentity> { - - constructor(driver:SQLiteDriver) { + constructor(driver: SQLiteDriver) { super( driver, - 'idty', + "idty", // PK fields - ['pubkey', 'uid', 'hash'], + ["pubkey", "uid", "hash"], // Fields [ - 'revoked', - 'revoked_on', - 'revocation_sig', - 'currentMSN', - 'currentINN', - 'buid', - 'member', - 'kick', - 'leaving', - 'wasMember', - 'pubkey', - 'uid', - 'sig', - 'hash', - 'written', - 'wotb_id', - 'expired', - 'expires_on', - 'removed' + "revoked", + "revoked_on", + "revocation_sig", + "currentMSN", + "currentINN", + "buid", + "member", + "kick", + "leaving", + "wasMember", + "pubkey", + "uid", + "sig", + "hash", + "written", + "wotb_id", + "expired", + "expires_on", + "removed", ], // Arrays [], // Booleans - ['revoked', 'member', 'kick', 'leaving', 'wasMember', 'written', 'removed'], + [ + "revoked", + "member", + "kick", + "leaving", + "wasMember", + "written", + "removed", + ], // BigIntegers [], // Transient - ['certsCount', 'ref_block'] - ) + ["certsCount", "ref_block"] + ); } async init() { - await this.exec('BEGIN;' + - 'CREATE TABLE IF NOT EXISTS ' + this.table + ' (' + - 'revoked BOOLEAN NOT NULL,' + - 'currentMSN INTEGER NULL,' + - 'currentINN INTEGER NULL,' + - 'buid VARCHAR(100) NOT NULL,' + - 'member BOOLEAN NOT NULL,' + - 'kick BOOLEAN NOT NULL,' + - 'leaving BOOLEAN NULL,' + - 'wasMember BOOLEAN NOT NULL,' + - 'pubkey VARCHAR(50) NOT NULL,' + - 'uid VARCHAR(255) NOT NULL,' + - 'sig VARCHAR(100) NOT NULL,' + - 'revocation_sig VARCHAR(100) NULL,' + - 'hash VARCHAR(64) NOT NULL,' + - 'written BOOLEAN NULL,' + - 'wotb_id INTEGER NULL,' + - 'expires_on INTEGER NULL,' + - 'PRIMARY KEY (pubkey,uid,hash)' + - ');' + - 'CREATE INDEX IF NOT EXISTS idx_idty_pubkey ON idty (pubkey);' + - 'CREATE INDEX IF NOT EXISTS idx_idty_uid ON idty (uid);' + - 'CREATE INDEX IF NOT EXISTS idx_idty_kick ON idty (kick);' + - 'CREATE INDEX IF NOT EXISTS idx_idty_member ON idty (member);' + - 'CREATE INDEX IF NOT EXISTS idx_idty_wasMember ON idty (wasMember);' + - 'CREATE INDEX IF NOT EXISTS idx_idty_hash ON idty (hash);' + - 'CREATE INDEX IF NOT EXISTS idx_idty_written ON idty (written);' + - 'CREATE INDEX IF NOT EXISTS idx_idty_currentMSN ON idty (currentMSN);' + - 'CREATE INDEX IF NOT EXISTS idx_idty_currentINN ON idty (currentINN);' + - 'COMMIT;') + await this.exec( + "BEGIN;" + + "CREATE TABLE IF NOT EXISTS " + + this.table + + " (" + + "revoked BOOLEAN NOT NULL," + + "currentMSN INTEGER NULL," + + "currentINN INTEGER NULL," + + "buid VARCHAR(100) NOT NULL," + + "member BOOLEAN NOT NULL," + + "kick BOOLEAN NOT NULL," + + "leaving BOOLEAN NULL," + + "wasMember BOOLEAN NOT NULL," + + "pubkey VARCHAR(50) NOT NULL," + + "uid VARCHAR(255) NOT NULL," + + "sig VARCHAR(100) NOT NULL," + + "revocation_sig VARCHAR(100) NULL," + + "hash VARCHAR(64) NOT NULL," + + "written BOOLEAN NULL," + + "wotb_id INTEGER NULL," + + "expires_on INTEGER NULL," + + "PRIMARY KEY (pubkey,uid,hash)" + + ");" + + "CREATE INDEX IF NOT EXISTS idx_idty_pubkey ON idty (pubkey);" + + "CREATE INDEX IF NOT EXISTS idx_idty_uid ON idty (uid);" + + "CREATE INDEX IF NOT EXISTS idx_idty_kick ON idty (kick);" + + "CREATE INDEX IF NOT EXISTS idx_idty_member ON idty (member);" + + "CREATE INDEX IF NOT EXISTS idx_idty_wasMember ON idty (wasMember);" + + "CREATE INDEX IF NOT EXISTS idx_idty_hash ON idty (hash);" + + "CREATE INDEX IF NOT EXISTS idx_idty_written ON idty (written);" + + "CREATE INDEX IF NOT EXISTS idx_idty_currentMSN ON idty (currentMSN);" + + "CREATE INDEX IF NOT EXISTS idx_idty_currentINN ON idty (currentINN);" + + "COMMIT;" + ); } - revokeIdentity(pubkey:string) { - return this.exec('DELETE FROM ' + this.table + ' WHERE pubkey = \'' + pubkey + '\'') + revokeIdentity(pubkey: string) { + return this.exec( + "DELETE FROM " + this.table + " WHERE pubkey = '" + pubkey + "'" + ); } - removeUnWrittenWithPubkey(pubkey:string) { + removeUnWrittenWithPubkey(pubkey: string) { return this.sqlRemoveWhere({ pubkey: pubkey, - written: false - }) + written: false, + }); } - removeUnWrittenWithUID(uid:string) { + removeUnWrittenWithUID(uid: string) { return this.sqlRemoveWhere({ uid: uid, - written: false - }) + written: false, + }); } setRevoked(pubkey: string) { - return this.query('UPDATE ' + this.table + ' SET revoked = ? WHERE pubkey = ?', [true, pubkey]) + return this.query( + "UPDATE " + this.table + " SET revoked = ? WHERE pubkey = ?", + [true, pubkey] + ); } - getByHash(hash:string) { + getByHash(hash: string) { return this.sqlFindOne({ - hash: hash - }) + hash: hash, + }); } - saveIdentity(idty:DBIdentity) { - return this.saveEntity(idty) + saveIdentity(idty: DBIdentity) { + return this.saveEntity(idty); } - async deleteByHash(hash:string) { - await this.exec('UPDATE ' + this.table + ' SET removed = 1 where hash = \'' + hash + '\'') + async deleteByHash(hash: string) { + await this.exec( + "UPDATE " + this.table + " SET removed = 1 where hash = '" + hash + "'" + ); } getToRevoke() { return this.sqlFind({ revocation_sig: { $null: false }, revoked: false, - wasMember: true - }) + wasMember: true, + }); } getPendingIdentities() { return this.sqlFind({ revocation_sig: { $null: true }, - revoked: false - }) + revoked: false, + }); } - searchThoseMatching(search:string) { + searchThoseMatching(search: string) { return this.sqlFindLikeAny({ pubkey: "%" + search + "%", - uid: "%" + search + "%" - }) + uid: "%" + search + "%", + }); } - async trimExpiredIdentities(medianTime:number) { - await this.exec('DELETE FROM ' + this.table + ' WHERE (expires_on IS NULL AND revocation_sig IS NULL) OR expires_on < ' + medianTime) + async trimExpiredIdentities(medianTime: number) { + await this.exec( + "DELETE FROM " + + this.table + + " WHERE (expires_on IS NULL AND revocation_sig IS NULL) OR expires_on < " + + medianTime + ); } /************************** @@ -320,32 +342,35 @@ export class IdentityDAL extends AbstractSQLite<DBIdentity> { */ getSandboxIdentities() { - return this.query('SELECT * FROM sandbox_idty LIMIT ' + (this.sandbox.maxSize), []) + return this.query( + "SELECT * FROM sandbox_idty LIMIT " + this.sandbox.maxSize, + [] + ); } - sandbox = new SandBox(constants.SANDBOX_SIZE_IDENTITIES, this.getSandboxIdentities.bind(this), (compared:DBSandboxIdentity, reference:DBSandboxIdentity) => { - if (compared.certsCount < reference.certsCount) { - return -1; - } - else if (compared.certsCount > reference.certsCount) { - return 1; - } - else if (compared.ref_block < reference.ref_block) { - return -1; - } - else if (compared.ref_block > reference.ref_block) { - return 1; - } - else { - return 0; + sandbox = new SandBox( + constants.SANDBOX_SIZE_IDENTITIES, + this.getSandboxIdentities.bind(this), + (compared: DBSandboxIdentity, reference: DBSandboxIdentity) => { + if (compared.certsCount < reference.certsCount) { + return -1; + } else if (compared.certsCount > reference.certsCount) { + return 1; + } else if (compared.ref_block < reference.ref_block) { + return -1; + } else if (compared.ref_block > reference.ref_block) { + return 1; + } else { + return 0; + } } - }); + ); getSandboxRoom() { - return this.sandbox.getSandboxRoom() + return this.sandbox.getSandboxRoom(); } - setSandboxSize(maxSize:number) { - this.sandbox.maxSize = maxSize + setSandboxSize(maxSize: number) { + this.sandbox.maxSize = maxSize; } } diff --git a/app/lib/dal/sqliteDAL/Initiable.ts b/app/lib/dal/sqliteDAL/Initiable.ts index d02073acd796c839185889fbc7abdb94942c35d3..68091a235762c0d6e55ebce60df3ca6b47ff8d5b 100644 --- a/app/lib/dal/sqliteDAL/Initiable.ts +++ b/app/lib/dal/sqliteDAL/Initiable.ts @@ -1,6 +1,5 @@ - export abstract class Initiable { - abstract init(): Promise<void> - abstract close(): Promise<void> - abstract cleanCache(): void + abstract init(): Promise<void>; + abstract close(): Promise<void>; + abstract cleanCache(): void; } diff --git a/app/lib/dal/sqliteDAL/MembershipDAL.ts b/app/lib/dal/sqliteDAL/MembershipDAL.ts index ddadff48d9138f1bc567666eb79405fa918584b2..bbbb17d1443166ec847a7671ae31062782fddb87 100644 --- a/app/lib/dal/sqliteDAL/MembershipDAL.ts +++ b/app/lib/dal/sqliteDAL/MembershipDAL.ts @@ -11,132 +11,156 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {SQLiteDriver} from "../drivers/SQLiteDriver"; -import {AbstractSQLite} from "./AbstractSQLite"; -import {SandBox} from './SandBox'; -import {DBDocument} from './DocumentDAL'; -import {Underscore} from "../../common-libs/underscore" +import { SQLiteDriver } from "../drivers/SQLiteDriver"; +import { AbstractSQLite } from "./AbstractSQLite"; +import { SandBox } from "./SandBox"; +import { DBDocument } from "./DocumentDAL"; +import { Underscore } from "../../common-libs/underscore"; -const constants = require('../../constants'); +const constants = require("../../constants"); export interface DBMembership extends DBDocument { - membership: string - issuer: string - number: number - blockNumber: number - blockHash: string - userid: string - certts: string - block: string - fpr: string - idtyHash: string - written: boolean - written_number: number | null - expires_on: number - signature: string - expired: boolean | null, - block_number: number + membership: string; + issuer: string; + number: number; + blockNumber: number; + blockHash: string; + userid: string; + certts: string; + block: string; + fpr: string; + idtyHash: string; + written: boolean; + written_number: number | null; + expires_on: number; + signature: string; + expired: boolean | null; + block_number: number; } export class MembershipDAL extends AbstractSQLite<DBMembership> { - - constructor(driver:SQLiteDriver) { + constructor(driver: SQLiteDriver) { super( driver, - 'membership', + "membership", // PK fields - ['issuer','signature'], + ["issuer", "signature"], // Fields [ - 'membership', - 'issuer', - 'number', - 'blockNumber', - 'blockHash', - 'userid', - 'certts', - 'block', - 'fpr', - 'idtyHash', - 'written', - 'written_number', - 'expires_on', - 'signature', - 'expired' + "membership", + "issuer", + "number", + "blockNumber", + "blockHash", + "userid", + "certts", + "block", + "fpr", + "idtyHash", + "written", + "written_number", + "expires_on", + "signature", + "expired", ], // Arrays [], // Booleans - ['written'], + ["written"], // BigIntegers [], // Transient [] - ) + ); } async init() { - await this.exec('BEGIN;' + - 'CREATE TABLE IF NOT EXISTS membership (' + - 'membership CHAR(2) NOT NULL,' + - 'issuer VARCHAR(50) NOT NULL,' + - 'number INTEGER NOT NULL,' + - 'blockNumber INTEGER,' + - 'blockHash VARCHAR(64) NOT NULL,' + - 'userid VARCHAR(255) NOT NULL,' + - 'certts VARCHAR(100) NOT NULL,' + - 'block INTEGER,' + - 'fpr VARCHAR(64),' + - 'idtyHash VARCHAR(64),' + - 'written BOOLEAN NOT NULL,' + - 'written_number INTEGER,' + - 'expires_on INTEGER NULL,' + - 'signature VARCHAR(50),' + - 'PRIMARY KEY (issuer,signature)' + - ');' + - 'CREATE INDEX IF NOT EXISTS idx_mmembership_idtyHash ON membership (idtyHash);' + - 'CREATE INDEX IF NOT EXISTS idx_mmembership_membership ON membership (membership);' + - 'CREATE INDEX IF NOT EXISTS idx_mmembership_written ON membership (written);' + - 'COMMIT;') + await this.exec( + "BEGIN;" + + "CREATE TABLE IF NOT EXISTS membership (" + + "membership CHAR(2) NOT NULL," + + "issuer VARCHAR(50) NOT NULL," + + "number INTEGER NOT NULL," + + "blockNumber INTEGER," + + "blockHash VARCHAR(64) NOT NULL," + + "userid VARCHAR(255) NOT NULL," + + "certts VARCHAR(100) NOT NULL," + + "block INTEGER," + + "fpr VARCHAR(64)," + + "idtyHash VARCHAR(64)," + + "written BOOLEAN NOT NULL," + + "written_number INTEGER," + + "expires_on INTEGER NULL," + + "signature VARCHAR(50)," + + "PRIMARY KEY (issuer,signature)" + + ");" + + "CREATE INDEX IF NOT EXISTS idx_mmembership_idtyHash ON membership (idtyHash);" + + "CREATE INDEX IF NOT EXISTS idx_mmembership_membership ON membership (membership);" + + "CREATE INDEX IF NOT EXISTS idx_mmembership_written ON membership (written);" + + "COMMIT;" + ); } - getMembershipsOfIssuer(issuer:string) { + getMembershipsOfIssuer(issuer: string) { return this.sqlFind({ - issuer: issuer - }) + issuer: issuer, + }); } - getPendingINOfTarget(hash:string) { + getPendingINOfTarget(hash: string) { return this.sqlFind({ idtyHash: hash, - membership: 'IN' - }) + membership: "IN", + }); } getPendingIN() { return this.sqlFind({ - membership: 'IN' - }) + membership: "IN", + }); } getPendingOUT() { return this.sqlFind({ - membership: 'OUT' - }) + membership: "OUT", + }); } - savePendingMembership(ms:DBMembership) { + savePendingMembership(ms: DBMembership) { ms.membership = ms.membership.toUpperCase(); ms.written = false; - return this.saveEntity(Underscore.pick(ms, 'membership', 'issuer', 'number', 'blockNumber', 'blockHash', 'userid', 'certts', 'block', 'fpr', 'idtyHash', 'expires_on', 'written', 'written_number', 'signature')) + return this.saveEntity( + Underscore.pick( + ms, + "membership", + "issuer", + "number", + "blockNumber", + "blockHash", + "userid", + "certts", + "block", + "fpr", + "idtyHash", + "expires_on", + "written", + "written_number", + "signature" + ) + ); } - async deleteMS(ms:DBMembership) { - await this.deleteEntity(ms) + async deleteMS(ms: DBMembership) { + await this.deleteEntity(ms); } - async trimExpiredMemberships(medianTime:number) { - await this.exec('DELETE FROM ' + this.table + ' WHERE expires_on IS NULL OR expires_on < ' + medianTime) + async trimExpiredMemberships(medianTime: number) { + await this.exec( + "DELETE FROM " + + this.table + + " WHERE expires_on IS NULL OR expires_on < " + + medianTime + ); } /************************** @@ -144,26 +168,34 @@ export class MembershipDAL extends AbstractSQLite<DBMembership> { */ getSandboxMemberships() { - return this.query('SELECT * FROM sandbox_memberships LIMIT ' + (this.sandbox.maxSize), []) + return this.query( + "SELECT * FROM sandbox_memberships LIMIT " + this.sandbox.maxSize, + [] + ); } - sandbox = new SandBox(constants.SANDBOX_SIZE_MEMBERSHIPS, this.getSandboxMemberships.bind(this), (compared:{ block_number: number, issuers: string[] }, reference:{ block_number: number, issuers: string[] }) => { - if (compared.block_number < reference.block_number) { - return -1; - } - else if (compared.block_number > reference.block_number) { - return 1; - } - else { - return 0; + sandbox = new SandBox( + constants.SANDBOX_SIZE_MEMBERSHIPS, + this.getSandboxMemberships.bind(this), + ( + compared: { block_number: number; issuers: string[] }, + reference: { block_number: number; issuers: string[] } + ) => { + if (compared.block_number < reference.block_number) { + return -1; + } else if (compared.block_number > reference.block_number) { + return 1; + } else { + return 0; + } } - }); + ); getSandboxRoom() { - return this.sandbox.getSandboxRoom() + return this.sandbox.getSandboxRoom(); } - setSandboxSize(maxSize:number) { - this.sandbox.maxSize = maxSize + setSandboxSize(maxSize: number) { + this.sandbox.maxSize = maxSize; } } diff --git a/app/lib/dal/sqliteDAL/MetaDAL.ts b/app/lib/dal/sqliteDAL/MetaDAL.ts index 53b7b49086d1d7f1a21ff53d8448f7985ca1be03..5660bb53567c97a5eaf4fea2411affdf385fb506 100644 --- a/app/lib/dal/sqliteDAL/MetaDAL.ts +++ b/app/lib/dal/sqliteDAL/MetaDAL.ts @@ -11,34 +11,30 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractSQLite} from "./AbstractSQLite" -import {SQLiteDriver} from "../drivers/SQLiteDriver" -import {ConfDTO} from "../../dto/ConfDTO" -import {TransactionDTO} from "../../dto/TransactionDTO" -import {IdentityDAL} from "./IdentityDAL" +import { AbstractSQLite } from "./AbstractSQLite"; +import { SQLiteDriver } from "../drivers/SQLiteDriver"; +import { ConfDTO } from "../../dto/ConfDTO"; +import { TransactionDTO } from "../../dto/TransactionDTO"; +import { IdentityDAL } from "./IdentityDAL"; -const logger = require('../../logger').NewLogger('metaDAL'); +const logger = require("../../logger").NewLogger("metaDAL"); export interface DBMeta { - id: number, - version: number + id: number; + version: number; } export class MetaDAL extends AbstractSQLite<DBMeta> { + driverCopy: SQLiteDriver; - driverCopy:SQLiteDriver - - constructor(driver:SQLiteDriver) { + constructor(driver: SQLiteDriver) { super( driver, - 'meta', + "meta", // PK fields - ['version'], + ["version"], // Fields - [ - 'id', - 'version' - ], + ["id", "version"], // Arrays [], // Booleans @@ -47,92 +43,90 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { [], // Transient [] - ) - this.driverCopy = driver + ); + this.driverCopy = driver; } - private migrations:any = { - + private migrations: any = { // Test - 0: 'BEGIN;' + - - // This table was initially created by BlockDAL, but now it has been removed so we keep it here - // to keep the unit tests work - 'CREATE TABLE IF NOT EXISTS block (' + - 'fork BOOLEAN NOT NULL,' + - 'hash VARCHAR(64) NOT NULL,' + - 'inner_hash VARCHAR(64) NOT NULL,' + - 'signature VARCHAR(100) NOT NULL,' + - 'currency VARCHAR(50) NOT NULL,' + - 'issuer VARCHAR(50) NOT NULL,' + - 'parameters VARCHAR(255),' + - 'previousHash VARCHAR(64),' + - 'previousIssuer VARCHAR(50),' + - 'version INTEGER NOT NULL,' + - 'membersCount INTEGER NOT NULL,' + - 'monetaryMass VARCHAR(100) DEFAULT \'0\',' + - 'UDTime DATETIME,' + - 'medianTime DATETIME NOT NULL,' + - 'dividend INTEGER DEFAULT \'0\',' + - 'unitbase INTEGER NULL,' + - 'time DATETIME NOT NULL,' + - 'powMin INTEGER NOT NULL,' + - 'number INTEGER NOT NULL,' + - 'nonce INTEGER NOT NULL,' + - 'transactions TEXT,' + - 'certifications TEXT,' + - 'identities TEXT,' + - 'joiners TEXT,' + - 'actives TEXT,' + - 'leavers TEXT,' + - 'revoked TEXT,' + - 'excluded TEXT,' + - 'created DATETIME DEFAULT NULL,' + - 'updated DATETIME DEFAULT NULL,' + - 'PRIMARY KEY (number,hash)' + - ');' + - 'CREATE INDEX IF NOT EXISTS idx_block_hash ON block (hash);' + - 'CREATE INDEX IF NOT EXISTS idx_block_fork ON block (fork);' + - - // Same, but for Transactions - 'CREATE TABLE IF NOT EXISTS txs (' + - 'hash CHAR(64) NOT NULL,' + - 'block_number INTEGER,' + - 'locktime INTEGER NOT NULL,' + - 'version INTEGER NOT NULL,' + - 'currency VARCHAR(50) NOT NULL,' + - 'comment VARCHAR(255) NOT NULL,' + - 'time DATETIME,' + - 'inputs TEXT NOT NULL,' + - 'unlocks TEXT NOT NULL,' + - 'outputs TEXT NOT NULL,' + - 'issuers TEXT NOT NULL,' + - 'signatures TEXT NOT NULL,' + - 'recipients TEXT NOT NULL,' + - 'written BOOLEAN NOT NULL,' + - 'removed BOOLEAN NOT NULL,' + - 'PRIMARY KEY (hash)' + - ');' + - 'CREATE INDEX IF NOT EXISTS idx_txs_issuers ON txs (issuers);' + - 'CREATE INDEX IF NOT EXISTS idx_txs_written ON txs (written);' + - 'CREATE INDEX IF NOT EXISTS idx_txs_removed ON txs (removed);' + - 'CREATE INDEX IF NOT EXISTS idx_txs_hash ON txs (hash);' + - - 'COMMIT;', + 0: + "BEGIN;" + + // This table was initially created by BlockDAL, but now it has been removed so we keep it here + // to keep the unit tests work + "CREATE TABLE IF NOT EXISTS block (" + + "fork BOOLEAN NOT NULL," + + "hash VARCHAR(64) NOT NULL," + + "inner_hash VARCHAR(64) NOT NULL," + + "signature VARCHAR(100) NOT NULL," + + "currency VARCHAR(50) NOT NULL," + + "issuer VARCHAR(50) NOT NULL," + + "parameters VARCHAR(255)," + + "previousHash VARCHAR(64)," + + "previousIssuer VARCHAR(50)," + + "version INTEGER NOT NULL," + + "membersCount INTEGER NOT NULL," + + "monetaryMass VARCHAR(100) DEFAULT '0'," + + "UDTime DATETIME," + + "medianTime DATETIME NOT NULL," + + "dividend INTEGER DEFAULT '0'," + + "unitbase INTEGER NULL," + + "time DATETIME NOT NULL," + + "powMin INTEGER NOT NULL," + + "number INTEGER NOT NULL," + + "nonce INTEGER NOT NULL," + + "transactions TEXT," + + "certifications TEXT," + + "identities TEXT," + + "joiners TEXT," + + "actives TEXT," + + "leavers TEXT," + + "revoked TEXT," + + "excluded TEXT," + + "created DATETIME DEFAULT NULL," + + "updated DATETIME DEFAULT NULL," + + "PRIMARY KEY (number,hash)" + + ");" + + "CREATE INDEX IF NOT EXISTS idx_block_hash ON block (hash);" + + "CREATE INDEX IF NOT EXISTS idx_block_fork ON block (fork);" + + // Same, but for Transactions + "CREATE TABLE IF NOT EXISTS txs (" + + "hash CHAR(64) NOT NULL," + + "block_number INTEGER," + + "locktime INTEGER NOT NULL," + + "version INTEGER NOT NULL," + + "currency VARCHAR(50) NOT NULL," + + "comment VARCHAR(255) NOT NULL," + + "time DATETIME," + + "inputs TEXT NOT NULL," + + "unlocks TEXT NOT NULL," + + "outputs TEXT NOT NULL," + + "issuers TEXT NOT NULL," + + "signatures TEXT NOT NULL," + + "recipients TEXT NOT NULL," + + "written BOOLEAN NOT NULL," + + "removed BOOLEAN NOT NULL," + + "PRIMARY KEY (hash)" + + ");" + + "CREATE INDEX IF NOT EXISTS idx_txs_issuers ON txs (issuers);" + + "CREATE INDEX IF NOT EXISTS idx_txs_written ON txs (written);" + + "CREATE INDEX IF NOT EXISTS idx_txs_removed ON txs (removed);" + + "CREATE INDEX IF NOT EXISTS idx_txs_hash ON txs (hash);" + + "COMMIT;", // Test - 1: 'BEGIN;' + - 'CREATE VIEW IF NOT EXISTS identities_pending AS SELECT * FROM idty WHERE NOT written;' + - 'CREATE VIEW IF NOT EXISTS certifications_pending AS SELECT * FROM cert WHERE NOT written;' + - 'CREATE VIEW IF NOT EXISTS transactions_pending AS SELECT * FROM txs WHERE NOT written;' + - 'CREATE VIEW IF NOT EXISTS transactions_desc AS SELECT * FROM txs ORDER BY time DESC;' + - 'CREATE VIEW IF NOT EXISTS forks AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE fork ORDER BY number DESC;' + - 'CREATE VIEW IF NOT EXISTS blockchain AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE NOT fork ORDER BY number DESC;' + - 'CREATE VIEW IF NOT EXISTS network AS select i.uid, (last_try - first_down) / 1000 as down_delay_in_sec, p.* from peer p LEFT JOIN idty i on i.pubkey = p.pubkey ORDER by down_delay_in_sec;' + - 'COMMIT;', + 1: + "BEGIN;" + + "CREATE VIEW IF NOT EXISTS identities_pending AS SELECT * FROM idty WHERE NOT written;" + + "CREATE VIEW IF NOT EXISTS certifications_pending AS SELECT * FROM cert WHERE NOT written;" + + "CREATE VIEW IF NOT EXISTS transactions_pending AS SELECT * FROM txs WHERE NOT written;" + + "CREATE VIEW IF NOT EXISTS transactions_desc AS SELECT * FROM txs ORDER BY time DESC;" + + "CREATE VIEW IF NOT EXISTS forks AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE fork ORDER BY number DESC;" + + "CREATE VIEW IF NOT EXISTS blockchain AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE NOT fork ORDER BY number DESC;" + + "CREATE VIEW IF NOT EXISTS network AS select i.uid, (last_try - first_down) / 1000 as down_delay_in_sec, p.* from peer p LEFT JOIN idty i on i.pubkey = p.pubkey ORDER by down_delay_in_sec;" + + "COMMIT;", // New `receveid` column - 2: 'BEGIN; ALTER TABLE txs ADD COLUMN received INTEGER NULL; COMMIT;', + 2: "BEGIN; ALTER TABLE txs ADD COLUMN received INTEGER NULL; COMMIT;", // Update wrong recipients field (was not filled in) 3: async () => {}, @@ -143,57 +137,56 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { // Migrates wrong monetary masses 5: async () => {}, - 6: 'BEGIN; ALTER TABLE idty ADD COLUMN expired INTEGER NULL; COMMIT;', - 7: 'BEGIN; ALTER TABLE cert ADD COLUMN expired INTEGER NULL; COMMIT;', - 8: 'BEGIN; ALTER TABLE membership ADD COLUMN expired INTEGER NULL; COMMIT;', - 9: 'BEGIN;' + - 'ALTER TABLE txs ADD COLUMN output_base INTEGER NULL;' + - 'ALTER TABLE txs ADD COLUMN output_amount INTEGER NULL;' + - 'COMMIT;', - 10: 'BEGIN; ALTER TABLE txs ADD COLUMN blockstamp VARCHAR(200) NULL; COMMIT;', - 11: 'BEGIN;' + - 'ALTER TABLE block ADD COLUMN issuersFrame INTEGER NULL;' + - 'ALTER TABLE block ADD COLUMN issuersFrameVar INTEGER NULL;' + - 'ALTER TABLE block ADD COLUMN issuersCount INTEGER NULL;' + - 'COMMIT;', + 6: "BEGIN; ALTER TABLE idty ADD COLUMN expired INTEGER NULL; COMMIT;", + 7: "BEGIN; ALTER TABLE cert ADD COLUMN expired INTEGER NULL; COMMIT;", + 8: "BEGIN; ALTER TABLE membership ADD COLUMN expired INTEGER NULL; COMMIT;", + 9: + "BEGIN;" + + "ALTER TABLE txs ADD COLUMN output_base INTEGER NULL;" + + "ALTER TABLE txs ADD COLUMN output_amount INTEGER NULL;" + + "COMMIT;", + 10: "BEGIN; ALTER TABLE txs ADD COLUMN blockstamp VARCHAR(200) NULL; COMMIT;", + 11: + "BEGIN;" + + "ALTER TABLE block ADD COLUMN issuersFrame INTEGER NULL;" + + "ALTER TABLE block ADD COLUMN issuersFrameVar INTEGER NULL;" + + "ALTER TABLE block ADD COLUMN issuersCount INTEGER NULL;" + + "COMMIT;", 12: async () => { - let blockDAL = new MetaDAL(this.driverCopy) - await blockDAL.exec('ALTER TABLE block ADD COLUMN len INTEGER NULL;'); - await blockDAL.exec('ALTER TABLE txs ADD COLUMN len INTEGER NULL;'); + let blockDAL = new MetaDAL(this.driverCopy); + await blockDAL.exec("ALTER TABLE block ADD COLUMN len INTEGER NULL;"); + await blockDAL.exec("ALTER TABLE txs ADD COLUMN len INTEGER NULL;"); }, - 13: 'BEGIN; ALTER TABLE txs ADD COLUMN blockstampTime INTEGER NULL; COMMIT;', - 14: 'BEGIN; ' + - - 'CREATE VIEW IF NOT EXISTS sandbox_txs AS SELECT * FROM txs WHERE NOT written AND NOT removed ORDER BY output_base DESC, output_amount DESC;' + - - 'CREATE VIEW IF NOT EXISTS sandbox_idty AS SELECT ' + - 'I.*, ' + - 'I.hash, ' + - '(SELECT COUNT(*) FROM cert C where C.target = I.hash) AS certsCount, ' + + 13: "BEGIN; ALTER TABLE txs ADD COLUMN blockstampTime INTEGER NULL; COMMIT;", + 14: + "BEGIN; " + + "CREATE VIEW IF NOT EXISTS sandbox_txs AS SELECT * FROM txs WHERE NOT written AND NOT removed ORDER BY output_base DESC, output_amount DESC;" + + "CREATE VIEW IF NOT EXISTS sandbox_idty AS SELECT " + + "I.*, " + + "I.hash, " + + "(SELECT COUNT(*) FROM cert C where C.target = I.hash) AS certsCount, " + 'CAST(SUBSTR(buid, 0, INSTR(buid, "-")) as number) AS ref_block ' + - 'FROM idty as I ' + - 'WHERE NOT I.member ' + - 'AND I.expired IS NULL ' + - 'ORDER BY certsCount DESC, ref_block DESC;' + - - 'CREATE VIEW IF NOT EXISTS sandbox_memberships AS SELECT ' + - '* ' + - 'FROM membership ' + - 'WHERE expired IS NULL ' + - 'AND written_number IS NULL ' + - 'ORDER BY blockNumber DESC;' + - - 'CREATE VIEW IF NOT EXISTS sandbox_certs AS SELECT ' + - '* ' + - 'FROM cert ' + - 'WHERE expired IS NULL ' + - 'AND written_block IS NULL ' + - 'ORDER BY block_number DESC;' + - 'COMMIT;', + "FROM idty as I " + + "WHERE NOT I.member " + + "AND I.expired IS NULL " + + "ORDER BY certsCount DESC, ref_block DESC;" + + "CREATE VIEW IF NOT EXISTS sandbox_memberships AS SELECT " + + "* " + + "FROM membership " + + "WHERE expired IS NULL " + + "AND written_number IS NULL " + + "ORDER BY blockNumber DESC;" + + "CREATE VIEW IF NOT EXISTS sandbox_certs AS SELECT " + + "* " + + "FROM cert " + + "WHERE expired IS NULL " + + "AND written_block IS NULL " + + "ORDER BY block_number DESC;" + + "COMMIT;", 15: async () => { - let idtyDAL = new IdentityDAL(this.driverCopy) - await idtyDAL.exec('ALTER TABLE idty ADD COLUMN revoked_on INTEGER NULL'); + let idtyDAL = new IdentityDAL(this.driverCopy); + await idtyDAL.exec("ALTER TABLE idty ADD COLUMN revoked_on INTEGER NULL"); }, 16: async () => {}, @@ -202,124 +195,140 @@ export class MetaDAL extends AbstractSQLite<DBMeta> { // This migration is now obsolete }, - 18: 'BEGIN;' + + 18: + "BEGIN;" + // Add a `massReeval` column - // 'ALTER TABLE b_index ADD COLUMN massReeval VARCHAR(100) NOT NULL DEFAULT \'0\';' + - 'COMMIT;', + // 'ALTER TABLE b_index ADD COLUMN massReeval VARCHAR(100) NOT NULL DEFAULT \'0\';' + + "COMMIT;", - 19: 'BEGIN;' + + 19: + "BEGIN;" + // Add a `removed` column - 'ALTER TABLE idty ADD COLUMN removed BOOLEAN NULL DEFAULT 0;' + - 'COMMIT;', + "ALTER TABLE idty ADD COLUMN removed BOOLEAN NULL DEFAULT 0;" + + "COMMIT;", /** * Feeds the table of wallets with balances */ - 20: async () => { - }, + 20: async () => {}, - 21: async (conf:ConfDTO) => { - }, + 21: async (conf: ConfDTO) => {}, // Replay the wallet table feeding, because of a potential bug 22: () => { - return this.migrations[20]() + return this.migrations[20](); }, - 23: 'BEGIN;' + - 'COMMIT;', + 23: "BEGIN;" + "COMMIT;", /** * Feeds the m_index.chainable_on correctly */ - 24: async (conf:ConfDTO) => { - }, + 24: async (conf: ConfDTO) => {}, /** * Wrong transaction storage */ 25: async () => { - const txsDAL:any = new MetaDAL(this.driverCopy) - const wrongTXS = await txsDAL.query('SELECT * FROM txs WHERE outputs LIKE ? OR inputs LIKE ?', ['%amount%', '%amount%']) - let i = 1 + const txsDAL: any = new MetaDAL(this.driverCopy); + const wrongTXS = await txsDAL.query( + "SELECT * FROM txs WHERE outputs LIKE ? OR inputs LIKE ?", + ["%amount%", "%amount%"] + ); + let i = 1; for (const tx of wrongTXS) { - logger.info('Updating incorrect transaction %s/%s.', i, wrongTXS.length) - i++ - const dto = TransactionDTO.fromJSONObject(tx) - dto.outputs = dto.outputs.map(o => { - if (typeof o === 'object') { - return TransactionDTO.outputObj2Str(o) + logger.info( + "Updating incorrect transaction %s/%s.", + i, + wrongTXS.length + ); + i++; + const dto = TransactionDTO.fromJSONObject(tx); + dto.outputs = dto.outputs.map((o) => { + if (typeof o === "object") { + return TransactionDTO.outputObj2Str(o); } - return o - }) - dto.inputs = dto.inputs.map(o => { - if (typeof o === 'object') { - return TransactionDTO.inputObj2Str(o) + return o; + }); + dto.inputs = dto.inputs.map((o) => { + if (typeof o === "object") { + return TransactionDTO.inputObj2Str(o); } - return o - }) - await txsDAL.exec('UPDATE txs SET ' + - 'outputs = \'' + JSON.stringify(dto.outputs) + '\', ' + - 'inputs = \'' + JSON.stringify(dto.inputs) + '\' ' + - 'WHERE hash = \'' + tx.hash + '\'') + return o; + }); + await txsDAL.exec( + "UPDATE txs SET " + + "outputs = '" + + JSON.stringify(dto.outputs) + + "', " + + "inputs = '" + + JSON.stringify(dto.inputs) + + "' " + + "WHERE hash = '" + + tx.hash + + "'" + ); } }, }; async init() { - await this.exec('BEGIN;' + - 'CREATE TABLE IF NOT EXISTS ' + this.table + ' (' + - 'id INTEGER NOT NULL,' + - 'version INTEGER NOT NULL,' + - 'PRIMARY KEY (id)' + - ');' + - 'COMMIT;') + await this.exec( + "BEGIN;" + + "CREATE TABLE IF NOT EXISTS " + + this.table + + " (" + + "id INTEGER NOT NULL," + + "version INTEGER NOT NULL," + + "PRIMARY KEY (id)" + + ");" + + "COMMIT;" + ); } - private async executeMigration(migration: (string|((conf:ConfDTO)=>void)), conf:ConfDTO) { + private async executeMigration( + migration: string | ((conf: ConfDTO) => void), + conf: ConfDTO + ) { try { if (typeof migration == "string") { - // Simple SQL script to pass await this.exec(migration); - } else { - // JS function to execute await migration(conf); - } } catch (e) { - logger.warn('An error occured during DB migration, continue.', e); + logger.warn("An error occured during DB migration, continue.", e); } } - async upgradeDatabase(conf:ConfDTO) { + async upgradeDatabase(conf: ConfDTO) { let version = await this.getVersion(); - while(this.migrations[version]) { + while (this.migrations[version]) { await this.executeMigration(this.migrations[version], conf); // Automated increment - await this.exec('UPDATE meta SET version = version + 1'); + await this.exec("UPDATE meta SET version = version + 1"); version++; } } getRow() { - return this.sqlFindOne({ id: 1 }) + return this.sqlFindOne({ id: 1 }); } async getVersion() { try { - const row = await this.getRow() + const row = await this.getRow(); return row.version; - } catch(e) { - await this.exec('INSERT INTO ' + this.table + ' VALUES (1,0);') + } catch (e) { + await this.exec("INSERT INTO " + this.table + " VALUES (1,0);"); return 0; } } cleanData() { // Never clean data of this table - return Promise.resolve() + return Promise.resolve(); } } diff --git a/app/lib/dal/sqliteDAL/SandBox.ts b/app/lib/dal/sqliteDAL/SandBox.ts index 903d2b28564bf05122b33b029bcfa3a641c94929..05a49ae8959c1703582ea1e2188a556ac5921a27 100644 --- a/app/lib/dal/sqliteDAL/SandBox.ts +++ b/app/lib/dal/sqliteDAL/SandBox.ts @@ -11,36 +11,35 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {DBDocument} from './DocumentDAL'; +import { DBDocument } from "./DocumentDAL"; export class SandBox<T extends DBDocument> { + maxSize: number; - maxSize:number - constructor( - maxSize:number, - public findElements:() => Promise<T[]>, - public compareElements:(t1:T, t2:T) => number + maxSize: number, + public findElements: () => Promise<T[]>, + public compareElements: (t1: T, t2: T) => number ) { - this.maxSize = maxSize || 10 + this.maxSize = maxSize || 10; } - - async acceptNewSandBoxEntry(element:T, pubkey:string) { + + async acceptNewSandBoxEntry(element: T, pubkey: string) { // Accept any document which has the exception pubkey (= the node pubkey) if (element.issuers.indexOf(pubkey) !== -1) { return true; } - const elements = await this.findElements() + const elements = await this.findElements(); if (elements.length < this.maxSize) { return true; } - const lowestElement:T = elements[elements.length - 1]; - const comparison = this.compareElements(element, lowestElement) + const lowestElement: T = elements[elements.length - 1]; + const comparison = this.compareElements(element, lowestElement); return comparison > 0; } async getSandboxRoom() { - const elems = await this.findElements() + const elems = await this.findElements(); return this.maxSize - elems.length; } } diff --git a/app/lib/dal/sqliteDAL/index/BIndexDAL.ts b/app/lib/dal/sqliteDAL/index/BIndexDAL.ts index 2b8537825305fec4f2bf995754acb0681bb76dc8..2cdc8e428a19302b39c7b7d96c9400c10d3c1d50 100644 --- a/app/lib/dal/sqliteDAL/index/BIndexDAL.ts +++ b/app/lib/dal/sqliteDAL/index/BIndexDAL.ts @@ -11,94 +11,100 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractSQLite} from "../AbstractSQLite"; -import {DBHead} from "../../../db/DBHead"; -import {SQLiteDriver} from "../../drivers/SQLiteDriver"; +import { AbstractSQLite } from "../AbstractSQLite"; +import { DBHead } from "../../../db/DBHead"; +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; export class BIndexDAL extends AbstractSQLite<DBHead> { - - constructor(driver:SQLiteDriver) { + constructor(driver: SQLiteDriver) { super( driver, - 'b_index', + "b_index", // PK fields - ['number'], + ["number"], // Fields [ - 'version', - 'bsize', - 'hash', - 'issuer', - 'time', - 'number', - 'membersCount', - 'issuersCount', - 'issuersFrame', - 'issuersFrameVar', - 'issuerDiff', - 'avgBlockSize', - 'medianTime', - 'dividend', - 'mass', - 'massReeval', - 'unitBase', - 'powMin', - 'udTime', - 'udReevalTime', - 'diffNumber', - 'speed' + "version", + "bsize", + "hash", + "issuer", + "time", + "number", + "membersCount", + "issuersCount", + "issuersFrame", + "issuersFrameVar", + "issuerDiff", + "avgBlockSize", + "medianTime", + "dividend", + "mass", + "massReeval", + "unitBase", + "powMin", + "udTime", + "udReevalTime", + "diffNumber", + "speed", ], // Arrays [], // Booleans - ['leaving'], + ["leaving"], // BigIntegers - ['mass', 'massReeval'], + ["mass", "massReeval"], // Transient [] - ) + ); } async init() { - await this.exec('BEGIN;' + - 'CREATE TABLE IF NOT EXISTS ' + this.table + ' (' + - 'version INTEGER NOT NULL,' + - 'bsize INTEGER NOT NULL,' + - 'hash VARCHAR(64) NOT NULL,' + - 'issuer VARCHAR(50) NOT NULL,' + - 'time INTEGER NOT NULL,' + - 'number INTEGER NOT NULL,' + - 'membersCount INTEGER NOT NULL,' + - 'issuersCount INTEGER NOT NULL,' + - 'issuersFrame INTEGER NOT NULL,' + - 'issuersFrameVar INTEGER NOT NULL,' + - 'issuerDiff INTEGER NULL,' + - 'avgBlockSize INTEGER NOT NULL,' + - 'medianTime INTEGER NOT NULL,' + - 'dividend INTEGER NOT NULL,' + - 'mass VARCHAR(100) NOT NULL,' + - 'unitBase INTEGER NOT NULL,' + - 'powMin INTEGER NOT NULL,' + - 'udTime INTEGER NOT NULL,' + - 'udReevalTime INTEGER NOT NULL,' + - 'diffNumber INTEGER NOT NULL,' + - 'speed FLOAT NOT NULL,' + - 'PRIMARY KEY (number)' + - ');' + - 'CREATE INDEX IF NOT EXISTS idx_bindex_number ON b_index (number);' + - 'CREATE INDEX IF NOT EXISTS idx_bindex_issuer ON b_index (issuer);' + - 'COMMIT;') + await this.exec( + "BEGIN;" + + "CREATE TABLE IF NOT EXISTS " + + this.table + + " (" + + "version INTEGER NOT NULL," + + "bsize INTEGER NOT NULL," + + "hash VARCHAR(64) NOT NULL," + + "issuer VARCHAR(50) NOT NULL," + + "time INTEGER NOT NULL," + + "number INTEGER NOT NULL," + + "membersCount INTEGER NOT NULL," + + "issuersCount INTEGER NOT NULL," + + "issuersFrame INTEGER NOT NULL," + + "issuersFrameVar INTEGER NOT NULL," + + "issuerDiff INTEGER NULL," + + "avgBlockSize INTEGER NOT NULL," + + "medianTime INTEGER NOT NULL," + + "dividend INTEGER NOT NULL," + + "mass VARCHAR(100) NOT NULL," + + "unitBase INTEGER NOT NULL," + + "powMin INTEGER NOT NULL," + + "udTime INTEGER NOT NULL," + + "udReevalTime INTEGER NOT NULL," + + "diffNumber INTEGER NOT NULL," + + "speed FLOAT NOT NULL," + + "PRIMARY KEY (number)" + + ");" + + "CREATE INDEX IF NOT EXISTS idx_bindex_number ON b_index (number);" + + "CREATE INDEX IF NOT EXISTS idx_bindex_issuer ON b_index (issuer);" + + "COMMIT;" + ); } /** * Get HEAD~n * @param n Position */ - async head(n:number): Promise<DBHead> { + async head(n: number): Promise<DBHead> { if (!n) { - throw "Cannot read HEAD~0, which is the incoming block" + throw "Cannot read HEAD~0, which is the incoming block"; } - const headRecords = await this.query('SELECT * FROM ' + this.table + ' ORDER BY number DESC LIMIT 1 OFFSET ?', [n - 1]); + const headRecords = await this.query( + "SELECT * FROM " + this.table + " ORDER BY number DESC LIMIT 1 OFFSET ?", + [n - 1] + ); return headRecords[0]; } @@ -106,7 +112,10 @@ export class BIndexDAL extends AbstractSQLite<DBHead> { * Get the last record available in bindex */ async tail() { - const tailRecords = await this.query('SELECT * FROM ' + this.table + ' ORDER BY number ASC LIMIT 1', []); + const tailRecords = await this.query( + "SELECT * FROM " + this.table + " ORDER BY number ASC LIMIT 1", + [] + ); return tailRecords[0]; } @@ -115,16 +124,21 @@ export class BIndexDAL extends AbstractSQLite<DBHead> { * @param n * @param m */ - range(n:number, m:number) { + range(n: number, m: number) { const count = m - n + 1; - return this.query('SELECT * FROM ' + this.table + ' ORDER BY number DESC LIMIT ? OFFSET ?', [count, n - 1]); + return this.query( + "SELECT * FROM " + this.table + " ORDER BY number DESC LIMIT ? OFFSET ?", + [count, n - 1] + ); } - removeBlock(number:number) { - return this.exec('DELETE FROM ' + this.table + ' WHERE number = ' + number) + removeBlock(number: number) { + return this.exec("DELETE FROM " + this.table + " WHERE number = " + number); } - trimBlocks(maxnumber:number) { - return this.exec('DELETE FROM ' + this.table + ' WHERE number < ' + maxnumber) + trimBlocks(maxnumber: number) { + return this.exec( + "DELETE FROM " + this.table + " WHERE number < " + maxnumber + ); } } diff --git a/app/lib/dal/sqliteDAL/index/IIndexDAL.ts b/app/lib/dal/sqliteDAL/index/IIndexDAL.ts index 4f5dfdf2d58d44fd39894cc51710881bbbd45869..ff00dd433cc4d3bc269b81bc50c28f824a754d0f 100644 --- a/app/lib/dal/sqliteDAL/index/IIndexDAL.ts +++ b/app/lib/dal/sqliteDAL/index/IIndexDAL.ts @@ -11,164 +11,212 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {SQLiteDriver} from "../../drivers/SQLiteDriver" -import {FullIindexEntry, IindexEntry, Indexer} from "../../../indexer" -import {AbstractSQLite} from "../AbstractSQLite" +import { SQLiteDriver } from "../../drivers/SQLiteDriver"; +import { FullIindexEntry, IindexEntry, Indexer } from "../../../indexer"; +import { AbstractSQLite } from "../AbstractSQLite"; -const _ = require('underscore'); +const _ = require("underscore"); export interface OldIindexEntry extends IindexEntry { - pubkey: string - buid: string | null - revocation_sig:string | null + pubkey: string; + buid: string | null; + revocation_sig: string | null; } export class IIndexDAL extends AbstractSQLite<IindexEntry> { - - constructor(driver:SQLiteDriver) { + constructor(driver: SQLiteDriver) { super( driver, - 'i_index', + "i_index", // PK fields - ['op', 'pub', 'created_on', 'written_on'], + ["op", "pub", "created_on", "written_on"], // Fields [ - 'op', - 'uid', - 'pub', - 'hash', - 'sig', - 'created_on', - 'written_on', - 'writtenOn', - 'member', - 'wasMember', - 'kick', - 'wotb_id', - 'legacy' + "op", + "uid", + "pub", + "hash", + "sig", + "created_on", + "written_on", + "writtenOn", + "member", + "wasMember", + "kick", + "wotb_id", + "legacy", ], // Arrays [], // Booleans - ['member', 'wasMember', 'kick', 'legacy'], + ["member", "wasMember", "kick", "legacy"], // BigIntegers [], // Transient [] - ) + ); } init() { - return this.exec('BEGIN;' + - 'CREATE TABLE IF NOT EXISTS ' + this.table + ' (' + - 'op VARCHAR(10) NOT NULL,' + - 'uid VARCHAR(100) NULL,' + - 'pub VARCHAR(50) NOT NULL,' + - 'hash VARCHAR(80) NULL,' + - 'sig VARCHAR(80) NULL,' + - 'created_on VARCHAR(80) NULL,' + - 'written_on VARCHAR(80) NOT NULL,' + - 'writtenOn INTEGER NOT NULL,' + - 'member BOOLEAN NULL,' + - 'wasMember BOOLEAN NULL,' + - 'kick BOOLEAN NULL,' + - 'legacy BOOLEAN NOT NULL,' + - 'wotb_id INTEGER NULL,' + - 'PRIMARY KEY (op,pub,created_on,written_on)' + - ');' + - 'CREATE INDEX IF NOT EXISTS idx_iindex_pub ON i_index (pub);' + - 'COMMIT;') + return this.exec( + "BEGIN;" + + "CREATE TABLE IF NOT EXISTS " + + this.table + + " (" + + "op VARCHAR(10) NOT NULL," + + "uid VARCHAR(100) NULL," + + "pub VARCHAR(50) NOT NULL," + + "hash VARCHAR(80) NULL," + + "sig VARCHAR(80) NULL," + + "created_on VARCHAR(80) NULL," + + "written_on VARCHAR(80) NOT NULL," + + "writtenOn INTEGER NOT NULL," + + "member BOOLEAN NULL," + + "wasMember BOOLEAN NULL," + + "kick BOOLEAN NULL," + + "legacy BOOLEAN NOT NULL," + + "wotb_id INTEGER NULL," + + "PRIMARY KEY (op,pub,created_on,written_on)" + + ");" + + "CREATE INDEX IF NOT EXISTS idx_iindex_pub ON i_index (pub);" + + "COMMIT;" + ); } async getMembers() { // All those who has been subject to, or who are currently subject to kicking. Make one result per pubkey. - const pubkeys = await this.query('SELECT DISTINCT(pub) FROM ' + this.table); + const pubkeys = await this.query("SELECT DISTINCT(pub) FROM " + this.table); // We get the full representation for each member - const reduced = await Promise.all(pubkeys.map(async (entry) => { - const reducable = await this.reducable(entry.pub); - return Indexer.DUP_HELPERS.reduce(reducable); - })); + const reduced = await Promise.all( + pubkeys.map(async (entry) => { + const reducable = await this.reducable(entry.pub); + return Indexer.DUP_HELPERS.reduce(reducable); + }) + ); // Filter on those to be kicked, return their pubkey - const filtered = _.filter(reduced, (entry:IindexEntry) => entry.member); - return filtered.map((t:IindexEntry) => this.toCorrectEntity(t)) + const filtered = _.filter(reduced, (entry: IindexEntry) => entry.member); + return filtered.map((t: IindexEntry) => this.toCorrectEntity(t)); } getMembersPubkeys() { - return this.query('SELECT i1.pub ' + - 'FROM i_index i1 ' + - 'WHERE i1.member ' + - 'AND CAST(i1.written_on as int) = (' + - ' SELECT MAX(CAST(i2.written_on as int)) ' + - ' FROM i_index i2 ' + - ' WHERE i1.pub = i2.pub ' + - ' AND i2.member IS NOT NULL' + - ')') + return this.query( + "SELECT i1.pub " + + "FROM i_index i1 " + + "WHERE i1.member " + + "AND CAST(i1.written_on as int) = (" + + " SELECT MAX(CAST(i2.written_on as int)) " + + " FROM i_index i2 " + + " WHERE i1.pub = i2.pub " + + " AND i2.member IS NOT NULL" + + ")" + ); } async getToBeKickedPubkeys() { // All those who has been subject to, or who are currently subject to kicking. Make one result per pubkey. - const reducables = Indexer.DUP_HELPERS.reduceBy(await this.sqlFind({ kick: true }), ['pub']); + const reducables = Indexer.DUP_HELPERS.reduceBy( + await this.sqlFind({ kick: true }), + ["pub"] + ); // We get the full representation for each member - const reduced = await Promise.all(reducables.map(async (entry) => { - const reducable = await this.reducable(entry.pub); - return Indexer.DUP_HELPERS.reduce(reducable); - })) + const reduced = await Promise.all( + reducables.map(async (entry) => { + const reducable = await this.reducable(entry.pub); + return Indexer.DUP_HELPERS.reduce(reducable); + }) + ); // Filter on those to be kicked, return their pubkey - return _.filter(reduced, (entry:IindexEntry) => entry.kick).map((entry:IindexEntry) => entry.pub); - } - - async searchThoseMatching(search:string) { - const reducables = Indexer.DUP_HELPERS.reduceBy(await this.sqlFindLikeAny({ - pub: "%" + search + "%", - uid: "%" + search + "%" - }), ['pub']); + return _.filter(reduced, (entry: IindexEntry) => entry.kick).map( + (entry: IindexEntry) => entry.pub + ); + } + + async searchThoseMatching(search: string) { + const reducables = Indexer.DUP_HELPERS.reduceBy( + await this.sqlFindLikeAny({ + pub: "%" + search + "%", + uid: "%" + search + "%", + }), + ["pub"] + ); // We get the full representation for each member - return await Promise.all(reducables.map(async (entry) => { - return this.toCorrectEntity(Indexer.DUP_HELPERS.reduce(await this.reducable(entry.pub))) - })) + return await Promise.all( + reducables.map(async (entry) => { + return this.toCorrectEntity( + Indexer.DUP_HELPERS.reduce(await this.reducable(entry.pub)) + ); + }) + ); } - getFromPubkey(pubkey:string) { - return this.entityOrNull('pub', pubkey) as Promise<FullIindexEntry|null> + getFromPubkey(pubkey: string) { + return this.entityOrNull("pub", pubkey) as Promise<FullIindexEntry | null>; } - getFromUID(uid:string, retrieveOnPubkey = false) { - return this.entityOrNull('uid', uid, retrieveOnPubkey) + getFromUID(uid: string, retrieveOnPubkey = false) { + return this.entityOrNull("uid", uid, retrieveOnPubkey); } - getFullFromPubkey(pub:string): Promise<FullIindexEntry> { - return this.entityOrNull('pub', pub) as Promise<FullIindexEntry> + getFullFromPubkey(pub: string): Promise<FullIindexEntry> { + return this.entityOrNull("pub", pub) as Promise<FullIindexEntry>; } - getFullFromUID(uid:string): Promise<FullIindexEntry|null> { - return this.entityOrNull('uid', uid, true) as Promise<FullIindexEntry|null> + getFullFromUID(uid: string): Promise<FullIindexEntry | null> { + return this.entityOrNull( + "uid", + uid, + true + ) as Promise<FullIindexEntry | null>; } - getFullFromHash(hash:string): Promise<FullIindexEntry|null> { - return this.entityOrNull('hash', hash, true) as Promise<FullIindexEntry|null> + getFullFromHash(hash: string): Promise<FullIindexEntry | null> { + return this.entityOrNull( + "hash", + hash, + true + ) as Promise<FullIindexEntry | null>; } - reducable(pub:string) { - return this.query('SELECT * FROM ' + this.table + ' WHERE pub = ? ORDER BY CAST(written_on as integer) ASC', [pub]) + reducable(pub: string) { + return this.query( + "SELECT * FROM " + + this.table + + " WHERE pub = ? ORDER BY CAST(written_on as integer) ASC", + [pub] + ); } - removeBlock(blockstamp:string) { - return this.exec('DELETE FROM ' + this.table + ' WHERE written_on = \'' + blockstamp + '\'') + removeBlock(blockstamp: string) { + return this.exec( + "DELETE FROM " + this.table + " WHERE written_on = '" + blockstamp + "'" + ); } - private async entityOrNull(field:string, value:any, retrieveOnField:boolean = false) { - let reducable = await this.query('SELECT * FROM ' + this.table + ' WHERE ' + field + ' = ?', [value]); + private async entityOrNull( + field: string, + value: any, + retrieveOnField: boolean = false + ) { + let reducable = await this.query( + "SELECT * FROM " + this.table + " WHERE " + field + " = ?", + [value] + ); if (reducable.length) { if (retrieveOnField) { // Force full retrieval on `pub` field - reducable = await this.query('SELECT * FROM ' + this.table + ' WHERE pub = ? ORDER BY CAST(written_on as int) ASC', [reducable[0].pub]); + reducable = await this.query( + "SELECT * FROM " + + this.table + + " WHERE pub = ? ORDER BY CAST(written_on as int) ASC", + [reducable[0].pub] + ); } return this.toCorrectEntity(Indexer.DUP_HELPERS.reduce(reducable)); } return null; } - private toCorrectEntity(row:IindexEntry): OldIindexEntry { + private toCorrectEntity(row: IindexEntry): OldIindexEntry { // Old field return { pubkey: row.pub, @@ -187,15 +235,15 @@ export class IIndexDAL extends AbstractSQLite<IindexEntry> { index: row.index, op: row.op, writtenOn: row.writtenOn, - written_on: row.written_on - } + written_on: row.written_on, + }; } async getFromPubkeyOrUid(search: string) { - const idty = await this.getFromPubkey(search) + const idty = await this.getFromPubkey(search); if (idty) { - return idty + return idty; } - return this.getFromUID(search, true) as Promise<FullIindexEntry|null> + return this.getFromUID(search, true) as Promise<FullIindexEntry | null>; } } diff --git a/app/lib/db/DBBlock.ts b/app/lib/db/DBBlock.ts index 05e54ba8fca03f753d2153772d1b9f01666a45b9..7f8eb593bbf2d442eb432fcbf57cbd58ea35ad48 100644 --- a/app/lib/db/DBBlock.ts +++ b/app/lib/db/DBBlock.ts @@ -11,89 +11,89 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../dto/BlockDTO" -import {TransactionDTO} from "../dto/TransactionDTO" +import { BlockDTO } from "../dto/BlockDTO"; +import { TransactionDTO } from "../dto/TransactionDTO"; export class DBBlock { + version: number; + number: number; + currency: string; + hash: string; + inner_hash: string; + signature: string; + previousHash: string; + issuer: string; + previousIssuer: string; + time: number; + powMin: number; + unitbase: number; + membersCount: number; + issuersCount: number; + issuersFrame: number; + issuersFrameVar: number; + identities: string[]; + joiners: string[]; + actives: string[]; + leavers: string[]; + revoked: string[]; + excluded: string[]; + certifications: string[]; + transactions: TransactionDTO[]; + medianTime: number; + nonce: number; + fork: boolean; + parameters: string; + monetaryMass: number; + dividend: number | null; + UDTime: number; + writtenOn: number; + written_on: string; + wrong = false; - version: number - number: number - currency: string - hash: string - inner_hash: string - signature: string - previousHash: string - issuer: string - previousIssuer: string - time: number - powMin: number - unitbase: number - membersCount: number - issuersCount: number - issuersFrame: number - issuersFrameVar: number - identities: string[] - joiners: string[] - actives: string[] - leavers: string[] - revoked: string[] - excluded: string[] - certifications: string[] - transactions: TransactionDTO[] - medianTime: number - nonce: number - fork: boolean - parameters: string - monetaryMass: number - dividend: number | null - UDTime: number - writtenOn: number - written_on: string - wrong = false - - constructor( - ) { - } + constructor() {} toBlockDTO() { - return BlockDTO.fromJSONObject(this) + return BlockDTO.fromJSONObject(this); } - static fromBlockDTO(b:BlockDTO) { - const dbb = new DBBlock() - dbb.version = b.version - dbb.number = b.number - dbb.currency = b.currency - dbb.hash = b.hash - dbb.previousHash = b.previousHash - dbb.issuer = b.issuer - dbb.previousIssuer = b.previousIssuer - dbb.dividend = (b.dividend === null || b.dividend === undefined ? b.dividend : parseInt(String(b.dividend))) - dbb.time = b.time - dbb.powMin = b.powMin - dbb.unitbase = b.unitbase - dbb.membersCount = b.membersCount - dbb.issuersCount = b.issuersCount - dbb.issuersFrame = b.issuersFrame - dbb.issuersFrameVar = b.issuersFrameVar - dbb.identities = b.identities - dbb.joiners = b.joiners - dbb.actives = b.actives - dbb.leavers = b.leavers - dbb.revoked = b.revoked - dbb.excluded = b.excluded - dbb.certifications = b.certifications - dbb.transactions = b.transactions - dbb.medianTime = b.medianTime - dbb.fork = b.fork - dbb.parameters = b.parameters - dbb.inner_hash = b.inner_hash - dbb.signature = b.signature - dbb.nonce = b.nonce - dbb.UDTime = b.UDTime - dbb.monetaryMass = b.monetaryMass - dbb.writtenOn = b.number - dbb.written_on = [b.number, b.hash].join('-') - return dbb + static fromBlockDTO(b: BlockDTO) { + const dbb = new DBBlock(); + dbb.version = b.version; + dbb.number = b.number; + dbb.currency = b.currency; + dbb.hash = b.hash; + dbb.previousHash = b.previousHash; + dbb.issuer = b.issuer; + dbb.previousIssuer = b.previousIssuer; + dbb.dividend = + b.dividend === null || b.dividend === undefined + ? b.dividend + : parseInt(String(b.dividend)); + dbb.time = b.time; + dbb.powMin = b.powMin; + dbb.unitbase = b.unitbase; + dbb.membersCount = b.membersCount; + dbb.issuersCount = b.issuersCount; + dbb.issuersFrame = b.issuersFrame; + dbb.issuersFrameVar = b.issuersFrameVar; + dbb.identities = b.identities; + dbb.joiners = b.joiners; + dbb.actives = b.actives; + dbb.leavers = b.leavers; + dbb.revoked = b.revoked; + dbb.excluded = b.excluded; + dbb.certifications = b.certifications; + dbb.transactions = b.transactions; + dbb.medianTime = b.medianTime; + dbb.fork = b.fork; + dbb.parameters = b.parameters; + dbb.inner_hash = b.inner_hash; + dbb.signature = b.signature; + dbb.nonce = b.nonce; + dbb.UDTime = b.UDTime; + dbb.monetaryMass = b.monetaryMass; + dbb.writtenOn = b.number; + dbb.written_on = [b.number, b.hash].join("-"); + return dbb; } -} \ No newline at end of file +} diff --git a/app/lib/db/DBHead.ts b/app/lib/db/DBHead.ts index 8c97197e4a4a2ffadf81210cfad40c52ddedfbbe..9cc2a55195b1ba42d285160e1a98e85caa8e81b6 100644 --- a/app/lib/db/DBHead.ts +++ b/app/lib/db/DBHead.ts @@ -12,42 +12,40 @@ // GNU Affero General Public License for more details. export class DBHead { - // TODO: some properties are not registered in the DB, we should create another class - version: number - currency: string | null - bsize: number - avgBlockSize: number - udTime: number - udReevalTime: number - massReeval: number - mass: number - hash: string - previousHash: string | null - previousIssuer: string | null - issuer: string - time: number - medianTime: number - number: number - powMin: number - diffNumber: number - issuersCount: number - issuersFrame: number - issuersFrameVar: number - dtDiffEval: number - issuerDiff: number - powZeros: number - powRemainder: number - speed: number - unitBase: number - membersCount: number - dividend: number - new_dividend: number | null - issuerIsMember: boolean - written_on: string - writtenOn: number + version: number; + currency: string | null; + bsize: number; + avgBlockSize: number; + udTime: number; + udReevalTime: number; + massReeval: number; + mass: number; + hash: string; + previousHash: string | null; + previousIssuer: string | null; + issuer: string; + time: number; + medianTime: number; + number: number; + powMin: number; + diffNumber: number; + issuersCount: number; + issuersFrame: number; + issuersFrameVar: number; + dtDiffEval: number; + issuerDiff: number; + powZeros: number; + powRemainder: number; + speed: number; + unitBase: number; + membersCount: number; + dividend: number; + new_dividend: number | null; + issuerIsMember: boolean; + written_on: string; + writtenOn: number; - constructor( - ) {} -} \ No newline at end of file + constructor() {} +} diff --git a/app/lib/db/DBPeer.ts b/app/lib/db/DBPeer.ts index e4bbada5d9ac61ad12dfcc1bf1a23fcd71e26673..f115a713fdc95a1b2428ed119679545b19651067 100644 --- a/app/lib/db/DBPeer.ts +++ b/app/lib/db/DBPeer.ts @@ -1,23 +1,22 @@ -import {PeerDTO} from "../dto/PeerDTO" +import { PeerDTO } from "../dto/PeerDTO"; export class DBPeer { + version: number; + currency: string; + status: string; + statusTS: number; + hash: string; + first_down: number | null; + last_try: number | null; + lastContact: number = Math.floor(Date.now() / 1000); + pubkey: string; + block: string; + signature: string; + endpoints: string[]; + raw: string; + nonWoT: boolean = true; // Security measure: a peer is presumed nonWoT. - version: number - currency: string - status: string - statusTS: number - hash: string - first_down: number | null - last_try: number | null - lastContact: number = Math.floor(Date.now() / 1000) - pubkey: string - block: string - signature: string - endpoints: string[] - raw: string - nonWoT: boolean = true // Security measure: a peer is presumed nonWoT. - - static json(peer:DBPeer): JSONDBPeer { + static json(peer: DBPeer): JSONDBPeer { return { version: peer.version, currency: peer.currency, @@ -28,22 +27,22 @@ export class DBPeer { block: peer.block, signature: peer.signature, endpoints: peer.endpoints, - } + }; } - static fromPeerDTO(peer:PeerDTO): DBPeer { - return peer.toDBPeer() + static fromPeerDTO(peer: PeerDTO): DBPeer { + return peer.toDBPeer(); } } export class JSONDBPeer { - version: number - currency: string - status: string - first_down: number | null - last_try: number | null - pubkey: string - block: string - signature: string - endpoints: string[] + version: number; + currency: string; + status: string; + first_down: number | null; + last_try: number | null; + pubkey: string; + block: string; + signature: string; + endpoints: string[]; } diff --git a/app/lib/db/DBTx.ts b/app/lib/db/DBTx.ts index 1d47b7bb07530f1733ea58adc1b39f2fa2808b59..1f0fd0e64b4e2c173b8d69a2aa8c0082c81fb1cb 100644 --- a/app/lib/db/DBTx.ts +++ b/app/lib/db/DBTx.ts @@ -1,60 +1,60 @@ -import {TransactionDTO} from "../dto/TransactionDTO" +import { TransactionDTO } from "../dto/TransactionDTO"; export class DBTx { - hash: string - block_number: number | null - locktime: number - version: number - currency: string - comment: string - blockstamp: string - blockstampTime: number | null - time: number | null - inputs: string[] - unlocks: string[] - outputs: string[] - issuers: string[] - signatures: string[] - recipients: string[] - written: boolean - removed: boolean - received: number - output_base: number - output_amount: number - written_on: string - writtenOn: number + hash: string; + block_number: number | null; + locktime: number; + version: number; + currency: string; + comment: string; + blockstamp: string; + blockstampTime: number | null; + time: number | null; + inputs: string[]; + unlocks: string[]; + outputs: string[]; + issuers: string[]; + signatures: string[]; + recipients: string[]; + written: boolean; + removed: boolean; + received: number; + output_base: number; + output_amount: number; + written_on: string; + writtenOn: number; - static fromTransactionDTO(tx:TransactionDTO) { - const dbTx = new DBTx() - dbTx.hash = tx.hash - dbTx.locktime = tx.locktime - dbTx.version = tx.version - dbTx.currency = tx.currency - dbTx.blockstamp = tx.blockstamp - dbTx.blockstampTime = tx.blockstampTime - dbTx.comment = tx.comment || "" - dbTx.inputs = tx.inputs - dbTx.unlocks = tx.unlocks - dbTx.outputs = tx.outputs - dbTx.issuers = tx.issuers - dbTx.signatures = tx.signatures - dbTx.recipients = tx.outputsAsRecipients() - dbTx.written = false - dbTx.removed = false - dbTx.output_base = tx.output_base - dbTx.output_amount = tx.output_amount - return dbTx + static fromTransactionDTO(tx: TransactionDTO) { + const dbTx = new DBTx(); + dbTx.hash = tx.hash; + dbTx.locktime = tx.locktime; + dbTx.version = tx.version; + dbTx.currency = tx.currency; + dbTx.blockstamp = tx.blockstamp; + dbTx.blockstampTime = tx.blockstampTime; + dbTx.comment = tx.comment || ""; + dbTx.inputs = tx.inputs; + dbTx.unlocks = tx.unlocks; + dbTx.outputs = tx.outputs; + dbTx.issuers = tx.issuers; + dbTx.signatures = tx.signatures; + dbTx.recipients = tx.outputsAsRecipients(); + dbTx.written = false; + dbTx.removed = false; + dbTx.output_base = tx.output_base; + dbTx.output_amount = tx.output_amount; + return dbTx; } - static setRecipients(txs:DBTx[]) { + static setRecipients(txs: DBTx[]) { // Each transaction must have a good "recipients" field for future searchs - txs.forEach((tx) => tx.recipients = DBTx.outputs2recipients(tx)) + txs.forEach((tx) => (tx.recipients = DBTx.outputs2recipients(tx))); } - static outputs2recipients(tx:DBTx) { - return tx.outputs.map(function(out) { - const recipent = out.match('SIG\\((.*)\\)') - return (recipent && recipent[1]) || 'UNKNOWN' - }) + static outputs2recipients(tx: DBTx) { + return tx.outputs.map(function (out) { + const recipent = out.match("SIG\\((.*)\\)"); + return (recipent && recipent[1]) || "UNKNOWN"; + }); } } diff --git a/app/lib/db/DBWallet.ts b/app/lib/db/DBWallet.ts index d59c617d685ce73a1a4d0d7dc0ba7bf0394b8f23..b244513f5bba813bfb1d99d5df4ae3c6e347034a 100644 --- a/app/lib/db/DBWallet.ts +++ b/app/lib/db/DBWallet.ts @@ -1,4 +1,4 @@ export interface DBWallet { - conditions: string - balance: number + conditions: string; + balance: number; } diff --git a/app/lib/db/OldIindexEntry.ts b/app/lib/db/OldIindexEntry.ts index 308969f906b983034a717c6503d54ac9ca1eee61..9ae7e0d844ad243a8bcbefdc33fc1d84a060375d 100644 --- a/app/lib/db/OldIindexEntry.ts +++ b/app/lib/db/OldIindexEntry.ts @@ -1,7 +1,7 @@ -import {IindexEntry} from "../indexer" +import { IindexEntry } from "../indexer"; export interface OldIindexEntry extends IindexEntry { - pubkey: string - buid: string | null - revocation_sig:string | null + pubkey: string; + buid: string | null; + revocation_sig: string | null; } diff --git a/app/lib/debug/MonitorExecutionTime.ts b/app/lib/debug/MonitorExecutionTime.ts index 8c3b740aa3be77b4bf59aca5135b7202942c25c3..06946c3d7806f8fae648af0b4498ce80bfb28821 100644 --- a/app/lib/debug/MonitorExecutionTime.ts +++ b/app/lib/debug/MonitorExecutionTime.ts @@ -11,82 +11,100 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {getDurationInMicroSeconds, getMicrosecondsTime} from "../../ProcessCpuProfiler" -import {OtherConstants} from "../other_constants" -import {Underscore} from "../common-libs/underscore" +import { + getDurationInMicroSeconds, + getMicrosecondsTime, +} from "../../ProcessCpuProfiler"; +import { OtherConstants } from "../other_constants"; +import { Underscore } from "../common-libs/underscore"; const monitorings: { [k: string]: { times: { - time: number - }[] - } -} = {} + time: number; + }[]; + }; +} = {}; -process.on('exit', () => { - let traces: { name: string, times: number, avg: number, total: number }[] = [] - Object - .keys(monitorings) - .forEach(k => { - const m = monitorings[k] - const total = m.times.reduce((s, t) => s + t.time / 1000, 0) - const avg = m.times.length ? total / m.times.length : 0 - traces.push({ - name: k, - times: m.times.length, - avg, - total - }) - }) - traces = Underscore.sortBy(traces, t => t.total) - traces - .forEach(t => { - console.log('%s %s times %sms (average) %sms (total time)', - (t.name + ':').padEnd(50, ' '), - String(t.times).padStart(10, ' '), - t.avg.toFixed(3).padStart(10, ' '), - t.total.toFixed(0).padStart(10, ' ') - ) - }) -}) +process.on("exit", () => { + let traces: { + name: string; + times: number; + avg: number; + total: number; + }[] = []; + Object.keys(monitorings).forEach((k) => { + const m = monitorings[k]; + const total = m.times.reduce((s, t) => s + t.time / 1000, 0); + const avg = m.times.length ? total / m.times.length : 0; + traces.push({ + name: k, + times: m.times.length, + avg, + total, + }); + }); + traces = Underscore.sortBy(traces, (t) => t.total); + traces.forEach((t) => { + console.log( + "%s %s times %sms (average) %sms (total time)", + (t.name + ":").padEnd(50, " "), + String(t.times).padStart(10, " "), + t.avg.toFixed(3).padStart(10, " "), + t.total.toFixed(0).padStart(10, " ") + ); + }); +}); export const MonitorExecutionTime = function (idProperty?: string) { - return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) { + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { if (OtherConstants.ENABLE_MONITORING) { - const original = descriptor.value + const original = descriptor.value; if (original.__proto__.constructor.name === "AsyncFunction") { descriptor.value = async function (...args: any[]) { - const start = getMicrosecondsTime() - const entities: any[] = await original.apply(this, args) - const duration = getDurationInMicroSeconds(start) - const k = target.constructor.name + '.' + propertyKey + (idProperty ? `[${(this as any)[idProperty]}]` : '') + const start = getMicrosecondsTime(); + const entities: any[] = await original.apply(this, args); + const duration = getDurationInMicroSeconds(start); + const k = + target.constructor.name + + "." + + propertyKey + + (idProperty ? `[${(this as any)[idProperty]}]` : ""); if (!monitorings[k]) { monitorings[k] = { - times: [] - } + times: [], + }; } monitorings[k].times.push({ - time: duration - }) - return entities - } + time: duration, + }); + return entities; + }; } else { descriptor.value = function (...args: any[]) { - const start = getMicrosecondsTime() - const entities: any[] = original.apply(this, args) - const duration = getDurationInMicroSeconds(start) - const k = target.constructor.name + '.' + propertyKey + (idProperty ? `[${(this as any)[idProperty]}]` : '') + const start = getMicrosecondsTime(); + const entities: any[] = original.apply(this, args); + const duration = getDurationInMicroSeconds(start); + const k = + target.constructor.name + + "." + + propertyKey + + (idProperty ? `[${(this as any)[idProperty]}]` : ""); if (!monitorings[k]) { monitorings[k] = { - times: [] - } + times: [], + }; } monitorings[k].times.push({ - time: duration - }) - return entities - } + time: duration, + }); + return entities; + }; } } - } -} \ No newline at end of file + }; +}; diff --git a/app/lib/debug/MonitorFlushedIndex.ts b/app/lib/debug/MonitorFlushedIndex.ts index f2d083ccef8a0488a4f4cce955fe77cd6cf92072..35a361db0a7a5c7e242c4baa0b96496c93eb6634 100644 --- a/app/lib/debug/MonitorFlushedIndex.ts +++ b/app/lib/debug/MonitorFlushedIndex.ts @@ -11,42 +11,46 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {cliprogram} from "../common-libs/programOptions" -import {IndexBatch} from "../dal/fileDAL" +import { cliprogram } from "../common-libs/programOptions"; +import { IndexBatch } from "../dal/fileDAL"; export const MonitorFlushedIndex = function () { - return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) { - const original = descriptor.value + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { + const original = descriptor.value; if (original.__proto__.constructor.name === "AsyncFunction") { - descriptor.value = async function (...args:any[]) { - const pub = cliprogram.syncTrace + descriptor.value = async function (...args: any[]) { + const pub = cliprogram.syncTrace; if (pub) { - const batch: IndexBatch = args[0] - batch.iindex.forEach(e => { + const batch: IndexBatch = args[0]; + batch.iindex.forEach((e) => { if (e.pub === pub) { - console.log(JSON.stringify(e)) + console.log(JSON.stringify(e)); } - }) - batch.mindex.forEach(e => { + }); + batch.mindex.forEach((e) => { if (e.pub === pub) { - console.log(JSON.stringify(e)) + console.log(JSON.stringify(e)); } - }) - batch.cindex.forEach(e => { + }); + batch.cindex.forEach((e) => { if (e.issuer === pub || e.receiver === pub) { - console.log(JSON.stringify(e)) + console.log(JSON.stringify(e)); } - }) - batch.sindex.forEach(e => { - if (e.conditions.indexOf(pub || '') !== -1) { - console.log(JSON.stringify(e)) + }); + batch.sindex.forEach((e) => { + if (e.conditions.indexOf(pub || "") !== -1) { + console.log(JSON.stringify(e)); } - }) + }); } - return await original.apply(this, args) - } + return await original.apply(this, args); + }; } else { - throw Error("Monitoring a synchronous function is not allowed.") + throw Error("Monitoring a synchronous function is not allowed."); } - } -} \ No newline at end of file + }; +}; diff --git a/app/lib/debug/MonitorLokiExecutionTime.ts b/app/lib/debug/MonitorLokiExecutionTime.ts index 0491951af3e646bba96c9cfcc8ef5f55f0cb2297..3a30f586f98b7315d7cfd5bf9496e2d5eeb1b4e1 100644 --- a/app/lib/debug/MonitorLokiExecutionTime.ts +++ b/app/lib/debug/MonitorLokiExecutionTime.ts @@ -11,31 +11,46 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {NewLogger} from "../logger" -import {getMicrosecondsTime} from "../../ProcessCpuProfiler" -import {OtherConstants} from "../other_constants" +import { NewLogger } from "../logger"; +import { getMicrosecondsTime } from "../../ProcessCpuProfiler"; +import { OtherConstants } from "../other_constants"; -const theLogger = NewLogger() +const theLogger = NewLogger(); export const MonitorLokiExecutionTime = function (dumpFirstParam = false) { - return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) { + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { if (OtherConstants.ENABLE_LOKI_MONITORING) { - const original = descriptor.value + const original = descriptor.value; if (original.__proto__.constructor.name === "AsyncFunction") { - descriptor.value = async function (...args:any[]) { - const that :any = this - const now = getMicrosecondsTime() - const result = await original.apply(this, args) + descriptor.value = async function (...args: any[]) { + const that: any = this; + const now = getMicrosecondsTime(); + const result = await original.apply(this, args); if (dumpFirstParam) { - theLogger.trace('[loki][%s][%s] => %sµs', that.collectionName, propertyKey, (getMicrosecondsTime() - now), args && args[0]) + theLogger.trace( + "[loki][%s][%s] => %sµs", + that.collectionName, + propertyKey, + getMicrosecondsTime() - now, + args && args[0] + ); } else { - theLogger.trace('[loki][%s][%s] => %sµs', that.collectionName, propertyKey, (getMicrosecondsTime() - now)) + theLogger.trace( + "[loki][%s][%s] => %sµs", + that.collectionName, + propertyKey, + getMicrosecondsTime() - now + ); } - return result - } + return result; + }; } else { - throw Error("Monitoring a Loki synchronous function is not allowed.") + throw Error("Monitoring a Loki synchronous function is not allowed."); } } - } -} \ No newline at end of file + }; +}; diff --git a/app/lib/debug/MonitorSQLExecutionTime.ts b/app/lib/debug/MonitorSQLExecutionTime.ts index 65724c527347f3df256933cbf1c1dcf582edd44d..72a4fcf64252baddf344131b2883de651cfabf24 100644 --- a/app/lib/debug/MonitorSQLExecutionTime.ts +++ b/app/lib/debug/MonitorSQLExecutionTime.ts @@ -11,29 +11,41 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {getDurationInMicroSeconds, getMicrosecondsTime} from "../../ProcessCpuProfiler" -import {NewLogger} from "../logger" -import {OtherConstants} from "../other_constants" +import { + getDurationInMicroSeconds, + getMicrosecondsTime, +} from "../../ProcessCpuProfiler"; +import { NewLogger } from "../logger"; +import { OtherConstants } from "../other_constants"; -const theLogger = NewLogger() +const theLogger = NewLogger(); export const MonitorSQLExecutionTime = function () { - return function (target: any, propertyKey: string, descriptor: PropertyDescriptor) { + return function ( + target: any, + propertyKey: string, + descriptor: PropertyDescriptor + ) { if (OtherConstants.ENABLE_SQL_MONITORING) { - const original = descriptor.value + const original = descriptor.value; if (original.__proto__.constructor.name === "AsyncFunction") { descriptor.value = async function (...args: any[]) { - const start = getMicrosecondsTime() - const sql: string = args[0] - const params: any[] = args[1] - const entities: any[] = await original.apply(this, args) - const duration = getDurationInMicroSeconds(start) - theLogger.trace('[sqlite][query] %s %s %sµs', sql, JSON.stringify(params || []), duration) - return entities - } + const start = getMicrosecondsTime(); + const sql: string = args[0]; + const params: any[] = args[1]; + const entities: any[] = await original.apply(this, args); + const duration = getDurationInMicroSeconds(start); + theLogger.trace( + "[sqlite][query] %s %s %sµs", + sql, + JSON.stringify(params || []), + duration + ); + return entities; + }; } else { - throw Error("Monitoring an SQL synchronous function is not allowed.") + throw Error("Monitoring an SQL synchronous function is not allowed."); } } - } -} \ No newline at end of file + }; +}; diff --git a/app/lib/debug/dump.ts b/app/lib/debug/dump.ts index ac8410429572b77d773d8273649a89e27a7782bb..620edbdd445090374e00ddab6e5fb087d3c1cb5e 100644 --- a/app/lib/debug/dump.ts +++ b/app/lib/debug/dump.ts @@ -1,66 +1,174 @@ -import {CindexEntry} from "../indexer" +import { CindexEntry } from "../indexer"; -const Table = require('cli-table') +const Table = require("cli-table"); export function dumpBindex(rows: CindexEntry[]) { - return dump(rows, ['version','bsize','hash','issuer','time','number','membersCount','issuersCount','issuersFrame','issuersFrameVar','issuerDiff','avgBlockSize','medianTime','dividend','mass','unitBase','powMin','udTime','udReevalTime','diffNumber','speed','massReeval']) + return dump(rows, [ + "version", + "bsize", + "hash", + "issuer", + "time", + "number", + "membersCount", + "issuersCount", + "issuersFrame", + "issuersFrameVar", + "issuerDiff", + "avgBlockSize", + "medianTime", + "dividend", + "mass", + "unitBase", + "powMin", + "udTime", + "udReevalTime", + "diffNumber", + "speed", + "massReeval", + ]); } export function dumpIindex(rows: CindexEntry[]) { - return dump(rows, ['op','uid','pub','hash','sig','created_on','written_on','member','wasMember','kick','wotb_id']) + return dump(rows, [ + "op", + "uid", + "pub", + "hash", + "sig", + "created_on", + "written_on", + "member", + "wasMember", + "kick", + "wotb_id", + ]); } export function dumpCindex(rows: CindexEntry[]) { - return dump(rows, ['op','issuer','receiver','created_on','written_on','sig','expires_on','expired_on','chainable_on','from_wid','to_wid','replayable_on']) + return dump(rows, [ + "op", + "issuer", + "receiver", + "created_on", + "written_on", + "sig", + "expires_on", + "expired_on", + "chainable_on", + "from_wid", + "to_wid", + "replayable_on", + ]); } -export function dumpCindexPretty(rows: CindexEntry[], getUid: (pub: string) => Promise<string>) { - return dumpPretty(rows, ['row','op','issuer','created_on','written_on','expires_on','expired_on','chainable_on','replayable_on'], async (f, v) => { - if (f === 'issuer') { - return await getUid(v) - } - if (f === 'written_on') { - return String(v).substr(0, 15) +export function dumpCindexPretty( + rows: CindexEntry[], + getUid: (pub: string) => Promise<string> +) { + return dumpPretty( + rows, + [ + "row", + "op", + "issuer", + "created_on", + "written_on", + "expires_on", + "expired_on", + "chainable_on", + "replayable_on", + ], + async (f, v) => { + if (f === "issuer") { + return await getUid(v); + } + if (f === "written_on") { + return String(v).substr(0, 15); + } + return v; } - return v - }) + ); } export function dumpMindex(rows: CindexEntry[]) { - return dump(rows, ['op','pub','created_on','written_on','expires_on','expired_on','revokes_on','revoked_on','leaving','revocation','chainable_on']) + return dump(rows, [ + "op", + "pub", + "created_on", + "written_on", + "expires_on", + "expired_on", + "revokes_on", + "revoked_on", + "leaving", + "revocation", + "chainable_on", + ]); } export function dumpSindex(rows: CindexEntry[]) { - return dump(rows, ['op','tx','identifier','pos','created_on','amount','base','locktime','consumed','conditions', 'writtenOn']) + return dump(rows, [ + "op", + "tx", + "identifier", + "pos", + "created_on", + "amount", + "base", + "locktime", + "consumed", + "conditions", + "writtenOn", + ]); } -async function dumpPretty(rows: any[], columns: string[], transform: (field: string, value: any) => Promise<string> = (f, v) => Promise.resolve(v)) { - return dump(rows, columns, transform, {'mid': '', 'left-mid': '', 'mid-mid': '', 'right-mid': ''}) +async function dumpPretty( + rows: any[], + columns: string[], + transform: (field: string, value: any) => Promise<string> = (f, v) => + Promise.resolve(v) +) { + return dump(rows, columns, transform, { + mid: "", + "left-mid": "", + "mid-mid": "", + "right-mid": "", + }); } -async function dump(rows: any[], columns: string[], transform: (field: string, value: any) => Promise<string> = (f, v) => Promise.resolve(v), chars?: any) { +async function dump( + rows: any[], + columns: string[], + transform: (field: string, value: any) => Promise<string> = (f, v) => + Promise.resolve(v), + chars?: any +) { // Table columns - const t = chars ? new Table({ head: columns, chars }) : new Table({ head: columns }); + const t = chars + ? new Table({ head: columns, chars }) + : new Table({ head: columns }); let i = 0; for (const row of rows) { - t.push(await Promise.all(columns.map(async (c) => { - if (c === 'row') { - return i - } - else if (row[c] === null) { - return "NULL" - } - else if (row[c] === undefined) { - return 'NULL' - } - else if (typeof row[c] === 'boolean') { - const v = await transform(c, row[c] ? 1 : 0) - return v - } - const v = await transform(c, row[c]) - return v - }))); - i++ + t.push( + await Promise.all( + columns.map(async (c) => { + if (c === "row") { + return i; + } else if (row[c] === null) { + return "NULL"; + } else if (row[c] === undefined) { + return "NULL"; + } else if (typeof row[c] === "boolean") { + const v = await transform(c, row[c] ? 1 : 0); + return v; + } + const v = await transform(c, row[c]); + return v; + }) + ) + ); + i++; } try { - const dumped = t.toString() - console.log(dumped) + const dumped = t.toString(); + console.log(dumped); } catch (e) { - console.error(e) + console.error(e); } } diff --git a/app/lib/dto/BlockDTO.ts b/app/lib/dto/BlockDTO.ts index 1dc973b9f2f65d17d1a9e2d00c81409a491b417e..91f4a438198c978cb6f172011e337086adb11be2 100644 --- a/app/lib/dto/BlockDTO.ts +++ b/app/lib/dto/BlockDTO.ts @@ -11,54 +11,52 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {TransactionDTO} from "./TransactionDTO" -import {CurrencyConfDTO} from "./ConfDTO" -import {hashf} from "../common" -import {Cloneable} from "./Cloneable" -import {MonitorExecutionTime} from "../debug/MonitorExecutionTime" +import { TransactionDTO } from "./TransactionDTO"; +import { CurrencyConfDTO } from "./ConfDTO"; +import { hashf } from "../common"; +import { Cloneable } from "./Cloneable"; +import { MonitorExecutionTime } from "../debug/MonitorExecutionTime"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export class BlockDTO implements Cloneable { - clone(): any { - return BlockDTO.fromJSONObject(this) + return BlockDTO.fromJSONObject(this); } - version: number - number: number - currency: string - hash: string - inner_hash: string - previousHash: string - issuer: string - previousIssuer: string - dividend: number|null - time: number - powMin: number - unitbase: number - membersCount: number - issuersCount: number - issuersFrame: number - issuersFrameVar: number - identities: string[] = [] - joiners: string[] = [] - actives: string[] = [] - leavers: string[] = [] - revoked: string[] = [] - excluded: string[] = [] - certifications: string[] = [] - transactions: TransactionDTO[] = [] - medianTime: number - nonce: number - fork: boolean - parameters: string - signature: string - monetaryMass: number - UDTime: number + version: number; + number: number; + currency: string; + hash: string; + inner_hash: string; + previousHash: string; + issuer: string; + previousIssuer: string; + dividend: number | null; + time: number; + powMin: number; + unitbase: number; + membersCount: number; + issuersCount: number; + issuersFrame: number; + issuersFrameVar: number; + identities: string[] = []; + joiners: string[] = []; + actives: string[] = []; + leavers: string[] = []; + revoked: string[] = []; + excluded: string[] = []; + certifications: string[] = []; + transactions: TransactionDTO[] = []; + medianTime: number; + nonce: number; + fork: boolean; + parameters: string; + signature: string; + monetaryMass: number; + UDTime: number; - constructor() { - } + constructor() {} json() { return { @@ -103,27 +101,29 @@ export class BlockDTO implements Cloneable { outputs: tx.outputs, unlocks: tx.unlocks, signatures: tx.signatures, - comment: tx.comment - } - }) - } + comment: tx.comment, + }; + }), + }; } get len() { - return this.identities.length + + return ( + this.identities.length + this.joiners.length + this.actives.length + this.leavers.length + this.revoked.length + this.certifications.length + this.transactions.reduce((sum, tx) => sum + tx.getLen(), 0) + ); } - getInlineIdentity(pubkey:string): string | null { + getInlineIdentity(pubkey: string): string | null { let i = 0; let found = null; while (!found && i < this.identities.length) { - if (this.identities[i].match(new RegExp('^' + pubkey))) + if (this.identities[i].match(new RegExp("^" + pubkey))) found = this.identities[i]; i++; } @@ -131,20 +131,21 @@ export class BlockDTO implements Cloneable { } getRawUnSigned() { - return this.getRawInnerPart() + this.getSignedPart() + return this.getRawInnerPart() + this.getSignedPart(); } getRawSigned() { - return this.getRawUnSigned() + this.signature + "\n" + return this.getRawUnSigned() + this.signature + "\n"; } getSignedPart() { - return "InnerHash: " + this.inner_hash + "\n" + - "Nonce: " + this.nonce + "\n" + return ( + "InnerHash: " + this.inner_hash + "\n" + "Nonce: " + this.nonce + "\n" + ); } getSignedPartSigned() { - return this.getSignedPart() + this.signature + "\n" + return this.getSignedPart() + this.signature + "\n"; } getRawInnerPart() { @@ -156,107 +157,106 @@ export class BlockDTO implements Cloneable { raw += "PoWMin: " + this.powMin + "\n"; raw += "Time: " + this.time + "\n"; raw += "MedianTime: " + this.medianTime + "\n"; - if (this.dividend) - raw += "UniversalDividend: " + this.dividend + "\n"; + if (this.dividend) raw += "UniversalDividend: " + this.dividend + "\n"; raw += "UnitBase: " + this.unitbase + "\n"; raw += "Issuer: " + this.issuer + "\n"; raw += "IssuersFrame: " + this.issuersFrame + "\n"; raw += "IssuersFrameVar: " + this.issuersFrameVar + "\n"; raw += "DifferentIssuersCount: " + this.issuersCount + "\n"; - if(this.previousHash) - raw += "PreviousHash: " + this.previousHash + "\n"; - if(this.previousIssuer) + if (this.previousHash) raw += "PreviousHash: " + this.previousHash + "\n"; + if (this.previousIssuer) raw += "PreviousIssuer: " + this.previousIssuer + "\n"; - if(this.parameters) - raw += "Parameters: " + this.parameters + "\n"; + if (this.parameters) raw += "Parameters: " + this.parameters + "\n"; raw += "MembersCount: " + this.membersCount + "\n"; raw += "Identities:\n"; - for (const idty of (this.identities || [])){ + for (const idty of this.identities || []) { raw += idty + "\n"; } raw += "Joiners:\n"; - for (const joiner of (this.joiners || [])){ + for (const joiner of this.joiners || []) { raw += joiner + "\n"; } raw += "Actives:\n"; - for (const active of (this.actives || [])){ + for (const active of this.actives || []) { raw += active + "\n"; } raw += "Leavers:\n"; - for (const leaver of (this.leavers || [])){ + for (const leaver of this.leavers || []) { raw += leaver + "\n"; } raw += "Revoked:\n"; - for (const revoked of (this.revoked || [])){ + for (const revoked of this.revoked || []) { raw += revoked + "\n"; } raw += "Excluded:\n"; - for (const excluded of (this.excluded || [])){ + for (const excluded of this.excluded || []) { raw += excluded + "\n"; } raw += "Certifications:\n"; - for (const cert of (this.certifications || [])){ + for (const cert of this.certifications || []) { raw += cert + "\n"; } raw += "Transactions:\n"; - for (const tx of (this.transactions || [])){ + for (const tx of this.transactions || []) { raw += tx.getCompactVersion(); } - return raw + return raw; } getHash() { - return hashf(this.getSignedPartSigned()) + return hashf(this.getSignedPartSigned()); } get blockstamp() { - return BlockDTO.blockstamp({ number: this.number, hash: this.getHash() }) + return BlockDTO.blockstamp({ number: this.number, hash: this.getHash() }); } @MonitorExecutionTime() - static fromJSONObject(obj:any) { - const dto = new BlockDTO() - dto.version = parseInt(obj.version) || DEFAULT_DOCUMENT_VERSION - dto.number = parseInt(obj.number) - dto.currency = obj.currency || "" - dto.hash = obj.hash || "" - dto.inner_hash = obj.inner_hash - dto.previousHash = obj.previousHash - dto.issuer = obj.issuer || "" - dto.previousIssuer = obj.previousIssuer - dto.dividend = obj.dividend || null - dto.time = parseInt(obj.time) - dto.powMin = parseInt(obj.powMin) - dto.monetaryMass = parseInt(obj.monetaryMass) + static fromJSONObject(obj: any) { + const dto = new BlockDTO(); + dto.version = parseInt(obj.version) || DEFAULT_DOCUMENT_VERSION; + dto.number = parseInt(obj.number); + dto.currency = obj.currency || ""; + dto.hash = obj.hash || ""; + dto.inner_hash = obj.inner_hash; + dto.previousHash = obj.previousHash; + dto.issuer = obj.issuer || ""; + dto.previousIssuer = obj.previousIssuer; + dto.dividend = obj.dividend || null; + dto.time = parseInt(obj.time); + dto.powMin = parseInt(obj.powMin); + dto.monetaryMass = parseInt(obj.monetaryMass); if (isNaN(dto.monetaryMass) && obj.mass !== undefined) { - dto.monetaryMass = parseInt(obj.mass) + dto.monetaryMass = parseInt(obj.mass); } if (isNaN(dto.monetaryMass)) { - dto.monetaryMass = 0 + dto.monetaryMass = 0; } - dto.unitbase = parseInt(obj.unitbase) - dto.membersCount = parseInt(obj.membersCount) - dto.issuersCount = parseInt(obj.issuersCount) - dto.issuersFrame = parseInt(obj.issuersFrame) - dto.issuersFrameVar = parseInt(obj.issuersFrameVar) - dto.identities = obj.identities || [] - dto.joiners = obj.joiners || [] - dto.actives = obj.actives || [] - dto.leavers = obj.leavers || [] - dto.revoked = obj.revoked || [] - dto.excluded = obj.excluded || [] - dto.certifications = obj.certifications || [] - dto.transactions = (obj.transactions || []).map((tx:any) => TransactionDTO.fromJSONObject(tx)) - dto.medianTime = parseInt(obj.medianTime) - dto.fork = !!obj.fork - dto.parameters = obj.parameters || "" - dto.signature = obj.signature || "" - dto.nonce = parseInt(obj.nonce) - return dto + dto.unitbase = parseInt(obj.unitbase); + dto.membersCount = parseInt(obj.membersCount); + dto.issuersCount = parseInt(obj.issuersCount); + dto.issuersFrame = parseInt(obj.issuersFrame); + dto.issuersFrameVar = parseInt(obj.issuersFrameVar); + dto.identities = obj.identities || []; + dto.joiners = obj.joiners || []; + dto.actives = obj.actives || []; + dto.leavers = obj.leavers || []; + dto.revoked = obj.revoked || []; + dto.excluded = obj.excluded || []; + dto.certifications = obj.certifications || []; + dto.transactions = (obj.transactions || []).map((tx: any) => + TransactionDTO.fromJSONObject(tx) + ); + dto.medianTime = parseInt(obj.medianTime); + dto.fork = !!obj.fork; + dto.parameters = obj.parameters || ""; + dto.signature = obj.signature || ""; + dto.nonce = parseInt(obj.nonce); + return dto; } - static getConf(block:BlockDTO): CurrencyConfDTO { - const sp = block.parameters.split(':'); + static getConf(block: BlockDTO): CurrencyConfDTO { + const sp = block.parameters.split(":"); return { currency: block.currency, c: parseFloat(sp[0]), @@ -282,18 +282,18 @@ export class BlockDTO implements Cloneable { // New parameters, defaults to msWindow msPeriod: parseInt(sp[9]), sigReplay: parseInt(sp[9]), - } + }; } - static getLen(block:any) { - return BlockDTO.fromJSONObject(block).len + static getLen(block: any) { + return BlockDTO.fromJSONObject(block).len; } - static getHash(block:any) { - return BlockDTO.fromJSONObject(block).getHash() + static getHash(block: any) { + return BlockDTO.fromJSONObject(block).getHash(); } - static blockstamp(b: { number: number, hash: string }) { - return [b.number, b.hash].join('-') + static blockstamp(b: { number: number; hash: string }) { + return [b.number, b.hash].join("-"); } } diff --git a/app/lib/dto/CertificationDTO.ts b/app/lib/dto/CertificationDTO.ts index 66b76c92d9df315a8267dfece93d8cf622e3f7c5..047206d56a4b07cbe4d7a38a79285e86326083ae 100644 --- a/app/lib/dto/CertificationDTO.ts +++ b/app/lib/dto/CertificationDTO.ts @@ -11,15 +11,14 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {IdentityDTO} from "./IdentityDTO" -import {Buid} from "../common-libs/buid" -import {Cloneable} from "./Cloneable"; -import {hashf} from "../common"; +import { IdentityDTO } from "./IdentityDTO"; +import { Buid } from "../common-libs/buid"; +import { Cloneable } from "./Cloneable"; +import { hashf } from "../common"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export class ShortCertificationDTO { - constructor( public pubkey: string, public block_number: number, @@ -28,22 +27,22 @@ export class ShortCertificationDTO { ) {} get issuer() { - return this.pubkey + return this.pubkey; } get from() { - return this.pubkey + return this.pubkey; } get to() { - return this.idty_issuer + return this.idty_issuer; } } -export class CertificationDTO extends ShortCertificationDTO implements Cloneable { - +export class CertificationDTO extends ShortCertificationDTO + implements Cloneable { clone(): any { - return CertificationDTO.fromJSONObject(this) + return CertificationDTO.fromJSONObject(this); } constructor( @@ -52,20 +51,20 @@ export class CertificationDTO extends ShortCertificationDTO implements Cloneable public pubkey: string, public buid: string, public sig: string, - public idty_issuer:string, - public idty_uid:string, - public idty_buid:string, - public idty_sig:string + public idty_issuer: string, + public idty_uid: string, + public idty_buid: string, + public idty_sig: string ) { - super(pubkey, parseInt(buid.split(':')[0]), sig, idty_issuer) + super(pubkey, parseInt(buid.split(":")[0]), sig, idty_issuer); } getTargetHash() { return IdentityDTO.getTargetHash({ uid: this.idty_uid, created_on: this.idty_buid, - pub: this.idty_issuer - }) + pub: this.idty_issuer, + }); } getRawUnSigned() { @@ -74,42 +73,42 @@ export class CertificationDTO extends ShortCertificationDTO implements Cloneable raw += "Type: Certification\n"; raw += "Currency: " + this.currency + "\n"; raw += "Issuer: " + this.pubkey + "\n"; - raw += "IdtyIssuer: " + this.idty_issuer + '\n'; - raw += "IdtyUniqueID: " + this.idty_uid + '\n'; - raw += "IdtyTimestamp: " + this.idty_buid + '\n'; - raw += "IdtySignature: " + this.idty_sig + '\n'; - raw += "CertTimestamp: " + this.buid + '\n'; - return raw + raw += "IdtyIssuer: " + this.idty_issuer + "\n"; + raw += "IdtyUniqueID: " + this.idty_uid + "\n"; + raw += "IdtyTimestamp: " + this.idty_buid + "\n"; + raw += "IdtySignature: " + this.idty_sig + "\n"; + raw += "CertTimestamp: " + this.buid + "\n"; + return raw; } getRawSigned() { - return this.getRawUnSigned() + this.sig + '\n' + return this.getRawUnSigned() + this.sig + "\n"; } json() { return { - "issuer": this.pubkey, - "timestamp": this.buid, - "sig": this.sig, - "target": { - "issuer": this.idty_issuer, - "uid": this.idty_uid, - "timestamp": this.idty_buid, - "sig": this.idty_sig - } - } + issuer: this.pubkey, + timestamp: this.buid, + sig: this.sig, + target: { + issuer: this.idty_issuer, + uid: this.idty_uid, + timestamp: this.idty_buid, + sig: this.idty_sig, + }, + }; } inline() { - return [this.pubkey, this.to, this.block_number, this.sig].join(':') + return [this.pubkey, this.to, this.block_number, this.sig].join(":"); } - static fromInline(inline:string): ShortCertificationDTO { - const [pubkey, to, block_number, sig]: string[] = inline.split(':') - return new ShortCertificationDTO(pubkey, parseInt(block_number), sig, to) + static fromInline(inline: string): ShortCertificationDTO { + const [pubkey, to, block_number, sig]: string[] = inline.split(":"); + return new ShortCertificationDTO(pubkey, parseInt(block_number), sig, to); } - static fromJSONObject(obj:any) { + static fromJSONObject(obj: any) { return new CertificationDTO( obj.version || DEFAULT_DOCUMENT_VERSION, obj.currency, @@ -120,10 +119,10 @@ export class CertificationDTO extends ShortCertificationDTO implements Cloneable obj.idty_uid, obj.idty_buid, obj.idty_sig - ) + ); } getHash() { - return hashf(this.getRawSigned()) + return hashf(this.getRawSigned()); } -} \ No newline at end of file +} diff --git a/app/lib/dto/Cloneable.ts b/app/lib/dto/Cloneable.ts index 358cc75ac8030c014e53270f5834da334536f5c4..8966718703a47b89c9c93e56d94ad431e277b41e 100644 --- a/app/lib/dto/Cloneable.ts +++ b/app/lib/dto/Cloneable.ts @@ -12,5 +12,5 @@ // GNU Affero General Public License for more details. export interface Cloneable { - clone(): any -} \ No newline at end of file + clone(): any; +} diff --git a/app/lib/dto/ConfDTO.ts b/app/lib/dto/ConfDTO.ts index 59c229f19a724175c2e1077d5f7630351e7dda11..7d5d8247423998f526d6ef3059966f5d492309f0 100644 --- a/app/lib/dto/ConfDTO.ts +++ b/app/lib/dto/ConfDTO.ts @@ -11,108 +11,115 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../common-libs/constants" -import {ProxiesConf} from '../proxy'; -import {Underscore} from "../common-libs/underscore" +import { CommonConstants } from "../common-libs/constants"; +import { ProxiesConf } from "../proxy"; +import { Underscore } from "../common-libs/underscore"; -const constants = require('../constants'); +const constants = require("../constants"); export interface Keypair { - pub: string - sec: string + pub: string; + sec: string; } export interface StorageDTO { storage?: { - transactions?:boolean - wotwizard?:boolean - } + transactions?: boolean; + wotwizard?: boolean; + }; } export interface PowDTO { - powNoSecurity:boolean + powNoSecurity: boolean; } export interface BranchingDTO { - switchOnHeadAdvance:number - avgGenTime:number - forksize:number + switchOnHeadAdvance: number; + avgGenTime: number; + forksize: number; } export interface CurrencyConfDTO { - currency: string - c: number - dt: number - ud0: number - sigPeriod: number - sigReplay: number - sigStock: number - sigWindow: number - sigValidity: number - sigQty: number - idtyWindow: number - msWindow: number - msPeriod: number - xpercent: number - msValidity: number - stepMax: number - medianTimeBlocks: number - avgGenTime: number - dtDiffEval: number - percentRot: number - udTime0: number - udReevalTime0: number - dtReeval: number + currency: string; + c: number; + dt: number; + ud0: number; + sigPeriod: number; + sigReplay: number; + sigStock: number; + sigWindow: number; + sigValidity: number; + sigQty: number; + idtyWindow: number; + msWindow: number; + msPeriod: number; + xpercent: number; + msValidity: number; + stepMax: number; + medianTimeBlocks: number; + avgGenTime: number; + dtDiffEval: number; + percentRot: number; + udTime0: number; + udReevalTime0: number; + dtReeval: number; } export interface KeypairConfDTO { - pair: Keypair - oldPair: Keypair|null - salt: string - passwd: string + pair: Keypair; + oldPair: Keypair | null; + salt: string; + passwd: string; } export interface NetworkConfDTO { - proxiesConf: ProxiesConf|undefined - nobma: boolean - bmaWithCrawler: boolean - remoteport: number - remotehost: string|null - remoteipv4: string|null - remoteipv6: string|null - port: number - ipv4: string - ipv6: string - dos:any - upnp:boolean - httplogs:boolean - nonWoTPeersLimit: number + proxiesConf: ProxiesConf | undefined; + nobma: boolean; + bmaWithCrawler: boolean; + remoteport: number; + remotehost: string | null; + remoteipv4: string | null; + remoteipv6: string | null; + port: number; + ipv4: string; + ipv6: string; + dos: any; + upnp: boolean; + httplogs: boolean; + nonWoTPeersLimit: number; } export interface WS2PConfDTO { ws2p?: { - privateAccess?: boolean - publicAccess?: boolean - sync?: boolean - uuid?: string - upnp?: boolean - remotehost?: string|null - remoteport?: number|null - remotepath?: string - port?: number - host?: string - maxPublic?:number - maxPrivate?:number - preferedNodes?: string[] - preferedOnly: boolean - privilegedNodes?: string[] - privilegedOnly: boolean - syncLimit?: number - } + privateAccess?: boolean; + publicAccess?: boolean; + sync?: boolean; + uuid?: string; + upnp?: boolean; + remotehost?: string | null; + remoteport?: number | null; + remotepath?: string; + port?: number; + host?: string; + maxPublic?: number; + maxPrivate?: number; + preferedNodes?: string[]; + preferedOnly: boolean; + privilegedNodes?: string[]; + privilegedOnly: boolean; + syncLimit?: number; + }; } -export class ConfDTO implements StorageDTO, CurrencyConfDTO, KeypairConfDTO, NetworkConfDTO, BranchingDTO, WS2PConfDTO, PowDTO { - +export class ConfDTO + implements + StorageDTO, + CurrencyConfDTO, + KeypairConfDTO, + NetworkConfDTO, + BranchingDTO, + WS2PConfDTO, + PowDTO { constructor( public loglevel: string, public currency: string, @@ -154,13 +161,13 @@ export class ConfDTO implements StorageDTO, CurrencyConfDTO, KeypairConfDTO, Net public sigWindow: number, public switchOnHeadAdvance: number, public pair: Keypair, - public oldPair: Keypair|null, + public oldPair: Keypair | null, public salt: string, public passwd: string, public remoteport: number, - public remotehost: string|null, - public remoteipv4: string|null, - public remoteipv6: string|null, + public remotehost: string | null, + public remoteipv4: string | null, + public remoteipv6: string | null, public host: string, public port: number, public ipv4: string, @@ -172,70 +179,131 @@ export class ConfDTO implements StorageDTO, CurrencyConfDTO, KeypairConfDTO, Net public nobma: boolean, public bmaWithCrawler: boolean, public nonWoTPeersLimit: number, - public proxiesConf: ProxiesConf|undefined, + public proxiesConf: ProxiesConf | undefined, public ws2p?: { - privateAccess?: boolean - publicAccess?: boolean - uuid?: string - upnp?: boolean - remotehost?: string|null - remoteport?: number|null - remotepath?: string - port?: number - host?: string - preferedNodes?: string[] - preferedOnly: boolean - privilegedNodes?: string[] - privilegedOnly: boolean - maxPublic?:number - maxPrivate?:number - syncLimit?:number + privateAccess?: boolean; + publicAccess?: boolean; + uuid?: string; + upnp?: boolean; + remotehost?: string | null; + remoteport?: number | null; + remotepath?: string; + port?: number; + host?: string; + preferedNodes?: string[]; + preferedOnly: boolean; + privilegedNodes?: string[]; + privilegedOnly: boolean; + maxPublic?: number; + maxPrivate?: number; + syncLimit?: number; }, public powNoSecurity = false, public storage = { transactions: false, wotwizard: false, - }, -) {} + } + ) {} static mock() { - return new ConfDTO("", "", [], [], 0, 3600 * 1000, constants.PROOF_OF_WORK.DEFAULT.CPU, 1, constants.PROOF_OF_WORK.DEFAULT.PREFIX, 0, 0, constants.CONTRACT.DEFAULT.C, constants.CONTRACT.DEFAULT.DT, constants.CONTRACT.DEFAULT.DT_REEVAL, 0, constants.CONTRACT.DEFAULT.UD0, 0, 0, constants.CONTRACT.DEFAULT.STEPMAX, constants.CONTRACT.DEFAULT.SIGPERIOD, constants.CONTRACT.DEFAULT.SIGREPLAY, 0, constants.CONTRACT.DEFAULT.SIGVALIDITY, constants.CONTRACT.DEFAULT.MSVALIDITY, constants.CONTRACT.DEFAULT.SIGQTY, constants.CONTRACT.DEFAULT.SIGSTOCK, constants.CONTRACT.DEFAULT.X_PERCENT, constants.CONTRACT.DEFAULT.PERCENTROT, constants.CONTRACT.DEFAULT.POWDELAY, constants.CONTRACT.DEFAULT.AVGGENTIME, constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, false, 3000, false, constants.BRANCHES.DEFAULT_WINDOW_SIZE, constants.CONTRACT.DEFAULT.IDTYWINDOW, constants.CONTRACT.DEFAULT.MSWINDOW, constants.CONTRACT.DEFAULT.SIGWINDOW, 0, { pub:'', sec:'' }, null, "", "", 0, "", "", "", "", 0, "", "", null, false, "", true, true, false, 100, new ProxiesConf(), undefined) + return new ConfDTO( + "", + "", + [], + [], + 0, + 3600 * 1000, + constants.PROOF_OF_WORK.DEFAULT.CPU, + 1, + constants.PROOF_OF_WORK.DEFAULT.PREFIX, + 0, + 0, + constants.CONTRACT.DEFAULT.C, + constants.CONTRACT.DEFAULT.DT, + constants.CONTRACT.DEFAULT.DT_REEVAL, + 0, + constants.CONTRACT.DEFAULT.UD0, + 0, + 0, + constants.CONTRACT.DEFAULT.STEPMAX, + constants.CONTRACT.DEFAULT.SIGPERIOD, + constants.CONTRACT.DEFAULT.SIGREPLAY, + 0, + constants.CONTRACT.DEFAULT.SIGVALIDITY, + constants.CONTRACT.DEFAULT.MSVALIDITY, + constants.CONTRACT.DEFAULT.SIGQTY, + constants.CONTRACT.DEFAULT.SIGSTOCK, + constants.CONTRACT.DEFAULT.X_PERCENT, + constants.CONTRACT.DEFAULT.PERCENTROT, + constants.CONTRACT.DEFAULT.POWDELAY, + constants.CONTRACT.DEFAULT.AVGGENTIME, + constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, + false, + 3000, + false, + constants.BRANCHES.DEFAULT_WINDOW_SIZE, + constants.CONTRACT.DEFAULT.IDTYWINDOW, + constants.CONTRACT.DEFAULT.MSWINDOW, + constants.CONTRACT.DEFAULT.SIGWINDOW, + 0, + { pub: "", sec: "" }, + null, + "", + "", + 0, + "", + "", + "", + "", + 0, + "", + "", + null, + false, + "", + true, + true, + false, + 100, + new ProxiesConf(), + undefined + ); } static defaultConf() { /*return new ConfDTO("", "", [], [], 0, 3600 * 1000, constants.PROOF_OF_WORK.DEFAULT.CPU, 1, constants.PROOF_OF_WORK.DEFAULT.PREFIX, 0, 0, constants.CONTRACT.DEFAULT.C, constants.CONTRACT.DEFAULT.DT, constants.CONTRACT.DEFAULT.DT_REEVAL, 0, constants.CONTRACT.DEFAULT.UD0, 0, 0, constants.CONTRACT.DEFAULT.STEPMAX, constants.CONTRACT.DEFAULT.SIGPERIOD, 0, constants.CONTRACT.DEFAULT.SIGVALIDITY, constants.CONTRACT.DEFAULT.MSVALIDITY, constants.CONTRACT.DEFAULT.SIGQTY, constants.CONTRACT.DEFAULT.SIGSTOCK, constants.CONTRACT.DEFAULT.X_PERCENT, constants.CONTRACT.DEFAULT.PERCENTROT, constants.CONTRACT.DEFAULT.POWDELAY, constants.CONTRACT.DEFAULT.AVGGENTIME, constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, false, 3000, false, constants.BRANCHES.DEFAULT_WINDOW_SIZE, constants.CONTRACT.DEFAULT.IDTYWINDOW, constants.CONTRACT.DEFAULT.MSWINDOW, constants.CONTRACT.DEFAULT.SIGWINDOW, 0, { pub:'', sec:'' }, null, "", "", 0, "", "", "", "", 0, "", "", null, false, "", true, true)*/ return { - "currency": null, - "endpoints": [], - "rmEndpoints": [], - "upInterval": 3600 * 1000, - "c": constants.CONTRACT.DEFAULT.C, - "dt": constants.CONTRACT.DEFAULT.DT, - "dtReeval": constants.CONTRACT.DEFAULT.DT_REEVAL, - "ud0": constants.CONTRACT.DEFAULT.UD0, - "stepMax": constants.CONTRACT.DEFAULT.STEPMAX, - "sigPeriod": constants.CONTRACT.DEFAULT.SIGPERIOD, - "sigReplay": constants.CONTRACT.DEFAULT.SIGREPLAY, - "sigValidity": constants.CONTRACT.DEFAULT.SIGVALIDITY, - "msValidity": constants.CONTRACT.DEFAULT.MSVALIDITY, - "sigQty": constants.CONTRACT.DEFAULT.SIGQTY, - "xpercent": constants.CONTRACT.DEFAULT.X_PERCENT, - "percentRot": constants.CONTRACT.DEFAULT.PERCENTROT, - "powDelay": constants.CONTRACT.DEFAULT.POWDELAY, - "avgGenTime": constants.CONTRACT.DEFAULT.AVGGENTIME, - "dtDiffEval": constants.CONTRACT.DEFAULT.DTDIFFEVAL, - "medianTimeBlocks": constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, - "httplogs": false, - "udid2": false, - "timeout": 3000, - "isolate": false, - "forksize": constants.BRANCHES.DEFAULT_WINDOW_SIZE, - "switchOnHeadAdvance": CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS, - "nonWoTPeersLimit": CommonConstants.DEFAULT_NON_WOT_PEERS_LIMIT, + currency: null, + endpoints: [], + rmEndpoints: [], + upInterval: 3600 * 1000, + c: constants.CONTRACT.DEFAULT.C, + dt: constants.CONTRACT.DEFAULT.DT, + dtReeval: constants.CONTRACT.DEFAULT.DT_REEVAL, + ud0: constants.CONTRACT.DEFAULT.UD0, + stepMax: constants.CONTRACT.DEFAULT.STEPMAX, + sigPeriod: constants.CONTRACT.DEFAULT.SIGPERIOD, + sigReplay: constants.CONTRACT.DEFAULT.SIGREPLAY, + sigValidity: constants.CONTRACT.DEFAULT.SIGVALIDITY, + msValidity: constants.CONTRACT.DEFAULT.MSVALIDITY, + sigQty: constants.CONTRACT.DEFAULT.SIGQTY, + xpercent: constants.CONTRACT.DEFAULT.X_PERCENT, + percentRot: constants.CONTRACT.DEFAULT.PERCENTROT, + powDelay: constants.CONTRACT.DEFAULT.POWDELAY, + avgGenTime: constants.CONTRACT.DEFAULT.AVGGENTIME, + dtDiffEval: constants.CONTRACT.DEFAULT.DTDIFFEVAL, + medianTimeBlocks: constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, + httplogs: false, + udid2: false, + timeout: 3000, + isolate: false, + forksize: constants.BRANCHES.DEFAULT_WINDOW_SIZE, + switchOnHeadAdvance: CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS, + nonWoTPeersLimit: CommonConstants.DEFAULT_NON_WOT_PEERS_LIMIT, }; } - static complete(conf:any) { - return Underscore.extend(ConfDTO.defaultConf(), conf) + static complete(conf: any) { + return Underscore.extend(ConfDTO.defaultConf(), conf); } -} \ No newline at end of file +} diff --git a/app/lib/dto/IdentityDTO.ts b/app/lib/dto/IdentityDTO.ts index 5671987b0c2801dd1f820cae2d821a408ec48048..7ce0f76dc2bbcb93701a71109885fe3f475dabe5 100644 --- a/app/lib/dto/IdentityDTO.ts +++ b/app/lib/dto/IdentityDTO.ts @@ -11,37 +11,36 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {RevocationDTO} from "./RevocationDTO" -import {hashf} from "../common" -import {DBIdentity, NewDBIdentity} from "../dal/sqliteDAL/IdentityDAL" +import { RevocationDTO } from "./RevocationDTO"; +import { hashf } from "../common"; +import { DBIdentity, NewDBIdentity } from "../dal/sqliteDAL/IdentityDAL"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export interface HashableIdentity { - created_on: string - uid: string - pub: string + created_on: string; + uid: string; + pub: string; } export interface BasicIdentity { - buid: string - uid: string - pubkey: string - sig: string + buid: string; + uid: string; + pubkey: string; + sig: string; } export interface BasicRevocableIdentity { - buid: string - uid: string - pubkey: string - sig: string - member: boolean - wasMember: boolean - expires_on: number + buid: string; + uid: string; + pubkey: string; + sig: string; + member: boolean; + wasMember: boolean; + expires_on: number; } export class IdentityDTO { - constructor( public version: number, public currency: string, @@ -52,38 +51,38 @@ export class IdentityDTO { ) {} get hash() { - return this.getTargetHash() + return this.getTargetHash(); } private getTargetHash() { - return hashf(this.uid + this.buid + this.pubkey) + return hashf(this.uid + this.buid + this.pubkey); } inline() { - return [this.pubkey, this.sig, this.buid, this.uid].join(':') + return [this.pubkey, this.sig, this.buid, this.uid].join(":"); } rawWithoutSig() { - let raw = "" - raw += "Version: " + this.version + "\n" - raw += "Type: Identity\n" - raw += "Currency: " + this.currency + "\n" - raw += "Issuer: " + this.pubkey + "\n" - raw += "UniqueID: " + this.uid + '\n' - raw += "Timestamp: " + this.buid + '\n' - return raw + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Identity\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "Issuer: " + this.pubkey + "\n"; + raw += "UniqueID: " + this.uid + "\n"; + raw += "Timestamp: " + this.buid + "\n"; + return raw; } getRawUnSigned() { - return this.rawWithoutSig() + return this.rawWithoutSig(); } getRawSigned() { - return this.rawWithoutSig() + this.sig + "\n" + return this.rawWithoutSig() + this.sig + "\n"; } - static fromInline(inline:string, currency:string = ""): IdentityDTO { - const [pubkey, sig, buid, uid] = inline.split(':') + static fromInline(inline: string, currency: string = ""): IdentityDTO { + const [pubkey, sig, buid, uid] = inline.split(":"); return new IdentityDTO( DEFAULT_DOCUMENT_VERSION, currency, @@ -91,14 +90,14 @@ export class IdentityDTO { sig, buid, uid - ) + ); } - static getTargetHash(idty:HashableIdentity) { - return hashf(idty.uid + idty.created_on + idty.pub) + static getTargetHash(idty: HashableIdentity) { + return hashf(idty.uid + idty.created_on + idty.pub); } - static fromJSONObject(obj:any) { + static fromJSONObject(obj: any) { return new IdentityDTO( obj.version || DEFAULT_DOCUMENT_VERSION, obj.currency, @@ -106,10 +105,10 @@ export class IdentityDTO { obj.signature || obj.sig, obj.buid || obj.blockstamp, obj.uid - ) + ); } - static fromBasicIdentity(basic:BasicIdentity): DBIdentity { + static fromBasicIdentity(basic: BasicIdentity): DBIdentity { return new NewDBIdentity( basic.pubkey, basic.sig, @@ -118,12 +117,12 @@ export class IdentityDTO { IdentityDTO.getTargetHash({ pub: basic.pubkey, created_on: basic.buid, - uid: basic.uid + uid: basic.uid, }) - ) + ); } - static fromRevocation(revoc:RevocationDTO): DBIdentity { + static fromRevocation(revoc: RevocationDTO): DBIdentity { return new NewDBIdentity( revoc.pubkey, revoc.idty_sig, @@ -132,12 +131,12 @@ export class IdentityDTO { IdentityDTO.getTargetHash({ pub: revoc.pubkey, created_on: revoc.idty_buid, - uid: revoc.idty_uid + uid: revoc.idty_uid, }) - ) + ); } getHash() { - return hashf(this.getRawSigned()) + return hashf(this.getRawSigned()); } -} \ No newline at end of file +} diff --git a/app/lib/dto/Jsonable.ts b/app/lib/dto/Jsonable.ts index fa028a88d5cfa103ae7cc2a6bfb3ff9b9a2737a6..1a5ba0a0dd2c810d9c2ae7c67842f3f96ec94e03 100644 --- a/app/lib/dto/Jsonable.ts +++ b/app/lib/dto/Jsonable.ts @@ -12,5 +12,5 @@ // GNU Affero General Public License for more details. export interface Jsonable { - json(): any -} \ No newline at end of file + json(): any; +} diff --git a/app/lib/dto/MembershipDTO.ts b/app/lib/dto/MembershipDTO.ts index 267106ab0d634bd17bf9162b771fcfcfda748db6..e2b1de529790cf68b572ee81ec522740be75c525 100644 --- a/app/lib/dto/MembershipDTO.ts +++ b/app/lib/dto/MembershipDTO.ts @@ -11,59 +11,58 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as moment from "moment" -import {IdentityDTO} from "./IdentityDTO" -import {Cloneable} from "./Cloneable"; -import {hashf} from "../common"; +import * as moment from "moment"; +import { IdentityDTO } from "./IdentityDTO"; +import { Cloneable } from "./Cloneable"; +import { hashf } from "../common"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export class MembershipDTO implements Cloneable { - clone(): any { - return MembershipDTO.fromJSONObject(this) + return MembershipDTO.fromJSONObject(this); } - sigDate?:number - date?:number + sigDate?: number; + date?: number; constructor( public version: number, public currency: string, public issuer: string, public type: string, - public blockstamp:string, - public userid:string, - public certts:string, - public signature:string + public blockstamp: string, + public userid: string, + public certts: string, + public signature: string ) {} get pubkey() { - return this.issuer + return this.issuer; } get pub() { - return this.issuer + return this.issuer; } get membership() { - return this.type + return this.type; } get fpr() { - return this.blockstamp.split('-')[1] + return this.blockstamp.split("-")[1]; } get number() { - return parseInt(this.blockstamp) + return parseInt(this.blockstamp); } get block_number() { - return parseInt(this.blockstamp) + return parseInt(this.blockstamp); } get block_hash() { - return this.blockstamp.split('-')[1] + return this.blockstamp.split("-")[1]; } inline() { @@ -72,33 +71,33 @@ export class MembershipDTO implements Cloneable { this.signature, this.blockstamp, this.certts, - this.userid - ].join(':') + this.userid, + ].join(":"); } getIdtyHash() { return IdentityDTO.getTargetHash({ created_on: this.certts, uid: this.userid, - pub: this.issuer - }) + pub: this.issuer, + }); } getRaw() { - let raw = "" - raw += "Version: " + this.version + "\n" - raw += "Type: Membership\n" - raw += "Currency: " + this.currency + "\n" - raw += "Issuer: " + this.issuer + "\n" - raw += "Block: " + this.blockstamp + "\n" - raw += "Membership: " + this.type + "\n" - raw += "UserID: " + this.userid + "\n" - raw += "CertTS: " + this.certts + "\n" - return raw + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Membership\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "Issuer: " + this.issuer + "\n"; + raw += "Block: " + this.blockstamp + "\n"; + raw += "Membership: " + this.type + "\n"; + raw += "UserID: " + this.userid + "\n"; + raw += "CertTS: " + this.certts + "\n"; + return raw; } getRawSigned() { - return this.getRaw() + this.signature + "\n" + return this.getRaw() + this.signature + "\n"; } json() { @@ -111,13 +110,17 @@ export class MembershipDTO implements Cloneable { membership: this.type, date: this.date && moment(this.date).unix(), sigDate: this.sigDate && moment(this.sigDate).unix(), - raw: this.getRaw() - } + raw: this.getRaw(), + }, }; } - static fromInline(inlineMS:string, type:string = "", currency:string = "") { - const [issuer, sig, blockstamp, certts, userid] = inlineMS.split(':'); + static fromInline( + inlineMS: string, + type: string = "", + currency: string = "" + ) { + const [issuer, sig, blockstamp, certts, userid] = inlineMS.split(":"); return new MembershipDTO( DEFAULT_DOCUMENT_VERSION, currency, @@ -127,23 +130,23 @@ export class MembershipDTO implements Cloneable { userid, certts, sig - ) + ); } - static fromJSONObject(obj:any) { + static fromJSONObject(obj: any) { return new MembershipDTO( obj.version || DEFAULT_DOCUMENT_VERSION, obj.currency, obj.issuer || obj.pubkey, - obj.type || obj.membership, - obj.blockstamp || obj.block, + obj.type || obj.membership, + obj.blockstamp || obj.block, obj.userid, obj.certts, obj.signature - ) + ); } getHash() { - return hashf(this.getRawSigned()) + return hashf(this.getRawSigned()); } } diff --git a/app/lib/dto/MerkleDTO.ts b/app/lib/dto/MerkleDTO.ts index 1d6d136343fa40d2cf3f0b528959804be82b8bec..cdc62e8d5275580b38ffd4fd2a62e371bc5436ed 100644 --- a/app/lib/dto/MerkleDTO.ts +++ b/app/lib/dto/MerkleDTO.ts @@ -13,16 +13,15 @@ "use strict"; -const merkle = require('merkle'); +const merkle = require("merkle"); export class MerkleDTO { + private levels: any[]; + nodes: any[]; + depth: number; - private levels:any[] - nodes:any[] - depth:number - - initialize(leaves:string[]) { - const tree = merkle('sha256').sync(leaves); + initialize(leaves: string[]) { + const tree = merkle("sha256").sync(leaves); this.depth = tree.depth(); this.nodes = tree.nodes(); this.levels = []; @@ -32,12 +31,12 @@ export class MerkleDTO { return this; } - remove(leaf:string) { + remove(leaf: string) { // If leaf IS present - if(~this.levels[this.depth].indexOf(leaf)){ + if (~this.levels[this.depth].indexOf(leaf)) { const leaves = this.leaves(); const index = leaves.indexOf(leaf); - if(~index){ + if (~index) { // Replacement: remove previous hash leaves.splice(index, 1); } @@ -46,13 +45,13 @@ export class MerkleDTO { } } - removeMany(leaves:string[]) { - leaves.forEach((leaf:string) => { + removeMany(leaves: string[]) { + leaves.forEach((leaf: string) => { // If leaf IS present - if(~this.levels[this.depth].indexOf(leaf)){ + if (~this.levels[this.depth].indexOf(leaf)) { const theLeaves = this.leaves(); const index = theLeaves.indexOf(leaf); - if(~index){ + if (~index) { // Replacement: remove previous hash theLeaves.splice(index, 1); } @@ -60,16 +59,16 @@ export class MerkleDTO { }); leaves.sort(); this.initialize(leaves); - }; + } - push(leaf:string, previous:string) { + push(leaf: string, previous: string) { // If leaf is not present - if(this.levels[this.depth].indexOf(leaf) == -1){ + if (this.levels[this.depth].indexOf(leaf) == -1) { const leaves = this.leaves(); // Update or replacement ? - if(previous && leaf != previous){ + if (previous && leaf != previous) { const index = leaves.indexOf(previous); - if(~index){ + if (~index) { // Replacement: remove previous hash leaves.splice(index, 1); } @@ -80,10 +79,10 @@ export class MerkleDTO { } } - pushMany(leaves:string[]) { + pushMany(leaves: string[]) { leaves.forEach((leaf) => { // If leaf is not present - if(this.levels[this.depth].indexOf(leaf) == -1){ + if (this.levels[this.depth].indexOf(leaf) == -1) { this.leaves().push(leaf); } }); @@ -92,14 +91,14 @@ export class MerkleDTO { } root() { - return this.levels.length > 0 ? this.levels[0][0] : '' + return this.levels.length > 0 ? this.levels[0][0] : ""; } leaves() { - return this.levels[this.depth] + return this.levels[this.depth]; } count() { - return this.leaves().length + return this.leaves().length; } } diff --git a/app/lib/dto/PeerDTO.ts b/app/lib/dto/PeerDTO.ts index c241736d526a8bf8f3d09d3742c355320cd505f8..50c9b109ca56623ea955084adced5ef366bb6bb8 100644 --- a/app/lib/dto/PeerDTO.ts +++ b/app/lib/dto/PeerDTO.ts @@ -11,73 +11,74 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {hashf} from "../common" -import {CommonConstants} from "../common-libs/constants" -import {Cloneable} from "./Cloneable" -import {DBPeer} from "../db/DBPeer" +import { hashf } from "../common"; +import { CommonConstants } from "../common-libs/constants"; +import { Cloneable } from "./Cloneable"; +import { DBPeer } from "../db/DBPeer"; export interface WS2PEndpoint { - version:number - uuid:string - host:string - port:number - path:string + version: number; + uuid: string; + host: string; + port: number; + path: string; } export class PeerDTO implements Cloneable { - clone(): any { - return PeerDTO.fromJSONObject(this) + return PeerDTO.fromJSONObject(this); } - member = false + member = false; constructor( - public version:number, - public currency:string, - public pubkey:string, - public blockstamp:string, - public endpoints:string[], - public signature:string, - public status:string, - public statusTS:number, + public version: number, + public currency: string, + public pubkey: string, + public blockstamp: string, + public endpoints: string[], + public signature: string, + public status: string, + public statusTS: number, member = false ) { - this.member = member + this.member = member; } get block() { - return this.blockstamp + return this.blockstamp; } blockNumber() { - return parseInt(this.blockstamp) + return parseInt(this.blockstamp); } keyID() { - return this.pubkey && this.pubkey.length > 10 ? this.pubkey.substring(0, 10) : "Unknown" + return this.pubkey && this.pubkey.length > 10 + ? this.pubkey.substring(0, 10) + : "Unknown"; } getRawUnsigned() { - return this.getRaw() + return this.getRaw(); } getRaw() { - let raw = "" - raw += "Version: " + this.version + "\n" - raw += "Type: Peer\n" - raw += "Currency: " + this.currency + "\n" - raw += "PublicKey: " + this.pubkey + "\n" - raw += "Block: " + this.blockstamp + "\n" - raw += "Endpoints:" + "\n" - for(const ep of this.endpoints) { - raw += ep + "\n" + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Peer\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "PublicKey: " + this.pubkey + "\n"; + raw += "Block: " + this.blockstamp + "\n"; + raw += "Endpoints:" + "\n"; + for (const ep of this.endpoints) { + raw += ep + "\n"; } - return raw + return raw; } getRawSigned() { - return this.getRaw() + this.signature + "\n" + return this.getRaw() + this.signature + "\n"; } json() { @@ -89,113 +90,152 @@ export class PeerDTO implements Cloneable { block: this.block, signature: this.signature, raw: this.getRawSigned(), - pubkey: this.pubkey - } + pubkey: this.pubkey, + }; } getBMA() { - let bma: { dns?: string, ipv4?: string, ipv6?: string, port?: number, path?: string } = {} - let notFound = true + let bma: { + dns?: string; + ipv4?: string; + ipv6?: string; + port?: number; + path?: string; + } = {}; + let notFound = true; this.endpoints.forEach((ep) => { const matchesBMA = notFound && ep.match(CommonConstants.BMA_REGEXP); const matchesBMAS = notFound && ep.match(CommonConstants.BMAS_REGEXP); if (matchesBMA) { - notFound = false + notFound = false; bma = { - "dns": matchesBMA[2] || '', - "ipv4": matchesBMA[4] || '', - "ipv6": matchesBMA[6] || '', - "port": parseInt(matchesBMA[8]) || 9101 + dns: matchesBMA[2] || "", + ipv4: matchesBMA[4] || "", + ipv6: matchesBMA[6] || "", + port: parseInt(matchesBMA[8]) || 9101, }; - } - else if (matchesBMAS) { - notFound = false + } else if (matchesBMAS) { + notFound = false; bma = { - "dns": matchesBMAS[2] || '', - "ipv4": matchesBMAS[4] || '', - "ipv6": matchesBMAS[6] || '', - "port": parseInt(matchesBMAS[8]) || 9101, - "path": matchesBMAS[10] || '' + dns: matchesBMAS[2] || "", + ipv4: matchesBMAS[4] || "", + ipv6: matchesBMAS[6] || "", + port: parseInt(matchesBMAS[8]) || 9101, + path: matchesBMAS[10] || "", }; } }); - return bma + return bma; } - getOnceWS2PEndpoint(canReachTorEp:boolean, canReachClearEp:boolean, uuidExcluded:string[] = []) { - let api:WS2PEndpoint|null = null - let bestWS2PVersionAvailable:number = 0 - let bestWS2PTORVersionAvailable:number = 0 + getOnceWS2PEndpoint( + canReachTorEp: boolean, + canReachClearEp: boolean, + uuidExcluded: string[] = [] + ) { + let api: WS2PEndpoint | null = null; + let bestWS2PVersionAvailable: number = 0; + let bestWS2PTORVersionAvailable: number = 0; for (const ep of this.endpoints) { if (canReachTorEp) { - let matches:RegExpMatchArray | null = ep.match(CommonConstants.WS2PTOR_V2_REGEXP) - if (matches && parseInt(matches[1]) > bestWS2PTORVersionAvailable && (uuidExcluded.indexOf(matches[2]) === -1)) { - bestWS2PTORVersionAvailable = parseInt(matches[1]) + let matches: RegExpMatchArray | null = ep.match( + CommonConstants.WS2PTOR_V2_REGEXP + ); + if ( + matches && + parseInt(matches[1]) > bestWS2PTORVersionAvailable && + uuidExcluded.indexOf(matches[2]) === -1 + ) { + bestWS2PTORVersionAvailable = parseInt(matches[1]); api = { version: parseInt(matches[1]), uuid: matches[2], - host: matches[3] || '', + host: matches[3] || "", port: parseInt(matches[4]) || 0, - path: matches[5] - } + path: matches[5], + }; } else { - matches = ep.match(CommonConstants.WS2PTOR_REGEXP) - if (matches && bestWS2PTORVersionAvailable == 0 && (uuidExcluded.indexOf(matches[1]) === -1)) { - bestWS2PTORVersionAvailable = 1 + matches = ep.match(CommonConstants.WS2PTOR_REGEXP); + if ( + matches && + bestWS2PTORVersionAvailable == 0 && + uuidExcluded.indexOf(matches[1]) === -1 + ) { + bestWS2PTORVersionAvailable = 1; api = { version: 1, uuid: matches[1], - host: matches[2] || '', + host: matches[2] || "", port: parseInt(matches[3]) || 0, - path: matches[4] - } + path: matches[4], + }; } } } // If can reach clear endpoint and not found tor endpoint if (canReachClearEp && bestWS2PTORVersionAvailable == 0) { - let matches:any = ep.match(CommonConstants.WS2P_V2_REGEXP) - if (matches && parseInt(matches[1]) > bestWS2PVersionAvailable && (uuidExcluded.indexOf(matches[2]) === -1)) { - bestWS2PVersionAvailable = parseInt(matches[1]) + let matches: any = ep.match(CommonConstants.WS2P_V2_REGEXP); + if ( + matches && + parseInt(matches[1]) > bestWS2PVersionAvailable && + uuidExcluded.indexOf(matches[2]) === -1 + ) { + bestWS2PVersionAvailable = parseInt(matches[1]); api = { version: parseInt(matches[1]), uuid: matches[2], - host: matches[3] || '', + host: matches[3] || "", port: parseInt(matches[4]) || 0, - path: matches[5] - } + path: matches[5], + }; } else { - matches = ep.match(CommonConstants.WS2P_REGEXP) - if (matches && bestWS2PVersionAvailable == 0 && (uuidExcluded.indexOf(matches[1]) === -1)) { - bestWS2PVersionAvailable = 1 + matches = ep.match(CommonConstants.WS2P_REGEXP); + if ( + matches && + bestWS2PVersionAvailable == 0 && + uuidExcluded.indexOf(matches[1]) === -1 + ) { + bestWS2PVersionAvailable = 1; api = { version: 1, uuid: matches[1], - host: matches[2] || '', + host: matches[2] || "", port: parseInt(matches[3]) || 0, - path: matches[4] - } + path: matches[4], + }; } } } } - return api || null + return api || null; } - getAllWS2PEndpoints(canReachTorEp:boolean, canReachClearEp:boolean, myUUID:string) { - let apis:WS2PEndpoint[] = [] - let uuidExcluded:string[] = [myUUID] - let api = this.getOnceWS2PEndpoint(canReachTorEp, canReachClearEp, uuidExcluded) + getAllWS2PEndpoints( + canReachTorEp: boolean, + canReachClearEp: boolean, + myUUID: string + ) { + let apis: WS2PEndpoint[] = []; + let uuidExcluded: string[] = [myUUID]; + let api = this.getOnceWS2PEndpoint( + canReachTorEp, + canReachClearEp, + uuidExcluded + ); while (api !== null) { - uuidExcluded.push(api.uuid) - apis.push(api) - api = this.getOnceWS2PEndpoint(canReachTorEp, canReachClearEp, uuidExcluded) + uuidExcluded.push(api.uuid); + apis.push(api); + api = this.getOnceWS2PEndpoint( + canReachTorEp, + canReachClearEp, + uuidExcluded + ); } - return apis + return apis; } getFirstNonTorWS2P() { - return this.getOnceWS2PEndpoint(false, true) + return this.getOnceWS2PEndpoint(false, true); } getDns() { @@ -225,102 +265,116 @@ export class PeerDTO implements Cloneable { getHostPreferDNS() { const bma = this.getBMA(); - return (bma.dns ? bma.dns : - (bma.ipv4 ? bma.ipv4 : - (bma.ipv6 ? bma.ipv6 : ''))) + return bma.dns ? bma.dns : bma.ipv4 ? bma.ipv4 : bma.ipv6 ? bma.ipv6 : ""; } getURL() { const bma = this.getBMA(); let base = this.getHostPreferDNS(); - if(bma.port) - base += ':' + bma.port; + if (bma.port) base += ":" + bma.port; return base; } - hasValid4(bma:any) { - return !!(bma.ipv4 && !bma.ipv4.match(/^127.0/) && !bma.ipv4.match(/^192.168/)) + hasValid4(bma: any) { + return !!( + bma.ipv4 && + !bma.ipv4.match(/^127.0/) && + !bma.ipv4.match(/^192.168/) + ); } getNamedURL() { - return this.getURL() + return this.getURL(); } isReachable() { - return !!(this.getURL()) + return !!this.getURL(); } - containsEndpoint(ep:string) { - return this.endpoints.reduce((found:boolean, endpoint:string) => found || endpoint == ep, false) + containsEndpoint(ep: string) { + return this.endpoints.reduce( + (found: boolean, endpoint: string) => found || endpoint == ep, + false + ); } - containsAllEndpoints(endpoints:string[]) { + containsAllEndpoints(endpoints: string[]) { for (const ep of endpoints) { if (!this.containsEndpoint(ep)) { - return false + return false; } } - return true + return true; } endpointSum() { - return this.endpoints.join('_') + return this.endpoints.join("_"); } getHash() { - return hashf(this.getRawSigned()) + return hashf(this.getRawSigned()); } toDBPeer(): DBPeer { - const p = new DBPeer() - p.version = this.version - p.currency = this.currency - p.status = this.status || "DOWN" - p.statusTS = this.statusTS || 0 - p.hash = this.getHash() - p.first_down = 0 - p.last_try = 0 - p.pubkey = this.pubkey - p.block = this.block - p.signature = this.signature - p.endpoints = this.endpoints - p.raw = this.getRawSigned() - return p - } - - static blockNumber(blockstamp:string) { - return parseInt(blockstamp) - } - - static fromDBPeer(p:DBPeer) { - return new PeerDTO(p.version, p.currency, p.pubkey, p.block, p.endpoints, p.signature, p.status, p.statusTS, false) - } - - static fromJSONObject(obj:any) { + const p = new DBPeer(); + p.version = this.version; + p.currency = this.currency; + p.status = this.status || "DOWN"; + p.statusTS = this.statusTS || 0; + p.hash = this.getHash(); + p.first_down = 0; + p.last_try = 0; + p.pubkey = this.pubkey; + p.block = this.block; + p.signature = this.signature; + p.endpoints = this.endpoints; + p.raw = this.getRawSigned(); + return p; + } + + static blockNumber(blockstamp: string) { + return parseInt(blockstamp); + } + + static fromDBPeer(p: DBPeer) { + return new PeerDTO( + p.version, + p.currency, + p.pubkey, + p.block, + p.endpoints, + p.signature, + p.status, + p.statusTS, + false + ); + } + + static fromJSONObject(obj: any) { return new PeerDTO( parseInt(obj.version), obj.currency || "", obj.pubkey || obj.pub || obj.issuer || "", obj.blockstamp || obj.block, - obj.endpoints || [], + obj.endpoints || [], obj.signature || obj.sig, obj.status || "DOWN", obj.statusTS || 0, obj.member - ) + ); } - static endpoint2host(endpoint:string) { - return PeerDTO.fromJSONObject({ endpoints: [endpoint] }).getURL() + static endpoint2host(endpoint: string) { + return PeerDTO.fromJSONObject({ endpoints: [endpoint] }).getURL(); } - static indexOfFirst(endpoints:string[], intoEndpoints:string[]) { + static indexOfFirst(endpoints: string[], intoEndpoints: string[]) { for (let i = 0; i < intoEndpoints.length; i++) { - const index = endpoints.indexOf(intoEndpoints[i]) + const index = endpoints.indexOf(intoEndpoints[i]); if (index !== -1) { - return index + return index; } } - return 0 + return 0; } -} \ No newline at end of file +} diff --git a/app/lib/dto/RevocationDTO.ts b/app/lib/dto/RevocationDTO.ts index 1185188dafe53668ee0b3aa4d05112795a63f0a7..284294ce2d941eba4dd789d78d11e9cde7f514b0 100644 --- a/app/lib/dto/RevocationDTO.ts +++ b/app/lib/dto/RevocationDTO.ts @@ -11,20 +11,19 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Cloneable} from "./Cloneable"; -import {hashf} from "../common"; +import { Cloneable } from "./Cloneable"; +import { hashf } from "../common"; -const DEFAULT_DOCUMENT_VERSION = 10 +const DEFAULT_DOCUMENT_VERSION = 10; export interface ShortRevocation { - pubkey: string - revocation: string + pubkey: string; + revocation: string; } export class RevocationDTO implements ShortRevocation, Cloneable { - clone(): any { - return RevocationDTO.fromJSONObject(this) + return RevocationDTO.fromJSONObject(this); } constructor( @@ -38,38 +37,38 @@ export class RevocationDTO implements ShortRevocation, Cloneable { ) {} rawWithoutSig() { - let raw = "" - raw += "Version: " + this.version + "\n" - raw += "Type: Revocation\n" - raw += "Currency: " + this.currency + "\n" - raw += "Issuer: " + this.pubkey + "\n" - raw += "IdtyUniqueID: " + this.idty_uid+ '\n' - raw += "IdtyTimestamp: " + this.idty_buid + '\n' - raw += "IdtySignature: " + this.idty_sig + '\n' - return raw + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Revocation\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "Issuer: " + this.pubkey + "\n"; + raw += "IdtyUniqueID: " + this.idty_uid + "\n"; + raw += "IdtyTimestamp: " + this.idty_buid + "\n"; + raw += "IdtySignature: " + this.idty_sig + "\n"; + return raw; } getRaw() { - return this.rawWithoutSig() + this.revocation + "\n" + return this.rawWithoutSig() + this.revocation + "\n"; } getRawUnsigned() { - return this.rawWithoutSig() + return this.rawWithoutSig(); } // TODO: to remove when BMA has been merged in duniter/duniter repo json() { return { - result: true - } + result: true, + }; } - static fromInline(inline:string): ShortRevocation { - const [pubkey, revocation] = inline.split(':') - return { pubkey, revocation } + static fromInline(inline: string): ShortRevocation { + const [pubkey, revocation] = inline.split(":"); + return { pubkey, revocation }; } - static fromJSONObject(json:any) { + static fromJSONObject(json: any) { return new RevocationDTO( json.version || DEFAULT_DOCUMENT_VERSION, json.currency, @@ -78,10 +77,10 @@ export class RevocationDTO implements ShortRevocation, Cloneable { json.idty_buid || json.buid, json.idty_sig || json.sig, json.revocation || json.revocation - ) + ); } getHash() { - return hashf(this.getRaw()) + return hashf(this.getRaw()); } -} \ No newline at end of file +} diff --git a/app/lib/dto/TransactionDTO.ts b/app/lib/dto/TransactionDTO.ts index 45e6b3749e5ffdf9c35e7e3236fc8dd6cae37af2..d58d660fe7afba0867124594fdc73bc4da41a9a3 100644 --- a/app/lib/dto/TransactionDTO.ts +++ b/app/lib/dto/TransactionDTO.ts @@ -11,19 +11,19 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {hashf} from "../common" -import {Cloneable} from "./Cloneable" -import {verify} from "duniteroxyde" +import { hashf } from "../common"; +import { Cloneable } from "./Cloneable"; +import { verify } from "duniteroxyde"; export interface BaseDTO { - base: number + base: number; } export class InputDTO implements BaseDTO { constructor( public amount: number, public base: number, - public type: 'T'|'D', + public type: "T" | "D", public identifier: string, public pos: number, public raw: string @@ -40,35 +40,33 @@ export class OutputDTO implements BaseDTO { } export interface TxSignatureResult { - sigs:{ - k:string - ok:boolean - }[] + sigs: { + k: string; + ok: boolean; + }[]; } export class TxSignatureResultImpl implements TxSignatureResult { - // The signature results - public sigs:{ - k:string - ok:boolean - }[] - - constructor(issuers:string[]) { - this.sigs = issuers.map(k => { - return { k, ok: false } - }) + public sigs: { + k: string; + ok: boolean; + }[]; + + constructor(issuers: string[]) { + this.sigs = issuers.map((k) => { + return { k, ok: false }; + }); } get allMatching() { - return this.sigs.reduce((ok, s) => ok && s.ok, true) + return this.sigs.reduce((ok, s) => ok && s.ok, true); } } export class TransactionDTO implements Cloneable { - clone(): any { - return TransactionDTO.fromJSONObject(this) + return TransactionDTO.fromJSONObject(this); } constructor( @@ -87,132 +85,149 @@ export class TransactionDTO implements Cloneable { ) { // Compute the hash if not given if (!hash) { - this.hash = this.getHash() + this.hash = this.getHash(); } } get signature() { - return this.signatures[0] + return this.signatures[0]; } get output_amount() { - return this.outputs.reduce((maxBase, output) => Math.max(maxBase, parseInt(output.split(':')[0])), 0) + return this.outputs.reduce( + (maxBase, output) => Math.max(maxBase, parseInt(output.split(":")[0])), + 0 + ); } get output_base() { - return this.outputs.reduce((sum, output) => sum + parseInt(output.split(':')[1]), 0) + return this.outputs.reduce( + (sum, output) => sum + parseInt(output.split(":")[1]), + 0 + ); } get blockNumber() { - return parseInt(this.blockstamp) + return parseInt(this.blockstamp); } get block_hash() { - return this.blockstamp.split('-')[1] + return this.blockstamp.split("-")[1]; } getLen() { - return 2 // header + blockstamp - + this.issuers.length * 2 // issuers + signatures - + this.inputs.length * 2 // inputs + unlocks - + (this.comment ? 1 : 0) - + this.outputs.length + return ( + 2 + // header + blockstamp + this.issuers.length * 2 + // issuers + signatures + this.inputs.length * 2 + // inputs + unlocks + (this.comment ? 1 : 0) + + this.outputs.length + ); } getHash() { - const raw = TransactionDTO.toRAW(this) - return hashf(raw) + const raw = TransactionDTO.toRAW(this); + return hashf(raw); } getRawTxNoSig() { - return TransactionDTO.toRAW(this, true) + return TransactionDTO.toRAW(this, true); } inputsAsObjects(): InputDTO[] { - return this.inputs.map(input => { - const [amount, base, type, identifier, pos] = input.split(':') + return this.inputs.map((input) => { + const [amount, base, type, identifier, pos] = input.split(":"); return new InputDTO( parseInt(amount), parseInt(base), - type as 'T'|'D', + type as "T" | "D", identifier, parseInt(pos), input - ) - }) + ); + }); } outputsAsObjects(): OutputDTO[] { - return this.outputs.map(output => { - const [amount, base, conditions] = output.split(':') + return this.outputs.map((output) => { + const [amount, base, conditions] = output.split(":"); return new OutputDTO( parseInt(amount), parseInt(base), conditions, output - ) - }) + ); + }); } outputsAsRecipients(): string[] { return this.outputs.map((out) => { - const recipent = out.match('SIG\\((.*)\\)'); - return (recipent && recipent[1]) || 'UNKNOWN'; - }) + const recipent = out.match("SIG\\((.*)\\)"); + return (recipent && recipent[1]) || "UNKNOWN"; + }); } getRaw() { - let raw = "" - raw += "Version: " + (this.version) + "\n" - raw += "Type: Transaction\n" - raw += "Currency: " + this.currency + "\n" - raw += "Blockstamp: " + this.blockstamp + "\n" - raw += "Locktime: " + this.locktime + "\n" + let raw = ""; + raw += "Version: " + this.version + "\n"; + raw += "Type: Transaction\n"; + raw += "Currency: " + this.currency + "\n"; + raw += "Blockstamp: " + this.blockstamp + "\n"; + raw += "Locktime: " + this.locktime + "\n"; raw += "Issuers:\n"; (this.issuers || []).forEach((issuer) => { - raw += issuer + '\n' - }) + raw += issuer + "\n"; + }); raw += "Inputs:\n"; this.inputs.forEach((input) => { - raw += input + '\n' - }) + raw += input + "\n"; + }); raw += "Unlocks:\n"; this.unlocks.forEach((unlock) => { - raw += unlock + '\n' - }) + raw += unlock + "\n"; + }); raw += "Outputs:\n"; this.outputs.forEach((output) => { - raw += output + '\n' - }) + raw += output + "\n"; + }); raw += "Comment: " + (this.comment || "") + "\n"; this.signatures.forEach((signature) => { - raw += signature + '\n' - }) - return raw + raw += signature + "\n"; + }); + return raw; } getCompactVersion() { let issuers = this.issuers; - let raw = ["TX", this.version, issuers.length, this.inputs.length, this.unlocks.length, this.outputs.length, this.comment ? 1 : 0, this.locktime || 0].join(':') + '\n'; + let raw = + [ + "TX", + this.version, + issuers.length, + this.inputs.length, + this.unlocks.length, + this.outputs.length, + this.comment ? 1 : 0, + this.locktime || 0, + ].join(":") + "\n"; raw += this.blockstamp + "\n"; (issuers || []).forEach((issuer) => { - raw += issuer + '\n'; + raw += issuer + "\n"; }); (this.inputs || []).forEach((input) => { - raw += input + '\n'; + raw += input + "\n"; }); (this.unlocks || []).forEach((input) => { - raw += input + '\n'; + raw += input + "\n"; }); (this.outputs || []).forEach((output) => { - raw += output + '\n'; + raw += output + "\n"; }); - if (this.comment) - raw += this.comment + '\n'; + if (this.comment) raw += this.comment + "\n"; (this.signatures || []).forEach((signature) => { - raw += signature + '\n' - }) - return raw + raw += signature + "\n"; + }); + return raw; } computeAllHashes() { @@ -221,47 +236,47 @@ export class TransactionDTO implements Cloneable { json() { return { - 'version': this.version, - 'currency': this.currency, - 'issuers': this.issuers, - 'inputs': this.inputs, - 'unlocks': this.unlocks, - 'outputs': this.outputs, - 'comment': this.comment, - 'locktime': this.locktime, - 'blockstamp': this.blockstamp, - 'blockstampTime': this.blockstampTime, - 'signatures': this.signatures, - 'raw': this.getRaw(), - 'hash': this.hash - } + version: this.version, + currency: this.currency, + issuers: this.issuers, + inputs: this.inputs, + unlocks: this.unlocks, + outputs: this.outputs, + comment: this.comment, + locktime: this.locktime, + blockstamp: this.blockstamp, + blockstampTime: this.blockstampTime, + signatures: this.signatures, + raw: this.getRaw(), + hash: this.hash, + }; } getTransactionSigResult(dubp_version: number) { - const sigResult = new TxSignatureResultImpl(this.issuers.slice()) - let i = 0 - const raw = this.getRawTxNoSig() - let matching = true + const sigResult = new TxSignatureResultImpl(this.issuers.slice()); + let i = 0; + const raw = this.getRawTxNoSig(); + let matching = true; while (matching && i < this.signatures.length) { - const sig = this.signatures[i] - const pub = this.issuers[i] + const sig = this.signatures[i]; + const pub = this.issuers[i]; if (dubp_version >= 12) { - sigResult.sigs[i].ok = verify(raw, sig, pub) + sigResult.sigs[i].ok = verify(raw, sig, pub); } else { // TODO ESZ list all invalid transactions - sigResult.sigs[i].ok = verify(raw, sig, pub) + sigResult.sigs[i].ok = verify(raw, sig, pub); } - matching = sigResult.sigs[i].ok - i++ + matching = sigResult.sigs[i].ok; + i++; } - return sigResult + return sigResult; } checkSignatures(dubp_version: number) { - return this.getTransactionSigResult(dubp_version).allMatching + return this.getTransactionSigResult(dubp_version).allMatching; } - static fromJSONObject(obj:any, currency:string = "") { + static fromJSONObject(obj: any, currency: string = "") { return new TransactionDTO( obj.version || 10, currency || obj.currency || "", @@ -275,80 +290,80 @@ export class TransactionDTO implements Cloneable { obj.unlocks || [], obj.signatures || [], obj.comment || "" - ) + ); } - static toRAW(json:TransactionDTO, noSig = false) { - let raw = "" - raw += "Version: " + (json.version) + "\n" - raw += "Type: Transaction\n" - raw += "Currency: " + json.currency + "\n" - raw += "Blockstamp: " + json.blockstamp + "\n" - raw += "Locktime: " + json.locktime + "\n" + static toRAW(json: TransactionDTO, noSig = false) { + let raw = ""; + raw += "Version: " + json.version + "\n"; + raw += "Type: Transaction\n"; + raw += "Currency: " + json.currency + "\n"; + raw += "Blockstamp: " + json.blockstamp + "\n"; + raw += "Locktime: " + json.locktime + "\n"; raw += "Issuers:\n"; (json.issuers || []).forEach((issuer) => { - raw += issuer + '\n' - }) + raw += issuer + "\n"; + }); raw += "Inputs:\n"; (json.inputs || []).forEach((input) => { - raw += input + '\n' - }) + raw += input + "\n"; + }); raw += "Unlocks:\n"; (json.unlocks || []).forEach((unlock) => { - raw += unlock + '\n' - }) + raw += unlock + "\n"; + }); raw += "Outputs:\n"; - (json.outputs || []).forEach((output) => { - raw += output + '\n' - }) + (json.outputs || []).forEach((output) => { + raw += output + "\n"; + }); raw += "Comment: " + (json.comment || "") + "\n"; if (!noSig) { (json.signatures || []).forEach((signature) => { - raw += signature + '\n' - }) + raw += signature + "\n"; + }); } - return raw + return raw; } - static outputObj2Str(o:OutputDTO) { - return [o.amount, o.base, o.conditions].join(':') + static outputObj2Str(o: OutputDTO) { + return [o.amount, o.base, o.conditions].join(":"); } - static inputObj2Str(i:InputDTO) { - return [i.amount, i.base, i.type, i.identifier, i.pos].join(':') + static inputObj2Str(i: InputDTO) { + return [i.amount, i.base, i.type, i.identifier, i.pos].join(":"); } - static outputStr2Obj(outputStr:string) { - const sp = outputStr.split(':'); + static outputStr2Obj(outputStr: string) { + const sp = outputStr.split(":"); return { amount: parseInt(sp[0]), base: parseInt(sp[1]), conditions: sp[2], - raw: outputStr + raw: outputStr, }; } - static inputStr2Obj(inputStr:string) { - const sp = inputStr.split(':') + static inputStr2Obj(inputStr: string) { + const sp = inputStr.split(":"); return { - amount: sp[0], - base: sp[1], - type: sp[2], + amount: sp[0], + base: sp[1], + type: sp[2], identifier: sp[3], - pos: parseInt(sp[4]), - raw: inputStr - } + pos: parseInt(sp[4]), + raw: inputStr, + }; } - static unlock2params(unlock:string) { - const match = unlock.match(/^\d+:(.*)$/) + static unlock2params(unlock: string) { + const match = unlock.match(/^\d+:(.*)$/); if (match) { - return match[1].split(' ') + return match[1].split(" "); } - return [] + return []; } static mock() { - return new TransactionDTO(1, "", 0, "", "", 0, [], [], [], [], [], "") + return new TransactionDTO(1, "", 0, "", "", 0, [], [], [], [], [], ""); } } diff --git a/app/lib/helpers/merkle.ts b/app/lib/helpers/merkle.ts index db89f0b735164b8e5e4347b3a80c305ff1256937..81c2e9784c51636bd69e65fd058f907c5ce68c4e 100644 --- a/app/lib/helpers/merkle.ts +++ b/app/lib/helpers/merkle.ts @@ -11,14 +11,18 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -export const processForURL = async (req:any, merkle:any, valueCoroutine:any) => { +export const processForURL = async ( + req: any, + merkle: any, + valueCoroutine: any +) => { // Result - const json:any = { - "depth": merkle.depth, - "nodesCount": merkle.nodes, - "leavesCount": merkle.levels[merkle.depth].length, - "root": merkle.levels[0][0] || "", - "leaves": [] + const json: any = { + depth: merkle.depth, + nodesCount: merkle.nodes, + leavesCount: merkle.levels[merkle.depth].length, + root: merkle.levels[0][0] || "", + leaves: [], }; if (req.query.leaves) { // Leaves @@ -26,18 +30,18 @@ export const processForURL = async (req:any, merkle:any, valueCoroutine:any) => return json; } else if (req.query.leaf) { // Extract of a leaf - json.leaves = [] + json.leaves = []; const hashes = [req.query.leaf]; // This code is in a loop for historic reasons. Should be set to non-loop style. const values = await valueCoroutine(hashes); hashes.forEach((hash) => { json.leaf = { - "hash": hash, - "value": values[hash] || "" + hash: hash, + value: values[hash] || "", }; }); return json; } else { return json; } -} +}; diff --git a/app/lib/indexer.ts b/app/lib/indexer.ts index ac397110faff7e0c41d57c1fa5817c9e9010351f..8ce3c8f368baa6591a7b992740c9c7d45d89541b 100644 --- a/app/lib/indexer.ts +++ b/app/lib/indexer.ts @@ -11,262 +11,263 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "./dto/BlockDTO" -import {ConfDTO, CurrencyConfDTO} from "./dto/ConfDTO" -import {IdentityDTO} from "./dto/IdentityDTO" -import {RevocationDTO} from "./dto/RevocationDTO" -import {CertificationDTO} from "./dto/CertificationDTO" -import {TransactionDTO} from "./dto/TransactionDTO" -import {DBHead} from "./db/DBHead" -import {verify} from "duniteroxyde" -import {rawer, txunlock} from "./common-libs/index" -import {CommonConstants} from "./common-libs/constants" -import {MembershipDTO} from "./dto/MembershipDTO" -import {UnlockMetadata} from "./common-libs/txunlock" -import {FileDAL} from "./dal/fileDAL" -import {DBBlock} from "./db/DBBlock" -import {DBWallet} from "./db/DBWallet" -import {Tristamp} from "./common/Tristamp" -import {Underscore} from "./common-libs/underscore" -import {DataErrors} from "./common-libs/errors" -import {MonitorExecutionTime} from "./debug/MonitorExecutionTime" -import {NewLogger} from "./logger" -import { WotBuilder } from "duniteroxyde" - -const constants = CommonConstants +import { BlockDTO } from "./dto/BlockDTO"; +import { ConfDTO, CurrencyConfDTO } from "./dto/ConfDTO"; +import { IdentityDTO } from "./dto/IdentityDTO"; +import { RevocationDTO } from "./dto/RevocationDTO"; +import { CertificationDTO } from "./dto/CertificationDTO"; +import { TransactionDTO } from "./dto/TransactionDTO"; +import { DBHead } from "./db/DBHead"; +import { verify } from "duniteroxyde"; +import { rawer, txunlock } from "./common-libs/index"; +import { CommonConstants } from "./common-libs/constants"; +import { MembershipDTO } from "./dto/MembershipDTO"; +import { UnlockMetadata } from "./common-libs/txunlock"; +import { FileDAL } from "./dal/fileDAL"; +import { DBBlock } from "./db/DBBlock"; +import { DBWallet } from "./db/DBWallet"; +import { Tristamp } from "./common/Tristamp"; +import { Underscore } from "./common-libs/underscore"; +import { DataErrors } from "./common-libs/errors"; +import { MonitorExecutionTime } from "./debug/MonitorExecutionTime"; +import { NewLogger } from "./logger"; +import { WotBuilder } from "duniteroxyde"; + +const constants = CommonConstants; export interface IndexEntry { - index: string, - op: string, - writtenOn: number, - written_on: string, + index: string; + op: string; + writtenOn: number; + written_on: string; } export interface MindexEntry extends IndexEntry { - pub: string, - created_on: string, - type: string | null, - expires_on: number | null, - expired_on: number | null, - revocation: string | null, - revokes_on: number | null, - chainable_on: number | null, - revoked_on: string | null, - leaving: boolean | null, - age: number, - isBeingRevoked?: boolean, - unchainables: number, - numberFollowing?: boolean, - distanceOK?: boolean, - onRevoked?: boolean, - joinsTwice?: boolean, - enoughCerts?: boolean, - leaverIsMember?: boolean, - activeIsMember?: boolean, - revokedIsMember?: boolean, - alreadyRevoked?: boolean, - revocationSigOK?: boolean, - created_on_ref?: { medianTime: number, number:number, hash:string } + pub: string; + created_on: string; + type: string | null; + expires_on: number | null; + expired_on: number | null; + revocation: string | null; + revokes_on: number | null; + chainable_on: number | null; + revoked_on: string | null; + leaving: boolean | null; + age: number; + isBeingRevoked?: boolean; + unchainables: number; + numberFollowing?: boolean; + distanceOK?: boolean; + onRevoked?: boolean; + joinsTwice?: boolean; + enoughCerts?: boolean; + leaverIsMember?: boolean; + activeIsMember?: boolean; + revokedIsMember?: boolean; + alreadyRevoked?: boolean; + revocationSigOK?: boolean; + created_on_ref?: { medianTime: number; number: number; hash: string }; } export interface FullMindexEntry { - op: string - pub: string - created_on: string - written_on: string - expires_on: number - expired_on: null|number - revokes_on: number - revoked_on: null|string - leaving: boolean - revocation: null|string - chainable_on: number - writtenOn: number + op: string; + pub: string; + created_on: string; + written_on: string; + expires_on: number; + expired_on: null | number; + revokes_on: number; + revoked_on: null | string; + leaving: boolean; + revocation: null | string; + chainable_on: number; + writtenOn: number; } export interface IindexEntry extends IndexEntry { - uid: string | null, - pub: string, - hash: string | null, - sig: string | null, - created_on: string | null, - member: boolean|null, - wasMember: boolean | null, - kick: boolean | null, - wotb_id: number | null, - age: number, - pubUnique?: boolean, - excludedIsMember?: boolean, - isBeingKicked?: boolean, - uidUnique?: boolean, - hasToBeExcluded?: boolean, + uid: string | null; + pub: string; + hash: string | null; + sig: string | null; + created_on: string | null; + member: boolean | null; + wasMember: boolean | null; + kick: boolean | null; + wotb_id: number | null; + age: number; + pubUnique?: boolean; + excludedIsMember?: boolean; + isBeingKicked?: boolean; + uidUnique?: boolean; + hasToBeExcluded?: boolean; } export interface FullIindexEntry { - op: string - uid: string - pub: string - hash: string - sig: string - created_on: string - written_on: string - writtenOn: number - member: boolean - wasMember: boolean - kick: boolean - wotb_id: number + op: string; + uid: string; + pub: string; + hash: string; + sig: string; + created_on: string; + written_on: string; + writtenOn: number; + member: boolean; + wasMember: boolean; + kick: boolean; + wotb_id: number; } export interface CindexEntry extends IndexEntry { - issuer: string, - receiver: string, - created_on: number, - sig: string, - chainable_on: number, - replayable_on: number, - expires_on: number, - expired_on: number, - from_wid: null, // <-These 2 fields are useless - to_wid: null, // <-' - unchainables: number, - age: number, - stock: number, - fromMember?: boolean, - toMember?: boolean, - toNewcomer?: boolean, - toLeaver?: boolean, - isReplay?: boolean, - isReplayable?: boolean, - sigOK?: boolean, - created_on_ref?: { medianTime: number }, + issuer: string; + receiver: string; + created_on: number; + sig: string; + chainable_on: number; + replayable_on: number; + expires_on: number; + expired_on: number; + from_wid: null; // <-These 2 fields are useless + to_wid: null; // <-' + unchainables: number; + age: number; + stock: number; + fromMember?: boolean; + toMember?: boolean; + toNewcomer?: boolean; + toLeaver?: boolean; + isReplay?: boolean; + isReplayable?: boolean; + sigOK?: boolean; + created_on_ref?: { medianTime: number }; } export interface FullCindexEntry { - issuer: string - receiver: string - created_on: number - sig: string - chainable_on: number - expires_on: number - expired_on: number - replayable_on: number + issuer: string; + receiver: string; + created_on: number; + sig: string; + chainable_on: number; + expires_on: number; + expired_on: number; + replayable_on: number; } export interface SindexEntry extends IndexEntry { - srcType: 'T'|'D' - tx: string | null, - identifier: string, - pos: number, - created_on: string | null, - written_time: number, - locktime: number, - unlock: string | null, - amount: number, - base: number, - conditions: string, - consumed: boolean, - txObj: TransactionDTO, - age: number, - type?: string, - available?: boolean, - isLocked?: boolean, - isTimeLocked?: boolean, + srcType: "T" | "D"; + tx: string | null; + identifier: string; + pos: number; + created_on: string | null; + written_time: number; + locktime: number; + unlock: string | null; + amount: number; + base: number; + conditions: string; + consumed: boolean; + txObj: TransactionDTO; + age: number; + type?: string; + available?: boolean; + isLocked?: boolean; + isTimeLocked?: boolean; } export interface FullSindexEntry { - tx: string | null - identifier: string - pos: number - created_on: string | null - written_time: number - locktime: number - unlock: string | null - amount: number - base: number - conditions: string - consumed: boolean + tx: string | null; + identifier: string; + pos: number; + created_on: string | null; + written_time: number; + locktime: number; + unlock: string | null; + amount: number; + base: number; + conditions: string; + consumed: boolean; } export interface SimpleTxInput { - conditions: string - consumed: boolean - written_time: number - amount: number - base: number + conditions: string; + consumed: boolean; + written_time: number; + amount: number; + base: number; } export interface BasedAmount { - amount: number - base: number + amount: number; + base: number; } export interface SimpleSindexEntryForWallet { - op: string - srcType: 'T'|'D' - conditions: string - amount: number - base: number - identifier: string - pos: number + op: string; + srcType: "T" | "D"; + conditions: string; + amount: number; + base: number; + identifier: string; + pos: number; } export interface SimpleTxEntryForWallet extends SimpleSindexEntryForWallet { - srcType: 'T' + srcType: "T"; } export interface SimpleUdEntryForWallet extends SimpleSindexEntryForWallet { - srcType: 'D' + srcType: "D"; } export interface Ranger { - (n:number, m:number): Promise<DBHead[]> + (n: number, m: number): Promise<DBHead[]>; } export interface ExclusionByCert { - op: 'UPDATE' - pub: string - written_on: string - writtenOn: number - kick: true + op: "UPDATE"; + pub: string; + written_on: string; + writtenOn: number; + kick: true; } function pushIindex(index: IndexEntry[], entry: IindexEntry): void { - index.push(entry) + index.push(entry); } function pushMindex(index: IndexEntry[], entry: MindexEntry): void { - index.push(entry) + index.push(entry); } function pushCindex(index: IndexEntry[], entry: CindexEntry): void { - index.push(entry) + index.push(entry); } export interface AccountsGarbagingDAL { - getWallet: (conditions: string) => Promise<DBWallet> - saveWallet: (wallet: DBWallet) => Promise<void> + getWallet: (conditions: string) => Promise<DBWallet>; + saveWallet: (wallet: DBWallet) => Promise<void>; sindexDAL: { - getAvailableForConditions: (conditions: string) => Promise<SindexEntry[]> - } + getAvailableForConditions: (conditions: string) => Promise<SindexEntry[]>; + }; } export interface BlockchainBlocksDAL { - getBlock(number: number): Promise<BlockDTO> - getBlockByBlockstamp(blockstamp: string): Promise<BlockDTO> + getBlock(number: number): Promise<BlockDTO>; + getBlockByBlockstamp(blockstamp: string): Promise<BlockDTO>; } export class Indexer { - @MonitorExecutionTime() - static localIndex(block:BlockDTO, conf:{ - sigValidity:number, - msValidity:number, - msPeriod:number, - sigPeriod:number, - sigReplay:number, - sigStock:number - }): IndexEntry[] { - + static localIndex( + block: BlockDTO, + conf: { + sigValidity: number; + msValidity: number; + msPeriod: number; + sigPeriod: number; + sigReplay: number; + sigStock: number; + } + ): IndexEntry[] { /******************** * GENERAL BEHAVIOR * @@ -299,14 +300,14 @@ export class Indexer { hash: idty.hash, sig: idty.sig, created_on: idty.buid, - written_on: [block.number, block.hash].join('-'), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, member: true, wasMember: true, kick: false, - wotb_id: null - }) + wotb_id: null, + }); } /**************************** @@ -315,47 +316,52 @@ export class Indexer { // Joiners (newcomer or join back) for (const inlineMS of block.joiners) { const ms = MembershipDTO.fromInline(inlineMS); - const matchesANewcomer = Underscore.filter(index, (row: IindexEntry) => row.index == constants.I_INDEX && row.pub == ms.issuer).length > 0; + const matchesANewcomer = + Underscore.filter( + index, + (row: IindexEntry) => + row.index == constants.I_INDEX && row.pub == ms.issuer + ).length > 0; if (matchesANewcomer) { // Newcomer pushMindex(index, { index: constants.M_INDEX, op: constants.IDX_CREATE, pub: ms.issuer, - created_on: [ms.number, ms.fpr].join('-'), - written_on: [block.number, block.hash].join('-'), + created_on: [ms.number, ms.fpr].join("-"), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, unchainables: 0, - type: 'JOIN', + type: "JOIN", expires_on: conf.msValidity, expired_on: null, revokes_on: conf.msValidity * constants.REVOCATION_FACTOR, revocation: null, chainable_on: block.medianTime + conf.msPeriod, revoked_on: null, - leaving: false - }) + leaving: false, + }); } else { // Join back pushMindex(index, { index: constants.M_INDEX, op: constants.IDX_UPDATE, pub: ms.issuer, - created_on: [ms.number, ms.fpr].join('-'), - written_on: [block.number, block.hash].join('-'), + created_on: [ms.number, ms.fpr].join("-"), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, unchainables: 0, - type: 'JOIN', + type: "JOIN", expires_on: conf.msValidity, expired_on: 0, revokes_on: conf.msValidity * constants.REVOCATION_FACTOR, revocation: null, chainable_on: block.medianTime + conf.msPeriod, revoked_on: null, - leaving: null - }) + leaving: null, + }); pushIindex(index, { index: constants.I_INDEX, op: constants.IDX_UPDATE, @@ -364,14 +370,14 @@ export class Indexer { hash: null, sig: null, created_on: null, - written_on: [block.number, block.hash].join('-'), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, member: true, wasMember: null, kick: null, - wotb_id: null - }) + wotb_id: null, + }); } } // Actives @@ -382,20 +388,20 @@ export class Indexer { index: constants.M_INDEX, op: constants.IDX_UPDATE, pub: ms.issuer, - created_on: [ms.number, ms.fpr].join('-'), - written_on: [block.number, block.hash].join('-'), + created_on: [ms.number, ms.fpr].join("-"), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, unchainables: 0, - type: 'ACTIVE', + type: "ACTIVE", expires_on: conf.msValidity, expired_on: null, revokes_on: conf.msValidity * constants.REVOCATION_FACTOR, revocation: null, chainable_on: block.medianTime + conf.msPeriod, revoked_on: null, - leaving: null - }) + leaving: null, + }); } // Leavers for (const inlineMS of block.leavers) { @@ -404,30 +410,30 @@ export class Indexer { index: constants.M_INDEX, op: constants.IDX_UPDATE, pub: ms.issuer, - created_on: [ms.number, ms.fpr].join('-'), - written_on: [block.number, block.hash].join('-'), + created_on: [ms.number, ms.fpr].join("-"), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, unchainables: 0, - type: 'LEAVE', + type: "LEAVE", expires_on: null, expired_on: null, revokes_on: null, revocation: null, chainable_on: block.medianTime + conf.msPeriod, revoked_on: null, - leaving: true - }) + leaving: true, + }); } // Revoked for (const inlineRevocation of block.revoked) { - const revocation = RevocationDTO.fromInline(inlineRevocation) + const revocation = RevocationDTO.fromInline(inlineRevocation); pushMindex(index, { index: constants.M_INDEX, op: constants.IDX_UPDATE, pub: revocation.pubkey, - created_on: [block.number, block.hash].join('-'), - written_on: [block.number, block.hash].join('-'), + created_on: [block.number, block.hash].join("-"), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, unchainables: 0, @@ -437,9 +443,9 @@ export class Indexer { revokes_on: null, revocation: revocation.revocation, chainable_on: block.medianTime + conf.msPeriod, // Note: this is useless, because a revoked identity cannot join back. But we let this property for data consistency - revoked_on: [block.number, block.hash].join('-'), - leaving: false - }) + revoked_on: [block.number, block.hash].join("-"), + leaving: false, + }); } // Excluded for (const excluded of block.excluded) { @@ -451,13 +457,13 @@ export class Indexer { hash: null, sig: null, created_on: null, - written_on: [block.number, block.hash].join('-'), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, member: false, wasMember: null, kick: false, - wotb_id: null + wotb_id: null, }); } @@ -472,32 +478,32 @@ export class Indexer { issuer: cert.pubkey, receiver: cert.to, created_on: cert.block_number, - written_on: [block.number, block.hash].join('-'), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, stock: conf.sigStock, unchainables: 0, sig: cert.sig, - chainable_on: block.medianTime + conf.sigPeriod, - replayable_on: block.medianTime + conf.sigReplay, + chainable_on: block.medianTime + conf.sigPeriod, + replayable_on: block.medianTime + conf.sigReplay, expires_on: conf.sigValidity, expired_on: 0, from_wid: null, - to_wid: null - }) + to_wid: null, + }); } return index.concat(Indexer.localSIndex(block)); } - static localSIndex(block:BlockDTO): SindexEntry[] { + static localSIndex(block: BlockDTO): SindexEntry[] { /******************************* * SOURCES INDEX (SINDEX) ******************************/ const index: SindexEntry[] = []; for (const tx of block.transactions) { tx.currency = block.currency || tx.currency; - const txHash = tx.getHash() + const txHash = tx.getHash(); let k = 0; for (const input of tx.inputsAsObjects()) { index.push({ @@ -508,7 +514,7 @@ export class Indexer { identifier: input.identifier, pos: input.pos, created_on: tx.blockstamp, - written_on: [block.number, block.hash].join('-'), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, written_time: block.medianTime, @@ -518,7 +524,7 @@ export class Indexer { base: input.base, conditions: "", // Is overriden thereafter consumed: true, - txObj: tx + txObj: tx, }); k++; } @@ -528,12 +534,12 @@ export class Indexer { index.push({ index: constants.S_INDEX, op: constants.IDX_CREATE, - srcType: 'T', + srcType: "T", tx: txHash, identifier: txHash, pos: i++, created_on: null, - written_on: [block.number, block.hash].join('-'), + written_on: [block.number, block.hash].join("-"), writtenOn: block.number, age: 0, written_time: block.medianTime, @@ -543,7 +549,7 @@ export class Indexer { base: output.base, conditions: output.conditions, consumed: false, - txObj: tx + txObj: tx, }); } } @@ -551,39 +557,46 @@ export class Indexer { } @MonitorExecutionTime() - static async quickCompleteGlobalScope(block: BlockDTO, conf: CurrencyConfDTO, bindex: DBHead[], iindex: IindexEntry[], mindex: MindexEntry[], cindex: CindexEntry[], dal:FileDAL) { - + static async quickCompleteGlobalScope( + block: BlockDTO, + conf: CurrencyConfDTO, + bindex: DBHead[], + iindex: IindexEntry[], + mindex: MindexEntry[], + cindex: CindexEntry[], + dal: FileDAL + ) { async function range(start: number, end: number) { - let theRange:DBHead[] = [] + let theRange: DBHead[] = []; end = Math.min(end, bindex.length); if (start == 1) { theRange = bindex.slice(-end); } else { theRange = bindex.slice(-end, -start + 1); } - theRange.reverse() - return theRange + theRange.reverse(); + return theRange; } - async function head(n:number) { - return (await range(n, n))[0] + async function head(n: number) { + return (await range(n, n))[0]; } - const HEAD = new DBHead() + const HEAD = new DBHead(); - HEAD.version = block.version - HEAD.currency = block.currency - HEAD.bsize = BlockDTO.getLen(block) - HEAD.hash = BlockDTO.getHash(block) - HEAD.issuer = block.issuer - HEAD.time = block.time - HEAD.medianTime = block.medianTime - HEAD.number = block.number - HEAD.powMin = block.powMin - HEAD.unitBase = block.unitbase - HEAD.membersCount = block.membersCount - HEAD.dividend = block.dividend || 0 - HEAD.new_dividend = null + HEAD.version = block.version; + HEAD.currency = block.currency; + HEAD.bsize = BlockDTO.getLen(block); + HEAD.hash = BlockDTO.getHash(block); + HEAD.issuer = block.issuer; + HEAD.time = block.time; + HEAD.medianTime = block.medianTime; + HEAD.number = block.number; + HEAD.powMin = block.powMin; + HEAD.unitBase = block.unitbase; + HEAD.membersCount = block.membersCount; + HEAD.dividend = block.dividend || 0; + HEAD.new_dividend = null; const HEAD_1 = await head(1); @@ -600,13 +613,13 @@ export class Indexer { await Indexer.prepareAvgBlockSize(HEAD, range); // BR_G09 - Indexer.prepareDiffNumber(HEAD, HEAD_1, conf) + Indexer.prepareDiffNumber(HEAD, HEAD_1, conf); // BR_G11 - Indexer.prepareUDTime(HEAD, HEAD_1, conf) + Indexer.prepareUDTime(HEAD, HEAD_1, conf); // BR_G13 - Indexer.prepareDividend(HEAD, HEAD_1, conf) + Indexer.prepareDividend(HEAD, HEAD_1, conf); // BR_G14 Indexer.prepareUnitBase(HEAD); @@ -615,7 +628,7 @@ export class Indexer { Indexer.prepareMass(HEAD, HEAD_1); // BR_G16 - await Indexer.prepareSpeed(HEAD, head, conf) + await Indexer.prepareSpeed(HEAD, head, conf); // BR_G19 await Indexer.prepareIdentitiesAge(iindex, HEAD, HEAD_1, conf, dal); @@ -633,30 +646,34 @@ export class Indexer { await Indexer.ruleIndexCorrectCertificationExpiryDate(HEAD, cindex, dal); // Cleaning - cindex.forEach(c => c.created_on_ref = undefined) - mindex.forEach(m => m.created_on_ref = undefined) + cindex.forEach((c) => (c.created_on_ref = undefined)); + mindex.forEach((m) => (m.created_on_ref = undefined)); return HEAD; } - static async completeGlobalScope(block: BlockDTO, conf: ConfDTO, index: IndexEntry[], dal:FileDAL) { - + static async completeGlobalScope( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[], + dal: FileDAL + ) { const iindex = Indexer.iindex(index); const mindex = Indexer.mindex(index); const cindex = Indexer.cindex(index); const sindex = Indexer.sindex(index); - const range = (n:number,m:number,p = "") => dal.range(n, m, p) - const head = (n:number) => dal.head(n) + const range = (n: number, m: number, p = "") => dal.range(n, m, p); + const head = (n: number) => dal.head(n); - const HEAD = new DBHead() + const HEAD = new DBHead(); - HEAD.version = block.version - HEAD.bsize = BlockDTO.getLen(block) - HEAD.hash = BlockDTO.getHash(block) - HEAD.issuer = block.issuer - HEAD.time = block.time - HEAD.powMin = block.powMin + HEAD.version = block.version; + HEAD.bsize = BlockDTO.getLen(block); + HEAD.hash = BlockDTO.getHash(block); + HEAD.issuer = block.issuer; + HEAD.time = block.time; + HEAD.powMin = block.powMin; const HEAD_1 = await head(1); if (HEAD_1) { @@ -689,9 +706,12 @@ export class Indexer { // BR_G03 if (HEAD.number > 0) { - HEAD.issuerIsMember = !!reduce(await dal.iindexDAL.reducable(HEAD.issuer)).member; + HEAD.issuerIsMember = !!reduce(await dal.iindexDAL.reducable(HEAD.issuer)) + .member; } else { - HEAD.issuerIsMember = !!reduce(Underscore.where(iindex, { pub: HEAD.issuer })).member; + HEAD.issuerIsMember = !!reduce( + Underscore.where(iindex, { pub: HEAD.issuer }) + ).member; } // BR_G04 @@ -708,25 +728,43 @@ export class Indexer { // BR_G08 if (HEAD.number > 0) { - HEAD.medianTime = Math.max(HEAD_1.medianTime, average(await range(1, Math.min(conf.medianTimeBlocks, HEAD.number), 'time'))); + HEAD.medianTime = Math.max( + HEAD_1.medianTime, + average( + await range(1, Math.min(conf.medianTimeBlocks, HEAD.number), "time") + ) + ); } else { HEAD.medianTime = HEAD.time; } // BR_G09 - Indexer.prepareDiffNumber(HEAD, HEAD_1, conf) + Indexer.prepareDiffNumber(HEAD, HEAD_1, conf); // BR_G10 if (HEAD.number == 0) { - HEAD.membersCount = count(Underscore.filter(iindex, (entry:IindexEntry) => entry.member === true)); + HEAD.membersCount = count( + Underscore.filter(iindex, (entry: IindexEntry) => entry.member === true) + ); } else { - HEAD.membersCount = HEAD_1.membersCount - + count(Underscore.filter(iindex, (entry:IindexEntry) => entry.member === true)) - - count(Underscore.filter(iindex, (entry:IindexEntry) => entry.member === false)); + HEAD.membersCount = + HEAD_1.membersCount + + count( + Underscore.filter( + iindex, + (entry: IindexEntry) => entry.member === true + ) + ) - + count( + Underscore.filter( + iindex, + (entry: IindexEntry) => entry.member === false + ) + ); } // BR_G11 - Indexer.prepareUDTime(HEAD, HEAD_1, conf) + Indexer.prepareUDTime(HEAD, HEAD_1, conf); // BR_G12 if (HEAD.number == 0) { @@ -736,7 +774,7 @@ export class Indexer { } // BR_G13 - Indexer.prepareDividend(HEAD, HEAD_1, conf) + Indexer.prepareDividend(HEAD, HEAD_1, conf); // BR_G14 Indexer.prepareUnitBase(HEAD); @@ -745,24 +783,37 @@ export class Indexer { Indexer.prepareMass(HEAD, HEAD_1); // BR_G16 - await Indexer.prepareSpeed(HEAD, head, conf) + await Indexer.prepareSpeed(HEAD, head, conf); // BR_G17 if (HEAD.number > 0) { - const ratio = constants.POW_DIFFICULTY_RANGE_RATIO; const maxGenTime = Math.ceil(conf.avgGenTime * ratio); const minGenTime = Math.floor(conf.avgGenTime / ratio); const minSpeed = 1 / maxGenTime; const maxSpeed = 1 / minGenTime; - if (HEAD.diffNumber != HEAD_1.diffNumber && HEAD.speed >= maxSpeed && (HEAD_1.powMin + 2) % 16 == 0) { + if ( + HEAD.diffNumber != HEAD_1.diffNumber && + HEAD.speed >= maxSpeed && + (HEAD_1.powMin + 2) % 16 == 0 + ) { HEAD.powMin = HEAD_1.powMin + 2; - } else if (HEAD.diffNumber != HEAD_1.diffNumber && HEAD.speed >= maxSpeed) { + } else if ( + HEAD.diffNumber != HEAD_1.diffNumber && + HEAD.speed >= maxSpeed + ) { HEAD.powMin = HEAD_1.powMin + 1; - } else if (HEAD.diffNumber != HEAD_1.diffNumber && HEAD.speed <= minSpeed && HEAD_1.powMin % 16 == 0) { + } else if ( + HEAD.diffNumber != HEAD_1.diffNumber && + HEAD.speed <= minSpeed && + HEAD_1.powMin % 16 == 0 + ) { HEAD.powMin = Math.max(0, HEAD_1.powMin - 2); - } else if (HEAD.diffNumber != HEAD_1.diffNumber && HEAD.speed <= minSpeed) { + } else if ( + HEAD.diffNumber != HEAD_1.diffNumber && + HEAD.speed <= minSpeed + ) { HEAD.powMin = Math.max(0, HEAD_1.powMin - 1); } else { HEAD.powMin = HEAD_1.powMin; @@ -770,96 +821,131 @@ export class Indexer { } // BR_G18 - await Indexer.preparePersonalizedPoW(HEAD, HEAD_1, range, conf) + await Indexer.preparePersonalizedPoW(HEAD, HEAD_1, range, conf); // BR_G19 await Indexer.prepareIdentitiesAge(iindex, HEAD, HEAD_1, conf, dal); // BR_G20 - await Promise.all(iindex.map(async (ENTRY: IindexEntry) => { - if (ENTRY.op == constants.IDX_CREATE) { - ENTRY.uidUnique = count(await dal.iindexDAL.findByUid(ENTRY.uid as string)) == 0; - } else { - ENTRY.uidUnique = true; - } - })) + await Promise.all( + iindex.map(async (ENTRY: IindexEntry) => { + if (ENTRY.op == constants.IDX_CREATE) { + ENTRY.uidUnique = + count(await dal.iindexDAL.findByUid(ENTRY.uid as string)) == 0; + } else { + ENTRY.uidUnique = true; + } + }) + ); // BR_G21 - await Promise.all(iindex.map(async (ENTRY: IindexEntry) => { - if (ENTRY.op == constants.IDX_CREATE) { - ENTRY.pubUnique = count(await dal.iindexDAL.findByPub(ENTRY.pub)) == 0; - } else { - ENTRY.pubUnique = true; - } - })) + await Promise.all( + iindex.map(async (ENTRY: IindexEntry) => { + if (ENTRY.op == constants.IDX_CREATE) { + ENTRY.pubUnique = + count(await dal.iindexDAL.findByPub(ENTRY.pub)) == 0; + } else { + ENTRY.pubUnique = true; + } + }) + ); // BR_G33 - await Promise.all(iindex.map(async (ENTRY: IindexEntry) => { - if (ENTRY.member !== false) { - ENTRY.excludedIsMember = true; - } else { - ENTRY.excludedIsMember = !!reduce(await dal.iindexDAL.reducable(ENTRY.pub)).member; - } - })) + await Promise.all( + iindex.map(async (ENTRY: IindexEntry) => { + if (ENTRY.member !== false) { + ENTRY.excludedIsMember = true; + } else { + ENTRY.excludedIsMember = !!reduce( + await dal.iindexDAL.reducable(ENTRY.pub) + ).member; + } + }) + ); // BR_G34 mindex.map((ENTRY: MindexEntry) => { ENTRY.isBeingRevoked = !!ENTRY.revoked_on; - }) + }); // BR_G107 if (HEAD.number > 0) { - await Promise.all(mindex.map(async (ENTRY: MindexEntry) => { - if (ENTRY.revocation === null) { - // This rule will be enabled on - if (HEAD.medianTime >= 1498860000) { - const rows = await dal.mindexDAL.findByPubAndChainableOnGt(ENTRY.pub, HEAD_1.medianTime) - ENTRY.unchainables = count(rows); + await Promise.all( + mindex.map(async (ENTRY: MindexEntry) => { + if (ENTRY.revocation === null) { + // This rule will be enabled on + if (HEAD.medianTime >= 1498860000) { + const rows = await dal.mindexDAL.findByPubAndChainableOnGt( + ENTRY.pub, + HEAD_1.medianTime + ); + ENTRY.unchainables = count(rows); + } } - } - })) + }) + ); } // BR_G35 - await Promise.all(iindex.map(async (ENTRY: IindexEntry) => { - ENTRY.isBeingKicked = ENTRY.member === false - })) + await Promise.all( + iindex.map(async (ENTRY: IindexEntry) => { + ENTRY.isBeingKicked = ENTRY.member === false; + }) + ); // BR_G36 - await Promise.all(iindex.map(async (ENTRY: IindexEntry) => { - const isMarkedAsToKick = reduce(await dal.iindexDAL.reducable(ENTRY.pub)).kick; - const isBeingRevoked = count(Underscore.filter(mindex, m => !!(m.isBeingRevoked && m.pub == ENTRY.pub))) == 1 - ENTRY.hasToBeExcluded = isMarkedAsToKick || isBeingRevoked; - })) + await Promise.all( + iindex.map(async (ENTRY: IindexEntry) => { + const isMarkedAsToKick = reduce( + await dal.iindexDAL.reducable(ENTRY.pub) + ).kick; + const isBeingRevoked = + count( + Underscore.filter( + mindex, + (m) => !!(m.isBeingRevoked && m.pub == ENTRY.pub) + ) + ) == 1; + ENTRY.hasToBeExcluded = isMarkedAsToKick || isBeingRevoked; + }) + ); // BR_G22 await Indexer.prepareMembershipsAge(mindex, HEAD, HEAD_1, conf, dal); // BR_G23 - await Promise.all(mindex.map(async (ENTRY: MindexEntry) => { - if (!ENTRY.revoked_on) { - const created_on = reduce(await dal.mindexDAL.reducable(ENTRY.pub)).created_on; - if (created_on != null) { - ENTRY.numberFollowing = number(ENTRY.created_on) > number(created_on); + await Promise.all( + mindex.map(async (ENTRY: MindexEntry) => { + if (!ENTRY.revoked_on) { + const created_on = reduce(await dal.mindexDAL.reducable(ENTRY.pub)) + .created_on; + if (created_on != null) { + ENTRY.numberFollowing = + number(ENTRY.created_on) > number(created_on); + } else { + ENTRY.numberFollowing = true; // Follows nothing + } } else { - ENTRY.numberFollowing = true; // Follows nothing + ENTRY.numberFollowing = true; } - } else { - ENTRY.numberFollowing = true; - } - })) + }) + ); // BR_G24 // Global testing, because of wotb const oneIsOutdistanced = await checkPeopleAreNotOudistanced( - Underscore.filter(mindex, (entry: MindexEntry) => !entry.revoked_on).map((entry: MindexEntry) => entry.pub), + Underscore.filter(mindex, (entry: MindexEntry) => !entry.revoked_on).map( + (entry: MindexEntry) => entry.pub + ), cindex.reduce((newLinks, c: CindexEntry) => { newLinks[c.receiver] = newLinks[c.receiver] || []; newLinks[c.receiver].push(c.issuer); return newLinks; - }, <{ [k:string]: string[] }>{}), + }, <{ [k: string]: string[] }>{}), // Newcomers - Underscore.where(iindex, { op: constants.IDX_CREATE }).map((entry: IindexEntry) => entry.pub), + Underscore.where(iindex, { op: constants.IDX_CREATE }).map( + (entry: IindexEntry) => entry.pub + ), conf, dal ); @@ -872,197 +958,316 @@ export class Indexer { }); // BR_G25 - await Promise.all(mindex.map(async (ENTRY: MindexEntry) => { - ENTRY.onRevoked = reduce(await dal.mindexDAL.reducable(ENTRY.pub)).revoked_on != null; - })) + await Promise.all( + mindex.map(async (ENTRY: MindexEntry) => { + ENTRY.onRevoked = + reduce(await dal.mindexDAL.reducable(ENTRY.pub)).revoked_on != null; + }) + ); // BR_G26 - await Promise.all(Underscore.filter(mindex, (entry: MindexEntry) => entry.op == constants.IDX_UPDATE && entry.expired_on === 0).map(async (ENTRY: MindexEntry) => { - ENTRY.joinsTwice = reduce(await dal.iindexDAL.reducable(ENTRY.pub)).member == true; - })) + await Promise.all( + Underscore.filter( + mindex, + (entry: MindexEntry) => + entry.op == constants.IDX_UPDATE && entry.expired_on === 0 + ).map(async (ENTRY: MindexEntry) => { + ENTRY.joinsTwice = + reduce(await dal.iindexDAL.reducable(ENTRY.pub)).member == true; + }) + ); // BR_G27 - await Promise.all(mindex.map(async (ENTRY: MindexEntry) => { - if (ENTRY.type == 'JOIN' || ENTRY.type == 'ACTIVE') { - const existing = (await dal.cindexDAL.findByReceiverAndExpiredOn(ENTRY.pub, 0)).map(value => value.issuer) - const pending = Underscore.filter(cindex, (c:CindexEntry) => c.receiver == ENTRY.pub && c.expired_on == 0).map(value => value.issuer) - const uniqIssuers = Underscore.uniq(existing.concat(pending)) - ENTRY.enoughCerts = count(uniqIssuers) >= conf.sigQty; - } else { - ENTRY.enoughCerts = true; - } - })) + await Promise.all( + mindex.map(async (ENTRY: MindexEntry) => { + if (ENTRY.type == "JOIN" || ENTRY.type == "ACTIVE") { + const existing = ( + await dal.cindexDAL.findByReceiverAndExpiredOn(ENTRY.pub, 0) + ).map((value) => value.issuer); + const pending = Underscore.filter( + cindex, + (c: CindexEntry) => c.receiver == ENTRY.pub && c.expired_on == 0 + ).map((value) => value.issuer); + const uniqIssuers = Underscore.uniq(existing.concat(pending)); + ENTRY.enoughCerts = count(uniqIssuers) >= conf.sigQty; + } else { + ENTRY.enoughCerts = true; + } + }) + ); // BR_G28 - await Promise.all(mindex.map(async (ENTRY: MindexEntry) => { - if (ENTRY.type == 'LEAVE') { - ENTRY.leaverIsMember = !!reduce(await dal.iindexDAL.reducable(ENTRY.pub)).member - } else { - ENTRY.leaverIsMember = true; - } - })) + await Promise.all( + mindex.map(async (ENTRY: MindexEntry) => { + if (ENTRY.type == "LEAVE") { + ENTRY.leaverIsMember = !!reduce( + await dal.iindexDAL.reducable(ENTRY.pub) + ).member; + } else { + ENTRY.leaverIsMember = true; + } + }) + ); // BR_G29 - await Promise.all(mindex.map(async (ENTRY: MindexEntry) => { - if (ENTRY.type == 'ACTIVE') { - const reducable = await dal.iindexDAL.reducable(ENTRY.pub) - ENTRY.activeIsMember = !!reduce(reducable).member; - } else { - ENTRY.activeIsMember = true; - } - })) + await Promise.all( + mindex.map(async (ENTRY: MindexEntry) => { + if (ENTRY.type == "ACTIVE") { + const reducable = await dal.iindexDAL.reducable(ENTRY.pub); + ENTRY.activeIsMember = !!reduce(reducable).member; + } else { + ENTRY.activeIsMember = true; + } + }) + ); // BR_G30 - await Promise.all(mindex.map(async (ENTRY: MindexEntry) => { - if (!ENTRY.revoked_on) { - ENTRY.revokedIsMember = true; - } else { - ENTRY.revokedIsMember = !!reduce(await dal.iindexDAL.reducable(ENTRY.pub)).member - } - })) + await Promise.all( + mindex.map(async (ENTRY: MindexEntry) => { + if (!ENTRY.revoked_on) { + ENTRY.revokedIsMember = true; + } else { + ENTRY.revokedIsMember = !!reduce( + await dal.iindexDAL.reducable(ENTRY.pub) + ).member; + } + }) + ); // BR_G31 - await Promise.all(mindex.map(async (ENTRY: MindexEntry) => { - if (!ENTRY.revoked_on) { - ENTRY.alreadyRevoked = false; - } else { - ENTRY.alreadyRevoked = !!(reduce(await dal.mindexDAL.reducable(ENTRY.pub)).revoked_on) - } - })) + await Promise.all( + mindex.map(async (ENTRY: MindexEntry) => { + if (!ENTRY.revoked_on) { + ENTRY.alreadyRevoked = false; + } else { + ENTRY.alreadyRevoked = !!reduce( + await dal.mindexDAL.reducable(ENTRY.pub) + ).revoked_on; + } + }) + ); // BR_G32 - await Promise.all(mindex.map(async (ENTRY: MindexEntry) => { - if (!ENTRY.revoked_on) { - ENTRY.revocationSigOK = true; - } else { - ENTRY.revocationSigOK = await sigCheckRevoke(ENTRY, dal, block.currency); - } - })) + await Promise.all( + mindex.map(async (ENTRY: MindexEntry) => { + if (!ENTRY.revoked_on) { + ENTRY.revocationSigOK = true; + } else { + ENTRY.revocationSigOK = await sigCheckRevoke( + ENTRY, + dal, + block.currency + ); + } + }) + ); // BR_G37 await Indexer.prepareCertificationsAge(cindex, HEAD, HEAD_1, conf, dal); // BR_G38 if (HEAD.number > 0) { - await Promise.all(cindex.map(async (ENTRY: CindexEntry) => { - const rows = await dal.cindexDAL.findByIssuerAndChainableOnGt(ENTRY.issuer, HEAD_1.medianTime) - ENTRY.unchainables = count(rows); - })) + await Promise.all( + cindex.map(async (ENTRY: CindexEntry) => { + const rows = await dal.cindexDAL.findByIssuerAndChainableOnGt( + ENTRY.issuer, + HEAD_1.medianTime + ); + ENTRY.unchainables = count(rows); + }) + ); } // BR_G39 - await Promise.all(cindex.map(async (ENTRY: CindexEntry) => { - ENTRY.stock = count(await dal.cindexDAL.getValidLinksFrom(ENTRY.issuer)) - })) + await Promise.all( + cindex.map(async (ENTRY: CindexEntry) => { + ENTRY.stock = count( + await dal.cindexDAL.getValidLinksFrom(ENTRY.issuer) + ); + }) + ); // BR_G40 - await Promise.all(cindex.map(async (ENTRY: CindexEntry) => { - ENTRY.fromMember = !!reduce(await dal.iindexDAL.reducable(ENTRY.issuer)).member - })) + await Promise.all( + cindex.map(async (ENTRY: CindexEntry) => { + ENTRY.fromMember = !!reduce(await dal.iindexDAL.reducable(ENTRY.issuer)) + .member; + }) + ); // BR_G41 - await Promise.all(cindex.map(async (ENTRY: CindexEntry) => { - ENTRY.toMember = !!reduce(await dal.iindexDAL.reducable(ENTRY.receiver)).member - })) + await Promise.all( + cindex.map(async (ENTRY: CindexEntry) => { + ENTRY.toMember = !!reduce(await dal.iindexDAL.reducable(ENTRY.receiver)) + .member; + }) + ); // BR_G42 - await Promise.all(cindex.map(async (ENTRY: CindexEntry) => { - ENTRY.toNewcomer = count(Underscore.where(iindex, { member: true, pub: ENTRY.receiver })) > 0; - })) + await Promise.all( + cindex.map(async (ENTRY: CindexEntry) => { + ENTRY.toNewcomer = + count( + Underscore.where(iindex, { member: true, pub: ENTRY.receiver }) + ) > 0; + }) + ); // BR_G43 - await Promise.all(cindex.map(async (ENTRY: CindexEntry) => { - ENTRY.toLeaver = !!(reduce(await dal.mindexDAL.reducable(ENTRY.receiver)).leaving) - })) + await Promise.all( + cindex.map(async (ENTRY: CindexEntry) => { + ENTRY.toLeaver = !!reduce(await dal.mindexDAL.reducable(ENTRY.receiver)) + .leaving; + }) + ); // BR_G44 + 44.2 - await Promise.all(cindex.map(async (ENTRY: CindexEntry) => { - const reducable = await dal.cindexDAL.findByIssuerAndReceiver(ENTRY.issuer, ENTRY.receiver) - ENTRY.isReplay = count(reducable) > 0 && reduce(reducable).expired_on === 0 - if (HEAD.number > 0 && HEAD_1.version > 10) { - ENTRY.isReplayable = count(reducable) === 0 || reduce(reducable).replayable_on < HEAD_1.medianTime - } - else { - // v10 blocks do not allow certification replay - ENTRY.isReplayable = false - } - })) + await Promise.all( + cindex.map(async (ENTRY: CindexEntry) => { + const reducable = await dal.cindexDAL.findByIssuerAndReceiver( + ENTRY.issuer, + ENTRY.receiver + ); + ENTRY.isReplay = + count(reducable) > 0 && reduce(reducable).expired_on === 0; + if (HEAD.number > 0 && HEAD_1.version > 10) { + ENTRY.isReplayable = + count(reducable) === 0 || + reduce(reducable).replayable_on < HEAD_1.medianTime; + } else { + // v10 blocks do not allow certification replay + ENTRY.isReplayable = false; + } + }) + ); // BR_G45 - await Promise.all(cindex.map(async (ENTRY: CindexEntry) => { - ENTRY.sigOK = await checkCertificationIsValid(block, ENTRY, async (block:BlockDTO,pub:string,dal:FileDAL) => { - let localInlineIdty = block.getInlineIdentity(pub); - if (localInlineIdty) { - return IdentityDTO.fromInline(localInlineIdty) - } - const idty = await dal.getWrittenIdtyByPubkeyForCertificationCheck(pub) - if (!idty) { - return null - } - return { - pubkey: idty.pub, - uid: idty.uid, - sig: idty.sig, - buid: idty.created_on - } - }, conf, dal); - })) + await Promise.all( + cindex.map(async (ENTRY: CindexEntry) => { + ENTRY.sigOK = await checkCertificationIsValid( + block, + ENTRY, + async (block: BlockDTO, pub: string, dal: FileDAL) => { + let localInlineIdty = block.getInlineIdentity(pub); + if (localInlineIdty) { + return IdentityDTO.fromInline(localInlineIdty); + } + const idty = await dal.getWrittenIdtyByPubkeyForCertificationCheck( + pub + ); + if (!idty) { + return null; + } + return { + pubkey: idty.pub, + uid: idty.uid, + sig: idty.sig, + buid: idty.created_on, + }; + }, + conf, + dal + ); + }) + ); // BR_G102 - await Promise.all(Underscore.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => { - if (HEAD.number == 0 && ENTRY.created_on == '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855') { - ENTRY.age = 0; - } else { - let ref = await dal.getAbsoluteValidBlockInForkWindowByBlockstamp(ENTRY.created_on as string); - if (ref && blockstamp(ref.number, ref.hash) == ENTRY.created_on) { - ENTRY.age = HEAD_1.medianTime - ref.medianTime; - } else { - ENTRY.age = constants.TX_WINDOW + 1; + await Promise.all( + Underscore.where(sindex, { op: constants.IDX_UPDATE }).map( + async (ENTRY: SindexEntry) => { + if ( + HEAD.number == 0 && + ENTRY.created_on == + "0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855" + ) { + ENTRY.age = 0; + } else { + let ref = await dal.getAbsoluteValidBlockInForkWindowByBlockstamp( + ENTRY.created_on as string + ); + if (ref && blockstamp(ref.number, ref.hash) == ENTRY.created_on) { + ENTRY.age = HEAD_1.medianTime - ref.medianTime; + } else { + ENTRY.age = constants.TX_WINDOW + 1; + } + } } - } - })) - - const getInputLocalFirstOrFallbackGlobally = async (sindex:SindexEntry[], ENTRY:SindexEntry): Promise<SimpleTxInput> => { - let source: SimpleTxInput|null = Underscore.filter(sindex, src => - src.identifier == ENTRY.identifier - && src.pos == ENTRY.pos - && src.conditions !== '' - && src.op === constants.IDX_CREATE)[0]; + ) + ); + + const getInputLocalFirstOrFallbackGlobally = async ( + sindex: SindexEntry[], + ENTRY: SindexEntry + ): Promise<SimpleTxInput> => { + let source: SimpleTxInput | null = Underscore.filter( + sindex, + (src) => + src.identifier == ENTRY.identifier && + src.pos == ENTRY.pos && + src.conditions !== "" && + src.op === constants.IDX_CREATE + )[0]; if (!source) { const reducable = await dal.findByIdentifierPosAmountBase( ENTRY.identifier, ENTRY.pos, ENTRY.amount, ENTRY.base, - ENTRY.srcType === 'D' + ENTRY.srcType === "D" ); if (!reducable.length) { - NewLogger().debug('Source %s:%s NOT FOUND', ENTRY.identifier, ENTRY.pos) + NewLogger().debug( + "Source %s:%s NOT FOUND", + ENTRY.identifier, + ENTRY.pos + ); } - source = reduce(reducable) + source = reduce(reducable); } - return source - } + return source; + }; // BR_G46 - await Promise.all(Underscore.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => { - const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY) - ENTRY.conditions = source.conditions; // We valuate the input conditions, so we can map these records to a same account - ENTRY.available = !source.consumed - })) + await Promise.all( + Underscore.where(sindex, { op: constants.IDX_UPDATE }).map( + async (ENTRY: SindexEntry) => { + const source = await getInputLocalFirstOrFallbackGlobally( + sindex, + ENTRY + ); + ENTRY.conditions = source.conditions; // We valuate the input conditions, so we can map these records to a same account + ENTRY.available = !source.consumed; + } + ) + ); // BR_G47 - await Promise.all(Underscore.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => { - const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY) - ENTRY.conditions = source.conditions; - ENTRY.isLocked = !txSourceUnlock(ENTRY, source, HEAD); - })) + await Promise.all( + Underscore.where(sindex, { op: constants.IDX_UPDATE }).map( + async (ENTRY: SindexEntry) => { + const source = await getInputLocalFirstOrFallbackGlobally( + sindex, + ENTRY + ); + ENTRY.conditions = source.conditions; + ENTRY.isLocked = !txSourceUnlock(ENTRY, source, HEAD); + } + ) + ); // BR_G48 - await Promise.all(Underscore.where(sindex, { op: constants.IDX_UPDATE }).map(async (ENTRY: SindexEntry) => { - const source = await getInputLocalFirstOrFallbackGlobally(sindex, ENTRY) - ENTRY.isTimeLocked = ENTRY.written_time - source.written_time < ENTRY.locktime; - })) + await Promise.all( + Underscore.where(sindex, { op: constants.IDX_UPDATE }).map( + async (ENTRY: SindexEntry) => { + const source = await getInputLocalFirstOrFallbackGlobally( + sindex, + ENTRY + ); + ENTRY.isTimeLocked = + ENTRY.written_time - source.written_time < ENTRY.locktime; + } + ) + ); return HEAD; } @@ -1077,11 +1282,17 @@ export class Indexer { } // BR_G04 - static async prepareIssuersCount(HEAD: DBHead, range:Ranger, HEAD_1: DBHead) { + static async prepareIssuersCount( + HEAD: DBHead, + range: Ranger, + HEAD_1: DBHead + ) { if (HEAD.number == 0) { HEAD.issuersCount = 0; } else { - HEAD.issuersCount = count(uniq(Underscore.pluck(await range(1, HEAD_1.issuersFrame), 'issuer'))) + HEAD.issuersCount = count( + uniq(Underscore.pluck(await range(1, HEAD_1.issuersFrame), "issuer")) + ); } } @@ -1090,11 +1301,11 @@ export class Indexer { if (HEAD.number == 0) { HEAD.issuersFrame = 1; } else if (HEAD_1.issuersFrameVar > 0) { - HEAD.issuersFrame = HEAD_1.issuersFrame + 1 + HEAD.issuersFrame = HEAD_1.issuersFrame + 1; } else if (HEAD_1.issuersFrameVar < 0) { - HEAD.issuersFrame = HEAD_1.issuersFrame - 1 + HEAD.issuersFrame = HEAD_1.issuersFrame - 1; } else { - HEAD.issuersFrame = HEAD_1.issuersFrame + HEAD.issuersFrame = HEAD_1.issuersFrame; } } @@ -1103,7 +1314,7 @@ export class Indexer { if (HEAD.number == 0) { HEAD.issuersFrameVar = 0; } else { - const issuersVar = (HEAD.issuersCount - HEAD_1.issuersCount); + const issuersVar = HEAD.issuersCount - HEAD_1.issuersCount; if (HEAD_1.issuersFrameVar > 0) { HEAD.issuersFrameVar = HEAD_1.issuersFrameVar + 5 * issuersVar - 1; } else if (HEAD_1.issuersFrameVar < 0) { @@ -1115,12 +1326,18 @@ export class Indexer { } // BR_G07 - static async prepareAvgBlockSize(HEAD: DBHead, range:Ranger) { - HEAD.avgBlockSize = average(Underscore.pluck(await range(1, HEAD.issuersCount), 'bsize')) + static async prepareAvgBlockSize(HEAD: DBHead, range: Ranger) { + HEAD.avgBlockSize = average( + Underscore.pluck(await range(1, HEAD.issuersCount), "bsize") + ); } // BR_G09 - static prepareDiffNumber(HEAD: DBHead, HEAD_1: DBHead, conf: CurrencyConfDTO) { + static prepareDiffNumber( + HEAD: DBHead, + HEAD_1: DBHead, + conf: CurrencyConfDTO + ) { if (HEAD.number == 0) { HEAD.diffNumber = HEAD.number + conf.dtDiffEval; } else if (HEAD_1.diffNumber <= HEAD.number) { @@ -1158,7 +1375,13 @@ export class Indexer { } else if (HEAD.udReevalTime != HEAD_1.udReevalTime) { // DUG const previousUB = HEAD_1.unitBase; - HEAD.dividend = Math.ceil(HEAD_1.dividend + Math.pow(conf.c, 2) * Math.ceil(HEAD_1.massReeval / Math.pow(10, previousUB)) / HEAD.membersCount / (conf.dtReeval / conf.dt)); + HEAD.dividend = Math.ceil( + HEAD_1.dividend + + (Math.pow(conf.c, 2) * + Math.ceil(HEAD_1.massReeval / Math.pow(10, previousUB))) / + HEAD.membersCount / + (conf.dtReeval / conf.dt) + ); } else { HEAD.dividend = HEAD_1.dividend; } @@ -1186,17 +1409,17 @@ export class Indexer { // Mass if (HEAD.number == 0) { HEAD.mass = 0; - } - else if (HEAD.udTime != HEAD_1.udTime) { - HEAD.mass = HEAD_1.mass + HEAD.dividend * Math.pow(10, HEAD.unitBase) * HEAD.membersCount; + } else if (HEAD.udTime != HEAD_1.udTime) { + HEAD.mass = + HEAD_1.mass + + HEAD.dividend * Math.pow(10, HEAD.unitBase) * HEAD.membersCount; } else { HEAD.mass = HEAD_1.mass; } // Mass on re-evaluation if (HEAD.number == 0) { HEAD.massReeval = 0; - } - else if (HEAD.udReevalTime != HEAD_1.udReevalTime) { + } else if (HEAD.udReevalTime != HEAD_1.udReevalTime) { HEAD.massReeval = HEAD_1.mass; } else { HEAD.massReeval = HEAD_1.massReeval; @@ -1204,12 +1427,16 @@ export class Indexer { } // BR_G16 - static async prepareSpeed(HEAD: DBHead, head: (n:number) => Promise<DBHead>, conf: CurrencyConfDTO) { + static async prepareSpeed( + HEAD: DBHead, + head: (n: number) => Promise<DBHead>, + conf: CurrencyConfDTO + ) { if (HEAD.number == 0) { HEAD.speed = 0; } else { const quantity = Math.min(conf.dtDiffEval, HEAD.number); - const elapsed = (HEAD.medianTime - (await head(quantity)).medianTime); + const elapsed = HEAD.medianTime - (await head(quantity)).medianTime; if (!elapsed) { HEAD.speed = 100; } else { @@ -1219,19 +1446,35 @@ export class Indexer { } // BR_G18 - static async preparePersonalizedPoW(HEAD: DBHead, HEAD_1: DBHead, range: (n:number,m:number)=>Promise<DBHead[]>, conf: ConfDTO) { + static async preparePersonalizedPoW( + HEAD: DBHead, + HEAD_1: DBHead, + range: (n: number, m: number) => Promise<DBHead[]>, + conf: ConfDTO + ) { let nbPersonalBlocksInFrame, medianOfBlocksInFrame, blocksOfIssuer; - let nbPreviousIssuers = 0, nbBlocksSince = 0; + let nbPreviousIssuers = 0, + nbBlocksSince = 0; if (HEAD.number == 0) { nbPersonalBlocksInFrame = 0; medianOfBlocksInFrame = 1; } else { - const ranged = await range(1, HEAD_1.issuersFrame) - const blocksInFrame = Underscore.filter(ranged, (b:DBHead) => b.number <= HEAD_1.number) - const issuersInFrame = blocksInFrame.map(b => b.issuer) - blocksOfIssuer = Underscore.filter(blocksInFrame, entry => entry.issuer == HEAD.issuer) + const ranged = await range(1, HEAD_1.issuersFrame); + const blocksInFrame = Underscore.filter( + ranged, + (b: DBHead) => b.number <= HEAD_1.number + ); + const issuersInFrame = blocksInFrame.map((b) => b.issuer); + blocksOfIssuer = Underscore.filter( + blocksInFrame, + (entry) => entry.issuer == HEAD.issuer + ); nbPersonalBlocksInFrame = count(blocksOfIssuer); - const blocksPerIssuerInFrame = uniq(issuersInFrame).map((issuer:string) => count(Underscore.where(blocksInFrame, { issuer }))); + const blocksPerIssuerInFrame = uniq( + issuersInFrame + ).map((issuer: string) => + count(Underscore.where(blocksInFrame, { issuer })) + ); medianOfBlocksInFrame = Math.max(1, median(blocksPerIssuerInFrame)); if (nbPersonalBlocksInFrame == 0) { nbPreviousIssuers = 0; @@ -1244,73 +1487,130 @@ export class Indexer { } // V0.6 Hardness - const PERSONAL_EXCESS = Math.max(0, ( (nbPersonalBlocksInFrame + 1) / medianOfBlocksInFrame) - 1); - const PERSONAL_HANDICAP = Math.floor(Math.log(1 + PERSONAL_EXCESS) / Math.log(1.189)); - HEAD.issuerDiff = Math.max(HEAD.powMin, HEAD.powMin * Math.floor(conf.percentRot * nbPreviousIssuers / (1 + nbBlocksSince))) + PERSONAL_HANDICAP; + const PERSONAL_EXCESS = Math.max( + 0, + (nbPersonalBlocksInFrame + 1) / medianOfBlocksInFrame - 1 + ); + const PERSONAL_HANDICAP = Math.floor( + Math.log(1 + PERSONAL_EXCESS) / Math.log(1.189) + ); + HEAD.issuerDiff = + Math.max( + HEAD.powMin, + HEAD.powMin * + Math.floor( + (conf.percentRot * nbPreviousIssuers) / (1 + nbBlocksSince) + ) + ) + PERSONAL_HANDICAP; if ((HEAD.issuerDiff + 1) % 16 == 0) { HEAD.issuerDiff += 1; } - HEAD.powRemainder = HEAD.issuerDiff % 16; + HEAD.powRemainder = HEAD.issuerDiff % 16; HEAD.powZeros = (HEAD.issuerDiff - HEAD.powRemainder) / 16; } // BR_G19 - static async prepareIdentitiesAge(iindex: IindexEntry[], HEAD: DBHead, HEAD_1: DBHead, conf: CurrencyConfDTO, dal:FileDAL) { - await Promise.all(Underscore.where(iindex, { op: constants.IDX_CREATE }).map(async (ENTRY: IindexEntry) => { - if (HEAD.number == 0 && ENTRY.created_on == '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855') { - ENTRY.age = 0; - } else { - let ref = await dal.getAbsoluteValidBlockInForkWindowByBlockstamp(ENTRY.created_on as string); - if (ref && blockstamp(ref.number, ref.hash) == ENTRY.created_on) { - ENTRY.age = HEAD_1.medianTime - ref.medianTime; - } else { - ENTRY.age = conf.idtyWindow + 1; + static async prepareIdentitiesAge( + iindex: IindexEntry[], + HEAD: DBHead, + HEAD_1: DBHead, + conf: CurrencyConfDTO, + dal: FileDAL + ) { + await Promise.all( + Underscore.where(iindex, { op: constants.IDX_CREATE }).map( + async (ENTRY: IindexEntry) => { + if ( + HEAD.number == 0 && + ENTRY.created_on == + "0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855" + ) { + ENTRY.age = 0; + } else { + let ref = await dal.getAbsoluteValidBlockInForkWindowByBlockstamp( + ENTRY.created_on as string + ); + if (ref && blockstamp(ref.number, ref.hash) == ENTRY.created_on) { + ENTRY.age = HEAD_1.medianTime - ref.medianTime; + } else { + ENTRY.age = conf.idtyWindow + 1; + } + } } - } - })) + ) + ); } // BR_G22 - static async prepareMembershipsAge(mindex: MindexEntry[], HEAD: DBHead, HEAD_1: DBHead, conf: CurrencyConfDTO, dal:FileDAL) { - await Promise.all(Underscore.filter(mindex, (entry: MindexEntry) => !entry.revoked_on).map(async (ENTRY:MindexEntry) => { - if (HEAD.number == 0 && ENTRY.created_on == '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855') { - ENTRY.age = 0; - } else { - let ref = ENTRY.created_on_ref || await dal.getAbsoluteValidBlockInForkWindowByBlockstamp(ENTRY.created_on) - if (ref && blockstamp(ref.number, ref.hash) == ENTRY.created_on) { - ENTRY.created_on_ref = ref - ENTRY.age = HEAD_1.medianTime - ref.medianTime; - } else { - ENTRY.age = conf.msWindow + 1; + static async prepareMembershipsAge( + mindex: MindexEntry[], + HEAD: DBHead, + HEAD_1: DBHead, + conf: CurrencyConfDTO, + dal: FileDAL + ) { + await Promise.all( + Underscore.filter(mindex, (entry: MindexEntry) => !entry.revoked_on).map( + async (ENTRY: MindexEntry) => { + if ( + HEAD.number == 0 && + ENTRY.created_on == + "0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855" + ) { + ENTRY.age = 0; + } else { + let ref = + ENTRY.created_on_ref || + (await dal.getAbsoluteValidBlockInForkWindowByBlockstamp( + ENTRY.created_on + )); + if (ref && blockstamp(ref.number, ref.hash) == ENTRY.created_on) { + ENTRY.created_on_ref = ref; + ENTRY.age = HEAD_1.medianTime - ref.medianTime; + } else { + ENTRY.age = conf.msWindow + 1; + } + } } - } - })) + ) + ); } // BR_G37 - static async prepareCertificationsAge(cindex: CindexEntry[], HEAD: DBHead, HEAD_1: DBHead, conf: CurrencyConfDTO, dal:FileDAL) { - await Promise.all(cindex.map(async (ENTRY) => { - if (HEAD.number == 0) { - ENTRY.age = 0; - } else { - let ref = ENTRY.created_on_ref || await dal.getTristampOf(ENTRY.created_on) - if (ref) { - if (!ENTRY.created_on_ref) { - ENTRY.created_on_ref = ref - } - ENTRY.age = HEAD_1.medianTime - ref.medianTime; + static async prepareCertificationsAge( + cindex: CindexEntry[], + HEAD: DBHead, + HEAD_1: DBHead, + conf: CurrencyConfDTO, + dal: FileDAL + ) { + await Promise.all( + cindex.map(async (ENTRY) => { + if (HEAD.number == 0) { + ENTRY.age = 0; } else { - ENTRY.age = conf.sigWindow + 1; + let ref = + ENTRY.created_on_ref || (await dal.getTristampOf(ENTRY.created_on)); + if (ref) { + if (!ENTRY.created_on_ref) { + ENTRY.created_on_ref = ref; + } + ENTRY.age = HEAD_1.medianTime - ref.medianTime; + } else { + ENTRY.age = conf.sigWindow + 1; + } } - } - })) + }) + ); } // BR_G49 static ruleVersion(HEAD: DBHead, HEAD_1: DBHead) { if (HEAD.number > 0) { - return HEAD.version == HEAD_1.version || HEAD.version == HEAD_1.version + 1; + return ( + HEAD.version == HEAD_1.version || HEAD.version == HEAD_1.version + 1 + ); } return true; } @@ -1324,7 +1624,7 @@ export class Indexer { } // BR_G98 - static ruleCurrency(block:BlockDTO, HEAD: DBHead) { + static ruleCurrency(block: BlockDTO, HEAD: DBHead) { if (HEAD.number > 0) { return block.currency === HEAD.currency; } @@ -1332,58 +1632,64 @@ export class Indexer { } // BR_G51 - static ruleNumber(block:BlockDTO, HEAD: DBHead) { - return block.number == HEAD.number + static ruleNumber(block: BlockDTO, HEAD: DBHead) { + return block.number == HEAD.number; } // BR_G52 - static rulePreviousHash(block:BlockDTO, HEAD: DBHead) { - return block.previousHash == HEAD.previousHash || (!block.previousHash && !HEAD.previousHash) + static rulePreviousHash(block: BlockDTO, HEAD: DBHead) { + return ( + block.previousHash == HEAD.previousHash || + (!block.previousHash && !HEAD.previousHash) + ); } // BR_G53 - static rulePreviousIssuer(block:BlockDTO, HEAD: DBHead) { - return block.previousIssuer == HEAD.previousIssuer || (!block.previousIssuer && !HEAD.previousIssuer) + static rulePreviousIssuer(block: BlockDTO, HEAD: DBHead) { + return ( + block.previousIssuer == HEAD.previousIssuer || + (!block.previousIssuer && !HEAD.previousIssuer) + ); } // BR_G101 static ruleIssuerIsMember(HEAD: DBHead) { - return HEAD.issuerIsMember == true + return HEAD.issuerIsMember == true; } // BR_G54 - static ruleIssuersCount(block:BlockDTO, HEAD: DBHead) { - return block.issuersCount == HEAD.issuersCount + static ruleIssuersCount(block: BlockDTO, HEAD: DBHead) { + return block.issuersCount == HEAD.issuersCount; } // BR_G55 - static ruleIssuersFrame(block:BlockDTO, HEAD: DBHead) { - return block.issuersFrame == HEAD.issuersFrame + static ruleIssuersFrame(block: BlockDTO, HEAD: DBHead) { + return block.issuersFrame == HEAD.issuersFrame; } // BR_G56 - static ruleIssuersFrameVar(block:BlockDTO, HEAD: DBHead) { - return block.issuersFrameVar == HEAD.issuersFrameVar + static ruleIssuersFrameVar(block: BlockDTO, HEAD: DBHead) { + return block.issuersFrameVar == HEAD.issuersFrameVar; } // BR_G57 - static ruleMedianTime(block:BlockDTO, HEAD: DBHead) { - return block.medianTime == HEAD.medianTime + static ruleMedianTime(block: BlockDTO, HEAD: DBHead) { + return block.medianTime == HEAD.medianTime; } // BR_G58 - static ruleDividend(block:BlockDTO, HEAD: DBHead) { - return block.dividend == HEAD.new_dividend + static ruleDividend(block: BlockDTO, HEAD: DBHead) { + return block.dividend == HEAD.new_dividend; } // BR_G59 - static ruleUnitBase(block:BlockDTO, HEAD: DBHead) { - return block.unitbase == HEAD.unitBase + static ruleUnitBase(block: BlockDTO, HEAD: DBHead) { + return block.unitbase == HEAD.unitBase; } // BR_G60 - static ruleMembersCount(block:BlockDTO, HEAD: DBHead) { - return block.membersCount == HEAD.membersCount + static ruleMembersCount(block: BlockDTO, HEAD: DBHead) { + return block.membersCount == HEAD.membersCount; } // BR_G61 @@ -1391,7 +1697,7 @@ export class Indexer { if (HEAD.number > 0) { return block.powMin == HEAD.powMin; } - return true + return true; } // BR_G62 @@ -1400,17 +1706,32 @@ export class Indexer { const remainder = HEAD.powRemainder; const nbZerosReq = HEAD.powZeros; const highMark = constants.PROOF_OF_WORK.UPPER_BOUND[remainder]; - const powRegexp = new RegExp('^0{' + nbZerosReq + '}' + '[0-' + highMark + ']'); + const powRegexp = new RegExp( + "^0{" + nbZerosReq + "}" + "[0-" + highMark + "]" + ); try { if (!HEAD.hash.match(powRegexp)) { - const match = HEAD.hash.match(/^0*/) - const givenZeros = Math.max(0, Math.min(nbZerosReq, (match && match[0].length) || 0)) + const match = HEAD.hash.match(/^0*/); + const givenZeros = Math.max( + 0, + Math.min(nbZerosReq, (match && match[0].length) || 0) + ); const c = HEAD.hash.substr(givenZeros, 1); - throw Error('Wrong proof-of-work level: given ' + givenZeros + ' zeros and \'' + c + '\', required was ' + nbZerosReq + ' zeros and an hexa char between [0-' + highMark + ']'); + throw Error( + "Wrong proof-of-work level: given " + + givenZeros + + " zeros and '" + + c + + "', required was " + + nbZerosReq + + " zeros and an hexa char between [0-" + + highMark + + "]" + ); } return true; } catch (e) { - console.error(e) + console.error(e); return false; } } @@ -1420,7 +1741,7 @@ export class Indexer { for (const ENTRY of iindex) { if (ENTRY.age > conf.idtyWindow) return false; } - return true + return true; } // BR_G64 @@ -1428,7 +1749,7 @@ export class Indexer { for (const ENTRY of mindex) { if (ENTRY.age > conf.msWindow) return false; } - return true + return true; } // BR_G108 @@ -1436,7 +1757,7 @@ export class Indexer { for (const ENTRY of mindex) { if (ENTRY.unchainables > 0) return false; } - return true + return true; } // BR_G65 @@ -1444,7 +1765,7 @@ export class Indexer { for (const ENTRY of cindex) { if (ENTRY.age > conf.sigWindow) return false; } - return true + return true; } // BR_G66 @@ -1452,7 +1773,7 @@ export class Indexer { for (const ENTRY of cindex) { if (ENTRY.stock > conf.sigStock) return false; } - return true + return true; } // BR_G67 @@ -1460,7 +1781,7 @@ export class Indexer { for (const ENTRY of cindex) { if (ENTRY.unchainables > 0) return false; } - return true + return true; } // BR_G68 @@ -1470,7 +1791,7 @@ export class Indexer { if (!ENTRY.fromMember) return false; } } - return true + return true; } // BR_G69 @@ -1478,7 +1799,7 @@ export class Indexer { for (const ENTRY of cindex) { if (!ENTRY.toMember && !ENTRY.toNewcomer) return false; } - return true + return true; } // BR_G70 @@ -1486,25 +1807,25 @@ export class Indexer { for (const ENTRY of cindex) { if (ENTRY.toLeaver) return false; } - return true + return true; } // BR_G71 static ruleCertificationReplay(cindex: CindexEntry[]) { for (const ENTRY of cindex) { - if (ENTRY.isReplay && !ENTRY.isReplayable) return false + if (ENTRY.isReplay && !ENTRY.isReplayable) return false; } - return true + return true; } // BR_G72 static ruleCertificationSignature(cindex: CindexEntry[]) { for (const ENTRY of cindex) { if (!ENTRY.sigOK) { - return false + return false; } } - return true + return true; } // BR_G73 @@ -1512,7 +1833,7 @@ export class Indexer { for (const ENTRY of iindex) { if (!ENTRY.uidUnique) return false; } - return true + return true; } // BR_G74 @@ -1520,7 +1841,7 @@ export class Indexer { for (const ENTRY of iindex) { if (!ENTRY.pubUnique) return false; } - return true + return true; } // BR_G75 @@ -1528,28 +1849,29 @@ export class Indexer { for (const ENTRY of mindex) { if (!ENTRY.numberFollowing) return false; } - return true + return true; } // BR_G76 static ruleMembershipDistance(HEAD: DBHead, mindex: MindexEntry[]) { for (const ENTRY of mindex) { - if (HEAD.currency == 'gtest' - && !ENTRY.distanceOK + if ( + HEAD.currency == "gtest" && + !ENTRY.distanceOK && // && HEAD.number != 8450 // && HEAD.number != 9775 // && HEAD.number != 10893 // && HEAD.number != 11090 // && HEAD.number != 11263 // && HEAD.number != 11392 - && HEAD.number < 11512) { + HEAD.number < 11512 + ) { return false; - } - else if (HEAD.currency != 'gtest' && !ENTRY.distanceOK) { + } else if (HEAD.currency != "gtest" && !ENTRY.distanceOK) { return false; } } - return true + return true; } // BR_G77 @@ -1557,7 +1879,7 @@ export class Indexer { for (const ENTRY of mindex) { if (ENTRY.onRevoked) return false; } - return true + return true; } // BR_G78 @@ -1565,7 +1887,7 @@ export class Indexer { for (const ENTRY of mindex) { if (ENTRY.joinsTwice) return false; } - return true + return true; } // BR_G79 @@ -1573,7 +1895,7 @@ export class Indexer { for (const ENTRY of mindex) { if (!ENTRY.enoughCerts) return false; } - return true + return true; } // BR_G80 @@ -1581,7 +1903,7 @@ export class Indexer { for (const ENTRY of mindex) { if (!ENTRY.leaverIsMember) return false; } - return true + return true; } // BR_G81 @@ -1589,7 +1911,7 @@ export class Indexer { for (const ENTRY of mindex) { if (!ENTRY.activeIsMember) return false; } - return true + return true; } // BR_G82 @@ -1597,7 +1919,7 @@ export class Indexer { for (const ENTRY of mindex) { if (!ENTRY.revokedIsMember) return false; } - return true + return true; } // BR_G83 @@ -1605,7 +1927,7 @@ export class Indexer { for (const ENTRY of mindex) { if (ENTRY.alreadyRevoked) return false; } - return true + return true; } // BR_G84 @@ -1613,7 +1935,7 @@ export class Indexer { for (const ENTRY of mindex) { if (!ENTRY.revocationSigOK) return false; } - return true + return true; } // BR_G85 @@ -1621,24 +1943,31 @@ export class Indexer { for (const ENTRY of iindex) { if (!ENTRY.excludedIsMember) return false; } - return true + return true; } // BR_G86 - static async ruleToBeKickedArePresent(iindex: IindexEntry[], dal:FileDAL) { + static async ruleToBeKickedArePresent(iindex: IindexEntry[], dal: FileDAL) { const toBeKicked = await dal.iindexDAL.getToBeKickedPubkeys(); for (const toKick of toBeKicked) { - if (count(Underscore.where(iindex, { pub: toKick, isBeingKicked: true })) !== 1) { + if ( + count( + Underscore.where(iindex, { pub: toKick, isBeingKicked: true }) + ) !== 1 + ) { return false; } } - const beingKicked = Underscore.filter(iindex, (i:IindexEntry) => i.member === false); + const beingKicked = Underscore.filter( + iindex, + (i: IindexEntry) => i.member === false + ); for (const entry of beingKicked) { if (!entry.hasToBeExcluded) { return false; } } - return true + return true; } // BR_G103 @@ -1646,7 +1975,7 @@ export class Indexer { for (const ENTRY of sindex) { if (ENTRY.age > constants.TX_WINDOW) return false; } - return true + return true; } // BR_G87 @@ -1657,7 +1986,7 @@ export class Indexer { return false; } } - return true + return true; } // BR_G88 @@ -1668,7 +1997,7 @@ export class Indexer { return false; } } - return true + return true; } // BR_G89 @@ -1679,7 +2008,7 @@ export class Indexer { return false; } } - return true + return true; } // BR_G90 @@ -1690,65 +2019,102 @@ export class Indexer { return false; } } - return true + return true; } // BR_G91 - static async ruleIndexGenDividend(HEAD: DBHead, local_iindex: IindexEntry[], dal: FileDAL): Promise<SimpleUdEntryForWallet[]> { + static async ruleIndexGenDividend( + HEAD: DBHead, + local_iindex: IindexEntry[], + dal: FileDAL + ): Promise<SimpleUdEntryForWallet[]> { // Create the newcomers first, as they will produce a dividend too for (const newcomer of local_iindex) { - await dal.dividendDAL.createMember(newcomer.pub) + await dal.dividendDAL.createMember(newcomer.pub); } if (HEAD.new_dividend) { - return dal.updateDividend(HEAD.number, HEAD.new_dividend, HEAD.unitBase, local_iindex) + return dal.updateDividend( + HEAD.number, + HEAD.new_dividend, + HEAD.unitBase, + local_iindex + ); } - return [] + return []; } // BR_G106 - static async ruleIndexGarbageSmallAccounts(HEAD: DBHead, transactions: SindexEntry[], dividends: SimpleUdEntryForWallet[], dal:AccountsGarbagingDAL) { - let sindex: SimpleSindexEntryForWallet[] = transactions - sindex = sindex.concat(dividends) + static async ruleIndexGarbageSmallAccounts( + HEAD: DBHead, + transactions: SindexEntry[], + dividends: SimpleUdEntryForWallet[], + dal: AccountsGarbagingDAL + ) { + let sindex: SimpleSindexEntryForWallet[] = transactions; + sindex = sindex.concat(dividends); const garbages: SindexEntry[] = []; - const accounts = Object.keys(sindex.reduce((acc: { [k:string]: boolean }, src) => { - acc[src.conditions] = acc[src.conditions] || src.srcType === 'T' // We don't touch accounts that only received an UD - return acc; - }, {})); - const wallets: { [k:string]: Promise<DBWallet> } = accounts.reduce((map: { [k:string]: Promise<DBWallet> }, acc) => { - map[acc] = dal.getWallet(acc); - return map; - }, {}); + const accounts = Object.keys( + sindex.reduce((acc: { [k: string]: boolean }, src) => { + acc[src.conditions] = acc[src.conditions] || src.srcType === "T"; // We don't touch accounts that only received an UD + return acc; + }, {}) + ); + const wallets: { [k: string]: Promise<DBWallet> } = accounts.reduce( + (map: { [k: string]: Promise<DBWallet> }, acc) => { + map[acc] = dal.getWallet(acc); + return map; + }, + {} + ); for (const account of accounts) { - const localAccountEntries = Underscore.filter(sindex, src => src.conditions == account) + const localAccountEntries = Underscore.filter( + sindex, + (src) => src.conditions == account + ); const wallet = await wallets[account]; - const balance = wallet.balance - const variations = localAccountEntries.reduce((sum:number, src:SindexEntry) => { - if (src.op === 'CREATE') { - return sum + src.amount * Math.pow(10, src.base); - } else { - return sum - src.amount * Math.pow(10, src.base); - } - }, 0) + const balance = wallet.balance; + const variations = localAccountEntries.reduce( + (sum: number, src: SindexEntry) => { + if (src.op === "CREATE") { + return sum + src.amount * Math.pow(10, src.base); + } else { + return sum - src.amount * Math.pow(10, src.base); + } + }, + 0 + ); if (balance + variations < 0) { - throw Error(DataErrors[DataErrors.NEGATIVE_BALANCE]) - } - else if (balance + variations < constants.ACCOUNT_MINIMUM_CURRENT_BASED_AMOUNT * Math.pow(10, HEAD.unitBase)) { + throw Error(DataErrors[DataErrors.NEGATIVE_BALANCE]); + } else if ( + balance + variations < + constants.ACCOUNT_MINIMUM_CURRENT_BASED_AMOUNT * + Math.pow(10, HEAD.unitBase) + ) { // console.log('GARBAGE ACCOUNT on B#%s %s! (has %s units left)', HEAD.number, account, balance + variations) - const globalAccountEntries = await dal.sindexDAL.getAvailableForConditions(account) + const globalAccountEntries = await dal.sindexDAL.getAvailableForConditions( + account + ); // localAccountEntries.forEach(e => console.log('local: %s %s %s', e.identifier, e.pos, e.amount)) // globalAccountEntries.forEach(e => console.log('global: %s %s %s', e.identifier, e.pos, e.amount)) for (const src of localAccountEntries.concat(globalAccountEntries)) { - const sourceBeingConsumed = Underscore.filter(sindex, entry => entry.op === 'UPDATE' && entry.identifier == src.identifier && entry.pos == src.pos).length > 0; + const sourceBeingConsumed = + Underscore.filter( + sindex, + (entry) => + entry.op === "UPDATE" && + entry.identifier == src.identifier && + entry.pos == src.pos + ).length > 0; if (!sourceBeingConsumed) { garbages.push({ - index: 'SINDEX', - op: 'UPDATE', - srcType: 'T', + index: "SINDEX", + op: "UPDATE", + srcType: "T", identifier: src.identifier, pos: src.pos, amount: src.amount, base: src.base, - written_on: [HEAD.number, HEAD.hash].join('-'), + written_on: [HEAD.number, HEAD.hash].join("-"), writtenOn: HEAD.number, written_time: HEAD.medianTime, conditions: src.conditions, @@ -1772,50 +2138,61 @@ export class Indexer { } // BR_G92 - static async ruleIndexGenCertificationExpiry(HEAD: DBHead, dal:FileDAL) { + static async ruleIndexGenCertificationExpiry(HEAD: DBHead, dal: FileDAL) { const expiries = []; - const certs = await dal.cindexDAL.findExpiresOnLteNotExpiredYet(HEAD.medianTime); + const certs = await dal.cindexDAL.findExpiresOnLteNotExpiredYet( + HEAD.medianTime + ); for (const CERT of certs) { expiries.push({ - op: 'UPDATE', + op: "UPDATE", issuer: CERT.issuer, receiver: CERT.receiver, created_on: CERT.created_on, - written_on: [HEAD.number, HEAD.hash].join('-'), + written_on: [HEAD.number, HEAD.hash].join("-"), writtenOn: HEAD.number, - expired_on: HEAD.medianTime + expired_on: HEAD.medianTime, }); } return expiries; } // BR_G93 - static async ruleIndexGenMembershipExpiry(HEAD: DBHead, dal:FileDAL) { - return (await dal.mindexDAL.findPubkeysThatShouldExpire(HEAD.medianTime)).map(MS => { + static async ruleIndexGenMembershipExpiry(HEAD: DBHead, dal: FileDAL) { + return ( + await dal.mindexDAL.findPubkeysThatShouldExpire(HEAD.medianTime) + ).map((MS) => { return { - op: 'UPDATE', + op: "UPDATE", pub: MS.pub, created_on: MS.created_on, - written_on: [HEAD.number, HEAD.hash].join('-'), + written_on: [HEAD.number, HEAD.hash].join("-"), writtenOn: HEAD.number, - expired_on: HEAD.medianTime - } - }) + expired_on: HEAD.medianTime, + }; + }); } // BR_G94 - static async ruleIndexGenExclusionByMembership(HEAD: DBHead, mindex: MindexEntry[], dal:FileDAL) { + static async ruleIndexGenExclusionByMembership( + HEAD: DBHead, + mindex: MindexEntry[], + dal: FileDAL + ) { const exclusions = []; - const memberships = Underscore.filter(mindex, entry => !!entry.expired_on) + const memberships = Underscore.filter( + mindex, + (entry) => !!entry.expired_on + ); for (const MS of memberships) { const idty = await dal.iindexDAL.getFullFromPubkey(MS.pub); if (idty.member) { exclusions.push({ - op: 'UPDATE', + op: "UPDATE", pub: MS.pub, - written_on: [HEAD.number, HEAD.hash].join('-'), + written_on: [HEAD.number, HEAD.hash].join("-"), writtenOn: HEAD.number, - kick: true + kick: true, }); } } @@ -1823,24 +2200,50 @@ export class Indexer { } // BR_G95 - static async ruleIndexGenExclusionByCertificatons(HEAD: DBHead, cindex: CindexEntry[], iindex: IindexEntry[], conf: ConfDTO, dal:FileDAL) { + static async ruleIndexGenExclusionByCertificatons( + HEAD: DBHead, + cindex: CindexEntry[], + iindex: IindexEntry[], + conf: ConfDTO, + dal: FileDAL + ) { const exclusions: ExclusionByCert[] = []; - const expiredCerts = Underscore.filter(cindex, (c: CindexEntry) => c.expired_on > 0); + const expiredCerts = Underscore.filter( + cindex, + (c: CindexEntry) => c.expired_on > 0 + ); for (const CERT of expiredCerts) { - const just_expired = Underscore.filter(cindex, (c: CindexEntry) => c.receiver == CERT.receiver && c.expired_on > 0); - const just_received = Underscore.filter(cindex, (c: CindexEntry) => c.receiver == CERT.receiver && c.expired_on == 0); - const non_expired_global = await dal.cindexDAL.getValidLinksTo(CERT.receiver); - if ((count(non_expired_global) - count(just_expired) + count(just_received)) < conf.sigQty) { - const isInExcluded = Underscore.filter(iindex, (i: IindexEntry) => i.member === false && i.pub === CERT.receiver)[0] - const isInKicked = Underscore.filter(exclusions, e => e.pub === CERT.receiver)[0] - const idty = await dal.iindexDAL.getFullFromPubkey(CERT.receiver) + const just_expired = Underscore.filter( + cindex, + (c: CindexEntry) => c.receiver == CERT.receiver && c.expired_on > 0 + ); + const just_received = Underscore.filter( + cindex, + (c: CindexEntry) => c.receiver == CERT.receiver && c.expired_on == 0 + ); + const non_expired_global = await dal.cindexDAL.getValidLinksTo( + CERT.receiver + ); + if ( + count(non_expired_global) - count(just_expired) + count(just_received) < + conf.sigQty + ) { + const isInExcluded = Underscore.filter( + iindex, + (i: IindexEntry) => i.member === false && i.pub === CERT.receiver + )[0]; + const isInKicked = Underscore.filter( + exclusions, + (e) => e.pub === CERT.receiver + )[0]; + const idty = await dal.iindexDAL.getFullFromPubkey(CERT.receiver); if (!isInExcluded && !isInKicked && idty.member) { exclusions.push({ - op: 'UPDATE', + op: "UPDATE", pub: CERT.receiver, - written_on: [HEAD.number, HEAD.hash].join('-'), + written_on: [HEAD.number, HEAD.hash].join("-"), writtenOn: HEAD.number, - kick: true + kick: true, }); } } @@ -1849,19 +2252,23 @@ export class Indexer { } // BR_G96 - static async ruleIndexGenImplicitRevocation(HEAD: DBHead, dal:FileDAL) { + static async ruleIndexGenImplicitRevocation(HEAD: DBHead, dal: FileDAL) { const revocations = []; - const pending = await dal.mindexDAL.findRevokesOnLteAndRevokedOnIsNull(HEAD.medianTime) + const pending = await dal.mindexDAL.findRevokesOnLteAndRevokedOnIsNull( + HEAD.medianTime + ); for (const pub of pending) { - const REDUCED = (await dal.mindexDAL.getReducedMSForImplicitRevocation(pub)) as FullMindexEntry + const REDUCED = (await dal.mindexDAL.getReducedMSForImplicitRevocation( + pub + )) as FullMindexEntry; if (REDUCED.revokes_on <= HEAD.medianTime && !REDUCED.revoked_on) { revocations.push({ - op: 'UPDATE', + op: "UPDATE", pub: pub, created_on: REDUCED.created_on, - written_on: [HEAD.number, HEAD.hash].join('-'), + written_on: [HEAD.number, HEAD.hash].join("-"), writtenOn: HEAD.number, - revoked_on: [HEAD.number, HEAD.hash].join('-'), + revoked_on: [HEAD.number, HEAD.hash].join("-"), }); } } @@ -1869,20 +2276,29 @@ export class Indexer { } // BR_G104 - static async ruleIndexCorrectMembershipExpiryDate(HEAD: DBHead, mindex: MindexEntry[], dal:FileDAL) { + static async ruleIndexCorrectMembershipExpiryDate( + HEAD: DBHead, + mindex: MindexEntry[], + dal: FileDAL + ) { for (const MS of mindex) { - if (MS.type == 'JOIN' || MS.type == 'ACTIVE') { + if (MS.type == "JOIN" || MS.type == "ACTIVE") { let basedBlock = { medianTime: 0 }; if (HEAD.number == 0) { basedBlock = HEAD; } else { - basedBlock = MS.created_on_ref || await dal.getAbsoluteValidBlockInForkWindowByBlockstamp(MS.created_on) || basedBlock + basedBlock = + MS.created_on_ref || + (await dal.getAbsoluteValidBlockInForkWindowByBlockstamp( + MS.created_on + )) || + basedBlock; } if (MS.expires_on === null) { - MS.expires_on = 0 + MS.expires_on = 0; } if (MS.revokes_on === null) { - MS.revokes_on = 0 + MS.revokes_on = 0; } MS.expires_on += basedBlock.medianTime; MS.revokes_on += basedBlock.medianTime; @@ -1891,61 +2307,81 @@ export class Indexer { } // BR_G105 - static async ruleIndexCorrectCertificationExpiryDate(HEAD: DBHead, cindex: CindexEntry[], dal:FileDAL) { - for (const CERT of cindex.filter(c => c.op === 'CREATE')) { + static async ruleIndexCorrectCertificationExpiryDate( + HEAD: DBHead, + cindex: CindexEntry[], + dal: FileDAL + ) { + for (const CERT of cindex.filter((c) => c.op === "CREATE")) { let basedBlock = { medianTime: 0 }; if (HEAD.number == 0) { basedBlock = HEAD; } else { - basedBlock = CERT.created_on_ref || ((await dal.getTristampOf(CERT.created_on)) as DBBlock) // We are sure at this point in the rules + basedBlock = + CERT.created_on_ref || + ((await dal.getTristampOf(CERT.created_on)) as DBBlock); // We are sure at this point in the rules } CERT.expires_on += basedBlock.medianTime; } } static iindexCreate(index: IndexEntry[]): IindexEntry[] { - return Underscore.where(index, { index: constants.I_INDEX, op: constants.IDX_CREATE }) as IindexEntry[] + return Underscore.where(index, { + index: constants.I_INDEX, + op: constants.IDX_CREATE, + }) as IindexEntry[]; } static mindexCreate(index: IndexEntry[]): MindexEntry[] { - return Underscore.where(index, { index: constants.M_INDEX, op: constants.IDX_CREATE }) as MindexEntry[] + return Underscore.where(index, { + index: constants.M_INDEX, + op: constants.IDX_CREATE, + }) as MindexEntry[]; } static iindex(index: IndexEntry[]): IindexEntry[] { - return Underscore.where(index, { index: constants.I_INDEX }) as IindexEntry[] + return Underscore.where(index, { + index: constants.I_INDEX, + }) as IindexEntry[]; } static mindex(index: IndexEntry[]): MindexEntry[] { - return Underscore.where(index, { index: constants.M_INDEX }) as MindexEntry[] + return Underscore.where(index, { + index: constants.M_INDEX, + }) as MindexEntry[]; } static cindex(index: IndexEntry[]): CindexEntry[] { - return Underscore.where(index, { index: constants.C_INDEX }) as CindexEntry[] + return Underscore.where(index, { + index: constants.C_INDEX, + }) as CindexEntry[]; } static sindex(index: IndexEntry[]): SindexEntry[] { - return Underscore.where(index, { index: constants.S_INDEX }) as SindexEntry[] + return Underscore.where(index, { + index: constants.S_INDEX, + }) as SindexEntry[]; } static DUP_HELPERS = { - reduce, reduceOrNull, reduceBy: reduceBy, - getMaxBlockSize: (HEAD: DBHead) => Math.max(500, Math.ceil(1.1 * HEAD.avgBlockSize)), - checkPeopleAreNotOudistanced - } + getMaxBlockSize: (HEAD: DBHead) => + Math.max(500, Math.ceil(1.1 * HEAD.avgBlockSize)), + checkPeopleAreNotOudistanced, + }; } -function count<T>(range:T[]) { +function count<T>(range: T[]) { return range.length; } -function uniq(range:string[]) { - return Underscore.uniq(range) +function uniq(range: string[]) { + return Underscore.uniq(range); } -function average(values:number[]) { +function average(values: number[]) { // No values => 0 average if (!values.length) return 0; // Otherwise, real average @@ -1953,16 +2389,16 @@ function average(values:number[]) { return Math.floor(avg); } -function median(values:number[]) { +function median(values: number[]) { let med = 0; - values.sort((a, b) => a < b ? -1 : (a > b ? 1 : 0)); + values.sort((a, b) => (a < b ? -1 : a > b ? 1 : 0)); const nbValues = values.length; if (nbValues > 0) { if (nbValues % 2 === 0) { // Even number: the median is the average between the 2 central values, ceil rounded. const firstValue = values[nbValues / 2]; const secondValue = values[nbValues / 2 - 1]; - med = ((firstValue + secondValue) / 2); + med = (firstValue + secondValue) / 2; } else { med = values[(nbValues + 1) / 2 - 1]; } @@ -1975,52 +2411,66 @@ function number(theBlockstamp: string) { } function blockstamp(aNumber: number, aHash: string) { - return [aNumber, aHash].join('-'); + return [aNumber, aHash].join("-"); } -export function reduceOrNull<T>(records: T[]): T|null { +export function reduceOrNull<T>(records: T[]): T | null { if (records.length === 0) { - return null + return null; } - return reduce(records) + return reduce(records); } -export function reduceForDBTrimming<T extends { writtenOn: number }>(records: T[], belowNumber: number): T[] { +export function reduceForDBTrimming<T extends { writtenOn: number }>( + records: T[], + belowNumber: number +): T[] { if (records.length === 0) { - throw Error(DataErrors[DataErrors.INVALID_TRIMMABLE_DATA]) + throw Error(DataErrors[DataErrors.INVALID_TRIMMABLE_DATA]); } - const reducableRecords = records.filter(r => r.writtenOn < belowNumber) - const nonReducableRecords = records.filter(r => r.writtenOn >= belowNumber) - const reduced = reduce(reducableRecords) as T - return [reduced].concat(nonReducableRecords) + const reducableRecords = records.filter((r) => r.writtenOn < belowNumber); + const nonReducableRecords = records.filter((r) => r.writtenOn >= belowNumber); + const reduced = reduce(reducableRecords) as T; + return [reduced].concat(nonReducableRecords); } export function reduce<T>(records: T[]): T { - return records.reduce((obj:T, record) => { - const keys = Object.keys(record) as (keyof T)[] + return records.reduce((obj: T, record) => { + const keys = Object.keys(record) as (keyof T)[]; for (const k of keys) { if (record[k] !== undefined && record[k] !== null) { obj[k] = record[k]; } else if (record[k] === null && obj[k] === undefined) { // null overrides undefined - (obj[k] as any) = null + (obj[k] as any) = null; } } - return obj - }, <T>{}) + return obj; + }, <T>{}); } -export function reduceBy<T extends IndexEntry>(reducables: T[], properties: (keyof T)[]): T[] { - const reduced: { [k:string]: T[] } = reducables.reduce((map, entry) => { - const id = properties.map((prop) => entry[prop]).join('-') - map[id] = map[id] || [] - map[id].push(entry) - return map - }, <{ [k:string]: T[] }>{}) - return Underscore.values(reduced).map(value => Indexer.DUP_HELPERS.reduce(value)) +export function reduceBy<T extends IndexEntry>( + reducables: T[], + properties: (keyof T)[] +): T[] { + const reduced: { [k: string]: T[] } = reducables.reduce((map, entry) => { + const id = properties.map((prop) => entry[prop]).join("-"); + map[id] = map[id] || []; + map[id].push(entry); + return map; + }, <{ [k: string]: T[] }>{}); + return Underscore.values(reduced).map((value) => + Indexer.DUP_HELPERS.reduce(value) + ); } -async function checkPeopleAreNotOudistanced (pubkeys: string[], newLinks: { [k:string]: string[] }, newcomers: string[], conf: ConfDTO, dal:FileDAL) { +async function checkPeopleAreNotOudistanced( + pubkeys: string[], + newLinks: { [k: string]: string[] }, + newcomers: string[], + conf: ConfDTO, + dal: FileDAL +) { let wotb = WotBuilder.fromWot(dal.wotb); let current = await dal.getCurrentBlockOrNull(); let membersCount = current ? current.membersCount : 0; @@ -2030,10 +2480,10 @@ async function checkPeopleAreNotOudistanced (pubkeys: string[], newLinks: { [k:s map[pubkey] = nodeID; wotb.setEnabled(false, nodeID); // These are not members yet return map; - }, <{ [k:string]: number }>{}); + }, <{ [k: string]: number }>{}); // Add temporarily the links to the WoT let tempLinks = []; - let toKeys = Underscore.keys(newLinks).map(String) + let toKeys = Underscore.keys(newLinks).map(String); for (const toKey of toKeys) { let toNode = await getNodeIDfromPubkey(nodesCache, toKey, dal); for (const fromKey of newLinks[toKey]) { @@ -2048,29 +2498,43 @@ async function checkPeopleAreNotOudistanced (pubkeys: string[], newLinks: { [k:s for (const pubkey of pubkeys) { let nodeID = await getNodeIDfromPubkey(nodesCache, pubkey, dal); const dSen = Math.ceil(Math.pow(membersCount, 1 / conf.stepMax)); - let isOutdistanced = wotb.isOutdistanced(nodeID, dSen, conf.stepMax, conf.xpercent); + let isOutdistanced = wotb.isOutdistanced( + nodeID, + dSen, + conf.stepMax, + conf.xpercent + ); if (isOutdistanced) { - error = Error('Joiner/Active is outdistanced from WoT'); + error = Error("Joiner/Active is outdistanced from WoT"); break; } } return error ? true : false; } -async function getNodeIDfromPubkey(nodesCache: { [k:string]: number }, pubkey: string, dal:FileDAL) { +async function getNodeIDfromPubkey( + nodesCache: { [k: string]: number }, + pubkey: string, + dal: FileDAL +) { let toNode = nodesCache[pubkey]; // Eventually cache the target nodeID if (toNode === null || toNode === undefined) { - let idty = await dal.getWrittenIdtyByPubkeyForWotbID(pubkey) - toNode = idty.wotb_id - nodesCache[pubkey] = toNode + let idty = await dal.getWrittenIdtyByPubkeyForWotbID(pubkey); + toNode = idty.wotb_id; + nodesCache[pubkey] = toNode; } return toNode; } -async function sigCheckRevoke(entry: MindexEntry, dal: FileDAL, currency: string) { +async function sigCheckRevoke( + entry: MindexEntry, + dal: FileDAL, + currency: string +) { try { - let pubkey = entry.pub, sig = entry.revocation || ""; + let pubkey = entry.pub, + sig = entry.revocation || ""; let idty = await dal.getWrittenIdtyByPubkeyForRevocationCheck(pubkey); if (!idty) { throw Error("A pubkey who was never a member cannot be revoked"); @@ -2084,7 +2548,7 @@ async function sigCheckRevoke(entry: MindexEntry, dal: FileDAL, currency: string uid: idty.uid, buid: idty.created_on, sig: idty.sig, - revocation: '' + revocation: "", }); let sigOK = verify(rawRevocation, sig, pubkey); if (!sigOK) { @@ -2096,41 +2560,53 @@ async function sigCheckRevoke(entry: MindexEntry, dal: FileDAL, currency: string } } - - -async function checkCertificationIsValid (block: BlockDTO, cert: CindexEntry, findIdtyFunc: (b:BlockDTO,to:string,dal:FileDAL)=>Promise<{ - pubkey:string - uid:string - buid:string - sig:string -}|null>, conf: ConfDTO, dal:FileDAL) { +async function checkCertificationIsValid( + block: BlockDTO, + cert: CindexEntry, + findIdtyFunc: ( + b: BlockDTO, + to: string, + dal: FileDAL + ) => Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + } | null>, + conf: ConfDTO, + dal: FileDAL +) { if (block.number == 0 && cert.created_on != 0) { - throw Error('Number must be 0 for root block\'s certifications'); + throw Error("Number must be 0 for root block's certifications"); } else { try { - let basedBlock:Tristamp|null = { + let basedBlock: Tristamp | null = { number: 0, hash: constants.SPECIAL_HASH, - medianTime: 0 - } + medianTime: 0, + }; if (block.number != 0) { - basedBlock = await dal.getTristampOf(cert.created_on) + basedBlock = await dal.getTristampOf(cert.created_on); if (!basedBlock) { - throw Error('Certification based on an unexisting block') + throw Error("Certification based on an unexisting block"); } } - const idty = await findIdtyFunc(block, cert.receiver, dal) - let current = block.number == 0 ? null : await dal.getCurrentBlockOrNull(); + const idty = await findIdtyFunc(block, cert.receiver, dal); + let current = + block.number == 0 ? null : await dal.getCurrentBlockOrNull(); if (!idty) { - throw Error('Identity does not exist for certified'); - } - else if (current && current.medianTime > basedBlock.medianTime + conf.sigValidity) { - throw Error('Certification has expired'); - } - else if (cert.issuer == idty.pubkey) - throw Error('Rejected certification: certifying its own self-certification has no meaning'); + throw Error("Identity does not exist for certified"); + } else if ( + current && + current.medianTime > basedBlock.medianTime + conf.sigValidity + ) { + throw Error("Certification has expired"); + } else if (cert.issuer == idty.pubkey) + throw Error( + "Rejected certification: certifying its own self-certification has no meaning" + ); else { - const buid = [cert.created_on, basedBlock.hash].join('-'); + const buid = [cert.created_on, basedBlock.hash].join("-"); const raw = rawer.getOfficialCertification({ currency: conf.currency, idty_issuer: idty.pubkey, @@ -2139,11 +2615,11 @@ async function checkCertificationIsValid (block: BlockDTO, cert: CindexEntry, fi idty_sig: idty.sig, issuer: cert.issuer, buid: buid, - sig: '' - }) + sig: "", + }); const verified = verify(raw, cert.sig, cert.issuer); if (!verified) { - throw constants.ERRORS.WRONG_SIGNATURE_FOR_CERT + throw constants.ERRORS.WRONG_SIGNATURE_FOR_CERT; } return true; } @@ -2153,13 +2629,21 @@ async function checkCertificationIsValid (block: BlockDTO, cert: CindexEntry, fi } } -function txSourceUnlock(ENTRY:SindexEntry, source:{ conditions: string, written_time: number}, HEAD: DBHead) { +function txSourceUnlock( + ENTRY: SindexEntry, + source: { conditions: string; written_time: number }, + HEAD: DBHead +) { const tx = ENTRY.txObj; - const unlockParams:string[] = TransactionDTO.unlock2params(ENTRY.unlock || '') - const unlocksMetadata:UnlockMetadata = {} - const sigResult = TransactionDTO.fromJSONObject(tx).getTransactionSigResult(HEAD.version) + const unlockParams: string[] = TransactionDTO.unlock2params( + ENTRY.unlock || "" + ); + const unlocksMetadata: UnlockMetadata = {}; + const sigResult = TransactionDTO.fromJSONObject(tx).getTransactionSigResult( + HEAD.version + ); if (!source.conditions) { - return false // Unlock fail + return false; // Unlock fail } if (source.conditions.match(/CLTV/)) { unlocksMetadata.currentTime = HEAD.medianTime; @@ -2167,5 +2651,5 @@ function txSourceUnlock(ENTRY:SindexEntry, source:{ conditions: string, written_ if (source.conditions.match(/CSV/)) { unlocksMetadata.elapsedTime = HEAD.medianTime - source.written_time; } - return txunlock(source.conditions, unlockParams, sigResult, unlocksMetadata) + return txunlock(source.conditions, unlockParams, sigResult, unlocksMetadata); } diff --git a/app/lib/logger.ts b/app/lib/logger.ts index f4d4f35babb0ce7e12c6b0454283e256f2cc2eb4..7b8a99d31c57e93a9c6c8f8f369904d9a177edc2 100644 --- a/app/lib/logger.ts +++ b/app/lib/logger.ts @@ -11,29 +11,35 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as moment from "moment" -import {Directory} from "./system/directory" +import * as moment from "moment"; +import { Directory } from "./system/directory"; -const path = require('path'); -const winston = require('winston'); +const path = require("path"); +const winston = require("winston"); /*************** * CALLBACK LOGGER ***************/ -const util = require('util'); +const util = require("util"); -const CallbackLogger:any = winston.transports.CallbackLogger = function (options:any) { - - this.name = 'customLogger'; - this.level = options.level || 'info'; +const CallbackLogger: any = (winston.transports.CallbackLogger = function ( + options: any +) { + this.name = "customLogger"; + this.level = options.level || "info"; this.callback = options.callback; this.timestamp = options.timestamp; -}; +}); util.inherits(CallbackLogger, winston.Transport); -CallbackLogger.prototype.log = function (level:string, msg:string, meta:any, callback:any) { +CallbackLogger.prototype.log = function ( + level: string, + msg: string, + meta: any, + callback: any +) { this.callback(level, msg, this.timestamp()); callback(null, true); }; @@ -49,66 +55,66 @@ const customLevels = { info: 2, debug: 3, trace: 4, - query: 5 + query: 5, }, colors: { - error: 'red', - warn: 'yellow', - info: 'green', - debug: 'cyan', - trace: 'cyan', - query: 'grey' - } + error: "red", + warn: "yellow", + info: "green", + debug: "cyan", + trace: "cyan", + query: "grey", + }, }; // create the logger -const logger = new (winston.Logger)({ - level: 'trace', +const logger = new winston.Logger({ + level: "trace", levels: customLevels.levels, handleExceptions: false, colors: customLevels.colors, transports: [ // setup console logging - new (winston.transports.Console)({ - level: 'trace', + new winston.transports.Console({ + level: "trace", levels: customLevels.levels, handleExceptions: false, colorize: true, - timestamp: function() { + timestamp: function () { return moment().format(); - } - }) - ] + }, + }), + ], }); // Singletons let loggerAttached = false; -logger.addCallbackLogs = (callbackForLog:any) => { +logger.addCallbackLogs = (callbackForLog: any) => { if (!loggerAttached) { loggerAttached = true; logger.add(CallbackLogger, { callback: callbackForLog, - level: 'trace', + level: "trace", levels: customLevels.levels, handleExceptions: false, colorize: true, - timestamp: function() { + timestamp: function () { return moment().format(); - } + }, }); } }; // Singletons let loggerHomeAttached = false; -logger.addHomeLogs = (home:string, level:string) => { +logger.addHomeLogs = (home: string, level: string) => { if (!muted) { if (loggerHomeAttached) { logger.remove(winston.transports.File); } loggerHomeAttached = true; logger.add(winston.transports.File, { - level: level || 'info', + level: level || "info", levels: customLevels.levels, handleExceptions: false, colorize: true, @@ -117,11 +123,11 @@ logger.addHomeLogs = (home:string, level:string) => { maxFiles: 3, //zippedArchive: true, json: false, - filename: path.join(home, 'duniter.log'), + filename: path.join(home, "duniter.log"), timestamp: function () { return moment().format(); - } - }) + }, + }); } }; @@ -135,27 +141,27 @@ logger.mute = () => { logger.unmute = () => { if (muted) { - muted = false + muted = false; logger.add(winston.transports.Console, { - level: 'trace', + level: "trace", levels: customLevels.levels, handleExceptions: false, colorize: true, - timestamp: function() { + timestamp: function () { return moment().format(); - } - }) + }, + }); } -} +}; /** * Default logging path */ -logger.addHomeLogs(Directory.INSTANCE_HOME) +logger.addHomeLogs(Directory.INSTANCE_HOME); /** -* Convenience function to get logger directly -*/ -export function NewLogger(name?:string) { - return logger + * Convenience function to get logger directly + */ +export function NewLogger(name?: string) { + return logger; } diff --git a/app/lib/other_constants.ts b/app/lib/other_constants.ts index c5a913311d3f1ea73ac9b7db446b32caf16682cb..41fd5fd6884e41cb231977e62a581f20050ca9d8 100644 --- a/app/lib/other_constants.ts +++ b/app/lib/other_constants.ts @@ -12,19 +12,18 @@ // GNU Affero General Public License for more details. export const OtherConstants = { - MUTE_LOGS_DURING_UNIT_TESTS: false, SQL_TRACES: false, BC_EVENT: { - SWITCHED: 'switched', - HEAD_CHANGED: 'newHEAD', - RESOLUTION_DONE: 'resolution_done' + SWITCHED: "switched", + HEAD_CHANGED: "newHEAD", + RESOLUTION_DONE: "resolution_done", }, ENABLE_LOKI_MONITORING: false, ENABLE_SQL_MONITORING: false, ENABLE_MONITORING: false, TRACE_BALANCES: false, - TRACE_PARTICULAR_BALANCE: '', -} + TRACE_PARTICULAR_BALANCE: "", +}; diff --git a/app/lib/proxy.ts b/app/lib/proxy.ts index 0d670bdea4a399ffedff28d21e67054c5af363de..31b16a588e01c618f511fa71b9534c80938d3d5a 100644 --- a/app/lib/proxy.ts +++ b/app/lib/proxy.ts @@ -11,55 +11,76 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "./common-libs/constants" +import { CommonConstants } from "./common-libs/constants"; -const SocksProxyAgent = require('socks-proxy-agent'); +const SocksProxyAgent = require("socks-proxy-agent"); export class ProxiesConf { - public proxySocksAddress: string|undefined - public proxyTorAddress: string|undefined - public reachingClearEp: string - public forceTor: boolean + public proxySocksAddress: string | undefined; + public proxyTorAddress: string | undefined; + public reachingClearEp: string; + public forceTor: boolean; - constructor () { - this.proxySocksAddress = undefined - this.proxyTorAddress = undefined - this.reachingClearEp = 'clear' - this.forceTor = false + constructor() { + this.proxySocksAddress = undefined; + this.proxyTorAddress = undefined; + this.reachingClearEp = "clear"; + this.forceTor = false; } - static canReachClearEndpoint(proxiesConf: ProxiesConf|undefined):boolean { - return (proxiesConf === undefined || proxiesConf.reachingClearEp !== 'none') + static canReachClearEndpoint(proxiesConf: ProxiesConf | undefined): boolean { + return proxiesConf === undefined || proxiesConf.reachingClearEp !== "none"; } - static canReachTorEndpoint(proxiesConf: ProxiesConf|undefined):boolean { - return (proxiesConf !== undefined && (proxiesConf.forceTor || proxiesConf.proxyTorAddress !== undefined) ) + static canReachTorEndpoint(proxiesConf: ProxiesConf | undefined): boolean { + return ( + proxiesConf !== undefined && + (proxiesConf.forceTor || proxiesConf.proxyTorAddress !== undefined) + ); } - static httpProxy(url:string, proxiesConf: ProxiesConf|undefined):string|undefined { - return ProxiesConf.chooseProxyAgent(url, proxiesConf, CommonConstants.HOST_ONION_REGEX) + static httpProxy( + url: string, + proxiesConf: ProxiesConf | undefined + ): string | undefined { + return ProxiesConf.chooseProxyAgent( + url, + proxiesConf, + CommonConstants.HOST_ONION_REGEX + ); } - static wsProxy(address:string, proxiesConf: ProxiesConf|undefined):string|undefined { - return ProxiesConf.chooseProxyAgent(address, proxiesConf, CommonConstants.WS_FULL_ADDRESS_ONION_REGEX) + static wsProxy( + address: string, + proxiesConf: ProxiesConf | undefined + ): string | undefined { + return ProxiesConf.chooseProxyAgent( + address, + proxiesConf, + CommonConstants.WS_FULL_ADDRESS_ONION_REGEX + ); } - private static chooseProxyAgent(address:string, proxiesConf: ProxiesConf|undefined, onionRegex:RegExp):string|undefined { + private static chooseProxyAgent( + address: string, + proxiesConf: ProxiesConf | undefined, + onionRegex: RegExp + ): string | undefined { if (proxiesConf !== undefined) { if (address.match(onionRegex)) { if (ProxiesConf.canReachTorEndpoint(proxiesConf)) { - return proxiesConf.proxyTorAddress + return proxiesConf.proxyTorAddress; } } else { if (ProxiesConf.canReachClearEndpoint(proxiesConf)) { - if (proxiesConf.reachingClearEp == 'tor') { - return proxiesConf.proxyTorAddress + if (proxiesConf.reachingClearEp == "tor") { + return proxiesConf.proxyTorAddress; } else { - return proxiesConf.proxySocksAddress + return proxiesConf.proxySocksAddress; } } } } - return undefined + return undefined; } -} \ No newline at end of file +} diff --git a/app/lib/rules/global_rules.ts b/app/lib/rules/global_rules.ts index 3004a16494dd7243f101ea7363c222997ad7687c..dd6d1fa5e90c29ac15bab80ed30f1d3affcfb9f1 100644 --- a/app/lib/rules/global_rules.ts +++ b/app/lib/rules/global_rules.ts @@ -11,112 +11,144 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../dto/ConfDTO" -import {FileDAL} from "../dal/fileDAL" -import {DBBlock} from "../db/DBBlock" -import {TransactionDTO, TxSignatureResult} from "../dto/TransactionDTO" -import {BlockDTO} from "../dto/BlockDTO" -import {verify} from "duniteroxyde" -import {rawer, txunlock} from "../common-libs/index" -import {CommonConstants} from "../common-libs/constants" -import {IdentityDTO} from "../dto/IdentityDTO" -import {hashf} from "../common" -import {Indexer, SimpleTxInput} from "../indexer" -import {DBTx} from "../db/DBTx" -import {Tristamp} from "../common/Tristamp" -import {DataErrors} from "../common-libs/errors" +import { ConfDTO } from "../dto/ConfDTO"; +import { FileDAL } from "../dal/fileDAL"; +import { DBBlock } from "../db/DBBlock"; +import { TransactionDTO, TxSignatureResult } from "../dto/TransactionDTO"; +import { BlockDTO } from "../dto/BlockDTO"; +import { verify } from "duniteroxyde"; +import { rawer, txunlock } from "../common-libs/index"; +import { CommonConstants } from "../common-libs/constants"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { hashf } from "../common"; +import { Indexer, SimpleTxInput } from "../indexer"; +import { DBTx } from "../db/DBTx"; +import { Tristamp } from "../common/Tristamp"; +import { DataErrors } from "../common-libs/errors"; -const constants = CommonConstants +const constants = CommonConstants; // Empty logger by default let logger = { - debug: (...args:any[]) => {}, - warn: (...args:any[]) => {} -} + debug: (...args: any[]) => {}, + warn: (...args: any[]) => {}, +}; // TODO: all the global rules should be replaced by index rule someday export interface ParamEval { - successful:boolean - funcName:string - parameter:string + successful: boolean; + funcName: string; + parameter: string; } -export function evalParams(params:string[], conditions = '', sigResult:TxSignatureResult): ParamEval[] { - const res:ParamEval[] = [] - const issuers = sigResult.sigs.map(s => s.k) +export function evalParams( + params: string[], + conditions = "", + sigResult: TxSignatureResult +): ParamEval[] { + const res: ParamEval[] = []; + const issuers = sigResult.sigs.map((s) => s.k); for (const func of params) { if (func.match(/^SIG/)) { - const param = (func.match(/^SIG\((.*)\)$/) as string[])[1] - const index = parseInt(param) - const sigEntry = !isNaN(index) && index < issuers.length && sigResult.sigs[index] - const signatory:{ k:string, ok:boolean } = sigEntry || { k: '', ok: false } + const param = (func.match(/^SIG\((.*)\)$/) as string[])[1]; + const index = parseInt(param); + const sigEntry = + !isNaN(index) && index < issuers.length && sigResult.sigs[index]; + const signatory: { k: string; ok: boolean } = sigEntry || { + k: "", + ok: false, + }; res.push({ - funcName: 'SIG', + funcName: "SIG", parameter: signatory.k, - successful: signatory.ok - }) - } - else if (func.match(/^XHX/)) { - const password = (func.match(/^XHX\((.*)\)$/) as string[])[1] - const hash = hashf(password) + successful: signatory.ok, + }); + } else if (func.match(/^XHX/)) { + const password = (func.match(/^XHX\((.*)\)$/) as string[])[1]; + const hash = hashf(password); res.push({ - funcName: 'XHX', + funcName: "XHX", parameter: password, - successful: conditions.indexOf('XHX(' + hash + ')') !== -1 - }) + successful: conditions.indexOf("XHX(" + hash + ")") !== -1, + }); } } - return res + return res; } export const GLOBAL_RULES_FUNCTIONS = { - - checkIdentitiesAreWritable: async (block:{ identities:string[], version: number }, conf:ConfDTO, dal:FileDAL) => { + checkIdentitiesAreWritable: async ( + block: { identities: string[]; version: number }, + conf: ConfDTO, + dal: FileDAL + ) => { let current = await dal.getCurrentBlockOrNull(); for (const obj of block.identities) { let idty = IdentityDTO.fromInline(obj); - let found = await dal.getWrittenIdtyByUIDForExistence(idty.uid) + let found = await dal.getWrittenIdtyByUIDForExistence(idty.uid); if (found) { - throw Error('Identity already used'); + throw Error("Identity already used"); } // Because the window rule does not apply on initial certifications if (current && idty.buid != constants.SPECIAL_BLOCK) { // From DUP 0.5: we fully check the blockstamp - const basedBlock = await dal.getAbsoluteValidBlockInForkWindowByBlockstamp(idty.buid) || { medianTime: 0 } + const basedBlock = (await dal.getAbsoluteValidBlockInForkWindowByBlockstamp( + idty.buid + )) || { medianTime: 0 }; // Check if writable - let duration = current.medianTime - basedBlock.medianTime + let duration = current.medianTime - basedBlock.medianTime; if (duration > conf.idtyWindow) { - throw Error('Identity is too old and cannot be written'); + throw Error("Identity is too old and cannot be written"); } } } return true; }, - checkSourcesAvailability: async (block:{ version: number, transactions:TransactionDTO[], medianTime: number }, conf:ConfDTO, dal:FileDAL, findSourceTx:(txHash:string) => Promise<DBTx|null>) => { - const txs = block.transactions + checkSourcesAvailability: async ( + block: { + version: number; + transactions: TransactionDTO[]; + medianTime: number; + }, + conf: ConfDTO, + dal: FileDAL, + findSourceTx: (txHash: string) => Promise<DBTx | null> + ) => { + const txs = block.transactions; const current = await dal.getCurrentBlockOrNull(); for (const tx of txs) { - const inputs = tx.inputsAsObjects() - const outputs = tx.outputsAsObjects() - let unlocks:any = {}; + const inputs = tx.inputsAsObjects(); + const outputs = tx.outputsAsObjects(); + let unlocks: any = {}; let sumOfInputs = 0; - let maxOutputBase = current && current.unitbase || 0; + let maxOutputBase = (current && current.unitbase) || 0; for (const theUnlock of tx.unlocks) { - let sp = theUnlock.split(':'); + let sp = theUnlock.split(":"); let index = parseInt(sp[0]); unlocks[index] = sp[1]; } for (let k = 0, len2 = inputs.length; k < len2; k++) { let src = inputs[k]; - let dbSrc: SimpleTxInput|null = await dal.getSource(src.identifier, src.pos, src.type === 'D'); - logger.debug('Source %s:%s:%s:%s = %s', src.amount, src.base, src.identifier, src.pos, dbSrc && dbSrc.consumed); + let dbSrc: SimpleTxInput | null = await dal.getSource( + src.identifier, + src.pos, + src.type === "D" + ); + logger.debug( + "Source %s:%s:%s:%s = %s", + src.amount, + src.base, + src.identifier, + src.pos, + dbSrc && dbSrc.consumed + ); if (!dbSrc) { // For chained transactions which are checked on sandbox submission, we accept them if there is already // a previous transaction of the chain already recorded in the pool dbSrc = await (async () => { - let hypotheticSrc:any = null; + let hypotheticSrc: any = null; let targetTX = await findSourceTx(src.identifier); if (targetTX) { let outputStr = targetTX.outputs[src.pos]; @@ -127,21 +159,24 @@ export const GLOBAL_RULES_FUNCTIONS = { } } return hypotheticSrc; - })() + })(); } if (!dbSrc || dbSrc.consumed) { - logger.warn('Source ' + [src.type, src.identifier, src.pos].join(':') + ' is not available'); + logger.warn( + "Source " + + [src.type, src.identifier, src.pos].join(":") + + " is not available" + ); throw constants.ERRORS.SOURCE_ALREADY_CONSUMED; } sumOfInputs += dbSrc.amount * Math.pow(10, dbSrc.base); if (block.medianTime - dbSrc.written_time < tx.locktime) { throw constants.ERRORS.LOCKTIME_PREVENT; } - let unlockValues = unlocks[k] - let unlocksForCondition:string[] = (unlockValues || '').split(' ') - let unlocksMetadata:any = {}; + let unlockValues = unlocks[k]; + let unlocksForCondition: string[] = (unlockValues || "").split(" "); + let unlocksMetadata: any = {}; if (dbSrc.conditions) { - if (dbSrc.conditions.match(/CLTV/)) { unlocksMetadata.currentTime = block.medianTime; } @@ -150,97 +185,174 @@ export const GLOBAL_RULES_FUNCTIONS = { unlocksMetadata.elapsedTime = block.medianTime - dbSrc.written_time; } - const sigs = tx.getTransactionSigResult(block.version) + const sigs = tx.getTransactionSigResult(block.version); try { - if (!txunlock(dbSrc.conditions, unlocksForCondition, sigs, unlocksMetadata)) { - throw Error('Locked'); + if ( + !txunlock( + dbSrc.conditions, + unlocksForCondition, + sigs, + unlocksMetadata + ) + ) { + throw Error("Locked"); } } catch (e) { - logger.warn('Source ' + [src.amount, src.base, src.type, src.identifier, src.pos].join(':') + ' unlock fail'); + logger.warn( + "Source " + + [src.amount, src.base, src.type, src.identifier, src.pos].join( + ":" + ) + + " unlock fail" + ); throw constants.ERRORS.WRONG_UNLOCKER; } } else { - throw Error("Source with no conditions") + throw Error("Source with no conditions"); } } - let sumOfOutputs = outputs.reduce(function(p, output) { + let sumOfOutputs = outputs.reduce(function (p, output) { if (output.base > maxOutputBase) { throw constants.ERRORS.WRONG_OUTPUT_BASE; } return p + output.amount * Math.pow(10, output.base); }, 0); if (sumOfInputs !== sumOfOutputs) { - logger.warn('Inputs/Outputs != 1 (%s/%s)', sumOfInputs, sumOfOutputs); + logger.warn("Inputs/Outputs != 1 (%s/%s)", sumOfInputs, sumOfOutputs); throw constants.ERRORS.WRONG_AMOUNTS; } } return true; - } -} + }, +}; export const GLOBAL_RULES_HELPERS = { - // Functions used in an external context too - checkMembershipBlock: (ms:any, current:DBBlock|null, conf:ConfDTO, dal:FileDAL) => checkMSTarget(ms, current ? { number: current.number + 1} : { number: 0 }, conf, dal), + checkMembershipBlock: ( + ms: any, + current: DBBlock | null, + conf: ConfDTO, + dal: FileDAL + ) => + checkMSTarget( + ms, + current ? { number: current.number + 1 } : { number: 0 }, + conf, + dal + ), - checkCertificationIsValidInSandbox: (cert:any, current:BlockDTO, findIdtyFunc:any, conf:ConfDTO, dal:FileDAL) => { - return checkCertificationShouldBeValid(current ? current : { number: 0, currency: '' }, cert, findIdtyFunc, conf, dal) + checkCertificationIsValidInSandbox: ( + cert: any, + current: BlockDTO, + findIdtyFunc: any, + conf: ConfDTO, + dal: FileDAL + ) => { + return checkCertificationShouldBeValid( + current ? current : { number: 0, currency: "" }, + cert, + findIdtyFunc, + conf, + dal + ); }, - checkCertificationIsValidForBlock: (cert:any, block:{ number:number, currency:string }, findIdtyFunc:(b:{ number:number, currency:string }, pubkey:string, dal:FileDAL) => Promise<{ - pubkey:string - uid:string - buid:string - sig:string}|null>, conf:ConfDTO, dal:FileDAL) => { - return checkCertificationShouldBeValid(block, cert, findIdtyFunc, conf, dal) + checkCertificationIsValidForBlock: ( + cert: any, + block: { number: number; currency: string }, + findIdtyFunc: ( + b: { number: number; currency: string }, + pubkey: string, + dal: FileDAL + ) => Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + } | null>, + conf: ConfDTO, + dal: FileDAL + ) => { + return checkCertificationShouldBeValid( + block, + cert, + findIdtyFunc, + conf, + dal + ); }, - isOver3Hops: async (member:any, newLinks:any, newcomers:string[], current:DBBlock|null, conf:ConfDTO, dal:FileDAL) => { + isOver3Hops: async ( + member: any, + newLinks: any, + newcomers: string[], + current: DBBlock | null, + conf: ConfDTO, + dal: FileDAL + ) => { if (!current) { return Promise.resolve(false); } try { - return Indexer.DUP_HELPERS.checkPeopleAreNotOudistanced([member], newLinks, newcomers, conf, dal); + return Indexer.DUP_HELPERS.checkPeopleAreNotOudistanced( + [member], + newLinks, + newcomers, + conf, + dal + ); } catch (e) { return true; } }, - checkExistsUserID: (uid:string, dal:FileDAL) => dal.getWrittenIdtyByUIDForExistence(uid), + checkExistsUserID: (uid: string, dal: FileDAL) => + dal.getWrittenIdtyByUIDForExistence(uid), - checkExistsPubkey: (pub:string, dal:FileDAL) => dal.getWrittenIdtyByPubkeyForExistence(pub), + checkExistsPubkey: (pub: string, dal: FileDAL) => + dal.getWrittenIdtyByPubkeyForExistence(pub), checkSingleTransaction: ( - tx:TransactionDTO, + tx: TransactionDTO, dubp_version: number, medianTime: number, - conf:ConfDTO, - dal:FileDAL, - findSourceTx:(txHash:string) => Promise<DBTx|null>) => GLOBAL_RULES_FUNCTIONS.checkSourcesAvailability( + conf: ConfDTO, + dal: FileDAL, + findSourceTx: (txHash: string) => Promise<DBTx | null> + ) => + GLOBAL_RULES_FUNCTIONS.checkSourcesAvailability( { version: dubp_version, transactions: [tx], - medianTime: medianTime + medianTime: medianTime, }, - conf, dal, findSourceTx + conf, + dal, + findSourceTx ), - checkTxBlockStamp: async (tx:TransactionDTO, dal:FileDAL) => { - const number = parseInt(tx.blockstamp.split('-')[0]) - const hash = tx.blockstamp.split('-')[1]; - const basedBlock = await dal.getAbsoluteValidBlockInForkWindow(number, hash) + checkTxBlockStamp: async (tx: TransactionDTO, dal: FileDAL) => { + const number = parseInt(tx.blockstamp.split("-")[0]); + const hash = tx.blockstamp.split("-")[1]; + const basedBlock = await dal.getAbsoluteValidBlockInForkWindow( + number, + hash + ); if (!basedBlock) { throw "Wrong blockstamp for transaction"; } // Valuates the blockstampTime field tx.blockstampTime = basedBlock.medianTime; const current = await dal.getCurrentBlockOrNull(); - if (current && current.medianTime > basedBlock.medianTime + constants.TX_WINDOW) { + if ( + current && + current.medianTime > basedBlock.medianTime + constants.TX_WINDOW + ) { throw DataErrors[DataErrors.TRANSACTION_WINDOW_IS_PASSED]; } - } -} + }, +}; /***************************** * @@ -248,70 +360,100 @@ export const GLOBAL_RULES_HELPERS = { * *****************************/ -async function checkMSTarget (ms:any, block:any, conf:ConfDTO, dal:FileDAL) { +async function checkMSTarget(ms: any, block: any, conf: ConfDTO, dal: FileDAL) { if (block.number == 0 && ms.number != 0) { - throw Error('Number must be 0 for root block\'s memberships'); - } - else if (block.number == 0 && ms.fpr != 'E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855') { - throw Error('Hash must be E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855 for root block\'s memberships'); - } - else if (block.number == 0) { + throw Error("Number must be 0 for root block's memberships"); + } else if ( + block.number == 0 && + ms.fpr != "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855" + ) { + throw Error( + "Hash must be E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855 for root block's memberships" + ); + } else if (block.number == 0) { return null; // Valid for root block } else { - const basedBlock = await dal.getAbsoluteValidBlockInForkWindow(ms.number, ms.fpr) + const basedBlock = await dal.getAbsoluteValidBlockInForkWindow( + ms.number, + ms.fpr + ); if (!basedBlock) { - throw Error('Membership based on an unexisting block') + throw Error("Membership based on an unexisting block"); } let current = await dal.getCurrentBlockOrNull(); - if (current && current.medianTime > basedBlock.medianTime + conf.msValidity) { - throw Error('Membership has expired'); + if ( + current && + current.medianTime > basedBlock.medianTime + conf.msValidity + ) { + throw Error("Membership has expired"); } return basedBlock; } } -async function checkCertificationShouldBeValid (block:{ number:number, currency:string }, cert:any, findIdtyFunc:(b:{ number:number, currency:string }, pubkey:string, dal:FileDAL) => Promise<{ - pubkey:string - uid:string - buid:string - sig:string -}|null>, conf:ConfDTO, dal:FileDAL) { +async function checkCertificationShouldBeValid( + block: { number: number; currency: string }, + cert: any, + findIdtyFunc: ( + b: { number: number; currency: string }, + pubkey: string, + dal: FileDAL + ) => Promise<{ + pubkey: string; + uid: string; + buid: string; + sig: string; + } | null>, + conf: ConfDTO, + dal: FileDAL +) { if (block.number == 0 && cert.block_number != 0) { - throw Error('Number must be 0 for root block\'s certifications'); + throw Error("Number must be 0 for root block's certifications"); } else { - let basedBlock:Tristamp|null = { + let basedBlock: Tristamp | null = { number: 0, hash: constants.SPECIAL_HASH, - medianTime: 0 - } + medianTime: 0, + }; if (block.number != 0) { - basedBlock = await dal.getTristampOf(cert.block_number) + basedBlock = await dal.getTristampOf(cert.block_number); if (!basedBlock) { - throw Error('Certification based on an unexisting block'); + throw Error("Certification based on an unexisting block"); } try { - const issuer = await dal.getWrittenIdtyByPubkeyForIsMember(cert.from) + const issuer = await dal.getWrittenIdtyByPubkeyForIsMember(cert.from); if (!issuer || !issuer.member) { - throw Error('Issuer is not a member') + throw Error("Issuer is not a member"); } } catch (e) { - throw Error('Certifier must be a member') + throw Error("Certifier must be a member"); } } - let idty = await findIdtyFunc(block, cert.to, dal) + let idty = await findIdtyFunc(block, cert.to, dal); let current = block.number == 0 ? null : await dal.getCurrentBlockOrNull(); if (!idty) { - throw Error('Identity does not exist for certified'); - } - else if (current && current.medianTime > basedBlock.medianTime + conf.sigValidity) { - throw Error('Certification has expired'); - } - else if (cert.from == idty.pubkey) - throw Error('Rejected certification: certifying its own self-certification has no meaning'); + throw Error("Identity does not exist for certified"); + } else if ( + current && + current.medianTime > basedBlock.medianTime + conf.sigValidity + ) { + throw Error("Certification has expired"); + } else if (cert.from == idty.pubkey) + throw Error( + "Rejected certification: certifying its own self-certification has no meaning" + ); else { - const buid = [cert.block_number, basedBlock.hash].join('-'); - if (cert.block_hash && buid != [cert.block_number, cert.block_hash].join('-')) - throw Error('Certification based on an unexisting block buid. from ' + cert.from.substring(0,8) + ' to ' + idty.pubkey.substring(0,8)); + const buid = [cert.block_number, basedBlock.hash].join("-"); + if ( + cert.block_hash && + buid != [cert.block_number, cert.block_hash].join("-") + ) + throw Error( + "Certification based on an unexisting block buid. from " + + cert.from.substring(0, 8) + + " to " + + idty.pubkey.substring(0, 8) + ); const raw = rawer.getOfficialCertification({ currency: conf.currency, idty_issuer: idty.pubkey, @@ -320,17 +462,17 @@ async function checkCertificationShouldBeValid (block:{ number:number, currency: idty_sig: idty.sig, issuer: cert.from, buid: buid, - sig: '' - }) + sig: "", + }); const verified = verify(raw, cert.sig, cert.from); if (!verified) { - throw constants.ERRORS.WRONG_SIGNATURE_FOR_CERT + throw constants.ERRORS.WRONG_SIGNATURE_FOR_CERT; } return true; } } } -export function setLogger(newLogger:any) { - logger = newLogger +export function setLogger(newLogger: any) { + logger = newLogger; } diff --git a/app/lib/rules/helpers.ts b/app/lib/rules/helpers.ts index ee7d194d9279cb74b2f529c8477a06b460197306..d743f0c16e86c6b71d77ff074ffed65ece05daf5 100644 --- a/app/lib/rules/helpers.ts +++ b/app/lib/rules/helpers.ts @@ -11,12 +11,14 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../dto/ConfDTO" -import {CommonConstants} from "../common-libs/constants" +import { ConfDTO } from "../dto/ConfDTO"; +import { CommonConstants } from "../common-libs/constants"; -const constants = CommonConstants +const constants = CommonConstants; -export function maxAcceleration (conf:ConfDTO) { - let maxGenTime = Math.ceil(conf.avgGenTime * constants.POW_DIFFICULTY_RANGE_RATIO); +export function maxAcceleration(conf: ConfDTO) { + let maxGenTime = Math.ceil( + conf.avgGenTime * constants.POW_DIFFICULTY_RANGE_RATIO + ); return Math.ceil(maxGenTime * conf.medianTimeBlocks); } diff --git a/app/lib/rules/index.ts b/app/lib/rules/index.ts index a3091ce798abf53a5536042065a6a0d09ea2f581..c43f7ccc92b358aa0a30691d16e99db3490252e8 100644 --- a/app/lib/rules/index.ts +++ b/app/lib/rules/index.ts @@ -12,14 +12,13 @@ // GNU Affero General Public License for more details. "use strict"; -import {BlockDTO} from "../dto/BlockDTO" -import {ConfDTO} from "../dto/ConfDTO" -import {IndexEntry} from "../indexer" -import {LOCAL_RULES_FUNCTIONS} from "./local_rules" +import { BlockDTO } from "../dto/BlockDTO"; +import { ConfDTO } from "../dto/ConfDTO"; +import { IndexEntry } from "../indexer"; +import { LOCAL_RULES_FUNCTIONS } from "./local_rules"; export const ALIAS = { - - ALL_LOCAL: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + ALL_LOCAL: async (block: BlockDTO, conf: ConfDTO, index: IndexEntry[]) => { await LOCAL_RULES_FUNCTIONS.checkParameters(block); await LOCAL_RULES_FUNCTIONS.checkProofOfWork(block); await LOCAL_RULES_FUNCTIONS.checkInnerHash(block); @@ -29,27 +28,51 @@ export const ALIAS = { await LOCAL_RULES_FUNCTIONS.checkBlockSignature(block); await LOCAL_RULES_FUNCTIONS.checkBlockTimes(block, conf); await LOCAL_RULES_FUNCTIONS.checkIdentitiesSignature(block); - await LOCAL_RULES_FUNCTIONS.checkIdentitiesUserIDConflict(block, conf, index); - await LOCAL_RULES_FUNCTIONS.checkIdentitiesPubkeyConflict(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkIdentitiesUserIDConflict( + block, + conf, + index + ); + await LOCAL_RULES_FUNCTIONS.checkIdentitiesPubkeyConflict( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkIdentitiesMatchJoin(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkMembershipUnicity(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkRevokedUnicity(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkRevokedAreExcluded(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkMembershipsSignature(block); await LOCAL_RULES_FUNCTIONS.checkPubkeyUnicity(block); - await LOCAL_RULES_FUNCTIONS.checkCertificationOneByIssuer(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkCertificationOneByIssuer( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkCertificationUnicity(block, conf, index); - await LOCAL_RULES_FUNCTIONS.checkCertificationIsntForLeaverOrExcluded(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkCertificationIsntForLeaverOrExcluded( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkTxVersion(block); await LOCAL_RULES_FUNCTIONS.checkTxIssuers(block); await LOCAL_RULES_FUNCTIONS.checkTxSources(block); await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block); await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block); await LOCAL_RULES_FUNCTIONS.checkTxSignature(block); - await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth( + block, + conf, + index + ); }, - ALL_LOCAL_BUT_POW_AND_SIGNATURE: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + ALL_LOCAL_BUT_POW_AND_SIGNATURE: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { await LOCAL_RULES_FUNCTIONS.checkParameters(block); await LOCAL_RULES_FUNCTIONS.checkInnerHash(block); await LOCAL_RULES_FUNCTIONS.checkPreviousHash(block); @@ -57,39 +80,70 @@ export const ALIAS = { await LOCAL_RULES_FUNCTIONS.checkUnitBase(block); await LOCAL_RULES_FUNCTIONS.checkBlockTimes(block, conf); await LOCAL_RULES_FUNCTIONS.checkIdentitiesSignature(block); - await LOCAL_RULES_FUNCTIONS.checkIdentitiesUserIDConflict(block, conf, index); - await LOCAL_RULES_FUNCTIONS.checkIdentitiesPubkeyConflict(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkIdentitiesUserIDConflict( + block, + conf, + index + ); + await LOCAL_RULES_FUNCTIONS.checkIdentitiesPubkeyConflict( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkIdentitiesMatchJoin(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkMembershipUnicity(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkRevokedUnicity(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkRevokedAreExcluded(block, conf, index); await LOCAL_RULES_FUNCTIONS.checkMembershipsSignature(block); await LOCAL_RULES_FUNCTIONS.checkPubkeyUnicity(block); - await LOCAL_RULES_FUNCTIONS.checkCertificationOneByIssuer(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkCertificationOneByIssuer( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkCertificationUnicity(block, conf, index); - await LOCAL_RULES_FUNCTIONS.checkCertificationIsntForLeaverOrExcluded(block, conf, index); + await LOCAL_RULES_FUNCTIONS.checkCertificationIsntForLeaverOrExcluded( + block, + conf, + index + ); await LOCAL_RULES_FUNCTIONS.checkTxVersion(block); await LOCAL_RULES_FUNCTIONS.checkTxIssuers(block); await LOCAL_RULES_FUNCTIONS.checkTxSources(block); await LOCAL_RULES_FUNCTIONS.checkTxRecipients(block); await LOCAL_RULES_FUNCTIONS.checkTxAmounts(block); await LOCAL_RULES_FUNCTIONS.checkTxSignature(block); - await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth(block, conf, index); - } -} + await LOCAL_RULES_FUNCTIONS.checkMaxTransactionChainingDepth( + block, + conf, + index + ); + }, +}; export const CHECK = { ASYNC: { ALL_LOCAL: checkLocal(ALIAS.ALL_LOCAL), - ALL_LOCAL_BUT_POW: checkLocal(ALIAS.ALL_LOCAL_BUT_POW_AND_SIGNATURE) - } + ALL_LOCAL_BUT_POW: checkLocal(ALIAS.ALL_LOCAL_BUT_POW_AND_SIGNATURE), + }, }; -function checkLocal(contract:(block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => Promise<void>) { - return async (b:BlockDTO, conf:ConfDTO, index:IndexEntry[], done:any = undefined) => { +function checkLocal( + contract: ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => Promise<void> +) { + return async ( + b: BlockDTO, + conf: ConfDTO, + index: IndexEntry[], + done: any = undefined + ) => { try { - const block = BlockDTO.fromJSONObject(b) - await contract(block, conf, index) + const block = BlockDTO.fromJSONObject(b); + await contract(block, conf, index); done && done(); } catch (err) { if (done) return done(err); diff --git a/app/lib/rules/local_rules.ts b/app/lib/rules/local_rules.ts index 933e6b394c4b0fdd7aec3a8f7415e6ca21c82557..81e52bee163b2f61729cb2cb30f97198ca7d7829 100644 --- a/app/lib/rules/local_rules.ts +++ b/app/lib/rules/local_rules.ts @@ -11,123 +11,161 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../dto/BlockDTO" -import {ConfDTO} from "../dto/ConfDTO" -import {CindexEntry, IndexEntry, Indexer, MindexEntry, SindexEntry} from "../indexer" -import {BaseDTO, TransactionDTO} from "../dto/TransactionDTO" -import {DBBlock} from "../db/DBBlock" -import {verify} from "duniteroxyde" -import {hashf} from "../common" -import {CommonConstants} from "../common-libs/constants" -import {IdentityDTO} from "../dto/IdentityDTO" -import {MembershipDTO} from "../dto/MembershipDTO" -import {Underscore} from "../common-libs/underscore" -import {FileDAL} from "../dal/fileDAL" - -const constants = CommonConstants -const maxAcceleration = require('./helpers').maxAcceleration - -const INVALID_G1_BLOCKS = new Set([15144, 31202, 85448, 87566, 90830, 109327, 189835, 199172, 221274, 253582]); -const INVALID_GT_BLOCKS = new Set([24316, 62067, 62551, 93288, 173118, 183706, 196196, 246027, 247211, 263207, - 307038, 328741, 335914, 377316, 395714, 396024, 407913, 422366, 496751]); +import { BlockDTO } from "../dto/BlockDTO"; +import { ConfDTO } from "../dto/ConfDTO"; +import { + CindexEntry, + IndexEntry, + Indexer, + MindexEntry, + SindexEntry, +} from "../indexer"; +import { BaseDTO, TransactionDTO } from "../dto/TransactionDTO"; +import { DBBlock } from "../db/DBBlock"; +import { verify } from "duniteroxyde"; +import { hashf } from "../common"; +import { CommonConstants } from "../common-libs/constants"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { MembershipDTO } from "../dto/MembershipDTO"; +import { Underscore } from "../common-libs/underscore"; +import { FileDAL } from "../dal/fileDAL"; + +const constants = CommonConstants; +const maxAcceleration = require("./helpers").maxAcceleration; + +const INVALID_G1_BLOCKS = new Set([ + 15144, + 31202, + 85448, + 87566, + 90830, + 109327, + 189835, + 199172, + 221274, + 253582, +]); +const INVALID_GT_BLOCKS = new Set([ + 24316, + 62067, + 62551, + 93288, + 173118, + 183706, + 196196, + 246027, + 247211, + 263207, + 307038, + 328741, + 335914, + 377316, + 395714, + 396024, + 407913, + 422366, + 496751, +]); export const LOCAL_RULES_FUNCTIONS = { - - checkParameters: async (block:BlockDTO) => { + checkParameters: async (block: BlockDTO) => { if (block.number == 0 && !block.parameters) { - throw Error('Parameters must be provided for root block'); - } - else if (block.number > 0 && block.parameters) { - throw Error('Parameters must not be provided for non-root block'); + throw Error("Parameters must be provided for root block"); + } else if (block.number > 0 && block.parameters) { + throw Error("Parameters must not be provided for non-root block"); } return true; }, - isProofOfWorkCorrect: (block:BlockDTO) => { + isProofOfWorkCorrect: (block: BlockDTO) => { let remainder = block.powMin % 16; let nb_zeros = (block.powMin - remainder) / 16; - const powRegexp = new RegExp('^0{' + nb_zeros + '}'); - return !!block.hash.match(powRegexp) + const powRegexp = new RegExp("^0{" + nb_zeros + "}"); + return !!block.hash.match(powRegexp); }, - checkProofOfWork: async (block:BlockDTO) => { + checkProofOfWork: async (block: BlockDTO) => { if (!LOCAL_RULES_FUNCTIONS.isProofOfWorkCorrect(block)) { - throw Error('Not a proof-of-work'); + throw Error("Not a proof-of-work"); } return true; }, - checkInnerHash: async (block:BlockDTO) => { + checkInnerHash: async (block: BlockDTO) => { let inner_hash = hashf(block.getRawInnerPart()).toUpperCase(); if (block.inner_hash != inner_hash) { - throw Error('Wrong inner hash'); + throw Error("Wrong inner hash"); } return true; }, - checkPreviousHash: async (block:BlockDTO) => { + checkPreviousHash: async (block: BlockDTO) => { if (block.number == 0 && block.previousHash) { - throw Error('PreviousHash must not be provided for root block'); - } - else if (block.number > 0 && !block.previousHash) { - throw Error('PreviousHash must be provided for non-root block'); + throw Error("PreviousHash must not be provided for root block"); + } else if (block.number > 0 && !block.previousHash) { + throw Error("PreviousHash must be provided for non-root block"); } return true; }, - checkPreviousIssuer: async (block:BlockDTO) => { + checkPreviousIssuer: async (block: BlockDTO) => { if (block.number == 0 && block.previousIssuer) - throw Error('PreviousIssuer must not be provided for root block'); + throw Error("PreviousIssuer must not be provided for root block"); else if (block.number > 0 && !block.previousIssuer) - throw Error('PreviousIssuer must be provided for non-root block'); + throw Error("PreviousIssuer must be provided for non-root block"); return true; }, - checkUnitBase: async (block:BlockDTO) => { + checkUnitBase: async (block: BlockDTO) => { if (block.number == 0 && block.unitbase != 0) { - throw Error('UnitBase must equal 0 for root block'); + throw Error("UnitBase must equal 0 for root block"); } return true; }, - checkBlockSignature: async (block:BlockDTO) => { + checkBlockSignature: async (block: BlockDTO) => { // Historically, Duniter used a buggy version of TweetNaCl (see #1390) // Starting with the v12 blocks, Duniter uses a fixed version of TweetNaCl. if (!verify(block.getSignedPart(), block.signature, block.issuer)) { if (block.version >= 12) { - throw Error('Block\'s signature must match'); + throw Error("Block's signature must match"); } // If DUBP < v12, block may have invalid signature else if (block.currency === constants.G1) { if (!INVALID_G1_BLOCKS.has(block.number)) { - throw Error('Block\'s signature must match'); + throw Error("Block's signature must match"); } - } - else if (block.currency === constants.GT) { + } else if (block.currency === constants.GT) { if (!INVALID_GT_BLOCKS.has(block.number)) { - throw Error('Block\'s signature must match'); + throw Error("Block's signature must match"); } } // Unknown currencies must have valid signature else { - throw Error('Block\'s signature must match'); + throw Error("Block's signature must match"); } } return true; }, - checkBlockTimes: async (block:BlockDTO, conf:ConfDTO) => { - const time = block.time - const medianTime = block.medianTime - if (block.number > 0 && (time < medianTime || time > medianTime + maxAcceleration(conf))) - throw Error('A block must have its Time between MedianTime and MedianTime + ' + maxAcceleration(conf)); + checkBlockTimes: async (block: BlockDTO, conf: ConfDTO) => { + const time = block.time; + const medianTime = block.medianTime; + if ( + block.number > 0 && + (time < medianTime || time > medianTime + maxAcceleration(conf)) + ) + throw Error( + "A block must have its Time between MedianTime and MedianTime + " + + maxAcceleration(conf) + ); else if (block.number == 0 && time != medianTime) - throw Error('Root block must have Time equal MedianTime'); + throw Error("Root block must have Time equal MedianTime"); return true; }, - checkIdentitiesSignature: async (block:BlockDTO) => { + checkIdentitiesSignature: async (block: BlockDTO) => { let i = 0; let wrongSig = false; while (!wrongSig && i < block.identities.length) { @@ -135,198 +173,260 @@ export const LOCAL_RULES_FUNCTIONS = { idty.currency = block.currency; wrongSig = !verify(idty.rawWithoutSig(), idty.sig, idty.pubkey); if (wrongSig) { - throw Error('Identity\'s signature must match'); + throw Error("Identity's signature must match"); } i++; } return true; }, - checkIdentitiesUserIDConflict: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkIdentitiesUserIDConflict: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const creates = Indexer.iindexCreate(index); - const uids = Underscore.chain(creates).pluck('uid').uniq().value(); + const uids = Underscore.chain(creates).pluck("uid").uniq().value(); if (creates.length !== uids.length) { - throw Error('Block must not contain twice same identity uid'); + throw Error("Block must not contain twice same identity uid"); } return true; }, - checkIdentitiesPubkeyConflict: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkIdentitiesPubkeyConflict: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const creates = Indexer.iindexCreate(index); - const pubkeys = Underscore.chain(creates).pluck('pub').uniq().value(); + const pubkeys = Underscore.chain(creates).pluck("pub").uniq().value(); if (creates.length !== pubkeys.length) { - throw Error('Block must not contain twice same identity pubkey'); + throw Error("Block must not contain twice same identity pubkey"); } return true; }, - checkIdentitiesMatchJoin: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkIdentitiesMatchJoin: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const icreates = Indexer.iindexCreate(index); const mcreates = Indexer.mindexCreate(index); for (const icreate of icreates) { const matching = Underscore.where(mcreates, { pub: icreate.pub }); if (matching.length == 0) { - throw Error('Each identity must match a newcomer line with same userid and certts'); + throw Error( + "Each identity must match a newcomer line with same userid and certts" + ); } } return true; }, - checkRevokedAreExcluded: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkRevokedAreExcluded: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const iindex = Indexer.iindex(index); const mindex = Indexer.mindex(index); const revocations = mindex - .filter((row:MindexEntry) => !!(row.op == constants.IDX_UPDATE && row.revoked_on !== null)) - .map(e => e.pub) + .filter( + (row: MindexEntry) => + !!(row.op == constants.IDX_UPDATE && row.revoked_on !== null) + ) + .map((e) => e.pub); for (const pub of revocations) { - const exclusions = Underscore.where(iindex, { op: constants.IDX_UPDATE, member: false, pub }) + const exclusions = Underscore.where(iindex, { + op: constants.IDX_UPDATE, + member: false, + pub, + }); if (exclusions.length == 0) { - throw Error('A revoked member must be excluded'); + throw Error("A revoked member must be excluded"); } } return true; }, - checkRevokedUnicity: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkRevokedUnicity: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { try { await LOCAL_RULES_FUNCTIONS.checkMembershipUnicity(block, conf, index); } catch (e) { - throw Error('A single revocation per member is allowed'); + throw Error("A single revocation per member is allowed"); } return true; }, - checkMembershipUnicity: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkMembershipUnicity: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const mindex = Indexer.mindex(index); - const pubkeys = Underscore.chain(mindex).pluck('pub').uniq().value(); + const pubkeys = Underscore.chain(mindex).pluck("pub").uniq().value(); if (pubkeys.length !== mindex.length) { - throw Error('Unicity constraint PUBLIC_KEY on MINDEX is not respected'); + throw Error("Unicity constraint PUBLIC_KEY on MINDEX is not respected"); } return true; }, - checkMembershipsSignature: async (block:BlockDTO) => { + checkMembershipsSignature: async (block: BlockDTO) => { let i = 0; - let wrongSig = false, ms; + let wrongSig = false, + ms; // Joiners while (!wrongSig && i < block.joiners.length) { - ms = MembershipDTO.fromInline(block.joiners[i], 'IN', block.currency); + ms = MembershipDTO.fromInline(block.joiners[i], "IN", block.currency); wrongSig = !checkSingleMembershipSignature(ms); i++; } // Actives i = 0; while (!wrongSig && i < block.actives.length) { - ms = MembershipDTO.fromInline(block.actives[i], 'IN', block.currency); + ms = MembershipDTO.fromInline(block.actives[i], "IN", block.currency); wrongSig = !checkSingleMembershipSignature(ms); i++; } // Leavers i = 0; while (!wrongSig && i < block.leavers.length) { - ms = MembershipDTO.fromInline(block.leavers[i], 'OUT', block.currency); + ms = MembershipDTO.fromInline(block.leavers[i], "OUT", block.currency); wrongSig = !checkSingleMembershipSignature(ms); i++; } if (wrongSig) { - throw Error('Membership\'s signature must match'); + throw Error("Membership's signature must match"); } return true; }, - checkPubkeyUnicity: async (block:BlockDTO) => { + checkPubkeyUnicity: async (block: BlockDTO) => { const pubkeys = []; let conflict = false; let pubk; // Joiners let i = 0; while (!conflict && i < block.joiners.length) { - pubk = block.joiners[i].split(':')[0]; - conflict = !!(~pubkeys.indexOf(pubk)) + pubk = block.joiners[i].split(":")[0]; + conflict = !!~pubkeys.indexOf(pubk); pubkeys.push(pubk); i++; } // Actives i = 0; while (!conflict && i < block.actives.length) { - pubk = block.actives[i].split(':')[0]; - conflict = !!(~pubkeys.indexOf(pubk)) + pubk = block.actives[i].split(":")[0]; + conflict = !!~pubkeys.indexOf(pubk); pubkeys.push(pubk); i++; } // Leavers i = 0; while (!conflict && i < block.leavers.length) { - pubk = block.leavers[i].split(':')[0]; - conflict = !!(~pubkeys.indexOf(pubk)) + pubk = block.leavers[i].split(":")[0]; + conflict = !!~pubkeys.indexOf(pubk); pubkeys.push(pubk); i++; } // Excluded i = 0; while (!conflict && i < block.excluded.length) { - pubk = block.excluded[i].split(':')[0]; - conflict = !!(~pubkeys.indexOf(pubk)) + pubk = block.excluded[i].split(":")[0]; + conflict = !!~pubkeys.indexOf(pubk); pubkeys.push(pubk); i++; } if (conflict) { - throw Error('Block cannot contain a same pubkey more than once in joiners, actives, leavers and excluded'); + throw Error( + "Block cannot contain a same pubkey more than once in joiners, actives, leavers and excluded" + ); } return true; }, - checkCertificationOneByIssuer: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkCertificationOneByIssuer: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { if (block.number > 0) { const cindex = Indexer.cindex(index); - const certFromA = Underscore.uniq(cindex.map((row:CindexEntry) => row.issuer)); + const certFromA = Underscore.uniq( + cindex.map((row: CindexEntry) => row.issuer) + ); if (certFromA.length !== cindex.length) { - throw Error('Block cannot contain two certifications from same issuer'); + throw Error("Block cannot contain two certifications from same issuer"); } } return true; }, - checkCertificationUnicity: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkCertificationUnicity: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const cindex = Indexer.cindex(index); - const certAtoB = Underscore.uniq(cindex.map((row:CindexEntry) => row.issuer + row.receiver)); + const certAtoB = Underscore.uniq( + cindex.map((row: CindexEntry) => row.issuer + row.receiver) + ); if (certAtoB.length !== cindex.length) { - throw Error('Block cannot contain identical certifications (A -> B)'); + throw Error("Block cannot contain identical certifications (A -> B)"); } return true; }, - checkCertificationIsntForLeaverOrExcluded: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { + checkCertificationIsntForLeaverOrExcluded: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { const cindex = Indexer.cindex(index); const iindex = Indexer.iindex(index); const mindex = Indexer.mindex(index); - const certified = cindex.map((row:CindexEntry) => row.receiver); + const certified = cindex.map((row: CindexEntry) => row.receiver); for (const pub of certified) { - const exclusions = Underscore.where(iindex, { op: constants.IDX_UPDATE, member: false, pub: pub }) - const leavers = Underscore.where(mindex, { op: constants.IDX_UPDATE, leaving: true, pub: pub }) + const exclusions = Underscore.where(iindex, { + op: constants.IDX_UPDATE, + member: false, + pub: pub, + }); + const leavers = Underscore.where(mindex, { + op: constants.IDX_UPDATE, + leaving: true, + pub: pub, + }); if (exclusions.length > 0 || leavers.length > 0) { - throw Error('Block cannot contain certifications concerning leavers or excluded members'); + throw Error( + "Block cannot contain certifications concerning leavers or excluded members" + ); } } return true; }, - checkTxVersion: async (block:BlockDTO) => { - const txs = block.transactions + checkTxVersion: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { if (tx.version != 10) { - throw Error('A transaction must have the version 10'); + throw Error("A transaction must have the version 10"); } } return true; }, - checkTxLen: async (block:BlockDTO) => { - const txs = block.transactions + checkTxLen: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { - const txLen = TransactionDTO.fromJSONObject(tx).getLen() + const txLen = TransactionDTO.fromJSONObject(tx).getLen(); if (txLen > constants.MAXIMUM_LEN_OF_COMPACT_TX) { throw constants.ERRORS.A_TRANSACTION_HAS_A_MAX_SIZE; } @@ -334,9 +434,12 @@ export const LOCAL_RULES_FUNCTIONS = { // Check rule against each output of each transaction for (const tx of txs) { for (const output of tx.outputs) { - const out = typeof output === 'string' ? output : TransactionDTO.outputObj2Str(output) + const out = + typeof output === "string" + ? output + : TransactionDTO.outputObj2Str(output); if (out.length > constants.MAXIMUM_LEN_OF_OUTPUT) { - throw constants.ERRORS.MAXIMUM_LEN_OF_OUTPUT + throw constants.ERRORS.MAXIMUM_LEN_OF_OUTPUT; } } } @@ -344,61 +447,70 @@ export const LOCAL_RULES_FUNCTIONS = { for (const tx of txs) { for (const unlock of tx.unlocks) { if (unlock.length > constants.MAXIMUM_LEN_OF_UNLOCK) { - throw constants.ERRORS.MAXIMUM_LEN_OF_UNLOCK + throw constants.ERRORS.MAXIMUM_LEN_OF_UNLOCK; } } } return true; }, - checkTxIssuers: async (block:BlockDTO) => { - const txs = block.transactions + checkTxIssuers: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { if (tx.issuers.length == 0) { - throw Error('A transaction must have at least 1 issuer'); + throw Error("A transaction must have at least 1 issuer"); } } return true; }, - checkTxSources: async (block:BlockDTO) => { - const dto = BlockDTO.fromJSONObject(block) + checkTxSources: async (block: BlockDTO) => { + const dto = BlockDTO.fromJSONObject(block); for (const tx of dto.transactions) { if (!tx.inputs || tx.inputs.length == 0) { - throw Error('A transaction must have at least 1 source'); + throw Error("A transaction must have at least 1 source"); } } const sindex = Indexer.localSIndex(dto); - const inputs = Underscore.filter(sindex, (row:SindexEntry) => row.op == constants.IDX_UPDATE).map((row:SindexEntry) => [row.op, row.identifier, row.pos].join('-')); + const inputs = Underscore.filter( + sindex, + (row: SindexEntry) => row.op == constants.IDX_UPDATE + ).map((row: SindexEntry) => [row.op, row.identifier, row.pos].join("-")); if (inputs.length !== Underscore.uniq(inputs).length) { - throw Error('It cannot exist 2 identical sources for transactions inside a given block'); - } - const outputs = Underscore.filter(sindex, (row:SindexEntry) => row.op == constants.IDX_CREATE).map((row:SindexEntry) => [row.op, row.identifier, row.pos].join('-')); + throw Error( + "It cannot exist 2 identical sources for transactions inside a given block" + ); + } + const outputs = Underscore.filter( + sindex, + (row: SindexEntry) => row.op == constants.IDX_CREATE + ).map((row: SindexEntry) => [row.op, row.identifier, row.pos].join("-")); if (outputs.length !== Underscore.uniq(outputs).length) { - throw Error('It cannot exist 2 identical sources for transactions inside a given block'); + throw Error( + "It cannot exist 2 identical sources for transactions inside a given block" + ); } return true; }, - checkTxAmounts: async (block:BlockDTO) => { + checkTxAmounts: async (block: BlockDTO) => { for (const tx of block.transactions) { LOCAL_RULES_HELPERS.checkTxAmountsValidity(tx); } }, - checkTxRecipients: async (block:BlockDTO) => { - const txs = block.transactions + checkTxRecipients: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { if (!tx.outputs || tx.outputs.length == 0) { - throw Error('A transaction must have at least 1 recipient'); - } - else { + throw Error("A transaction must have at least 1 recipient"); + } else { // Cannot have empty output condition for (const output of tx.outputsAsObjects()) { if (!output.conditions.match(/(SIG|XHX)/)) { - throw Error('Empty conditions are forbidden'); + throw Error("Empty conditions are forbidden"); } } } @@ -406,70 +518,113 @@ export const LOCAL_RULES_FUNCTIONS = { return true; }, - checkTxSignature: async (block:BlockDTO) => { - const txs = block.transactions + checkTxSignature: async (block: BlockDTO) => { + const txs = block.transactions; // Check rule against each transaction for (const tx of txs) { if (!tx.checkSignatures(block.version)) { - throw Error('Signature from a transaction must match') + throw Error("Signature from a transaction must match"); } } return true; }, - checkMaxTransactionChainingDepth: async (block:BlockDTO, conf:ConfDTO, index:IndexEntry[]) => { - const sindex = Indexer.sindex(index) - const max = getMaxTransactionDepth(sindex) + checkMaxTransactionChainingDepth: async ( + block: BlockDTO, + conf: ConfDTO, + index: IndexEntry[] + ) => { + const sindex = Indexer.sindex(index); + const max = getMaxTransactionDepth(sindex); // - const allowedMax = block.medianTime > CommonConstants.BLOCK_TX_CHAINING_ACTIVATION_MT ? CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH : 0 + const allowedMax = + block.medianTime > CommonConstants.BLOCK_TX_CHAINING_ACTIVATION_MT + ? CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH + : 0; if (max > allowedMax) { - throw "The maximum transaction chaining length per block is " + CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH + throw ( + "The maximum transaction chaining length per block is " + + CommonConstants.BLOCK_MAX_TX_CHAINING_DEPTH + ); } - return true - } -} + return true; + }, +}; export interface SindexShortEntry { - op:string, - identifier:string, - pos:number, - tx:string|null + op: string; + identifier: string; + pos: number; + tx: string | null; } -function getMaxTransactionDepth(sindex:SindexShortEntry[]) { - const ids = Underscore.uniq(Underscore.pluck(sindex, 'tx')) as string[] // We are sure because at this moment no UD is in the sources - let maxTxChainingDepth = 0 +function getMaxTransactionDepth(sindex: SindexShortEntry[]) { + const ids = Underscore.uniq(Underscore.pluck(sindex, "tx")) as string[]; // We are sure because at this moment no UD is in the sources + let maxTxChainingDepth = 0; for (let id of ids) { - maxTxChainingDepth = Math.max(maxTxChainingDepth, getTransactionDepth(id, sindex, 0)) + maxTxChainingDepth = Math.max( + maxTxChainingDepth, + getTransactionDepth(id, sindex, 0) + ); } - return maxTxChainingDepth + return maxTxChainingDepth; } -function getTransactionDepth(txHash:string, sindex:SindexShortEntry[], localDepth = 0) { - const inputs = Underscore.filter(sindex, (s:SindexShortEntry) => s.op === 'UPDATE' && s.tx === txHash) - let depth = localDepth +function getTransactionDepth( + txHash: string, + sindex: SindexShortEntry[], + localDepth = 0 +) { + const inputs = Underscore.filter( + sindex, + (s: SindexShortEntry) => s.op === "UPDATE" && s.tx === txHash + ); + let depth = localDepth; for (let input of inputs) { - const consumedOutput = Underscore.findWhere(sindex, { op: 'CREATE', identifier: input.identifier, pos: input.pos }) + const consumedOutput = Underscore.findWhere(sindex, { + op: "CREATE", + identifier: input.identifier, + pos: input.pos, + }); if (consumedOutput) { if (localDepth < 5) { // Cast: we are sure because at this moment no UD is in the sources - const subTxDepth = getTransactionDepth(consumedOutput.tx as string, sindex, localDepth + 1) - depth = Math.max(depth, subTxDepth) + const subTxDepth = getTransactionDepth( + consumedOutput.tx as string, + sindex, + localDepth + 1 + ); + depth = Math.max(depth, subTxDepth); } else { - depth++ + depth++; } } } - return depth + return depth; } -function checkSingleMembershipSignature(ms:any) { +function checkSingleMembershipSignature(ms: any) { return verify(ms.getRaw(), ms.signature, ms.issuer); } -function checkBunchOfTransactions(transactions:TransactionDTO[], conf:ConfDTO, medianTime: number, options?:{ dontCareAboutChaining?:boolean }){ - const block:any = { transactions, identities: [], joiners: [], actives: [], leavers: [], revoked: [], excluded: [], certifications: [], medianTime }; - const index = Indexer.localIndex(block, conf) +function checkBunchOfTransactions( + transactions: TransactionDTO[], + conf: ConfDTO, + medianTime: number, + options?: { dontCareAboutChaining?: boolean } +) { + const block: any = { + transactions, + identities: [], + joiners: [], + actives: [], + leavers: [], + revoked: [], + excluded: [], + certifications: [], + medianTime, + }; + const index = Indexer.localIndex(block, conf); return (async () => { let local_rule = LOCAL_RULES_FUNCTIONS; await local_rule.checkTxLen(block); @@ -481,12 +636,11 @@ function checkBunchOfTransactions(transactions:TransactionDTO[], conf:ConfDTO, m if (!options || !options.dontCareAboutChaining) { await local_rule.checkMaxTransactionChainingDepth(block, conf, index); } - })() + })(); } export const LOCAL_RULES_HELPERS = { - - maxAcceleration: (conf:ConfDTO) => maxAcceleration(conf), + maxAcceleration: (conf: ConfDTO) => maxAcceleration(conf), checkSingleMembershipSignature: checkSingleMembershipSignature, @@ -496,16 +650,19 @@ export const LOCAL_RULES_HELPERS = { getMaxTransactionDepth, - checkSingleTransactionLocally: (tx:any, conf:ConfDTO) => checkBunchOfTransactions([tx], conf, 0), + checkSingleTransactionLocally: (tx: any, conf: ConfDTO) => + checkBunchOfTransactions([tx], conf, 0), - checkTxAmountsValidity: (tx:TransactionDTO) => { - const inputs = tx.inputsAsObjects() - const outputs = tx.outputsAsObjects() + checkTxAmountsValidity: (tx: TransactionDTO) => { + const inputs = tx.inputsAsObjects(); + const outputs = tx.outputsAsObjects(); // Rule of money conservation - const commonBase:number = (inputs as BaseDTO[]).concat(outputs).reduce((min:number, input) => { - if (min === null) return input.base; - return Math.min(min, input.base) - }, 0) + const commonBase: number = (inputs as BaseDTO[]) + .concat(outputs) + .reduce((min: number, input) => { + if (min === null) return input.base; + return Math.min(min, input.base); + }, 0); const inputSumCommonBase = inputs.reduce((sum, input) => { return sum + input.amount * Math.pow(10, input.base - commonBase); }, 0); @@ -517,10 +674,10 @@ export const LOCAL_RULES_HELPERS = { } // Rule of unit base transformation const maxOutputBase = outputs.reduce((max, output) => { - return Math.max(max, output.base) - }, 0) + return Math.max(max, output.base); + }, 0); // Compute deltas - const deltas:any = {}; + const deltas: any = {}; for (let i = commonBase; i <= maxOutputBase; i++) { const inputBaseSum = inputs.reduce((sum, input) => { if (input.base == i) { @@ -548,29 +705,42 @@ export const LOCAL_RULES_HELPERS = { } }, - getMaxPossibleVersionNumber: async (current:DBBlock|null, dal: FileDAL) => { + getMaxPossibleVersionNumber: async ( + current: DBBlock | null, + dal: FileDAL + ) => { // Looking at current blockchain, find what is the next maximum version we can produce return !current + ? // 1. We use legacy version + constants.BLOCK_GENESIS_VERSION + : (async () => { + // 2. If we can, we go to the next version + const blocksInFrame = ( + await dal.getBlocksBetween( + current.number - current.issuersFrame + 1, + current.number + ) + ).sort((b1, b2) => b2.number - b1.number); + const uniqIssuersInFrame = Underscore.uniq( + blocksInFrame.map((b) => b.issuer) + ); + const lastNonceOfEachIssuer = uniqIssuersInFrame.map((issuer) => + String(blocksInFrame.filter((b) => b.issuer === issuer)[0].nonce) + ); + const nbNoncesWithNextVersionCode = lastNonceOfEachIssuer.filter( + (nonce) => nonce.substr(-11, 3) === "999" + ).length; + + // More than 70% of the computing network converted? Let's go to next version. + let propIssuersReadyToJump = + nbNoncesWithNextVersionCode / uniqIssuersInFrame.length; + if (propIssuersReadyToJump > 0.7) { + return constants.DUBP_NEXT_VERSION; + } - // 1. We use legacy version - ? constants.BLOCK_GENESIS_VERSION : (async () => { - - // 2. If we can, we go to the next version - const blocksInFrame = (await dal.getBlocksBetween(current.number - current.issuersFrame + 1, current.number)) - .sort((b1, b2) => b2.number - b1.number) - const uniqIssuersInFrame = Underscore.uniq(blocksInFrame.map(b => b.issuer)) - const lastNonceOfEachIssuer = uniqIssuersInFrame.map(issuer => String(blocksInFrame.filter(b => b.issuer === issuer)[0].nonce)) - const nbNoncesWithNextVersionCode = lastNonceOfEachIssuer.filter(nonce => nonce.substr(-11, 3) === '999').length - - // More than 70% of the computing network converted? Let's go to next version. - let propIssuersReadyToJump = nbNoncesWithNextVersionCode / uniqIssuersInFrame.length; - if (propIssuersReadyToJump > 0.7) { - return constants.DUBP_NEXT_VERSION - } - - // Otherwise, we stay on same version - return current.version - })() - } -} + // Otherwise, we stay on same version + return current.version; + })(); + }, +}; diff --git a/app/lib/streams/multicaster.ts b/app/lib/streams/multicaster.ts index cf2cdbf3fc9eb7b2c2e13cd108c14b64c2e03290..5afd88cd1e68267981d9c26c5bd6bcc46e1f48da 100644 --- a/app/lib/streams/multicaster.ts +++ b/app/lib/streams/multicaster.ts @@ -11,233 +11,284 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../dto/ConfDTO" -import * as stream from "stream" -import {BlockDTO} from "../dto/BlockDTO" -import {RevocationDTO} from "../dto/RevocationDTO" -import {IdentityDTO} from "../dto/IdentityDTO" -import {CertificationDTO} from "../dto/CertificationDTO" -import {MembershipDTO} from "../dto/MembershipDTO" -import {TransactionDTO} from "../dto/TransactionDTO" -import {PeerDTO} from "../dto/PeerDTO" -import {CommonConstants} from "../common-libs/constants" -import {DBPeer} from "../db/DBPeer" - -const request = require('request'); -const constants = require('../../lib/constants'); -const logger = require('../logger').NewLogger('multicaster'); +import { ConfDTO } from "../dto/ConfDTO"; +import * as stream from "stream"; +import { BlockDTO } from "../dto/BlockDTO"; +import { RevocationDTO } from "../dto/RevocationDTO"; +import { IdentityDTO } from "../dto/IdentityDTO"; +import { CertificationDTO } from "../dto/CertificationDTO"; +import { MembershipDTO } from "../dto/MembershipDTO"; +import { TransactionDTO } from "../dto/TransactionDTO"; +import { PeerDTO } from "../dto/PeerDTO"; +import { CommonConstants } from "../common-libs/constants"; +import { DBPeer } from "../db/DBPeer"; + +const request = require("request"); +const constants = require("../../lib/constants"); +const logger = require("../logger").NewLogger("multicaster"); const WITH_ISOLATION = true; export class Multicaster extends stream.Transform { + constructor( + private conf: ConfDTO | null = null, + private timeout: number = 0 + ) { + super({ objectMode: true }); - constructor(private conf:ConfDTO|null = null, private timeout:number = 0) { - - super({ objectMode: true }) - - this.on('identity', (data:any, peers:DBPeer[]) => this.idtyForward(data, peers)) - this.on('cert', (data:any, peers:DBPeer[]) => this.certForward(data, peers)) - this.on('revocation', (data:any, peers:DBPeer[]) => this.revocationForward(data, peers)) - this.on('block', (data:any, peers:DBPeer[]) => this.blockForward(data, peers)) - this.on('transaction', (data:any, peers:DBPeer[]) => this.txForward(data, peers)) - this.on('peer', (data:any, peers:DBPeer[]) => this.peerForward(data, peers)) - this.on('membership', (data:any, peers:DBPeer[]) => this.msForward(data, peers)) + this.on("identity", (data: any, peers: DBPeer[]) => + this.idtyForward(data, peers) + ); + this.on("cert", (data: any, peers: DBPeer[]) => + this.certForward(data, peers) + ); + this.on("revocation", (data: any, peers: DBPeer[]) => + this.revocationForward(data, peers) + ); + this.on("block", (data: any, peers: DBPeer[]) => + this.blockForward(data, peers) + ); + this.on("transaction", (data: any, peers: DBPeer[]) => + this.txForward(data, peers) + ); + this.on("peer", (data: any, peers: DBPeer[]) => + this.peerForward(data, peers) + ); + this.on("membership", (data: any, peers: DBPeer[]) => + this.msForward(data, peers) + ); } - async blockForward(doc:any, peers:DBPeer[]) { + async blockForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (b:any) => BlockDTO.fromJSONObject(b), - type: 'Block', - uri: '/blockchain/block', - getObj: (block:any) => { + transform: (b: any) => BlockDTO.fromJSONObject(b), + type: "Block", + uri: "/blockchain/block", + getObj: (block: any) => { return { - "block": block.getRawSigned() + block: block.getRawSigned(), }; }, - getDocID: (block:any) => 'block#' + block.number - })(doc, peers) + getDocID: (block: any) => "block#" + block.number, + })(doc, peers); } - async idtyForward(doc:any, peers:DBPeer[]) { + async idtyForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (obj:any) => IdentityDTO.fromJSONObject(obj), - type: 'Identity', - uri: '/wot/add', - getObj: (idty:IdentityDTO) => { + transform: (obj: any) => IdentityDTO.fromJSONObject(obj), + type: "Identity", + uri: "/wot/add", + getObj: (idty: IdentityDTO) => { return { - "identity": idty.getRawSigned() + identity: idty.getRawSigned(), }; }, - getDocID: (idty:any) => 'with ' + (idty.certs || []).length + ' certs' - })(doc, peers) + getDocID: (idty: any) => "with " + (idty.certs || []).length + " certs", + })(doc, peers); } - async certForward(doc:any, peers:DBPeer[]) { + async certForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (obj:any) => CertificationDTO.fromJSONObject(obj), - type: 'Cert', - uri: '/wot/certify', - getObj: (cert:CertificationDTO) => { + transform: (obj: any) => CertificationDTO.fromJSONObject(obj), + type: "Cert", + uri: "/wot/certify", + getObj: (cert: CertificationDTO) => { return { - "cert": cert.getRawSigned() + cert: cert.getRawSigned(), }; }, - getDocID: (idty:any) => 'with ' + (idty.certs || []).length + ' certs' - })(doc, peers) + getDocID: (idty: any) => "with " + (idty.certs || []).length + " certs", + })(doc, peers); } - async revocationForward(doc:any, peers:DBPeer[]) { + async revocationForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (json:any) => RevocationDTO.fromJSONObject(json), - type: 'Revocation', - uri: '/wot/revoke', - getObj: (revocation:RevocationDTO) => { + transform: (json: any) => RevocationDTO.fromJSONObject(json), + type: "Revocation", + uri: "/wot/revoke", + getObj: (revocation: RevocationDTO) => { return { - "revocation": revocation.getRaw() + revocation: revocation.getRaw(), }; - } - })(doc, peers) + }, + })(doc, peers); } - async txForward(doc:any, peers:DBPeer[]) { + async txForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (obj:any) => TransactionDTO.fromJSONObject(obj), - type: 'Transaction', - uri: '/tx/process', - getObj: (transaction:TransactionDTO) => { + transform: (obj: any) => TransactionDTO.fromJSONObject(obj), + type: "Transaction", + uri: "/tx/process", + getObj: (transaction: TransactionDTO) => { return { - "transaction": transaction.getRaw(), - "signature": transaction.signature + transaction: transaction.getRaw(), + signature: transaction.signature, }; - } - })(doc, peers) + }, + })(doc, peers); } - async peerForward(doc:any, peers:DBPeer[]) { + async peerForward(doc: any, peers: DBPeer[]) { return this.forward({ - type: 'Peer', - uri: '/network/peering/peers', - transform: (obj:any) => PeerDTO.fromJSONObject(obj), - getObj: (peering:PeerDTO) => { + type: "Peer", + uri: "/network/peering/peers", + transform: (obj: any) => PeerDTO.fromJSONObject(obj), + getObj: (peering: PeerDTO) => { return { - peer: peering.getRawSigned() + peer: peering.getRawSigned(), }; }, - getDocID: (doc:PeerDTO) => doc.keyID() + '#' + doc.blockNumber(), + getDocID: (doc: PeerDTO) => doc.keyID() + "#" + doc.blockNumber(), withIsolation: WITH_ISOLATION, - onError: (resJSON:{ - peer: { - block:string, - endpoints:string[] + onError: ( + resJSON: { + peer: { + block: string; + endpoints: string[]; + }; + ucode?: number; + message?: string; }, - ucode?:number, - message?:string - }, peering:any, to:any) => { - if (resJSON.ucode !== undefined && resJSON.ucode !== CommonConstants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE.uerr.ucode) { - if (resJSON.ucode == CommonConstants.ERRORS.DOCUMENT_BEING_TREATED.uerr.ucode || resJSON.ucode == constants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN.uerr.ucode) { - return Promise.resolve() + peering: any, + to: any + ) => { + if ( + resJSON.ucode !== undefined && + resJSON.ucode !== + CommonConstants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE.uerr.ucode + ) { + if ( + resJSON.ucode == + CommonConstants.ERRORS.DOCUMENT_BEING_TREATED.uerr.ucode || + resJSON.ucode == + constants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN.uerr.ucode + ) { + return Promise.resolve(); } else { - throw Error(resJSON.message) + throw Error(resJSON.message); } } else { // Handle possibly outdated peering document - const sentPeer = PeerDTO.fromJSONObject(peering) - if (PeerDTO.blockNumber(resJSON.peer.block) > sentPeer.blockNumber()) { + const sentPeer = PeerDTO.fromJSONObject(peering); + if ( + PeerDTO.blockNumber(resJSON.peer.block) > sentPeer.blockNumber() + ) { this.push({ outdated: true, peer: resJSON.peer }); - logger.warn('Outdated peer document (%s) sent to %s', sentPeer.keyID() + '#' + sentPeer.blockNumber(), to); + logger.warn( + "Outdated peer document (%s) sent to %s", + sentPeer.keyID() + "#" + sentPeer.blockNumber(), + to + ); } - return Promise.resolve() + return Promise.resolve(); } - } - })(doc, peers) + }, + })(doc, peers); } - async msForward(doc:any, peers:DBPeer[]) { + async msForward(doc: any, peers: DBPeer[]) { return this.forward({ - transform: (obj:any) => MembershipDTO.fromJSONObject(obj), - type: 'Membership', - uri: '/blockchain/membership', - getObj: (membership:MembershipDTO) => { + transform: (obj: any) => MembershipDTO.fromJSONObject(obj), + type: "Membership", + uri: "/blockchain/membership", + getObj: (membership: MembershipDTO) => { return { - "membership": membership.getRaw(), - "signature": membership.signature + membership: membership.getRaw(), + signature: membership.signature, }; - } - })(doc, peers) + }, + })(doc, peers); } - _write(obj:any, enc:any, done:any) { - this.emit(obj.type, obj.obj, obj.peers) - done() + _write(obj: any, enc: any, done: any) { + this.emit(obj.type, obj.obj, obj.peers); + done(); } - sendBlock(toPeer:any, block:any) { - return this.blockForward(block, [toPeer]) + sendBlock(toPeer: any, block: any) { + return this.blockForward(block, [toPeer]); } - sendPeering(toPeer:any, peer:any) { - return this.peerForward(peer, [toPeer]) + sendPeering(toPeer: any, peer: any) { + return this.peerForward(peer, [toPeer]); } - forward(params:any) { - return async (doc:any, peers:DBPeer[]) => { + forward(params: any) { + return async (doc: any, peers: DBPeer[]) => { try { - if(!params.withIsolation || !(this.conf && this.conf.isolate)) { + if (!params.withIsolation || !(this.conf && this.conf.isolate)) { let theDoc = params.transform ? params.transform(doc) : doc; if (params.getDocID) { - logger.info('POST %s %s to %s peers', params.type, params.getDocID(theDoc), peers.length) + logger.info( + "POST %s %s to %s peers", + params.type, + params.getDocID(theDoc), + peers.length + ); } else { - logger.info('POST %s to %s peers', params.type, peers.length); + logger.info("POST %s to %s peers", params.type, peers.length); } // Parallel treatment for superfast propagation - await Promise.all(peers.map(async (p) => { - let peer = PeerDTO.fromJSONObject(p) - const namedURL = peer.getNamedURL(); - try { - await this.post(peer, params.uri, params.getObj(theDoc)) - } catch (e) { - if (params.onError) { - try { - const json = JSON.parse(e.body); - await params.onError(json, doc, namedURL) - } catch (ex) { - logger.warn('Could not reach %s, reason: %s', namedURL, (ex && ex.message || ex)) + await Promise.all( + peers.map(async (p) => { + let peer = PeerDTO.fromJSONObject(p); + const namedURL = peer.getNamedURL(); + try { + await this.post(peer, params.uri, params.getObj(theDoc)); + } catch (e) { + if (params.onError) { + try { + const json = JSON.parse(e.body); + await params.onError(json, doc, namedURL); + } catch (ex) { + logger.warn( + "Could not reach %s, reason: %s", + namedURL, + (ex && ex.message) || ex + ); + } } } - } - })) + }) + ); } else { - logger.debug('[ISOLATE] Prevent --> new Peer to be sent to %s peer(s)', peers.length); + logger.debug( + "[ISOLATE] Prevent --> new Peer to be sent to %s peer(s)", + peers.length + ); } } catch (err) { logger.error(err); } - } + }; } - post(peer:any, uri:string, data:any) { + post(peer: any, uri: string, data: any) { if (!peer.isReachable()) { return Promise.resolve(); } return new Promise((resolve, reject) => { - const postReq = request.post({ - "uri": protocol(peer.getPort()) + '://' + peer.getURL() + uri, - "timeout": this.timeout || constants.NETWORK.DEFAULT_TIMEOUT - }, (err:any, res:any) => { - if (err) { - this.push({ unreachable: true, peer: { pubkey: peer.pubkey }}); - logger.warn(err.message || err); - } - if (res && res.statusCode != 200) { - return reject(res); + const postReq = request.post( + { + uri: protocol(peer.getPort()) + "://" + peer.getURL() + uri, + timeout: this.timeout || constants.NETWORK.DEFAULT_TIMEOUT, + }, + (err: any, res: any) => { + if (err) { + this.push({ unreachable: true, peer: { pubkey: peer.pubkey } }); + logger.warn(err.message || err); + } + if (res && res.statusCode != 200) { + return reject(res); + } + resolve(res); } - resolve(res); - }) + ); postReq.form(data); }); } } -function protocol(port:number) { - return port == 443 ? 'https' : 'http'; +function protocol(port: number) { + return port == 443 ? "https" : "http"; } diff --git a/app/lib/streams/router.ts b/app/lib/streams/router.ts index d5a88d46b883d934588cf2a138466ff11c488df6..c6065c81b350075d2d4c0de1a847248ecbba630f 100644 --- a/app/lib/streams/router.ts +++ b/app/lib/streams/router.ts @@ -11,116 +11,153 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as stream from "stream" -import {PeeringService} from "../../service/PeeringService" -import {FileDAL} from "../dal/fileDAL" -import {PeerDTO} from "../dto/PeerDTO" -import {DBPeer} from "../db/DBPeer" +import * as stream from "stream"; +import { PeeringService } from "../../service/PeeringService"; +import { FileDAL } from "../dal/fileDAL"; +import { PeerDTO } from "../dto/PeerDTO"; +import { DBPeer } from "../db/DBPeer"; -const constants = require('../constants'); +const constants = require("../constants"); export class RouterStream extends stream.Transform { + logger: any; + active = true; - logger:any - active = true + constructor(private peeringService: PeeringService, private dal: FileDAL) { + super({ objectMode: true }); - constructor(private peeringService:PeeringService, private dal:FileDAL) { - super({ objectMode: true }) - - this.logger = require('../logger').NewLogger('router') + this.logger = require("../logger").NewLogger("router"); } - - setConfDAL(theDAL:FileDAL) { - this.dal = theDAL + + setConfDAL(theDAL: FileDAL) { + this.dal = theDAL; } - setActive(shouldBeActive:boolean) { - this.active = shouldBeActive + setActive(shouldBeActive: boolean) { + this.active = shouldBeActive; } - async _write(obj:any, enc:any, done:any) { + async _write(obj: any, enc: any, done: any) { try { if (obj.joiners) { - await this.route('block', obj, () => this.getRandomInUPPeers(obj.issuer === this.peeringService.pubkey)()); - } - else if (obj.revocation) { - await this.route('revocation', obj, () => this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)()); - } - else if (obj.pubkey && obj.uid) { - await this.route('identity', obj, () => this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)()); - } - else if (obj.idty_uid) { - await this.route('cert', obj, () => this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)()); - } - else if (obj.userid) { - await this.route('membership', obj, () => this.getRandomInUPPeers(obj.issuer === this.peeringService.pubkey)()); - } - else if (obj.inputs) { - await this.route('transaction', obj, () => this.getRandomInUPPeers(obj.issuers.indexOf(this.peeringService.pubkey) !== -1)()); - } - else if (obj.endpoints) { - await this.route('peer', obj, () => this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)()); - } - else if (obj.from && obj.from == this.peeringService.pubkey) { + await this.route("block", obj, () => + this.getRandomInUPPeers(obj.issuer === this.peeringService.pubkey)() + ); + } else if (obj.revocation) { + await this.route("revocation", obj, () => + this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)() + ); + } else if (obj.pubkey && obj.uid) { + await this.route("identity", obj, () => + this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)() + ); + } else if (obj.idty_uid) { + await this.route("cert", obj, () => + this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)() + ); + } else if (obj.userid) { + await this.route("membership", obj, () => + this.getRandomInUPPeers(obj.issuer === this.peeringService.pubkey)() + ); + } else if (obj.inputs) { + await this.route("transaction", obj, () => + this.getRandomInUPPeers( + obj.issuers.indexOf(this.peeringService.pubkey) !== -1 + )() + ); + } else if (obj.endpoints) { + await this.route("peer", obj, () => + this.getRandomInUPPeers(obj.pubkey === this.peeringService.pubkey)() + ); + } else if (obj.from && obj.from == this.peeringService.pubkey) { // Route ONLY status emitted by this node - await this.route('status', obj, () => this.getTargeted(obj.to || obj.idty_issuer)()); - } - else if (obj.unreachable) { + await this.route("status", obj, () => + this.getTargeted(obj.to || obj.idty_issuer)() + ); + } else if (obj.unreachable) { await this.dal.setPeerDown(obj.peer.pubkey); - this.logger.info("Peer %s unreachable: now considered as DOWN.", obj.peer.pubkey); - } - else if (obj.outdated) { + this.logger.info( + "Peer %s unreachable: now considered as DOWN.", + obj.peer.pubkey + ); + } else if (obj.outdated) { await this.peeringService.handleNewerPeer(obj.peer); } } catch (e) { - if (e && e.uerr && e.uerr.ucode == constants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE.uerr.ucode) { - this.logger.info('Newer peer document available on the network for local node'); + if ( + e && + e.uerr && + e.uerr.ucode == + constants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE.uerr.ucode + ) { + this.logger.info( + "Newer peer document available on the network for local node" + ); } else { - this.logger.error("Routing error: %s", e && (e.stack || e.message || (e.uerr && e.uerr.message) || e)); + this.logger.error( + "Routing error: %s", + e && (e.stack || e.message || (e.uerr && e.uerr.message) || e) + ); } } done && done(); } - private async route(type:string, obj:any, getPeersFunc:any) { + private async route(type: string, obj: any, getPeersFunc: any) { if (!this.active) return; const peers = await getPeersFunc(); this.push({ - 'type': type, - 'obj': obj, - 'peers': (peers || []).map((p:any) => PeerDTO.fromJSONObject(p)) - }) + type: type, + obj: obj, + peers: (peers || []).map((p: any) => PeerDTO.fromJSONObject(p)), + }); } - private getRandomInUPPeers (isSelfDocument:boolean): () => Promise<any> { + private getRandomInUPPeers(isSelfDocument: boolean): () => Promise<any> { return this.getValidUpPeers([this.peeringService.pubkey], isSelfDocument); } - private getValidUpPeers (without:any, isSelfDocument:boolean) { + private getValidUpPeers(without: any, isSelfDocument: boolean) { return async () => { - let members:DBPeer[] = []; - let nonmembers:DBPeer[] = []; + let members: DBPeer[] = []; + let nonmembers: DBPeer[] = []; let peers = await this.dal.getRandomlyUPsWithout(without); // Peers with status UP for (const p of peers) { let isMember = await this.dal.isMember(p.pubkey); isMember ? members.push(p) : nonmembers.push(p); } - members = RouterStream.chooseXin(members, isSelfDocument ? constants.NETWORK.MAX_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS : constants.NETWORK.MAX_MEMBERS_TO_FORWARD_TO); - nonmembers = RouterStream.chooseXin(nonmembers, isSelfDocument ? constants.NETWORK.MAX_NON_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS : constants.NETWORK.MAX_NON_MEMBERS_TO_FORWARD_TO); - let mainRoutes:any = members.map((p:any) => (p.member = true) && p).concat(nonmembers); + members = RouterStream.chooseXin( + members, + isSelfDocument + ? constants.NETWORK.MAX_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS + : constants.NETWORK.MAX_MEMBERS_TO_FORWARD_TO + ); + nonmembers = RouterStream.chooseXin( + nonmembers, + isSelfDocument + ? constants.NETWORK.MAX_NON_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS + : constants.NETWORK.MAX_NON_MEMBERS_TO_FORWARD_TO + ); + let mainRoutes: any = members + .map((p: any) => (p.member = true) && p) + .concat(nonmembers); let mirrors = await this.peeringService.mirrorBMAEndpoints(); - const peersToRoute:DBPeer[] = mainRoutes.concat(mirrors.map((mep, index) => { return { - pubkey: 'M' + index + '_' + this.peeringService.pubkey, - endpoints: [mep] - }})); - return peersToRoute.map(p => PeerDTO.fromJSONObject(p)) - } + const peersToRoute: DBPeer[] = mainRoutes.concat( + mirrors.map((mep, index) => { + return { + pubkey: "M" + index + "_" + this.peeringService.pubkey, + endpoints: [mep], + }; + }) + ); + return peersToRoute.map((p) => PeerDTO.fromJSONObject(p)); + }; } /** - * Get the peer targeted by `to` argument, this node excluded (for not to loop on self). - */ - private getTargeted(to:string) { + * Get the peer targeted by `to` argument, this node excluded (for not to loop on self). + */ + private getTargeted(to: string) { return async () => { if (to == this.peeringService.pubkey) { return []; @@ -130,11 +167,14 @@ export class RouterStream extends stream.Transform { }; } - static chooseXin(peers:DBPeer[], max:number) { - const chosen:DBPeer[] = []; + static chooseXin(peers: DBPeer[], max: number) { + const chosen: DBPeer[] = []; const nbPeers = peers.length; for (let i = 0; i < Math.min(nbPeers, max); i++) { - const randIndex = Math.max(Math.floor(Math.random() * 10) - (10 - nbPeers) - i, 0); + const randIndex = Math.max( + Math.floor(Math.random() * 10) - (10 - nbPeers) - i, + 0 + ); chosen.push(peers[randIndex]); peers.splice(randIndex, 1); } diff --git a/app/lib/system/directory.ts b/app/lib/system/directory.ts index 742055668bd5131a668185b056fa90d4503b0d46..3a5e7c9ba3bfc0934e1cbe556ea34db5feed5f68 100644 --- a/app/lib/system/directory.ts +++ b/app/lib/system/directory.ts @@ -11,164 +11,187 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as path from "path" -import * as fs from 'fs' -import {SQLiteDriver} from "../dal/drivers/SQLiteDriver" -import {CFSCore} from "../dal/fileDALs/CFSCore" -import {Wot, WotBuilder} from "duniteroxyde" -import {FileDALParams} from "../dal/fileDAL" -import {cliprogram} from "../common-libs/programOptions" -import {LevelDBDriver} from "../dal/drivers/LevelDBDriver" -import {LevelUp} from 'levelup' -import {AbstractLevelDOWN} from 'abstract-leveldown' - -const opts = cliprogram -const qfs = require('q-io/fs'); +import * as path from "path"; +import * as fs from "fs"; +import { SQLiteDriver } from "../dal/drivers/SQLiteDriver"; +import { CFSCore } from "../dal/fileDALs/CFSCore"; +import { Wot, WotBuilder } from "duniteroxyde"; +import { FileDALParams } from "../dal/fileDAL"; +import { cliprogram } from "../common-libs/programOptions"; +import { LevelDBDriver } from "../dal/drivers/LevelDBDriver"; +import { LevelUp } from "levelup"; +import { AbstractLevelDOWN } from "abstract-leveldown"; + +const opts = cliprogram; +const qfs = require("q-io/fs"); const DEFAULT_DOMAIN = "duniter_default"; -const DEFAULT_HOME = (process.platform == 'win32' ? process.env.USERPROFILE : process.env.HOME) + '/.config/duniter/'; +const DEFAULT_HOME = + (process.platform == "win32" ? process.env.USERPROFILE : process.env.HOME) + + "/.config/duniter/"; -const getLogsPath = (profile:string|undefined, directory:string|null = null) => path.join(getHomePath(profile, directory), 'duniter.log'); +const getLogsPath = ( + profile: string | undefined, + directory: string | null = null +) => path.join(getHomePath(profile, directory), "duniter.log"); -const getHomePath = (profile:string|null|undefined, directory:string|null = null) => path.normalize(getUserHome(directory) + '/') + getDomain(profile); +const getHomePath = ( + profile: string | null | undefined, + directory: string | null = null +) => path.normalize(getUserHome(directory) + "/") + getDomain(profile); -const getUserHome = (directory:string|null = null) => (directory || DEFAULT_HOME); +const getUserHome = (directory: string | null = null) => + directory || DEFAULT_HOME; -const getDomain = (profile:string|null = null) => (profile || DEFAULT_DOMAIN); +const getDomain = (profile: string | null = null) => profile || DEFAULT_DOMAIN; export interface FileSystem { - isMemoryOnly(): boolean - fsExists(file:string): Promise<boolean> - fsReadFile(file:string): Promise<string> - fsUnlink(file:string): Promise<boolean> - fsList(dir:string): Promise<string[]> - fsWrite(file:string, content:string): Promise<void> - fsMakeDirectory(dir:string): Promise<void> - fsRemoveTree(dir:string): Promise<void> - fsStreamTo(file: string, iterator: IterableIterator<string>): Promise<void> + isMemoryOnly(): boolean; + fsExists(file: string): Promise<boolean>; + fsReadFile(file: string): Promise<string>; + fsUnlink(file: string): Promise<boolean>; + fsList(dir: string): Promise<string[]>; + fsWrite(file: string, content: string): Promise<void>; + fsMakeDirectory(dir: string): Promise<void>; + fsRemoveTree(dir: string): Promise<void>; + fsStreamTo(file: string, iterator: IterableIterator<string>): Promise<void>; } class QioFileSystem implements FileSystem { - - constructor(private qio:any, private isMemory:boolean = false) {} + constructor(private qio: any, private isMemory: boolean = false) {} isMemoryOnly() { - return this.isMemory + return this.isMemory; } - async fsExists(file:string) { - return this.qio.exists(file) + async fsExists(file: string) { + return this.qio.exists(file); } - async fsReadFile(file:string) { - return this.qio.read(file) + async fsReadFile(file: string) { + return this.qio.read(file); } - async fsUnlink(file:string) { - return this.qio.remove(file) + async fsUnlink(file: string) { + return this.qio.remove(file); } async fsList(dir: string): Promise<string[]> { if (!(await this.qio.exists(dir))) { - return [] + return []; } - return this.qio.list(dir) + return this.qio.list(dir); } fsWrite(file: string, content: string): Promise<void> { - return this.qio.write(file, content) + return this.qio.write(file, content); } - async fsStreamTo(file: string, iterator: IterableIterator<string>): Promise<void> { + async fsStreamTo( + file: string, + iterator: IterableIterator<string> + ): Promise<void> { if (this.isMemory) { for (const line of iterator) { - await this.qio.append(file, line) + await this.qio.append(file, line); } } else { // Use NodeJS streams for faster writing - let wstream = fs.createWriteStream(file) + let wstream = fs.createWriteStream(file); await new Promise(async (res, rej) => { // When done, return - wstream.on('close', (err:any) => { - if (err) return rej(err) - res() - }) + wstream.on("close", (err: any) => { + if (err) return rej(err); + res(); + }); // Write each line for (const line of iterator) { - wstream.write(line + "\n") + wstream.write(line + "\n"); } // End the writing - wstream.end() - }) + wstream.end(); + }); } } fsMakeDirectory(dir: string): Promise<void> { - return this.qio.makeTree(dir) + return this.qio.makeTree(dir); } async fsRemoveTree(dir: string): Promise<void> { - return this.qio.removeTree(dir) + return this.qio.removeTree(dir); } } export const RealFS = (): FileSystem => { - return new QioFileSystem(qfs) -} + return new QioFileSystem(qfs); +}; -export const MemFS = (initialTree:{ [folder:string]: { [file:string]: string }} = {}): FileSystem => { - return new QioFileSystem(require('q-io/fs-mock')(initialTree), true) -} +export const MemFS = ( + initialTree: { [folder: string]: { [file: string]: string } } = {} +): FileSystem => { + return new QioFileSystem(require("q-io/fs-mock")(initialTree), true); +}; export const Directory = { - - DATA_FILES: ['mindex.db', 'c_mindex.db', 'iindex.db', 'cindex.db', 'sindex.db', 'wallet.db', 'dividend.db', 'txs.db', 'peers.db'], - WW_FILES: ['wotwizard-export_0.db','wotwizard-export.db'], - DATA_DIRS: ['data'], + DATA_FILES: [ + "mindex.db", + "c_mindex.db", + "iindex.db", + "cindex.db", + "sindex.db", + "wallet.db", + "dividend.db", + "txs.db", + "peers.db", + ], + WW_FILES: ["wotwizard-export_0.db", "wotwizard-export.db"], + DATA_DIRS: ["data"], INSTANCE_NAME: getDomain(opts.mdb), INSTANCE_HOME: getHomePath(opts.mdb, opts.home), - GET_FILE_PATH: (fileSubPath: string, home = '') => path.join(home || Directory.INSTANCE_HOME, fileSubPath), + GET_FILE_PATH: (fileSubPath: string, home = "") => + path.join(home || Directory.INSTANCE_HOME, fileSubPath), INSTANCE_HOMELOG_FILE: getLogsPath(opts.mdb, opts.home), - DUNITER_DB_NAME: 'duniter', - LOKI_DB_DIR: 'loki', - DATA_DIR: 'data', - OLD_WOTB_FILE: 'wotb.bin', - NEW_WOTB_FILE: 'wotb.bin.gz', + DUNITER_DB_NAME: "duniter", + LOKI_DB_DIR: "loki", + DATA_DIR: "data", + OLD_WOTB_FILE: "wotb.bin", + NEW_WOTB_FILE: "wotb.bin.gz", + getHome: (profile: string | null = null, directory: string | null = null) => + getHomePath(profile, directory), - getHome: (profile:string|null = null, directory:string|null = null) => getHomePath(profile, directory), - - getHomeDB: async (isMemory:boolean, dbName: string, home = '') => { + getHomeDB: async (isMemory: boolean, dbName: string, home = "") => { // Memory if (isMemory) { - return new SQLiteDriver(':memory:') + return new SQLiteDriver(":memory:"); } // Or file - const sqlitePath = Directory.GET_FILE_PATH(dbName, home) - return new SQLiteDriver(sqlitePath) + const sqlitePath = Directory.GET_FILE_PATH(dbName, home); + return new SQLiteDriver(sqlitePath); }, - getHomeLevelDB: async (isMemory:boolean, dbName: string, home = '') => { + getHomeLevelDB: async (isMemory: boolean, dbName: string, home = "") => { // Memory if (isMemory) { - return LevelDBDriver.newMemoryInstance() + return LevelDBDriver.newMemoryInstance(); } // Or file - const levelDbRootPath = path.join(home, 'data', 'leveldb') - await RealFS().fsMakeDirectory(levelDbRootPath) - const levelDBPath = path.join(levelDbRootPath, dbName) - return LevelDBDriver.newFileInstance(levelDBPath) + const levelDbRootPath = path.join(home, "data", "leveldb"); + await RealFS().fsMakeDirectory(levelDbRootPath); + const levelDBPath = path.join(levelDbRootPath, dbName); + return LevelDBDriver.newFileInstance(levelDBPath); }, - getHomeFS: async (isMemory:boolean, theHome:string, makeTree = true) => { - const home = theHome || Directory.getHome() + getHomeFS: async (isMemory: boolean, theHome: string, makeTree = true) => { + const home = theHome || Directory.getHome(); const params = { home: home, - fs: isMemory ? MemFS() : RealFS() - } + fs: isMemory ? MemFS() : RealFS(), + }; if (makeTree) { - await params.fs.fsMakeDirectory(home) + await params.fs.fsMakeDirectory(home); } return params; }, @@ -176,7 +199,7 @@ export const Directory = { getWotbFilePathSync: (home: string): string => { let datas_dir = path.join(home, Directory.DATA_DIR); let wotbFilePath = path.join(datas_dir, Directory.NEW_WOTB_FILE); - let existsFile = fs.existsSync(wotbFilePath) + let existsFile = fs.existsSync(wotbFilePath); if (!existsFile) { wotbFilePath = path.join(home, Directory.OLD_WOTB_FILE); } @@ -186,43 +209,43 @@ export const Directory = { getWotbFilePath: async (home: string): Promise<string> => { let datas_dir = path.join(home, Directory.DATA_DIR); let wotbFilePath = path.join(datas_dir, Directory.NEW_WOTB_FILE); - let existsFile = qfs.exists(wotbFilePath) + let existsFile = qfs.exists(wotbFilePath); if (!existsFile) { wotbFilePath = path.join(home, Directory.OLD_WOTB_FILE); } return wotbFilePath; }, - getHomeParams: async (isMemory:boolean, theHome:string): Promise<FileDALParams> => { - const params = await Directory.getHomeFS(isMemory, theHome) + getHomeParams: async ( + isMemory: boolean, + theHome: string + ): Promise<FileDALParams> => { + const params = await Directory.getHomeFS(isMemory, theHome); const home = params.home; let dbf: () => SQLiteDriver; let wotbf: () => Wot; if (isMemory) { - // Memory DB - dbf = () => new SQLiteDriver(':memory:'); - wotbf = () => new Wot(100) - + dbf = () => new SQLiteDriver(":memory:"); + wotbf = () => new Wot(100); } else { - // File DB - const sqlitePath = path.join(home, Directory.DUNITER_DB_NAME + '.db'); + const sqlitePath = path.join(home, Directory.DUNITER_DB_NAME + ".db"); dbf = () => new SQLiteDriver(sqlitePath); let wotbFilePath = await Directory.getWotbFilePath(home); - wotbf = () => WotBuilder.fromFile(wotbFilePath) + wotbf = () => WotBuilder.fromFile(wotbFilePath); } return { home: params.home, fs: params.fs, dbf, - wotbf - } + wotbf, + }; }, - createHomeIfNotExists: async (fileSystem:any, theHome:string) => { + createHomeIfNotExists: async (fileSystem: any, theHome: string) => { const fsHandler = new CFSCore(theHome, fileSystem); - return fsHandler.makeTree(''); - } -} + return fsHandler.makeTree(""); + }, +}; diff --git a/app/lib/wizard.ts b/app/lib/wizard.ts index dbf9bf45859f79d61ac3c88b8060d7ab75adda93..1c38b02e1204e80150adb079fb7314881c94f926 100644 --- a/app/lib/wizard.ts +++ b/app/lib/wizard.ts @@ -11,105 +11,225 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "./dto/ConfDTO" +import { ConfDTO } from "./dto/ConfDTO"; -const constants = require('./constants'); -const async = require('async'); -const inquirer = require('inquirer'); -const logger = require('./logger').NewLogger('wizard'); +const constants = require("./constants"); +const async = require("async"); +const inquirer = require("inquirer"); +const logger = require("./logger").NewLogger("wizard"); export class Wizard { - - static configPoW(conf:ConfDTO) { - return doTasks(['pow'], conf) + static configPoW(conf: ConfDTO) { + return doTasks(["pow"], conf); } - static configCurrency(conf:ConfDTO) { - return doTasks(['currency'], conf) + static configCurrency(conf: ConfDTO) { + return doTasks(["currency"], conf); } - static configUCP(conf:ConfDTO) { - return doTasks(['parameters'], conf) + static configUCP(conf: ConfDTO) { + return doTasks(["parameters"], conf); } } -function doTasks (todos:string[], conf:ConfDTO) { +function doTasks(todos: string[], conf: ConfDTO) { return new Promise((res, rej) => { - async.forEachSeries(todos, function(task:any, callback:any){ - tasks[task] && tasks[task](conf, callback); - }, (err:any) => { - if (err) return rej(err) - return res() - }); - }) + async.forEachSeries( + todos, + function (task: any, callback: any) { + tasks[task] && tasks[task](conf, callback); + }, + (err: any) => { + if (err) return rej(err); + return res(); + } + ); + }); } -const tasks:any = { - - currency: async function (conf:ConfDTO, done:any) { - const answers = await inquirer.prompt([{ +const tasks: any = { + currency: async function (conf: ConfDTO, done: any) { + const answers = await inquirer.prompt([ + { type: "input", name: "currency", message: "Currency name", default: conf.currency, - validate: function (input:string) { + validate: function (input: string) { return input.match(/^[a-zA-Z0-9-_ ]+$/) ? true : false; - } - }]) - conf.currency = answers.currency - done() + }, + }, + ]); + conf.currency = answers.currency; + done(); }, - parameters: function (conf:ConfDTO, done:any) { - async.waterfall([ - async.apply(simpleFloat, "Universal Dividend %growth", "c", conf), - async.apply(simpleInteger, "Universal Dividend period (in seconds)", "dt", conf), - async.apply(simpleInteger, "First Universal Dividend (UD[0]) amount", "ud0", conf), - async.apply(simpleInteger, "Delay between 2 certifications of a same issuer", "sigPeriod", conf), - async.apply(simpleInteger, "Maximum stock of valid certifications per member", "sigStock", conf), - async.apply(simpleInteger, "Maximum age of a non-written certification", "sigWindow", conf), - async.apply(simpleInteger, "Certification validity duration", "sigValidity", conf), - async.apply(simpleInteger, "Number of valid certifications required to be a member", "sigQty", conf), - async.apply(simpleInteger, "Maximum age of a non-written identity", "idtyWindow", conf), - async.apply(simpleInteger, "Maximum age of a non-written membership", "msWindow", conf), - async.apply(simpleFloat, "Percentage of sentries to be reached to match WoT distance rule", "xpercent", conf), - async.apply(simpleInteger, "Membership validity duration", "msValidity", conf), - async.apply(simpleInteger, "Number of blocks on which is computed median time", "medianTimeBlocks", conf), - async.apply(simpleInteger, "The average time for writing 1 block (wished time)", "avgGenTime", conf), - async.apply(simpleInteger, "Frequency, in number of blocks, to wait for changing common difficulty", "dtDiffEval", conf), - async.apply(simpleFloat, "Weight in percent for previous issuers", "percentRot", conf) - ], done); + parameters: function (conf: ConfDTO, done: any) { + async.waterfall( + [ + async.apply(simpleFloat, "Universal Dividend %growth", "c", conf), + async.apply( + simpleInteger, + "Universal Dividend period (in seconds)", + "dt", + conf + ), + async.apply( + simpleInteger, + "First Universal Dividend (UD[0]) amount", + "ud0", + conf + ), + async.apply( + simpleInteger, + "Delay between 2 certifications of a same issuer", + "sigPeriod", + conf + ), + async.apply( + simpleInteger, + "Maximum stock of valid certifications per member", + "sigStock", + conf + ), + async.apply( + simpleInteger, + "Maximum age of a non-written certification", + "sigWindow", + conf + ), + async.apply( + simpleInteger, + "Certification validity duration", + "sigValidity", + conf + ), + async.apply( + simpleInteger, + "Number of valid certifications required to be a member", + "sigQty", + conf + ), + async.apply( + simpleInteger, + "Maximum age of a non-written identity", + "idtyWindow", + conf + ), + async.apply( + simpleInteger, + "Maximum age of a non-written membership", + "msWindow", + conf + ), + async.apply( + simpleFloat, + "Percentage of sentries to be reached to match WoT distance rule", + "xpercent", + conf + ), + async.apply( + simpleInteger, + "Membership validity duration", + "msValidity", + conf + ), + async.apply( + simpleInteger, + "Number of blocks on which is computed median time", + "medianTimeBlocks", + conf + ), + async.apply( + simpleInteger, + "The average time for writing 1 block (wished time)", + "avgGenTime", + conf + ), + async.apply( + simpleInteger, + "Frequency, in number of blocks, to wait for changing common difficulty", + "dtDiffEval", + conf + ), + async.apply( + simpleFloat, + "Weight in percent for previous issuers", + "percentRot", + conf + ), + ], + done + ); }, - pow: function (conf:ConfDTO, done:any) { - async.waterfall([ - function (next:any){ - simpleInteger("Start computation of a new block if none received since (seconds)", "powDelay", conf, next); - } - ], done); - } + pow: function (conf: ConfDTO, done: any) { + async.waterfall( + [ + function (next: any) { + simpleInteger( + "Start computation of a new block if none received since (seconds)", + "powDelay", + conf, + next + ); + }, + ], + done + ); + }, }; -async function simpleValue (question:string, property:string, defaultValue:any, conf:any, validation:any, done:any) { - const answers = await inquirer.prompt([{ +async function simpleValue( + question: string, + property: string, + defaultValue: any, + conf: any, + validation: any, + done: any +) { + const answers = await inquirer.prompt([ + { type: "input", name: property, message: question, default: conf[property], - validate: validation - }]) - conf[property] = answers[property] - done() + validate: validation, + }, + ]); + conf[property] = answers[property]; + done(); } -function simpleInteger (question:string, property:string, conf:any, done:any) { - simpleValue(question, property, conf[property], conf, function (input:string) { - return input && input.toString().match(/^[0-9]+$/) ? true : false; - }, done); +function simpleInteger( + question: string, + property: string, + conf: any, + done: any +) { + simpleValue( + question, + property, + conf[property], + conf, + function (input: string) { + return input && input.toString().match(/^[0-9]+$/) ? true : false; + }, + done + ); } -function simpleFloat (question:string, property:string, conf:any, done:any) { - simpleValue(question, property, conf[property], conf, function (input:string) { - return input && input.toString().match(/^[0-9]+(\.[0-9]+)?$/) ? true : false; - }, done); +function simpleFloat(question: string, property: string, conf: any, done: any) { + simpleValue( + question, + property, + conf[property], + conf, + function (input: string) { + return input && input.toString().match(/^[0-9]+(\.[0-9]+)?$/) + ? true + : false; + }, + done + ); } diff --git a/app/modules/DuniterModule.ts b/app/modules/DuniterModule.ts index 0dbd318ab292225fddbc2565d9331a7b70e53c76..47ef0d4309fe8b163406f35b0b2c12728b869c26 100644 --- a/app/modules/DuniterModule.ts +++ b/app/modules/DuniterModule.ts @@ -1,53 +1,105 @@ -import {Server} from "../../server" -import {ConfDTO} from "../lib/dto/ConfDTO" -import {ProgramOptions} from "../lib/common-libs/programOptions" -import {ConfDAL} from "../lib/dal/fileDALs/ConfDAL" -import {DuniterService, ReadableDuniterService, Stack, TransformableDuniterService} from "../../index" +import { Server } from "../../server"; +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { ProgramOptions } from "../lib/common-libs/programOptions"; +import { ConfDAL } from "../lib/dal/fileDALs/ConfDAL"; +import { + DuniterService, + ReadableDuniterService, + Stack, + TransformableDuniterService, +} from "../../index"; export interface DuniterModule { - name: string + name: string; required: { - duniter: DuniterDependency - } + duniter: DuniterDependency; + }; } export interface DuniterDependency { - cliOptions?: CliOption[] - cli?: CliCommand[] + cliOptions?: CliOption[]; + cli?: CliCommand[]; config?: { - onLoading: (conf: ConfDTO, program: ProgramOptions, logger:any, confDAL: ConfDAL) => void - beforeSave: (conf: ConfDTO, program: ProgramOptions, logger:any, confDAL: ConfDAL) => void - } + onLoading: ( + conf: ConfDTO, + program: ProgramOptions, + logger: any, + confDAL: ConfDAL + ) => void; + beforeSave: ( + conf: ConfDTO, + program: ProgramOptions, + logger: any, + confDAL: ConfDAL + ) => void; + }; onReset?: { - data?: (conf: ConfDTO, program: ProgramOptions, logger:any, confDAL: ConfDAL) => void - config?: (conf: ConfDTO, program: ProgramOptions, logger:any, confDAL: ConfDAL) => void - } + data?: ( + conf: ConfDTO, + program: ProgramOptions, + logger: any, + confDAL: ConfDAL + ) => void; + config?: ( + conf: ConfDTO, + program: ProgramOptions, + logger: any, + confDAL: ConfDAL + ) => void; + }; wizard?: { - [k: string]: (conf: ConfDTO, program: ProgramOptions, logger:any) => Promise<void> - } + [k: string]: ( + conf: ConfDTO, + program: ProgramOptions, + logger: any + ) => Promise<void>; + }; service?: { - input?: (server: Server, conf: ConfDTO, logger:any) => ReadableDuniterService - process?: (server: Server, conf: ConfDTO, logger:any) => TransformableDuniterService - output?: (server: Server, conf: ConfDTO, logger:any) => TransformableDuniterService - neutral?: (server: Server, conf: ConfDTO, logger:any) => DuniterService - } + input?: ( + server: Server, + conf: ConfDTO, + logger: any + ) => ReadableDuniterService; + process?: ( + server: Server, + conf: ConfDTO, + logger: any + ) => TransformableDuniterService; + output?: ( + server: Server, + conf: ConfDTO, + logger: any + ) => TransformableDuniterService; + neutral?: (server: Server, conf: ConfDTO, logger: any) => DuniterService; + }; } export interface CliOption { - value: string - desc: string - parser?: (parameter: string) => any + value: string; + desc: string; + parser?: (parameter: string) => any; } export interface CliCommand { - name: string - desc: string - logs?: boolean - preventIfRunning?: boolean - onConfiguredExecute?: (server: Server, conf: ConfDTO, program: ProgramOptions, params: string[], wizardTasks: any, stack: Stack) => Promise<any> - onDatabaseExecute?: (server: Server, conf: ConfDTO, program: ProgramOptions, params: string[], - startServices: () => Promise<void>, - stopServices: () => Promise<void>, - stack: Stack - ) => Promise<void> + name: string; + desc: string; + logs?: boolean; + preventIfRunning?: boolean; + onConfiguredExecute?: ( + server: Server, + conf: ConfDTO, + program: ProgramOptions, + params: string[], + wizardTasks: any, + stack: Stack + ) => Promise<any>; + onDatabaseExecute?: ( + server: Server, + conf: ConfDTO, + program: ProgramOptions, + params: string[], + startServices: () => Promise<void>, + stopServices: () => Promise<void>, + stack: Stack + ) => Promise<void>; } diff --git a/app/modules/bma/index.ts b/app/modules/bma/index.ts index 8d088641efd7e22cbb5bcfcdbed0903acefda81a..9bd2a3c05c8bd2c271094221db8b74ee0423de85 100644 --- a/app/modules/bma/index.ts +++ b/app/modules/bma/index.ts @@ -11,94 +11,121 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {NetworkConfDTO} from "../../lib/dto/ConfDTO" -import {Server} from "../../../server" -import * as stream from "stream" -import {BmaApi, Network, NetworkInterface} from "./lib/network" -import {Upnp, UpnpApi} from "./lib/upnp" -import {BMAConstants} from "./lib/constants" -import {BMALimitation} from "./lib/limiter" -import {PeerDTO} from "../../lib/dto/PeerDTO" -import {Underscore} from "../../lib/common-libs/underscore" -import {bma} from "./lib/bma" - -const Q = require('q'); -const rp = require('request-promise'); -const async = require('async'); -const dtos = require('./lib/dtos') -const inquirer = require('inquirer'); +import { NetworkConfDTO } from "../../lib/dto/ConfDTO"; +import { Server } from "../../../server"; +import * as stream from "stream"; +import { BmaApi, Network, NetworkInterface } from "./lib/network"; +import { Upnp, UpnpApi } from "./lib/upnp"; +import { BMAConstants } from "./lib/constants"; +import { BMALimitation } from "./lib/limiter"; +import { PeerDTO } from "../../lib/dto/PeerDTO"; +import { Underscore } from "../../lib/common-libs/underscore"; +import { bma } from "./lib/bma"; + +const Q = require("q"); +const rp = require("request-promise"); +const async = require("async"); +const dtos = require("./lib/dtos"); +const inquirer = require("inquirer"); let networkWizardDone = false; export const BmaDependency = { duniter: { - cliOptions: [ - { value: '--upnp', desc: 'Use UPnP to open remote port.' }, - { value: '--noupnp', desc: 'Do not use UPnP to open remote port.' }, - { value: '--bma', desc: 'Enables BMA API and its crawlers.' }, - { value: '--nobma', desc: 'Disables BMA API and its crawlers.' }, - { value: '--bma-with-crawler', desc: 'Enables BMA Crawler.' }, - { value: '--bma-without-crawler', desc: 'Disable BMA Crawler.' }, - { value: '-p, --port <port>', desc: 'Port to listen for requests', parser: (val:string) => parseInt(val) }, - { value: '--ipv4 <address>', desc: 'IPv4 interface to listen for requests' }, - { value: '--ipv6 <address>', desc: 'IPv6 interface to listen for requests' }, - { value: '--remoteh <host>', desc: 'Remote interface others may use to contact this node' }, - { value: '--remote4 <host>', desc: 'Remote interface for IPv4 access' }, - { value: '--remote6 <host>', desc: 'Remote interface for IPv6 access' }, - { value: '--remotep <port>', desc: 'Remote port others may use to contact this node' }, + { value: "--upnp", desc: "Use UPnP to open remote port." }, + { value: "--noupnp", desc: "Do not use UPnP to open remote port." }, + { value: "--bma", desc: "Enables BMA API and its crawlers." }, + { value: "--nobma", desc: "Disables BMA API and its crawlers." }, + { value: "--bma-with-crawler", desc: "Enables BMA Crawler." }, + { value: "--bma-without-crawler", desc: "Disable BMA Crawler." }, + { + value: "-p, --port <port>", + desc: "Port to listen for requests", + parser: (val: string) => parseInt(val), + }, + { + value: "--ipv4 <address>", + desc: "IPv4 interface to listen for requests", + }, + { + value: "--ipv6 <address>", + desc: "IPv6 interface to listen for requests", + }, + { + value: "--remoteh <host>", + desc: "Remote interface others may use to contact this node", + }, + { value: "--remote4 <host>", desc: "Remote interface for IPv4 access" }, + { value: "--remote6 <host>", desc: "Remote interface for IPv6 access" }, + { + value: "--remotep <port>", + desc: "Remote port others may use to contact this node", + }, ], wizard: { - - 'network': async (conf:NetworkConfDTO, program:any, logger:any) => { - await Q.nbind(networkConfiguration, null, conf, logger)() - conf.nobma = false + network: async (conf: NetworkConfDTO, program: any, logger: any) => { + await Q.nbind(networkConfiguration, null, conf, logger)(); + conf.nobma = false; networkWizardDone = true; }, - 'network-reconfigure': async (conf:NetworkConfDTO, program:any, logger:any) => { + "network-reconfigure": async ( + conf: NetworkConfDTO, + program: any, + logger: any + ) => { if (!networkWizardDone) { // This step can only be launched lonely - await Q.nbind(networkReconfiguration, null)(conf, program.autoconf, logger, program.noupnp); + await Q.nbind(networkReconfiguration, null)( + conf, + program.autoconf, + logger, + program.noupnp + ); } - } + }, }, config: { - - onLoading: async (conf:NetworkConfDTO, program:any, logger:any) => { - + onLoading: async (conf: NetworkConfDTO, program: any, logger: any) => { // If the usage of BMA hasn't been defined yet if (conf.nobma === undefined) { // Do we have an existing BMA conf? - if (conf.port !== undefined - || conf.ipv4 !== undefined - || conf.ipv6 !== undefined - || conf.remoteport !== undefined - || conf.remotehost !== undefined - || conf.remoteipv4 !== undefined - || conf.remoteipv6 !== undefined) { - conf.nobma = false + if ( + conf.port !== undefined || + conf.ipv4 !== undefined || + conf.ipv6 !== undefined || + conf.remoteport !== undefined || + conf.remotehost !== undefined || + conf.remoteipv4 !== undefined || + conf.remoteipv6 !== undefined + ) { + conf.nobma = false; } else { - conf.nobma = true + conf.nobma = true; } } // If bmaWithCrawler hasn't been defined yet - if (conf.bmaWithCrawler === undefined) { conf.bmaWithCrawler = false } + if (conf.bmaWithCrawler === undefined) { + conf.bmaWithCrawler = false; + } - if (program.port !== undefined) conf.port = parseInt(program.port) + if (program.port !== undefined) conf.port = parseInt(program.port); if (program.ipv4 !== undefined) conf.ipv4 = program.ipv4; if (program.ipv6 !== undefined) conf.ipv6 = program.ipv6; if (program.remoteh !== undefined) conf.remotehost = program.remoteh; if (program.remote4 !== undefined) conf.remoteipv4 = program.remote4; if (program.remote6 !== undefined) conf.remoteipv6 = program.remote6; - if (program.remotep !== undefined) conf.remoteport = parseInt(program.remotep) - if (program.bma !== undefined) conf.nobma = false - if (program.nobma !== undefined) conf.nobma = true - if (program.bmaWithCrawler !== undefined) conf.bmaWithCrawler = true - if (program.bmaWithoutCrawler !== undefined) conf.bmaWithCrawler = false + if (program.remotep !== undefined) + conf.remoteport = parseInt(program.remotep); + if (program.bma !== undefined) conf.nobma = false; + if (program.nobma !== undefined) conf.nobma = true; + if (program.bmaWithCrawler !== undefined) conf.bmaWithCrawler = true; + if (program.bmaWithoutCrawler !== undefined) + conf.bmaWithCrawler = false; if (!conf.ipv4) delete conf.ipv4; if (!conf.ipv6) delete conf.ipv6; @@ -110,7 +137,10 @@ export const BmaDependency = { conf.remoteipv6 = conf.ipv6; } // Fix #807: default remoteipv4: same as local ipv4 if no removeipv4 is not defined AND no DNS nor IPv6 - if (conf.ipv4 && !(conf.remoteipv4 || conf.remotehost || conf.remoteipv6)) { + if ( + conf.ipv4 && + !(conf.remoteipv4 || conf.remotehost || conf.remoteipv6) + ) { conf.remoteipv4 = conf.ipv4; } if (!conf.remoteport && conf.port) { @@ -119,7 +149,12 @@ export const BmaDependency = { // Network autoconf if (program.autoconf) { - await Q.nbind(networkReconfiguration, null)(conf, true, logger, program.noupnp); + await Q.nbind(networkReconfiguration, null)( + conf, + true, + logger, + program.noupnp + ); } // Default value @@ -127,7 +162,7 @@ export const BmaDependency = { conf.upnp = true; // Defaults to true } if (!conf.dos) { - conf.dos = { whitelist: ['127.0.0.1'] }; + conf.dos = { whitelist: ["127.0.0.1"] }; conf.dos.maxcount = 50; conf.dos.burst = 20; conf.dos.limit = conf.dos.burst * 2; @@ -135,7 +170,7 @@ export const BmaDependency = { conf.dos.checkinterval = 1; conf.dos.trustProxy = true; conf.dos.includeUserAgent = true; - conf.dos.errormessage = 'Error'; + conf.dos.errormessage = "Error"; conf.dos.testmode = false; conf.dos.silent = false; conf.dos.silentStart = false; @@ -151,82 +186,98 @@ export const BmaDependency = { } }, - beforeSave: async (conf:NetworkConfDTO, program:any) => { + beforeSave: async (conf: NetworkConfDTO, program: any) => { if (!conf.ipv4) delete conf.ipv4; if (!conf.ipv6) delete conf.ipv6; if (!conf.remoteipv4) delete conf.remoteipv4; if (!conf.remoteipv6) delete conf.remoteipv6; conf.dos.whitelist = Underscore.uniq(conf.dos.whitelist); - } + }, }, service: { - input: (server:Server, conf:NetworkConfDTO, logger:any) => { + input: (server: Server, conf: NetworkConfDTO, logger: any) => { // Configuration errors if (!conf.nobma) { - if(!conf.ipv4 && !conf.ipv6){ - throw new Error("BMA: no interface to listen to. Provide ipv4/ipv6 interface or deactivate BMA"); + if (!conf.ipv4 && !conf.ipv6) { + throw new Error( + "BMA: no interface to listen to. Provide ipv4/ipv6 interface or deactivate BMA" + ); } - if(!conf.remoteipv4 && !conf.remoteipv6 && !conf.remotehost){ - throw new Error('BMA: no interface for remote contact.'); + if (!conf.remoteipv4 && !conf.remoteipv6 && !conf.remotehost) { + throw new Error("BMA: no interface for remote contact."); } if (!conf.remoteport) { - throw new Error('BMA: no port for remote contact.'); + throw new Error("BMA: no port for remote contact."); } } if (!conf.nobma) { - server.addEndpointsDefinitions(() => Promise.resolve(getEndpoint(conf))) - server.addWrongEndpointFilter((endpoints:string[]) => getWrongEndpoints(endpoints, server.conf.pair.pub)) + server.addEndpointsDefinitions(() => + Promise.resolve(getEndpoint(conf)) + ); + server.addWrongEndpointFilter((endpoints: string[]) => + getWrongEndpoints(endpoints, server.conf.pair.pub) + ); } - return new BMAPI(server, conf, logger) - } + return new BMAPI(server, conf, logger); + }, }, methods: { noLimit: () => BMALimitation.noLimit(), - bma: async (server: Server, interfaces: (NetworkInterface[] | null) = null, httpLogs = false, logger?: any) => bma(server, interfaces, httpLogs, logger), + bma: async ( + server: Server, + interfaces: NetworkInterface[] | null = null, + httpLogs = false, + logger?: any + ) => bma(server, interfaces, httpLogs, logger), dtos, - getMainEndpoint: (conf:NetworkConfDTO) => Promise.resolve(getEndpoint(conf)) - } - } -} - -async function getWrongEndpoints(endpoints:string[], selfPubkey:string) { - const wrongs:string[] = [] - await Promise.all(endpoints.map(async (theEndpoint:string) => { - let remote = PeerDTO.endpoint2host(theEndpoint) - try { - // We test only BMA APIs, because other may exist and we cannot judge against them - if (theEndpoint.startsWith('BASIC_MERKLED_API')) { - let answer = await rp('http://' + remote + '/network/peering', { json: true }); - if (!answer || answer.pubkey != selfPubkey) { - throw Error("Not same pubkey as local instance"); + getMainEndpoint: (conf: NetworkConfDTO) => + Promise.resolve(getEndpoint(conf)), + }, + }, +}; + +async function getWrongEndpoints(endpoints: string[], selfPubkey: string) { + const wrongs: string[] = []; + await Promise.all( + endpoints.map(async (theEndpoint: string) => { + let remote = PeerDTO.endpoint2host(theEndpoint); + try { + // We test only BMA APIs, because other may exist and we cannot judge against them + if (theEndpoint.startsWith("BASIC_MERKLED_API")) { + let answer = await rp("http://" + remote + "/network/peering", { + json: true, + }); + if (!answer || answer.pubkey != selfPubkey) { + throw Error("Not same pubkey as local instance"); + } } + } catch (e) { + wrongs.push(theEndpoint); } - } catch (e) { - wrongs.push(theEndpoint) - } - })) - return wrongs + }) + ); + return wrongs; } export class BMAPI extends stream.Transform { - // Public http interface - private bmapi:BmaApi - private upnpAPI:UpnpApi + private bmapi: BmaApi; + private upnpAPI: UpnpApi; constructor( - private server:Server, - private conf:NetworkConfDTO, - private logger:any) { - super({ objectMode: true }) + private server: Server, + private conf: NetworkConfDTO, + private logger: any + ) { + super({ objectMode: true }); } startService = async () => { if (this.conf.nobma) { // Disable BMA - return Promise.resolve() + return Promise.resolve(); } this.bmapi = await bma(this.server, null, this.conf.httplogs, this.logger); await this.bmapi.openConnections(); @@ -239,7 +290,12 @@ export class BMAPI extends stream.Transform { } if (this.server.conf.upnp) { try { - this.upnpAPI = await Upnp(this.server.conf.port, this.server.conf.remoteport, this.logger, this.server.conf); + this.upnpAPI = await Upnp( + this.server.conf.port, + this.server.conf.remoteport, + this.logger, + this.server.conf + ); this.upnpAPI.startRegular(); const gateway = await this.upnpAPI.findGateway(); if (gateway) { @@ -251,12 +307,12 @@ export class BMAPI extends stream.Transform { this.logger.warn(e); } } - } + }; stopService = async () => { if (this.conf.nobma) { // Disable BMA - return Promise.resolve() + return Promise.resolve(); } if (this.bmapi) { await this.bmapi.closeConnections(); @@ -264,117 +320,154 @@ export class BMAPI extends stream.Transform { if (this.upnpAPI) { this.upnpAPI.stopRegular(); } - } + }; } -function getEndpoint(theConf:NetworkConfDTO) { - let endpoint = 'BASIC_MERKLED_API'; +function getEndpoint(theConf: NetworkConfDTO) { + let endpoint = "BASIC_MERKLED_API"; if (theConf.remoteport && theConf.remoteport == 443) { - endpoint = 'BMAS'; + endpoint = "BMAS"; } if (theConf.remotehost) { if (theConf.remotehost.match(BMAConstants.HOST_ONION_REGEX)) { - endpoint = 'BMATOR'; + endpoint = "BMATOR"; } - endpoint += ' ' + theConf.remotehost; + endpoint += " " + theConf.remotehost; } if (theConf.remoteipv4) { - endpoint += ' ' + theConf.remoteipv4; + endpoint += " " + theConf.remoteipv4; } if (theConf.remoteipv6) { - endpoint += ' ' + theConf.remoteipv6; + endpoint += " " + theConf.remoteipv6; } if (theConf.remoteport) { - endpoint += ' ' + theConf.remoteport; + endpoint += " " + theConf.remoteport; } return endpoint; } -export function networkReconfiguration(conf:NetworkConfDTO, autoconf:boolean, logger:any, noupnp:boolean, done:any) { - async.waterfall([ - upnpResolve.bind(null, noupnp, logger), - function(upnpSuccess:boolean, upnpConf:NetworkConfDTO, next:any) { - - // Default values - conf.port = conf.port || BMAConstants.DEFAULT_PORT; - conf.remoteport = conf.remoteport || BMAConstants.DEFAULT_PORT; - - const localOperations = getLocalNetworkOperations(conf, autoconf); - const remoteOpertions = getRemoteNetworkOperations(conf, upnpConf.remoteipv4); - const dnsOperations = getHostnameOperations(conf, logger, autoconf); - const useUPnPOperations = getUseUPnPOperations(conf, logger, autoconf); - - if (upnpSuccess) { - Underscore.extend(conf, upnpConf) - const local = [conf.ipv4, conf.port].join(':'); - const remote = [conf.remoteipv4, conf.remoteport].join(':'); - if (autoconf) { - conf.ipv6 = conf.remoteipv6 = Network.getBestLocalIPv6(); - logger.info('IPv6: %s', conf.ipv6 || ""); - logger.info('Local IPv4: %s', local); - logger.info('Remote IPv4: %s', remote); - // Use proposed local + remote with UPnP binding - return async.waterfall(useUPnPOperations - .concat(dnsOperations), next); - } - choose("UPnP is available: duniter will be bound: \n from " + local + "\n to " + remote + "\nKeep this configuration?", true, - function () { - // Yes: not network changes +export function networkReconfiguration( + conf: NetworkConfDTO, + autoconf: boolean, + logger: any, + noupnp: boolean, + done: any +) { + async.waterfall( + [ + upnpResolve.bind(null, noupnp, logger), + function (upnpSuccess: boolean, upnpConf: NetworkConfDTO, next: any) { + // Default values + conf.port = conf.port || BMAConstants.DEFAULT_PORT; + conf.remoteport = conf.remoteport || BMAConstants.DEFAULT_PORT; + + const localOperations = getLocalNetworkOperations(conf, autoconf); + const remoteOpertions = getRemoteNetworkOperations( + conf, + upnpConf.remoteipv4 + ); + const dnsOperations = getHostnameOperations(conf, logger, autoconf); + const useUPnPOperations = getUseUPnPOperations(conf, logger, autoconf); + + if (upnpSuccess) { + Underscore.extend(conf, upnpConf); + const local = [conf.ipv4, conf.port].join(":"); + const remote = [conf.remoteipv4, conf.remoteport].join(":"); + if (autoconf) { conf.ipv6 = conf.remoteipv6 = Network.getBestLocalIPv6(); - async.waterfall(useUPnPOperations - .concat(dnsOperations), next); - }, - function () { - // No: want to change - async.waterfall( - localOperations - .concat(remoteOpertions) - .concat(useUPnPOperations) - .concat(dnsOperations), next); - }); - } else { - conf.upnp = false; - if (autoconf) { - // Yes: local configuration = remote configuration - return async.waterfall( - localOperations - .concat(getHostnameOperations(conf, logger, autoconf)) - .concat([function (confDone:any) { - conf.remoteipv4 = conf.ipv4; - conf.remoteipv6 = conf.ipv6; - conf.remoteport = conf.port; - logger.info('Local & Remote IPv4: %s', [conf.ipv4, conf.port].join(':')); - logger.info('Local & Remote IPv6: %s', [conf.ipv6, conf.port].join(':')); - confDone(); - }]), next); - } - choose("UPnP is *not* available: is this a public server (like a VPS)?", true, - function () { + logger.info("IPv6: %s", conf.ipv6 || ""); + logger.info("Local IPv4: %s", local); + logger.info("Remote IPv4: %s", remote); + // Use proposed local + remote with UPnP binding + return async.waterfall( + useUPnPOperations.concat(dnsOperations), + next + ); + } + choose( + "UPnP is available: duniter will be bound: \n from " + + local + + "\n to " + + remote + + "\nKeep this configuration?", + true, + function () { + // Yes: not network changes + conf.ipv6 = conf.remoteipv6 = Network.getBestLocalIPv6(); + async.waterfall(useUPnPOperations.concat(dnsOperations), next); + }, + function () { + // No: want to change + async.waterfall( + localOperations + .concat(remoteOpertions) + .concat(useUPnPOperations) + .concat(dnsOperations), + next + ); + } + ); + } else { + conf.upnp = false; + if (autoconf) { // Yes: local configuration = remote configuration - async.waterfall( - localOperations - .concat(getHostnameOperations(conf, logger)) - .concat([function(confDone:any) { - conf.remoteipv4 = conf.ipv4; - conf.remoteipv6 = conf.ipv6; - conf.remoteport = conf.port; - confDone(); - }]), next); - }, - function () { - // No: must give all details - async.waterfall( + return async.waterfall( localOperations - .concat(remoteOpertions) - .concat(dnsOperations), next); - }); - } - } - ], done); + .concat(getHostnameOperations(conf, logger, autoconf)) + .concat([ + function (confDone: any) { + conf.remoteipv4 = conf.ipv4; + conf.remoteipv6 = conf.ipv6; + conf.remoteport = conf.port; + logger.info( + "Local & Remote IPv4: %s", + [conf.ipv4, conf.port].join(":") + ); + logger.info( + "Local & Remote IPv6: %s", + [conf.ipv6, conf.port].join(":") + ); + confDone(); + }, + ]), + next + ); + } + choose( + "UPnP is *not* available: is this a public server (like a VPS)?", + true, + function () { + // Yes: local configuration = remote configuration + async.waterfall( + localOperations + .concat(getHostnameOperations(conf, logger)) + .concat([ + function (confDone: any) { + conf.remoteipv4 = conf.ipv4; + conf.remoteipv6 = conf.ipv6; + conf.remoteport = conf.port; + confDone(); + }, + ]), + next + ); + }, + function () { + // No: must give all details + async.waterfall( + localOperations.concat(remoteOpertions).concat(dnsOperations), + next + ); + } + ); + } + }, + ], + done + ); } - -async function upnpResolve(noupnp:boolean, logger:any, done:any) { +async function upnpResolve(noupnp: boolean, logger: any, done: any) { try { let conf = await Network.upnpConf(noupnp, logger); done(null, true, conf); @@ -383,35 +476,44 @@ async function upnpResolve(noupnp:boolean, logger:any, done:any) { } } -function networkConfiguration(conf:NetworkConfDTO, logger:any, done:any) { - async.waterfall([ - upnpResolve.bind(null, !conf.upnp, logger), - function(upnpSuccess:boolean, upnpConf:NetworkConfDTO, next:any) { - - let operations = getLocalNetworkOperations(conf) - .concat(getRemoteNetworkOperations(conf, upnpConf.remoteipv4)); - - if (upnpSuccess) { - operations = operations.concat(getUseUPnPOperations(conf, logger)); - } +function networkConfiguration(conf: NetworkConfDTO, logger: any, done: any) { + async.waterfall( + [ + upnpResolve.bind(null, !conf.upnp, logger), + function (upnpSuccess: boolean, upnpConf: NetworkConfDTO, next: any) { + let operations = getLocalNetworkOperations(conf).concat( + getRemoteNetworkOperations(conf, upnpConf.remoteipv4) + ); + + if (upnpSuccess) { + operations = operations.concat(getUseUPnPOperations(conf, logger)); + } - async.waterfall(operations.concat(getHostnameOperations(conf, logger, false)), next); - } - ], done); + async.waterfall( + operations.concat(getHostnameOperations(conf, logger, false)), + next + ); + }, + ], + done + ); } -function getLocalNetworkOperations(conf:NetworkConfDTO, autoconf:boolean = false) { +function getLocalNetworkOperations( + conf: NetworkConfDTO, + autoconf: boolean = false +) { return [ - function (next:any){ + function (next: any) { const osInterfaces = Network.listInterfaces(); const interfaces = [{ name: "None", value: null }]; - osInterfaces.forEach(function(netInterface:any){ + osInterfaces.forEach(function (netInterface: any) { const addresses = netInterface.addresses; - const filtered = Underscore.where(addresses, {family: 'IPv4'}); - filtered.forEach(function(addr:any){ + const filtered = Underscore.where(addresses, { family: "IPv4" }); + filtered.forEach(function (addr: any) { interfaces.push({ - name: [netInterface.name, addr.address].join(' '), - value: addr.address + name: [netInterface.name, addr.address].join(" "), + value: addr.address, }); }); }); @@ -419,40 +521,43 @@ function getLocalNetworkOperations(conf:NetworkConfDTO, autoconf:boolean = false conf.ipv4 = Network.getBestLocalIPv4(); return next(); } - inquirer.prompt([{ - type: "list", - name: "ipv4", - message: "IPv4 interface", - default: conf.ipv4, - choices: interfaces - }]).then((answers:any) => { - conf.ipv4 = answers.ipv4; - next(); - }); + inquirer + .prompt([ + { + type: "list", + name: "ipv4", + message: "IPv4 interface", + default: conf.ipv4, + choices: interfaces, + }, + ]) + .then((answers: any) => { + conf.ipv4 = answers.ipv4; + next(); + }); }, - function (next:any){ + function (next: any) { const osInterfaces = Network.listInterfaces(); - const interfaces:any = [{ name: "None", value: null }]; - osInterfaces.forEach(function(netInterface:any){ + const interfaces: any = [{ name: "None", value: null }]; + osInterfaces.forEach(function (netInterface: any) { const addresses = netInterface.addresses; - const filtered = Underscore.where(addresses, { family: 'IPv6' }); - filtered.forEach(function(addr:any){ - let address = addr.address - if (addr.scopeid) - address += "%" + netInterface.name + const filtered = Underscore.where(addresses, { family: "IPv6" }); + filtered.forEach(function (addr: any) { + let address = addr.address; + if (addr.scopeid) address += "%" + netInterface.name; let nameSuffix = ""; if (addr.scopeid == 0 && !addr.internal) { nameSuffix = " (Global)"; } interfaces.push({ - name: [netInterface.name, address, nameSuffix].join(' '), + name: [netInterface.name, address, nameSuffix].join(" "), internal: addr.internal, scopeid: addr.scopeid, - value: address + value: address, }); }); }); - interfaces.sort((addr1:any, addr2:any) => { + interfaces.sort((addr1: any, addr2: any) => { if (addr1.value === null) return -1; if (addr1.internal && !addr2.internal) return 1; if (addr1.scopeid && !addr2.scopeid) return 1; @@ -464,41 +569,50 @@ function getLocalNetworkOperations(conf:NetworkConfDTO, autoconf:boolean = false if (autoconf) { return next(); } - inquirer.prompt([{ - type: "list", - name: "ipv6", - message: "IPv6 interface", - default: conf.ipv6, - choices: interfaces - }]).then((answers:any) => { - conf.ipv6 = conf.remoteipv6 = answers.ipv6; - next(); - }); + inquirer + .prompt([ + { + type: "list", + name: "ipv6", + message: "IPv6 interface", + default: conf.ipv6, + choices: interfaces, + }, + ]) + .then((answers: any) => { + conf.ipv6 = conf.remoteipv6 = answers.ipv6; + next(); + }); }, - autoconf ? (done:any) => { - conf.port = Network.getRandomPort(conf); - done(); - } : async.apply(simpleInteger, "Port", "port", conf) + autoconf + ? (done: any) => { + conf.port = Network.getRandomPort(conf); + done(); + } + : async.apply(simpleInteger, "Port", "port", conf), ]; } -function getRemoteNetworkOperations(conf:NetworkConfDTO, remoteipv4:string|null) { +function getRemoteNetworkOperations( + conf: NetworkConfDTO, + remoteipv4: string | null +) { return [ - function (next:any){ + function (next: any) { if (!conf.ipv4) { conf.remoteipv4 = null; return next(null, {}); } - const choices:any = [{ name: "None", value: null }]; + const choices: any = [{ name: "None", value: null }]; // Local interfaces const osInterfaces = Network.listInterfaces(); - osInterfaces.forEach(function(netInterface:any){ + osInterfaces.forEach(function (netInterface: any) { const addresses = netInterface.addresses; - const filtered = Underscore.where(addresses, {family: 'IPv4'}); - filtered.forEach(function(addr:any){ + const filtered = Underscore.where(addresses, { family: "IPv4" }); + filtered.forEach(function (addr: any) { choices.push({ - name: [netInterface.name, addr.address].join(' '), - value: addr.address + name: [netInterface.name, addr.address].join(" "), + value: addr.address, }); }); }); @@ -509,38 +623,55 @@ function getRemoteNetworkOperations(conf:NetworkConfDTO, remoteipv4:string|null) choices.push({ name: remoteipv4, value: remoteipv4 }); } choices.push({ name: "Enter new one", value: "new" }); - inquirer.prompt([{ - type: "list", - name: "remoteipv4", - message: "Remote IPv4", - default: conf.remoteipv4 || conf.ipv4 || null, - choices: choices, - validate: function (input:any) { - return !!(input && input.toString().match(BMAConstants.IPV4_REGEXP)); - } - }]).then((answers:any) => { - if (answers.remoteipv4 == "new") { - inquirer.prompt([{ - type: "input", + inquirer + .prompt([ + { + type: "list", name: "remoteipv4", message: "Remote IPv4", - default: conf.remoteipv4 || conf.ipv4, - validate: function (input:any) { - return !!(input && input.toString().match(BMAConstants.IPV4_REGEXP)); - } - }]).then((answers:any) => next(null, answers)); - } else { - next(null, answers); - } - }); + default: conf.remoteipv4 || conf.ipv4 || null, + choices: choices, + validate: function (input: any) { + return !!( + input && input.toString().match(BMAConstants.IPV4_REGEXP) + ); + }, + }, + ]) + .then((answers: any) => { + if (answers.remoteipv4 == "new") { + inquirer + .prompt([ + { + type: "input", + name: "remoteipv4", + message: "Remote IPv4", + default: conf.remoteipv4 || conf.ipv4, + validate: function (input: any) { + return !!( + input && input.toString().match(BMAConstants.IPV4_REGEXP) + ); + }, + }, + ]) + .then((answers: any) => next(null, answers)); + } else { + next(null, answers); + } + }); }, - async function (answers:any, next:any){ + async function (answers: any, next: any) { conf.remoteipv4 = answers.remoteipv4; try { if (conf.remoteipv4 || conf.remotehost) { await new Promise((resolve, reject) => { - const getPort = async.apply(simpleInteger, "Remote port", "remoteport", conf); - getPort((err:any) => { + const getPort = async.apply( + simpleInteger, + "Remote port", + "remoteport", + conf + ); + getPort((err: any) => { if (err) return reject(err); resolve(); }); @@ -552,81 +683,130 @@ function getRemoteNetworkOperations(conf:NetworkConfDTO, remoteipv4:string|null) } catch (e) { next(e); } - } + }, ]; } -function getHostnameOperations(conf:NetworkConfDTO, logger:any, autoconf = false) { - return [function(next:any) { - if (!conf.ipv4) { - conf.remotehost = null; - return next(); - } - if (autoconf) { - logger.info('DNS: %s', conf.remotehost || 'No'); - return next(); - } - choose("Does this server has a DNS name?", !!conf.remotehost, - function() { - // Yes - simpleValue("DNS name:", "remotehost", "", conf, function(){ return true; }, next); - }, - function() { +function getHostnameOperations( + conf: NetworkConfDTO, + logger: any, + autoconf = false +) { + return [ + function (next: any) { + if (!conf.ipv4) { conf.remotehost = null; - next(); - }); - }]; + return next(); + } + if (autoconf) { + logger.info("DNS: %s", conf.remotehost || "No"); + return next(); + } + choose( + "Does this server has a DNS name?", + !!conf.remotehost, + function () { + // Yes + simpleValue( + "DNS name:", + "remotehost", + "", + conf, + function () { + return true; + }, + next + ); + }, + function () { + conf.remotehost = null; + next(); + } + ); + }, + ]; } -function getUseUPnPOperations(conf:NetworkConfDTO, logger:any, autoconf:boolean = false) { - return [function(next:any) { - if (!conf.ipv4) { - conf.upnp = false; - return next(); - } - if (autoconf) { - logger.info('UPnP: %s', 'Yes'); - conf.upnp = true; - return next(); - } - choose("UPnP is available: use automatic port mapping? (easier)", conf.upnp, - function() { - conf.upnp = true; - next(); - }, - function() { +function getUseUPnPOperations( + conf: NetworkConfDTO, + logger: any, + autoconf: boolean = false +) { + return [ + function (next: any) { + if (!conf.ipv4) { conf.upnp = false; - next(); - }); - }]; + return next(); + } + if (autoconf) { + logger.info("UPnP: %s", "Yes"); + conf.upnp = true; + return next(); + } + choose( + "UPnP is available: use automatic port mapping? (easier)", + conf.upnp, + function () { + conf.upnp = true; + next(); + }, + function () { + conf.upnp = false; + next(); + } + ); + }, + ]; } -function choose (question:string, defaultValue:any, ifOK:any, ifNotOK:any) { - inquirer.prompt([{ - type: "confirm", - name: "q", - message: question, - default: defaultValue - }]).then((answer:any) => { - answer.q ? ifOK() : ifNotOK(); - }); +function choose(question: string, defaultValue: any, ifOK: any, ifNotOK: any) { + inquirer + .prompt([ + { + type: "confirm", + name: "q", + message: question, + default: defaultValue, + }, + ]) + .then((answer: any) => { + answer.q ? ifOK() : ifNotOK(); + }); } -function simpleValue (question:string, property:any, defaultValue:any, conf:any, validation:any, done:any) { - inquirer.prompt([{ - type: "input", - name: property, - message: question, - default: conf[property], - validate: validation - }]).then((answers:any) => { - conf[property] = answers[property]; - done(); - }); +function simpleValue( + question: string, + property: any, + defaultValue: any, + conf: any, + validation: any, + done: any +) { + inquirer + .prompt([ + { + type: "input", + name: property, + message: question, + default: conf[property], + validate: validation, + }, + ]) + .then((answers: any) => { + conf[property] = answers[property]; + done(); + }); } -function simpleInteger (question:string, property:any, conf:any, done:any) { - simpleValue(question, property, conf[property], conf, function (input:any) { - return input && input.toString().match(/^[0-9]+$/) ? true : false; - }, done); +function simpleInteger(question: string, property: any, conf: any, done: any) { + simpleValue( + question, + property, + conf[property], + conf, + function (input: any) { + return input && input.toString().match(/^[0-9]+$/) ? true : false; + }, + done + ); } diff --git a/app/modules/bma/lib/bma.ts b/app/modules/bma/lib/bma.ts index 44400889cc0def95713e2b8d0cfb3ceaa240beeb..013509fd11e51437023fb99e5d5b9201bc0b8b4d 100644 --- a/app/modules/bma/lib/bma.ts +++ b/app/modules/bma/lib/bma.ts @@ -11,201 +11,427 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Server} from "../../../../server" -import {BmaApi, Network, NetworkInterface} from "./network" -import {block2HttpBlock, HttpPeer} from "./dtos" -import {BMALimitation} from "./limiter" -import {BlockchainBinding} from "./controllers/blockchain" -import {NodeBinding} from "./controllers/node" -import {NetworkBinding} from "./controllers/network" -import {WOTBinding} from "./controllers/wot" -import {TransactionBinding} from "./controllers/transactions" -import {UDBinding} from "./controllers/uds" -import {PeerDTO} from "../../../lib/dto/PeerDTO" -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {OtherConstants} from "../../../lib/other_constants" -import {WebSocketServer} from "../../../lib/common-libs/websocket" +import { Server } from "../../../../server"; +import { BmaApi, Network, NetworkInterface } from "./network"; +import { block2HttpBlock, HttpPeer } from "./dtos"; +import { BMALimitation } from "./limiter"; +import { BlockchainBinding } from "./controllers/blockchain"; +import { NodeBinding } from "./controllers/node"; +import { NetworkBinding } from "./controllers/network"; +import { WOTBinding } from "./controllers/wot"; +import { TransactionBinding } from "./controllers/transactions"; +import { UDBinding } from "./controllers/uds"; +import { PeerDTO } from "../../../lib/dto/PeerDTO"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { OtherConstants } from "../../../lib/other_constants"; +import { WebSocketServer } from "../../../lib/common-libs/websocket"; -const es = require('event-stream'); - -export const bma = function(server:Server, interfaces:NetworkInterface[]|null, httpLogs:boolean, logger:any): Promise<BmaApi> { +const es = require("event-stream"); +export const bma = function ( + server: Server, + interfaces: NetworkInterface[] | null, + httpLogs: boolean, + logger: any +): Promise<BmaApi> { if (!interfaces) { interfaces = []; if (server.conf) { if (server.conf.ipv4) { - interfaces = [{ - ip: server.conf.ipv4, - port: server.conf.port - }]; + interfaces = [ + { + ip: server.conf.ipv4, + port: server.conf.port, + }, + ]; } if (server.conf.ipv6) { interfaces.push({ ip: server.conf.ipv6, - port: (server.conf.remoteport || server.conf.port) // We try to get the best one + port: server.conf.remoteport || server.conf.port, // We try to get the best one }); } } } - return Network.createServersAndListen('BMA server', server, interfaces, httpLogs, logger, null, (app:any, httpMethods:any) => { - - const node = new NodeBinding(server); - const blockchain = new BlockchainBinding(server) - const net = new NetworkBinding(server) - const wot = new WOTBinding(server) - const transactions = new TransactionBinding(server) - const dividend = new UDBinding(server) - httpMethods.httpGET( '/', (req:any) => node.summary(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/node/summary', (req:any) => node.summary(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/node/sandboxes', (req:any) => node.sandboxes(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/parameters', (req:any) => blockchain.parameters(), BMALimitation.limitAsHighUsage()); - httpMethods.httpPOST( '/blockchain/membership', (req:any) => blockchain.parseMembership(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/memberships/:search', (req:any) => blockchain.memberships(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpPOST( '/blockchain/block', (req:any) => blockchain.parseBlock(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/block/:number', (req:any) => blockchain.promoted(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/blocks/:count/:from', (req:any) => blockchain.blocks(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/milestones', (req:any) => blockchain.milestones(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/milestones/:page', (req:any) => blockchain.milestones(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/current', (req:any) => blockchain.current(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/hardship/:search', (req:any) => blockchain.hardship(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/difficulties', (req:any) => blockchain.difficulties(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/with/newcomers', (req:any) => blockchain.with.newcomers(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/with/certs', (req:any) => blockchain.with.certs(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/with/joiners', (req:any) => blockchain.with.joiners(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/with/actives', (req:any) => blockchain.with.actives(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/with/leavers', (req:any) => blockchain.with.leavers(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/with/excluded', (req:any) => blockchain.with.excluded(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/with/revoked', (req:any) => blockchain.with.revoked(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/with/ud', (req:any) => blockchain.with.ud(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/with/tx', (req:any) => blockchain.with.tx(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/blockchain/branches', (req:any) => blockchain.branches(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/network/peering', (req:any) => net.peer(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/network/peering/peers', (req:any) => net.peersGet(req), BMALimitation.limitAsVeryHighUsage()); - httpMethods.httpPOST( '/network/peering/peers', (req:any) => net.peersPost(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/network/peers', (req:any) => net.peers(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/network/ws2p/info', (req:any) => net.ws2pInfo(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/network/ws2p/heads', (req:any) => net.ws2pHeads(), BMALimitation.limitAsHighUsage()); - httpMethods.httpPOST( '/wot/add', (req:any) => wot.add(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpPOST( '/wot/certify', (req:any) => wot.certify(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpPOST( '/wot/revoke', (req:any) => wot.revoke(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/wot/lookup/:search', (req:any) => wot.lookup(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/wot/members', (req:any) => wot.members(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/wot/pending', (req:any) => wot.pendingMemberships(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/wot/requirements/:search', (req:any) => wot.requirements(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/wot/requirements-of-pending/:minsig', (req:any) => wot.requirementsOfPending(req), BMALimitation.limitAsLowUsage()); - httpMethods.httpGET( '/wot/certifiers-of/:search', (req:any) => wot.certifiersOf(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/wot/certified-by/:search', (req:any) => wot.certifiedBy(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/wot/identity-of/:search', (req:any) => wot.identityOf(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpPOST( '/tx/process', (req:any) => transactions.parseTransaction(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/tx/hash/:hash', (req:any) => transactions.getByHash(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/tx/sources/:pubkey', (req:any) => transactions.getSources(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/tx/history/:pubkey', (req:any) => transactions.getHistory(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/tx/history/:pubkey/blocks/:from/:to', (req:any) => transactions.getHistoryBetweenBlocks(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/tx/history/:pubkey/times/:from/:to', (req:any) => transactions.getHistoryBetweenTimes(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/tx/history/:pubkey/pending', (req:any) => transactions.getPendingForPubkey(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/tx/pending', (req:any) => transactions.getPending(), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/ud/history/:pubkey', (req:any) => dividend.getHistory(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/ud/history/:pubkey/blocks/:from/:to', (req:any) => dividend.getHistoryBetweenBlocks(req), BMALimitation.limitAsHighUsage()); - httpMethods.httpGET( '/ud/history/:pubkey/times/:from/:to', (req:any) => dividend.getHistoryBetweenTimes(req), BMALimitation.limitAsHighUsage()); + return Network.createServersAndListen( + "BMA server", + server, + interfaces, + httpLogs, + logger, + null, + (app: any, httpMethods: any) => { + const node = new NodeBinding(server); + const blockchain = new BlockchainBinding(server); + const net = new NetworkBinding(server); + const wot = new WOTBinding(server); + const transactions = new TransactionBinding(server); + const dividend = new UDBinding(server); + httpMethods.httpGET( + "/", + (req: any) => node.summary(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/node/summary", + (req: any) => node.summary(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/node/sandboxes", + (req: any) => node.sandboxes(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/parameters", + (req: any) => blockchain.parameters(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpPOST( + "/blockchain/membership", + (req: any) => blockchain.parseMembership(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/memberships/:search", + (req: any) => blockchain.memberships(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpPOST( + "/blockchain/block", + (req: any) => blockchain.parseBlock(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/block/:number", + (req: any) => blockchain.promoted(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/blocks/:count/:from", + (req: any) => blockchain.blocks(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/milestones", + (req: any) => blockchain.milestones(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/milestones/:page", + (req: any) => blockchain.milestones(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/current", + (req: any) => blockchain.current(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/hardship/:search", + (req: any) => blockchain.hardship(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/difficulties", + (req: any) => blockchain.difficulties(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/with/newcomers", + (req: any) => blockchain.with.newcomers(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/with/certs", + (req: any) => blockchain.with.certs(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/with/joiners", + (req: any) => blockchain.with.joiners(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/with/actives", + (req: any) => blockchain.with.actives(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/with/leavers", + (req: any) => blockchain.with.leavers(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/with/excluded", + (req: any) => blockchain.with.excluded(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/with/revoked", + (req: any) => blockchain.with.revoked(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/with/ud", + (req: any) => blockchain.with.ud(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/with/tx", + (req: any) => blockchain.with.tx(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/blockchain/branches", + (req: any) => blockchain.branches(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/network/peering", + (req: any) => net.peer(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/network/peering/peers", + (req: any) => net.peersGet(req), + BMALimitation.limitAsVeryHighUsage() + ); + httpMethods.httpPOST( + "/network/peering/peers", + (req: any) => net.peersPost(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/network/peers", + (req: any) => net.peers(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/network/ws2p/info", + (req: any) => net.ws2pInfo(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/network/ws2p/heads", + (req: any) => net.ws2pHeads(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpPOST( + "/wot/add", + (req: any) => wot.add(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpPOST( + "/wot/certify", + (req: any) => wot.certify(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpPOST( + "/wot/revoke", + (req: any) => wot.revoke(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/wot/lookup/:search", + (req: any) => wot.lookup(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/wot/members", + (req: any) => wot.members(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/wot/pending", + (req: any) => wot.pendingMemberships(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/wot/requirements/:search", + (req: any) => wot.requirements(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/wot/requirements-of-pending/:minsig", + (req: any) => wot.requirementsOfPending(req), + BMALimitation.limitAsLowUsage() + ); + httpMethods.httpGET( + "/wot/certifiers-of/:search", + (req: any) => wot.certifiersOf(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/wot/certified-by/:search", + (req: any) => wot.certifiedBy(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/wot/identity-of/:search", + (req: any) => wot.identityOf(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpPOST( + "/tx/process", + (req: any) => transactions.parseTransaction(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/tx/hash/:hash", + (req: any) => transactions.getByHash(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/tx/sources/:pubkey", + (req: any) => transactions.getSources(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/tx/history/:pubkey", + (req: any) => transactions.getHistory(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/tx/history/:pubkey/blocks/:from/:to", + (req: any) => transactions.getHistoryBetweenBlocks(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/tx/history/:pubkey/times/:from/:to", + (req: any) => transactions.getHistoryBetweenTimes(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/tx/history/:pubkey/pending", + (req: any) => transactions.getPendingForPubkey(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/tx/pending", + (req: any) => transactions.getPending(), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/ud/history/:pubkey", + (req: any) => dividend.getHistory(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/ud/history/:pubkey/blocks/:from/:to", + (req: any) => dividend.getHistoryBetweenBlocks(req), + BMALimitation.limitAsHighUsage() + ); + httpMethods.httpGET( + "/ud/history/:pubkey/times/:from/:to", + (req: any) => dividend.getHistoryBetweenTimes(req), + BMALimitation.limitAsHighUsage() + ); + }, + (httpServer: any) => { + let currentBlock: any = {}; + let wssBlock = new WebSocketServer({ + server: httpServer, + path: "/ws/block", + }); + let wssPeer = new WebSocketServer({ + server: httpServer, + path: "/ws/peer", + }); + let wssHeads = new WebSocketServer({ + server: httpServer, + path: "/ws/heads", + }); - }, (httpServer:any) => { + const errorHandler = function (error: any) { + logger && logger.error("Error on WS Server"); + logger && logger.error(error); + }; - let currentBlock:any = {}; - let wssBlock = new WebSocketServer({ - server: httpServer, - path: '/ws/block' - }); - let wssPeer = new WebSocketServer({ - server: httpServer, - path: '/ws/peer' - }); - let wssHeads = new WebSocketServer({ - server: httpServer, - path: '/ws/heads' - }); - - const errorHandler = function (error:any) { - logger && logger.error('Error on WS Server'); - logger && logger.error(error); - } - - wssBlock.on('error', errorHandler); - wssPeer.on('error', errorHandler); - wssHeads.on('error', errorHandler); - - wssBlock.on('connection', async function connection(ws:any) { - try { - currentBlock = await server.dal.getCurrentBlockOrNull(); - if (currentBlock) { - const blockDTO:BlockDTO = BlockDTO.fromJSONObject(currentBlock) - ws.send(JSON.stringify(block2HttpBlock(blockDTO))) - } - } catch (e) { - logger.error(e); - } - }); + wssBlock.on("error", errorHandler); + wssPeer.on("error", errorHandler); + wssHeads.on("error", errorHandler); - wssHeads.on('connection', async (ws:any) => { - if (server.ws2pCluster) { + wssBlock.on("connection", async function connection(ws: any) { try { - ws.send(JSON.stringify(await server.ws2pCluster.getKnownHeads())) + currentBlock = await server.dal.getCurrentBlockOrNull(); + if (currentBlock) { + const blockDTO: BlockDTO = BlockDTO.fromJSONObject(currentBlock); + ws.send(JSON.stringify(block2HttpBlock(blockDTO))); + } } catch (e) { logger.error(e); } - } - }) - const wssHeadsBroadcast = (data:any) => wssHeads.clients.forEach((client:any) => client.send(data)); + }); - const wssBlockBroadcast = (data:any) => wssBlock.clients.forEach((client:any) => { - try { - client.send(data); - } catch (e) { - logger && logger.error('error on ws: %s', e); - } - }); + wssHeads.on("connection", async (ws: any) => { + if (server.ws2pCluster) { + try { + ws.send(JSON.stringify(await server.ws2pCluster.getKnownHeads())); + } catch (e) { + logger.error(e); + } + } + }); + const wssHeadsBroadcast = (data: any) => + wssHeads.clients.forEach((client: any) => client.send(data)); - const wssPeerBroadcast = (data:any) => wssPeer.clients.forEach((client:any) => client.send(data)); + const wssBlockBroadcast = (data: any) => + wssBlock.clients.forEach((client: any) => { + try { + client.send(data); + } catch (e) { + logger && logger.error("error on ws: %s", e); + } + }); + + const wssPeerBroadcast = (data: any) => + wssPeer.clients.forEach((client: any) => client.send(data)); - // Forward current HEAD change - server - .on('bcEvent', (e) => { - if (e.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED || e.bcEvent === OtherConstants.BC_EVENT.SWITCHED) { + // Forward current HEAD change + server.on("bcEvent", (e) => { + if ( + e.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED || + e.bcEvent === OtherConstants.BC_EVENT.SWITCHED + ) { try { // Broadcast block currentBlock = e.block; - const blockDTO:BlockDTO = BlockDTO.fromJSONObject(currentBlock) - wssBlockBroadcast(JSON.stringify(block2HttpBlock(blockDTO))) + const blockDTO: BlockDTO = BlockDTO.fromJSONObject(currentBlock); + wssBlockBroadcast(JSON.stringify(block2HttpBlock(blockDTO))); } catch (e) { - logger && logger.error('error on ws mapSync:', e); + logger && logger.error("error on ws mapSync:", e); } } - }) - // Forward peers documents - server - .pipe(es.mapSync(function(data:any) { - try { - // Broadcast peer - if (data.endpoints) { - const peerDTO = PeerDTO.fromJSONObject(data) - const peerResult:HttpPeer = { - version: peerDTO.version, - currency: peerDTO.currency, - pubkey: peerDTO.pubkey, - block: peerDTO.blockstamp, - endpoints: peerDTO.endpoints, - signature: peerDTO.signature, - raw: peerDTO.getRaw() + }); + // Forward peers documents + server.pipe( + es.mapSync(function (data: any) { + try { + // Broadcast peer + if (data.endpoints) { + const peerDTO = PeerDTO.fromJSONObject(data); + const peerResult: HttpPeer = { + version: peerDTO.version, + currency: peerDTO.currency, + pubkey: peerDTO.pubkey, + block: peerDTO.blockstamp, + endpoints: peerDTO.endpoints, + signature: peerDTO.signature, + raw: peerDTO.getRaw(), + }; + wssPeerBroadcast(JSON.stringify(peerResult)); } - wssPeerBroadcast(JSON.stringify(peerResult)); - } - // Broadcast heads - else if (data.ws2p === 'heads' && data.added.length) { - wssHeadsBroadcast(JSON.stringify(data.added)); + // Broadcast heads + else if (data.ws2p === "heads" && data.added.length) { + wssHeadsBroadcast(JSON.stringify(data.added)); + } + } catch (e) { + logger && logger.error("error on ws mapSync:", e); } - } catch (e) { - logger && logger.error('error on ws mapSync:', e); - } - })); - }); + }) + ); + } + ); }; diff --git a/app/modules/bma/lib/constants.ts b/app/modules/bma/lib/constants.ts index 4ba022f2920eed2a23a4f16948bc6b2e7b907ee3..adb86d061c9dea7a88054b59092e4058264be526 100644 --- a/app/modules/bma/lib/constants.ts +++ b/app/modules/bma/lib/constants.ts @@ -11,9 +11,8 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../../../lib/common-libs/constants" +import { CommonConstants } from "../../../lib/common-libs/constants"; export const BMAConstants = { - BMA_PORTS_START: 10901, BMA_PORTS_END: 10999, @@ -28,33 +27,102 @@ export const BMAConstants = { SHA256_HASH: /^[A-F0-9]{64}$/, ERRORS: { - // Technical errors - UNKNOWN: { httpCode: 500, uerr: { ucode: 1001, message: "An unknown error occured" }}, - UNHANDLED: { httpCode: 500, uerr: { ucode: 1002, message: "An unhandled error occured" }}, - HTTP_LIMITATION: { httpCode: 503, uerr: { ucode: 1006, message: "This URI has reached its maximum usage quota. Please retry later." }}, - HTTP_PARAM_PUBKEY_REQUIRED: { httpCode: 400, uerr: { ucode: 1101, message: "Parameter `pubkey` is required" }}, - HTTP_PARAM_IDENTITY_REQUIRED: { httpCode: 400, uerr: { ucode: 1102, message: "Parameter `identity` is required" }}, - HTTP_PARAM_PEER_REQUIRED: { httpCode: 400, uerr: { ucode: 1103, message: "Requires a peer" }}, - HTTP_PARAM_BLOCK_REQUIRED: { httpCode: 400, uerr: { ucode: 1104, message: "Requires a block" }}, - HTTP_PARAM_MEMBERSHIP_REQUIRED: { httpCode: 400, uerr: { ucode: 1105, message: "Requires a membership" }}, - HTTP_PARAM_TX_REQUIRED: { httpCode: 400, uerr: { ucode: 1106, message: "Requires a transaction" }}, - HTTP_PARAM_SIG_REQUIRED: { httpCode: 400, uerr: { ucode: 1107, message: "Parameter `sig` is required" }}, - HTTP_PARAM_CERT_REQUIRED: { httpCode: 400, uerr: { ucode: 1108, message: "Parameter `cert` is required" }}, - HTTP_PARAM_REVOCATION_REQUIRED: { httpCode: 400, uerr: { ucode: 1109, message: "Parameter `revocation` is required" }}, - HTTP_PARAM_CONF_REQUIRED: { httpCode: 400, uerr: { ucode: 1110, message: "Parameter `conf` is required" }}, - HTTP_PARAM_CPU_REQUIRED: { httpCode: 400, uerr: { ucode: 1111, message: "Parameter `cpu` is required" }}, + UNKNOWN: { + httpCode: 500, + uerr: { ucode: 1001, message: "An unknown error occured" }, + }, + UNHANDLED: { + httpCode: 500, + uerr: { ucode: 1002, message: "An unhandled error occured" }, + }, + HTTP_LIMITATION: { + httpCode: 503, + uerr: { + ucode: 1006, + message: + "This URI has reached its maximum usage quota. Please retry later.", + }, + }, + HTTP_PARAM_PUBKEY_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1101, message: "Parameter `pubkey` is required" }, + }, + HTTP_PARAM_IDENTITY_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1102, message: "Parameter `identity` is required" }, + }, + HTTP_PARAM_PEER_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1103, message: "Requires a peer" }, + }, + HTTP_PARAM_BLOCK_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1104, message: "Requires a block" }, + }, + HTTP_PARAM_MEMBERSHIP_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1105, message: "Requires a membership" }, + }, + HTTP_PARAM_TX_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1106, message: "Requires a transaction" }, + }, + HTTP_PARAM_SIG_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1107, message: "Parameter `sig` is required" }, + }, + HTTP_PARAM_CERT_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1108, message: "Parameter `cert` is required" }, + }, + HTTP_PARAM_REVOCATION_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1109, message: "Parameter `revocation` is required" }, + }, + HTTP_PARAM_CONF_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1110, message: "Parameter `conf` is required" }, + }, + HTTP_PARAM_CPU_REQUIRED: { + httpCode: 400, + uerr: { ucode: 1111, message: "Parameter `cpu` is required" }, + }, // Business errors - NO_MATCHING_IDENTITY: { httpCode: 404, uerr: { ucode: 2001, message: "No matching identity" }}, - SELF_PEER_NOT_FOUND: { httpCode: 404, uerr: { ucode: 2005, message: "Self peering was not found" }}, - NOT_A_MEMBER: { httpCode: 400, uerr: { ucode: 2009, message: "Not a member" }}, - NO_CURRENT_BLOCK: { httpCode: 404, uerr: { ucode: 2010, message: "No current block" }}, - PEER_NOT_FOUND: { httpCode: 404, uerr: { ucode: 2012, message: "Peer not found" }}, - NO_IDTY_MATCHING_PUB_OR_UID: { httpCode: 404, uerr: { ucode: 2021, message: "No identity matching this pubkey or uid" }}, - TX_NOT_FOUND: { httpCode: 400, uerr: { ucode: 2034, message: 'Transaction not found' }}, - INCORRECT_PAGE_NUMBER: { httpCode: 400, uerr: { ucode: 2035, message: 'Incorrect page number' }} + NO_MATCHING_IDENTITY: { + httpCode: 404, + uerr: { ucode: 2001, message: "No matching identity" }, + }, + SELF_PEER_NOT_FOUND: { + httpCode: 404, + uerr: { ucode: 2005, message: "Self peering was not found" }, + }, + NOT_A_MEMBER: { + httpCode: 400, + uerr: { ucode: 2009, message: "Not a member" }, + }, + NO_CURRENT_BLOCK: { + httpCode: 404, + uerr: { ucode: 2010, message: "No current block" }, + }, + PEER_NOT_FOUND: { + httpCode: 404, + uerr: { ucode: 2012, message: "Peer not found" }, + }, + NO_IDTY_MATCHING_PUB_OR_UID: { + httpCode: 404, + uerr: { ucode: 2021, message: "No identity matching this pubkey or uid" }, + }, + TX_NOT_FOUND: { + httpCode: 400, + uerr: { ucode: 2034, message: "Transaction not found" }, + }, + INCORRECT_PAGE_NUMBER: { + httpCode: 400, + uerr: { ucode: 2035, message: "Incorrect page number" }, + }, // New errors: range 3000-4000 - } -} \ No newline at end of file + }, +}; diff --git a/app/modules/bma/lib/controllers/AbstractController.ts b/app/modules/bma/lib/controllers/AbstractController.ts index c9b8d9a37fb2575a5ea17e296232a82f6361d140..755e32db4c3761602405f2088e663965c5b05548 100644 --- a/app/modules/bma/lib/controllers/AbstractController.ts +++ b/app/modules/bma/lib/controllers/AbstractController.ts @@ -11,57 +11,65 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Server} from "../../../../../server" -import {dos2unix} from "../../../../lib/common-libs/dos2unix" -import {CommonConstants} from "../../../../lib/common-libs/constants" -import {BlockchainService} from "../../../../service/BlockchainService" -import {IdentityService} from "../../../../service/IdentityService" -import {PeeringService} from "../../../../service/PeeringService" -import {ConfDTO} from "../../../../lib/dto/ConfDTO" +import { Server } from "../../../../../server"; +import { dos2unix } from "../../../../lib/common-libs/dos2unix"; +import { CommonConstants } from "../../../../lib/common-libs/constants"; +import { BlockchainService } from "../../../../service/BlockchainService"; +import { IdentityService } from "../../../../service/IdentityService"; +import { PeeringService } from "../../../../service/PeeringService"; +import { ConfDTO } from "../../../../lib/dto/ConfDTO"; export abstract class AbstractController { - - constructor(protected server:Server) { - } + constructor(protected server: Server) {} get conf(): ConfDTO { - return this.server.conf + return this.server.conf; } get logger() { - return this.server.logger + return this.server.logger; } get BlockchainService(): BlockchainService { - return this.server.BlockchainService + return this.server.BlockchainService; } get IdentityService(): IdentityService { - return this.server.IdentityService + return this.server.IdentityService; } get PeeringService(): PeeringService { - return this.server.PeeringService + return this.server.PeeringService; } get MerkleService() { - return this.server.MerkleService + return this.server.MerkleService; } - async pushEntity<T>(req:any, rawer:(req:any)=>string, task:(raw:string) => Promise<T>): Promise<T> { + async pushEntity<T>( + req: any, + rawer: (req: any) => string, + task: (raw: string) => Promise<T> + ): Promise<T> { let rawDocument = rawer(req); rawDocument = dos2unix(rawDocument); try { - return await task(rawDocument) + return await task(rawDocument); } catch (e) { - const event = CommonConstants.DocumentError - this.server.emit(event, e) - if (e !== "Block already known" && (!e || !e.uerr || ( - e.uerr.ucode !== CommonConstants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN.uerr.ucode - && e.uerr.ucode !== CommonConstants.ERRORS.DOCUMENT_BEING_TREATED.uerr.ucode))) { - this.logger.error(e) + const event = CommonConstants.DocumentError; + this.server.emit(event, e); + if ( + e !== "Block already known" && + (!e || + !e.uerr || + (e.uerr.ucode !== + CommonConstants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN.uerr.ucode && + e.uerr.ucode !== + CommonConstants.ERRORS.DOCUMENT_BEING_TREATED.uerr.ucode)) + ) { + this.logger.error(e); } - throw e + throw e; } } -} \ No newline at end of file +} diff --git a/app/modules/bma/lib/controllers/blockchain.ts b/app/modules/bma/lib/controllers/blockchain.ts index 9268c7528a4b833dca603c410849b5237a79bbb6..36f7443b2ba16a9db33f5b7c2ae9cccd6190af85 100644 --- a/app/modules/bma/lib/controllers/blockchain.ts +++ b/app/modules/bma/lib/controllers/blockchain.ts @@ -11,11 +11,11 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Server} from "../../../../../server" -import {AbstractController} from "./AbstractController" -import {ParametersService} from "../parameters" -import {BMAConstants} from "../constants" -import {MembershipDTO} from "../../../../lib/dto/MembershipDTO" +import { Server } from "../../../../../server"; +import { AbstractController } from "./AbstractController"; +import { ParametersService } from "../parameters"; +import { BMAConstants } from "../constants"; +import { MembershipDTO } from "../../../../lib/dto/MembershipDTO"; import { block2HttpBlock, HttpBlock, @@ -26,38 +26,38 @@ import { HttpMemberships, HttpMilestonePage, HttpParameters, - HttpStat -} from "../dtos" -import {TransactionDTO} from "../../../../lib/dto/TransactionDTO" -import {DataErrors} from "../../../../lib/common-libs/errors" -import {Underscore} from "../../../../lib/common-libs/underscore" -import * as toJson from "../tojson" -import {StatName} from "../../../../lib/dal/fileDAL" + HttpStat, +} from "../dtos"; +import { TransactionDTO } from "../../../../lib/dto/TransactionDTO"; +import { DataErrors } from "../../../../lib/common-libs/errors"; +import { Underscore } from "../../../../lib/common-libs/underscore"; +import * as toJson from "../tojson"; +import { StatName } from "../../../../lib/dal/fileDAL"; -const http2raw = require('../http2raw'); +const http2raw = require("../http2raw"); export class BlockchainBinding extends AbstractController { + with: any; - with:any - - constructor(server:Server) { - super(server) + constructor(server: Server) { + super(server); this.with = { - - newcomers: this.getStat('newcomers'), - certs: this.getStat('certs'), - joiners: this.getStat('joiners'), - actives: this.getStat('actives'), - leavers: this.getStat('leavers'), - revoked: this.getStat('revoked'), - excluded: this.getStat('excluded'), - ud: this.getStat('ud'), - tx: this.getStat('tx') - } + newcomers: this.getStat("newcomers"), + certs: this.getStat("certs"), + joiners: this.getStat("joiners"), + actives: this.getStat("actives"), + leavers: this.getStat("leavers"), + revoked: this.getStat("revoked"), + excluded: this.getStat("excluded"), + ud: this.getStat("ud"), + tx: this.getStat("tx"), + }; } - async parseMembership(req:any): Promise<HttpMembership> { - const res = await this.pushEntity(req, http2raw.membership, (raw:string) => this.server.writeRawMembership(raw)) + async parseMembership(req: any): Promise<HttpMembership> { + const res = await this.pushEntity(req, http2raw.membership, (raw: string) => + this.server.writeRawMembership(raw) + ); return { signature: res.signature, membership: { @@ -67,65 +67,67 @@ export class BlockchainBinding extends AbstractController { membership: res.membership, date: res.date || 0, sigDate: res.sigDate || 0, - raw: res.getRaw() - } - } + raw: res.getRaw(), + }, + }; } - async parseBlock(req:any): Promise<HttpBlock> { - const res = await this.pushEntity(req, http2raw.block, (raw:string) => this.server.writeRawBlock(raw)) - return block2HttpBlock(res) + async parseBlock(req: any): Promise<HttpBlock> { + const res = await this.pushEntity(req, http2raw.block, (raw: string) => + this.server.writeRawBlock(raw) + ); + return block2HttpBlock(res); } parameters = async (): Promise<HttpParameters> => { - const params = await this.server.dal.getParameters() + const params = await this.server.dal.getParameters(); return { - "currency": params.currency, - "c": params.c, - "dt": params.dt, - "ud0": params.ud0, - "sigPeriod": params.sigPeriod, - "sigStock": params.sigStock, - "sigWindow": params.sigWindow, - "sigValidity": params.sigValidity, - "sigQty": params.sigQty, - "sigReplay": params.sigReplay, - "idtyWindow": params.idtyWindow, - "msWindow": params.msWindow, - "msPeriod": params.msPeriod, - "xpercent": params.xpercent, - "msValidity": params.msValidity, - "stepMax": params.stepMax, - "medianTimeBlocks": params.medianTimeBlocks, - "avgGenTime": params.avgGenTime, - "dtDiffEval": params.dtDiffEval, - "percentRot": params.percentRot, - "udTime0": params.udTime0, - "udReevalTime0": params.udReevalTime0, - "dtReeval": params.dtReeval - } - } + currency: params.currency, + c: params.c, + dt: params.dt, + ud0: params.ud0, + sigPeriod: params.sigPeriod, + sigStock: params.sigStock, + sigWindow: params.sigWindow, + sigValidity: params.sigValidity, + sigQty: params.sigQty, + sigReplay: params.sigReplay, + idtyWindow: params.idtyWindow, + msWindow: params.msWindow, + msPeriod: params.msPeriod, + xpercent: params.xpercent, + msValidity: params.msValidity, + stepMax: params.stepMax, + medianTimeBlocks: params.medianTimeBlocks, + avgGenTime: params.avgGenTime, + dtDiffEval: params.dtDiffEval, + percentRot: params.percentRot, + udTime0: params.udTime0, + udReevalTime0: params.udReevalTime0, + dtReeval: params.dtReeval, + }; + }; private getStat(statName: StatName): () => Promise<HttpStat> { return async () => { let stat = await this.server.dal.getStat(statName); return { result: toJson.stat(stat) }; - } + }; } - async promoted(req:any): Promise<HttpBlock> { + async promoted(req: any): Promise<HttpBlock> { const number = await ParametersService.getNumberP(req); const promoted = await this.BlockchainService.promoted(number); return toJson.block(promoted); } - async blocks(req:any): Promise<HttpBlock[]> { + async blocks(req: any): Promise<HttpBlock[]> { const params = ParametersService.getCountAndFrom(req); const count = parseInt(params.count); const from = parseInt(params.from); let blocks: any[] = await this.BlockchainService.blocksBetween(from, count); - blocks = blocks.map((b:any) => toJson.block(b)); - return blocks.map(b => ({ + blocks = blocks.map((b: any) => toJson.block(b)); + return blocks.map((b) => ({ version: b.version, currency: b.currency, number: b.number, @@ -151,7 +153,7 @@ export class BlockchainBinding extends AbstractController { leavers: b.leavers, revoked: b.revoked, excluded: b.excluded, - transactions: b.transactions.map((t:TransactionDTO) => ({ + transactions: b.transactions.map((t: TransactionDTO) => ({ version: t.version, currency: t.currency, comment: t.comment, @@ -169,12 +171,12 @@ export class BlockchainBinding extends AbstractController { inner_hash: b.inner_hash, signature: b.signature, raw: b.raw, - })) + })); } async milestones(req: any): Promise<HttpMilestonePage> { - const page = ParametersService.getPage(req) - return this.server.milestones(page) + const page = ParametersService.getPage(req); + return this.server.milestones(page); } async current(): Promise<HttpBlock> { @@ -183,10 +185,12 @@ export class BlockchainBinding extends AbstractController { return toJson.block(current); } - async hardship(req:any): Promise<HttpHardship> { + async hardship(req: any): Promise<HttpHardship> { let nextBlockNumber = 0; const search = await ParametersService.getSearchP(req); - const idty = await this.server.dal.getWrittenIdtyByPubkeyOrUidForIsMemberAndPubkey(search); + const idty = await this.server.dal.getWrittenIdtyByPubkeyOrUidForIsMemberAndPubkey( + search + ); if (!idty) { throw BMAConstants.ERRORS.NO_MATCHING_IDENTITY; } @@ -197,43 +201,52 @@ export class BlockchainBinding extends AbstractController { if (current) { nextBlockNumber = current ? current.number + 1 : 0; } - const difficulty = await this.server.getBcContext().getIssuerPersonalizedDifficulty(idty.pub); + const difficulty = await this.server + .getBcContext() + .getIssuerPersonalizedDifficulty(idty.pub); return { - "block": nextBlockNumber, - "level": difficulty + block: nextBlockNumber, + level: difficulty, }; } async difficulties(): Promise<HttpDifficulties> { - const current = await this.server.dal.getCurrentBlockOrNull() + const current = await this.server.dal.getCurrentBlockOrNull(); if (!current) { - throw Error(DataErrors[DataErrors.BLOCKCHAIN_NOT_INITIALIZED_YET]) + throw Error(DataErrors[DataErrors.BLOCKCHAIN_NOT_INITIALIZED_YET]); } const number = (current && current.number) || 0; - const issuers = await this.server.dal.getUniqueIssuersBetween(number - 1 - current.issuersFrame, number - 1); + const issuers = await this.server.dal.getUniqueIssuersBetween( + number - 1 - current.issuersFrame, + number - 1 + ); const difficulties = []; for (const issuer of issuers) { - const member = await this.server.dal.getWrittenIdtyByPubkeyForUidAndPubkey(issuer); - const difficulty = await this.server.getBcContext().getIssuerPersonalizedDifficulty(member.pub); + const member = await this.server.dal.getWrittenIdtyByPubkeyForUidAndPubkey( + issuer + ); + const difficulty = await this.server + .getBcContext() + .getIssuerPersonalizedDifficulty(member.pub); difficulties.push({ uid: member.uid, - level: difficulty + level: difficulty, }); } return { - "block": number + 1, - "levels": Underscore.sortBy(difficulties, (diff:any) => diff.level) + block: number + 1, + levels: Underscore.sortBy(difficulties, (diff: any) => diff.level), }; } - async memberships(req:any): Promise<HttpMemberships> { + async memberships(req: any): Promise<HttpMemberships> { const search = await ParametersService.getSearchP(req); const { idty, memberships } = await this.IdentityService.findMember(search); const json = { pubkey: idty.pubkey, uid: idty.uid, sigDate: idty.buid, - memberships: memberships.map((msObj:any) => { + memberships: memberships.map((msObj: any) => { const ms = MembershipDTO.fromJSONObject(msObj); return { version: ms.version, @@ -241,11 +254,14 @@ export class BlockchainBinding extends AbstractController { membership: ms.membership, blockNumber: ms.block_number, blockHash: ms.block_hash, - written: (!msObj.written_number && msObj.written_number !== 0) ? null : msObj.written_number + written: + !msObj.written_number && msObj.written_number !== 0 + ? null + : msObj.written_number, }; - }) - } - json.memberships = Underscore.sortBy(json.memberships, 'blockNumber') + }), + }; + json.memberships = Underscore.sortBy(json.memberships, "blockNumber"); json.memberships.reverse(); return json; } @@ -254,7 +270,7 @@ export class BlockchainBinding extends AbstractController { const branches = await this.BlockchainService.branches(); const blocks = branches.map((b) => toJson.block(b)); return { - blocks: blocks + blocks: blocks, }; } } diff --git a/app/modules/bma/lib/controllers/network.ts b/app/modules/bma/lib/controllers/network.ts index 61bfc34dcfc4f2a1476c20a3f0d170d041ca309e..ff9edf3f0e78a3d916cbe6905afe4219c0b36ff1 100644 --- a/app/modules/bma/lib/controllers/network.ts +++ b/app/modules/bma/lib/controllers/network.ts @@ -11,16 +11,21 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractController} from "./AbstractController" -import {BMAConstants} from "../constants" -import {HttpMerkleOfPeers, HttpPeer, HttpPeers, HttpWS2PHeads, HttpWS2PInfo} from "../dtos" -import {WS2PHead} from "../../../ws2p/lib/WS2PCluster" -import {DBPeer} from "../../../../lib/db/DBPeer" +import { AbstractController } from "./AbstractController"; +import { BMAConstants } from "../constants"; +import { + HttpMerkleOfPeers, + HttpPeer, + HttpPeers, + HttpWS2PHeads, + HttpWS2PInfo, +} from "../dtos"; +import { WS2PHead } from "../../../ws2p/lib/WS2PCluster"; +import { DBPeer } from "../../../../lib/db/DBPeer"; -const http2raw = require('../http2raw'); +const http2raw = require("../http2raw"); export class NetworkBinding extends AbstractController { - async peer(): Promise<HttpPeer> { const p = await this.PeeringService.peer(); if (!p) { @@ -29,13 +34,13 @@ export class NetworkBinding extends AbstractController { return p.json(); } - async peersGet(req:any): Promise<HttpMerkleOfPeers> { + async peersGet(req: any): Promise<HttpMerkleOfPeers> { let merkle = await this.server.dal.merkleForPeers(); - return await this.MerkleService(req, merkle, async (hashes:string[]) => { + return await this.MerkleService(req, merkle, async (hashes: string[]) => { try { let peers = await this.server.dal.findPeersWhoseHashIsIn(hashes); - const map:any = {}; - peers.forEach((peer:any) => { + const map: any = {}; + peers.forEach((peer: any) => { map[peer.hash] = peer; }); if (peers.length == 0) { @@ -45,11 +50,13 @@ export class NetworkBinding extends AbstractController { } catch (e) { throw e; } - }) + }); } - async peersPost(req:any): Promise<HttpPeer> { - const peerDTO = await this.pushEntity(req, http2raw.peer, (raw:string) => this.server.writeRawPeer(raw)) + async peersPost(req: any): Promise<HttpPeer> { + const peerDTO = await this.pushEntity(req, http2raw.peer, (raw: string) => + this.server.writeRawPeer(raw) + ); return { version: peerDTO.version, currency: peerDTO.currency, @@ -57,39 +64,39 @@ export class NetworkBinding extends AbstractController { block: peerDTO.blockstamp, endpoints: peerDTO.endpoints, signature: peerDTO.signature, - raw: peerDTO.getRaw() - } + raw: peerDTO.getRaw(), + }; } async peers(): Promise<HttpPeers> { let peers = await this.server.dal.listAllPeers(); return { - peers: peers.map(p => DBPeer.json(p)) - } + peers: peers.map((p) => DBPeer.json(p)), + }; } async ws2pInfo(): Promise<HttpWS2PInfo> { - const cluster = this.server.ws2pCluster - let level1 = 0 - let level2 = 0 + const cluster = this.server.ws2pCluster; + let level1 = 0; + let level2 = 0; if (cluster) { - level1 = await cluster.clientsCount() - level2 = await cluster.servedCount() + level1 = await cluster.clientsCount(); + level2 = await cluster.servedCount(); } return { peers: { level1, - level2 - } + level2, + }, }; } async ws2pHeads(): Promise<HttpWS2PHeads> { - const cluster = this.server.ws2pCluster - let heads: WS2PHead[] = [] + const cluster = this.server.ws2pCluster; + let heads: WS2PHead[] = []; if (cluster) { - heads = await cluster.getKnownHeads() + heads = await cluster.getKnownHeads(); } - return { heads } + return { heads }; } } diff --git a/app/modules/bma/lib/controllers/node.ts b/app/modules/bma/lib/controllers/node.ts index d75ed955a991cc5a7a5cb1f5c32408ce32da8a73..a2edfedaa59453321260e131aa7166196bed3c1d 100644 --- a/app/modules/bma/lib/controllers/node.ts +++ b/app/modules/bma/lib/controllers/node.ts @@ -12,33 +12,32 @@ // GNU Affero General Public License for more details. "use strict"; -import {AbstractController} from "./AbstractController" -import {HttpSandbox, HttpSandboxes, HttpSummary} from "../dtos"; +import { AbstractController } from "./AbstractController"; +import { HttpSandbox, HttpSandboxes, HttpSummary } from "../dtos"; export class NodeBinding extends AbstractController { - summary = (): HttpSummary => { return { - "duniter": { - "software": "duniter", - "version": this.server.version, - "forkWindowSize": this.server.conf.forksize - } - } - } + duniter: { + software: "duniter", + version: this.server.version, + forkWindowSize: this.server.conf.forksize, + }, + }; + }; async sandboxes(): Promise<HttpSandboxes> { return { identities: await sandboxIt(this.server.dal.idtyDAL.sandbox), memberships: await sandboxIt(this.server.dal.msDAL.sandbox), - transactions: await sandboxIt(this.server.dal.txsDAL.sandbox) - } + transactions: await sandboxIt(this.server.dal.txsDAL.sandbox), + }; } } -async function sandboxIt(sandbox:any): Promise<HttpSandbox> { +async function sandboxIt(sandbox: any): Promise<HttpSandbox> { return { size: sandbox.maxSize, - free: await sandbox.getSandboxRoom() - } + free: await sandbox.getSandboxRoom(), + }; } diff --git a/app/modules/bma/lib/controllers/transactions.ts b/app/modules/bma/lib/controllers/transactions.ts index 3706b8daabdf7dd4fe2f0272906aae57e2e789da..84ea38d13d0928123ccc9ca96e6f8006faac6545 100644 --- a/app/modules/bma/lib/controllers/transactions.ts +++ b/app/modules/bma/lib/controllers/transactions.ts @@ -11,20 +11,29 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractController} from "./AbstractController"; -import {ParametersService} from "../parameters"; -import {BMAConstants} from "../constants"; -import {TransactionDTO} from "../../../../lib/dto/TransactionDTO"; -import {HttpSources, HttpTransaction, HttpTxHistory, HttpTxOfHistory, HttpTxPending} from "../dtos"; -import {DBTx} from "../../../../lib/db/DBTx" -import {Underscore} from "../../../../lib/common-libs/underscore" +import { AbstractController } from "./AbstractController"; +import { ParametersService } from "../parameters"; +import { BMAConstants } from "../constants"; +import { TransactionDTO } from "../../../../lib/dto/TransactionDTO"; +import { + HttpSources, + HttpTransaction, + HttpTxHistory, + HttpTxOfHistory, + HttpTxPending, +} from "../dtos"; +import { DBTx } from "../../../../lib/db/DBTx"; +import { Underscore } from "../../../../lib/common-libs/underscore"; -const http2raw = require('../http2raw'); +const http2raw = require("../http2raw"); export class TransactionBinding extends AbstractController { - - async parseTransaction(req:any): Promise<HttpTransaction> { - const res = await this.pushEntity(req, http2raw.transaction, (raw:string) => this.server.writeRawTransaction(raw)) + async parseTransaction(req: any): Promise<HttpTransaction> { + const res = await this.pushEntity( + req, + http2raw.transaction, + (raw: string) => this.server.writeRawTransaction(raw) + ); return { version: res.version, currency: res.currency, @@ -37,28 +46,28 @@ export class TransactionBinding extends AbstractController { locktime: res.locktime, hash: res.hash, written_block: res.blockNumber, - raw: res.getRaw() - } + raw: res.getRaw(), + }; } - async getSources(req:any): Promise<HttpSources> { + async getSources(req: any): Promise<HttpSources> { const pubkey = await ParametersService.getPubkeyP(req); const sources = await this.server.dal.getAvailableSourcesByPubkey(pubkey); return { currency: this.conf.currency, pubkey, - sources - } + sources, + }; } - async getByHash(req:any): Promise<HttpTransaction> { + async getByHash(req: any): Promise<HttpTransaction> { const hash = ParametersService.getHash(req); - const tx:DBTx = await this.server.dal.getTxByHash(hash); + const tx: DBTx = await this.server.dal.getTxByHash(hash); if (!tx) { throw BMAConstants.ERRORS.TX_NOT_FOUND; } - tx.inputs = tx.inputs.map((i:any) => i.raw || i) - tx.outputs = tx.outputs.map((o:any) => o.raw || o) + tx.inputs = tx.inputs.map((i: any) => i.raw || i); + tx.outputs = tx.outputs.map((o: any) => o.raw || o); return { version: tx.version, currency: tx.currency, @@ -76,44 +85,52 @@ export class TransactionBinding extends AbstractController { // block_number: tx.block_number, written_block: tx.block_number, // received: tx.received, - raw: "" - } + raw: "", + }; } - async getHistory(req:any): Promise<HttpTxHistory> { + async getHistory(req: any): Promise<HttpTxHistory> { const pubkey = await ParametersService.getPubkeyP(req); - return this.getFilteredHistory(pubkey, (results:any) => results); + return this.getFilteredHistory(pubkey, (results: any) => results); } - async getHistoryBetweenBlocks(req:any): Promise<HttpTxHistory> { + async getHistoryBetweenBlocks(req: any): Promise<HttpTxHistory> { const pubkey = await ParametersService.getPubkeyP(req); const from = await ParametersService.getFromP(req); const to = await ParametersService.getToP(req); - return this.getFilteredHistory(pubkey, (res:any) => { + return this.getFilteredHistory(pubkey, (res: any) => { const histo = res.history; - histo.sent = Underscore.filter(histo.sent, function(tx:any){ return tx && tx.block_number >= from && tx.block_number <= to; }); - histo.received = Underscore.filter(histo.received, function(tx:any){ return tx && tx.block_number >= from && tx.block_number <= to; }); + histo.sent = Underscore.filter(histo.sent, function (tx: any) { + return tx && tx.block_number >= from && tx.block_number <= to; + }); + histo.received = Underscore.filter(histo.received, function (tx: any) { + return tx && tx.block_number >= from && tx.block_number <= to; + }); Underscore.extend(histo, { sending: [], receiving: [] }); return res; }); } - async getHistoryBetweenTimes(req:any): Promise<HttpTxHistory> { + async getHistoryBetweenTimes(req: any): Promise<HttpTxHistory> { const pubkey = await ParametersService.getPubkeyP(req); const from = await ParametersService.getFromP(req); const to = await ParametersService.getToP(req); - return this.getFilteredHistory(pubkey, (res:any) => { + return this.getFilteredHistory(pubkey, (res: any) => { const histo = res.history; - histo.sent = Underscore.filter(histo.sent, function(tx:any){ return tx && tx.time >= from && tx.time <= to; }); - histo.received = Underscore.filter(histo.received, function(tx:any){ return tx && tx.time >= from && tx.time <= to; }); + histo.sent = Underscore.filter(histo.sent, function (tx: any) { + return tx && tx.time >= from && tx.time <= to; + }); + histo.received = Underscore.filter(histo.received, function (tx: any) { + return tx && tx.time >= from && tx.time <= to; + }); Underscore.extend(histo, { sending: [], receiving: [] }); return res; }); } - async getPendingForPubkey(req:any): Promise<HttpTxHistory> { + async getPendingForPubkey(req: any): Promise<HttpTxHistory> { const pubkey = await ParametersService.getPubkeyP(req); - return this.getFilteredHistory(pubkey, function(res:any) { + return this.getFilteredHistory(pubkey, function (res: any) { const histo = res.history; Underscore.extend(histo, { sent: [], received: [] }); return res; @@ -124,8 +141,8 @@ export class TransactionBinding extends AbstractController { const pending = await this.server.dal.getTransactionsPending(); return { currency: this.conf.currency, - pending: pending.map(t => { - const tx = TransactionDTO.fromJSONObject(t) + pending: pending.map((t) => { + const tx = TransactionDTO.fromJSONObject(t); return { version: tx.version, issuers: tx.issuers, @@ -137,30 +154,33 @@ export class TransactionBinding extends AbstractController { blockstamp: tx.blockstamp, blockstampTime: tx.blockstampTime, signatures: tx.signatures, - hash: tx.hash - } - }) - } + hash: tx.hash, + }; + }), + }; } - private async getFilteredHistory(pubkey:string, filter:any): Promise<HttpTxHistory> { + private async getFilteredHistory( + pubkey: string, + filter: any + ): Promise<HttpTxHistory> { let history = await this.server.dal.getTransactionsHistory(pubkey); let result = { - "currency": this.conf.currency, - "pubkey": pubkey, - "history": { + currency: this.conf.currency, + pubkey: pubkey, + history: { sending: history.sending.map(dbtx2HttpTxOfHistory), received: history.received.map(dbtx2HttpTxOfHistory), receiving: history.receiving.map(dbtx2HttpTxOfHistory), sent: history.sent.map(dbtx2HttpTxOfHistory), - pending: history.pending.map(dbtx2HttpTxOfHistory) - } - } + pending: history.pending.map(dbtx2HttpTxOfHistory), + }, + }; return filter(result); } } -function dbtx2HttpTxOfHistory(tx:DBTx): HttpTxOfHistory { +function dbtx2HttpTxOfHistory(tx: DBTx): HttpTxOfHistory { return { version: tx.version, locktime: tx.locktime, @@ -175,6 +195,6 @@ function dbtx2HttpTxOfHistory(tx:DBTx): HttpTxOfHistory { hash: tx.hash, time: tx.time, block_number: tx.block_number, - received: tx.received - } + received: tx.received, + }; } diff --git a/app/modules/bma/lib/controllers/uds.ts b/app/modules/bma/lib/controllers/uds.ts index f2378438797664ce5f6d1c547503f5f66ef9dfd6..c73546866b70fb30ddfa57eea72a316e3169e5cb 100644 --- a/app/modules/bma/lib/controllers/uds.ts +++ b/app/modules/bma/lib/controllers/uds.ts @@ -11,52 +11,64 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractController} from "./AbstractController" -import {ParametersService} from "../parameters" -import {Source} from "../entity/source" -import {HttpUDHistory} from "../dtos"; -import {Underscore} from "../../../../lib/common-libs/underscore" +import { AbstractController } from "./AbstractController"; +import { ParametersService } from "../parameters"; +import { Source } from "../entity/source"; +import { HttpUDHistory } from "../dtos"; +import { Underscore } from "../../../../lib/common-libs/underscore"; export class UDBinding extends AbstractController { - - async getHistory(req:any): Promise<HttpUDHistory> { + async getHistory(req: any): Promise<HttpUDHistory> { const pubkey = await ParametersService.getPubkeyP(req); - return this.getUDSources(pubkey, (results:any) => results); + return this.getUDSources(pubkey, (results: any) => results); } - async getHistoryBetweenBlocks(req:any) { + async getHistoryBetweenBlocks(req: any) { const pubkey = await ParametersService.getPubkeyP(req); const from = await ParametersService.getFromP(req); const to = await ParametersService.getToP(req); - return this.getUDSources(pubkey, (results:any) => { - results.history.history = Underscore.filter(results.history.history, function(ud:any){ return ud.block_number >= from && ud.block_number <= to; }); + return this.getUDSources(pubkey, (results: any) => { + results.history.history = Underscore.filter( + results.history.history, + function (ud: any) { + return ud.block_number >= from && ud.block_number <= to; + } + ); return results; - }) + }); } - async getHistoryBetweenTimes(req:any) { + async getHistoryBetweenTimes(req: any) { const pubkey = await ParametersService.getPubkeyP(req); const from = await ParametersService.getFromP(req); const to = await ParametersService.getToP(req); - return this.getUDSources(pubkey, (results:any) => { - results.history.history = Underscore.filter(results.history.history, function(ud:any){ return ud.time >= from && ud.time <= to; }); + return this.getUDSources(pubkey, (results: any) => { + results.history.history = Underscore.filter( + results.history.history, + function (ud: any) { + return ud.time >= from && ud.time <= to; + } + ); return results; }); } - private async getUDSources(pubkey:string, filter:any) { - const history:any = await this.server.dal.getUDHistory(pubkey); - const result = { - "currency": this.conf.currency, - "pubkey": pubkey, - "history": history - }; - Underscore.keys(history).map((key:any) => { - history[key].map((src:any, index:number) => { - history[key][index] = new Source(src).UDjson() - Underscore.extend(history[key][index], { block_number: src && src.block_number, time: src && src.time }); + private async getUDSources(pubkey: string, filter: any) { + const history: any = await this.server.dal.getUDHistory(pubkey); + const result = { + currency: this.conf.currency, + pubkey: pubkey, + history: history, + }; + Underscore.keys(history).map((key: any) => { + history[key].map((src: any, index: number) => { + history[key][index] = new Source(src).UDjson(); + Underscore.extend(history[key][index], { + block_number: src && src.block_number, + time: src && src.time, }); }); - return filter(result); + }); + return filter(result); } } diff --git a/app/modules/bma/lib/controllers/wot.ts b/app/modules/bma/lib/controllers/wot.ts index 6a5e8bb23b2d1eea8ad43c1e918f15ec4dcc18b6..61e869b06e7ae01296a122ab523f990b6cc943dc 100644 --- a/app/modules/bma/lib/controllers/wot.ts +++ b/app/modules/bma/lib/controllers/wot.ts @@ -11,10 +11,10 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {AbstractController} from "./AbstractController"; -import {BMAConstants} from "../constants"; -import {DBIdentity} from "../../../../lib/dal/sqliteDAL/IdentityDAL"; -import {IdentityForRequirements} from '../../../../service/BlockchainService'; +import { AbstractController } from "./AbstractController"; +import { BMAConstants } from "../constants"; +import { DBIdentity } from "../../../../lib/dal/sqliteDAL/IdentityDAL"; +import { IdentityForRequirements } from "../../../../service/BlockchainService"; import { HttpCert, HttpCertIdentity, @@ -27,56 +27,70 @@ import { HttpMembershipList, HttpRequirements, HttpResult, - HttpSimpleIdentity + HttpSimpleIdentity, } from "../dtos"; -import {IdentityDTO} from "../../../../lib/dto/IdentityDTO" -import {FullIindexEntry} from "../../../../lib/indexer" -import {DBMembership} from "../../../../lib/dal/sqliteDAL/MembershipDAL" -import {Underscore} from "../../../../lib/common-libs/underscore" -import {Map} from "../../../../lib/common-libs/crypto/map" +import { IdentityDTO } from "../../../../lib/dto/IdentityDTO"; +import { FullIindexEntry } from "../../../../lib/indexer"; +import { DBMembership } from "../../../../lib/dal/sqliteDAL/MembershipDAL"; +import { Underscore } from "../../../../lib/common-libs/underscore"; +import { Map } from "../../../../lib/common-libs/crypto/map"; -const http2raw = require('../http2raw'); -const constants = require('../../../../lib/constants'); +const http2raw = require("../http2raw"); +const constants = require("../../../../lib/constants"); -const ParametersService = require('../parameters').ParametersService +const ParametersService = require("../parameters").ParametersService; export class WOTBinding extends AbstractController { - - async lookup(req:any): Promise<HttpLookup> { + async lookup(req: any): Promise<HttpLookup> { // Get the search parameter from HTTP query const search = await ParametersService.getSearchP(req); // Make the research const identities = await this.IdentityService.searchIdentities(search); // Entitify each result - identities.forEach((idty, index) => identities[index] = DBIdentity.copyFromExisting(idty)); + identities.forEach( + (idty, index) => (identities[index] = DBIdentity.copyFromExisting(idty)) + ); // Prepare some data to avoid displaying expired certifications for (const idty of identities) { - const certs: any[] = await this.server.dal.certsToTarget(idty.pubkey, idty.getTargetHash()); + const certs: any[] = await this.server.dal.certsToTarget( + idty.pubkey, + idty.getTargetHash() + ); const validCerts = []; for (const cert of certs) { - const member = await this.server.dal.getWrittenIdtyByPubkeyForUidAndIsMemberAndWasMember(cert.from); + const member = await this.server.dal.getWrittenIdtyByPubkeyForUidAndIsMemberAndWasMember( + cert.from + ); if (member) { cert.uids = [member.uid]; cert.isMember = member.member; cert.wasMember = member.wasMember; } else { - const potentials = await this.IdentityService.getPendingFromPubkey(cert.from); - cert.uids = potentials.map(p => p.uid) + const potentials = await this.IdentityService.getPendingFromPubkey( + cert.from + ); + cert.uids = potentials.map((p) => p.uid); cert.isMember = false; cert.wasMember = false; } validCerts.push(cert); } idty.certs = validCerts; - const signed:any = await this.server.dal.certsFrom(idty.pubkey); + const signed: any = await this.server.dal.certsFrom(idty.pubkey); const validSigned = []; for (let j = 0; j < signed.length; j++) { const cert = Underscore.clone(signed[j]); - cert.idty = await this.server.dal.getGlobalIdentityByHashForLookup(cert.target) + cert.idty = await this.server.dal.getGlobalIdentityByHashForLookup( + cert.target + ); if (cert.idty) { validSigned.push(cert); } else { - this.logger.debug('A certification to an unknown identity was found (%s => %s)', cert.from, cert.to); + this.logger.debug( + "A certification to an unknown identity was found (%s => %s)", + cert.from, + cert.to + ); } } idty.signed = validSigned; @@ -84,9 +98,9 @@ export class WOTBinding extends AbstractController { if (identities.length == 0) { throw BMAConstants.ERRORS.NO_MATCHING_IDENTITY; } - const resultsByPubkey:Map<HttpIdentity> = {}; + const resultsByPubkey: Map<HttpIdentity> = {}; identities.forEach((identity) => { - const copy = DBIdentity.copyFromExisting(identity) + const copy = DBIdentity.copyFromExisting(identity); const jsoned = copy.json(); if (!resultsByPubkey[jsoned.pubkey]) { // Create the first matching identity with this pubkey in the map @@ -101,28 +115,39 @@ export class WOTBinding extends AbstractController { }); return { partial: false, - results: Underscore.values(resultsByPubkey) + results: Underscore.values(resultsByPubkey), }; } async members(): Promise<HttpMembers> { const identities = await this.server.dal.getMembers(); - const json:any = { - results: [] + const json: any = { + results: [], }; - identities.forEach((identity:any) => json.results.push({ pubkey: identity.pubkey, uid: identity.uid })); + identities.forEach((identity: any) => + json.results.push({ pubkey: identity.pubkey, uid: identity.uid }) + ); return json; } - async certifiersOf(req:any): Promise<HttpCertifications> { + async certifiersOf(req: any): Promise<HttpCertifications> { const search = await ParametersService.getSearchP(req); - const idty = (await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember(search)) as FullIindexEntry - const certs = await this.server.dal.certsToTarget(idty.pub, IdentityDTO.getTargetHash(idty)) - const theCerts:HttpCertification[] = []; + const idty = (await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember( + search + )) as FullIindexEntry; + const certs = await this.server.dal.certsToTarget( + idty.pub, + IdentityDTO.getTargetHash(idty) + ); + const theCerts: HttpCertification[] = []; for (const cert of certs) { - const certifier = await this.server.dal.getWrittenIdtyByPubkeyForUidAndMemberAndCreatedOn(cert.from); + const certifier = await this.server.dal.getWrittenIdtyByPubkeyForUidAndMemberAndCreatedOn( + cert.from + ); if (certifier) { - let certBlock = await this.server.dal.getBlockWeHaveItForSure(cert.block_number) + let certBlock = await this.server.dal.getBlockWeHaveItForSure( + cert.block_number + ); theCerts.push({ pubkey: cert.from, uid: certifier.uid, @@ -130,15 +155,18 @@ export class WOTBinding extends AbstractController { wasMember: true, // a member is necessarily certified by members cert_time: { block: certBlock.number, - medianTime: certBlock.medianTime + medianTime: certBlock.medianTime, }, sigDate: certifier.created_on, - written: (cert.written_block !== null && cert.written_hash) ? { - number: cert.written_block, - hash: cert.written_hash - } : null, - signature: cert.sig - }) + written: + cert.written_block !== null && cert.written_hash + ? { + number: cert.written_block, + hash: cert.written_hash, + } + : null, + signature: cert.sig, + }); } } return { @@ -146,29 +174,34 @@ export class WOTBinding extends AbstractController { uid: idty.uid, sigDate: idty.created_on, isMember: idty.member, - certifications: theCerts - } + certifications: theCerts, + }; } - async requirements(req:any): Promise<HttpRequirements> { + async requirements(req: any): Promise<HttpRequirements> { const search = await ParametersService.getSearchP(req); - const identities:any = await this.IdentityService.searchIdentities(search); - const all:HttpIdentityRequirement[] = await this.BlockchainService.requirementsOfIdentities(identities); + const identities: any = await this.IdentityService.searchIdentities(search); + const all: HttpIdentityRequirement[] = await this.BlockchainService.requirementsOfIdentities( + identities + ); if (!all || !all.length) { throw BMAConstants.ERRORS.NO_IDTY_MATCHING_PUB_OR_UID; } return { - identities: all + identities: all, }; } - async requirementsOfPending(req:any): Promise<HttpRequirements> { - const minsig = ParametersService.getMinSig(req) - let identities:IdentityForRequirements[] = (await this.server.dal.idtyDAL.query( - 'SELECT i.*, count(c.sig) as nbSig ' + - 'FROM idty i, cert c ' + - 'WHERE c.target = i.hash group by i.hash having nbSig >= ?', - [minsig])).map(i => ({ + async requirementsOfPending(req: any): Promise<HttpRequirements> { + const minsig = ParametersService.getMinSig(req); + let identities: IdentityForRequirements[] = ( + await this.server.dal.idtyDAL.query( + "SELECT i.*, count(c.sig) as nbSig " + + "FROM idty i, cert c " + + "WHERE c.target = i.hash group by i.hash having nbSig >= ?", + [minsig] + ) + ).map((i) => ({ hash: i.hash || "", member: i.member || false, wasMember: i.wasMember || false, @@ -178,28 +211,37 @@ export class WOTBinding extends AbstractController { sig: i.sig || "", revocation_sig: i.revocation_sig, revoked: i.revoked, - revoked_on: i.revoked_on ? 1 : 0 - })) - const members = await this.server.dal.findReceiversAbove(minsig) - identities = identities.concat(members) - const all = await this.BlockchainService.requirementsOfIdentities(identities, false); + revoked_on: i.revoked_on ? 1 : 0, + })); + const members = await this.server.dal.findReceiversAbove(minsig); + identities = identities.concat(members); + const all = await this.BlockchainService.requirementsOfIdentities( + identities, + false + ); if (!all || !all.length) { throw BMAConstants.ERRORS.NO_IDTY_MATCHING_PUB_OR_UID; } return { - identities: all + identities: all, }; } - async certifiedBy(req:any): Promise<HttpCertifications> { + async certifiedBy(req: any): Promise<HttpCertifications> { const search = await ParametersService.getSearchP(req); - const idty = (await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember(search)) as FullIindexEntry + const idty = (await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember( + search + )) as FullIindexEntry; const certs = await this.server.dal.certsFrom(idty.pub); - const theCerts:HttpCertification[] = []; + const theCerts: HttpCertification[] = []; for (const cert of certs) { - const certified = await this.server.dal.getWrittenIdtyByPubkeyForUidAndMemberAndCreatedOn(cert.to); + const certified = await this.server.dal.getWrittenIdtyByPubkeyForUidAndMemberAndCreatedOn( + cert.to + ); if (certified) { - let certBlock = await this.server.dal.getBlockWeHaveItForSure(cert.block_number) + let certBlock = await this.server.dal.getBlockWeHaveItForSure( + cert.block_number + ); theCerts.push({ pubkey: cert.to, uid: certified.uid, @@ -207,15 +249,18 @@ export class WOTBinding extends AbstractController { wasMember: true, // a member is necessarily certified by members cert_time: { block: certBlock.number, - medianTime: certBlock.medianTime + medianTime: certBlock.medianTime, }, sigDate: certified.created_on, - written: (cert.written_block !== null && cert.written_hash) ? { - number: cert.written_block, - hash: cert.written_hash - } : null, - signature: cert.sig - }) + written: + cert.written_block !== null && cert.written_hash + ? { + number: cert.written_block, + hash: cert.written_hash, + } + : null, + signature: cert.sig, + }); } } return { @@ -223,62 +268,72 @@ export class WOTBinding extends AbstractController { uid: idty.uid, sigDate: idty.created_on, isMember: idty.member, - certifications: theCerts - } + certifications: theCerts, + }; } - async identityOf(req:any): Promise<HttpSimpleIdentity> { + async identityOf(req: any): Promise<HttpSimpleIdentity> { let search = await ParametersService.getSearchP(req); - const idty = await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember(search) + const idty = await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember( + search + ); if (!idty) { throw constants.ERRORS.NO_MEMBER_MATCHING_PUB_OR_UID; } if (!idty.member) { - throw 'Not a member'; + throw "Not a member"; } return { pubkey: idty.pub, uid: idty.uid, - sigDate: idty.created_on + sigDate: idty.created_on, }; } - async add(req:any): Promise<HttpIdentity> { - const res = await this.pushEntity(req, http2raw.identity, (raw:string) => this.server.writeRawIdentity(raw)) + async add(req: any): Promise<HttpIdentity> { + const res = await this.pushEntity(req, http2raw.identity, (raw: string) => + this.server.writeRawIdentity(raw) + ); return { pubkey: res.pubkey, uids: [], - signed: [] - } + signed: [], + }; } - async certify(req:any): Promise<HttpCert> { - const res = await this.pushEntity(req, http2raw.certification, (raw:string) => this.server.writeRawCertification(raw)) - const target:HttpCertIdentity = { + async certify(req: any): Promise<HttpCert> { + const res = await this.pushEntity( + req, + http2raw.certification, + (raw: string) => this.server.writeRawCertification(raw) + ); + const target: HttpCertIdentity = { issuer: res.idty_issuer, uid: res.idty_uid, timestamp: res.idty_buid, - sig: res.idty_sig - } + sig: res.idty_sig, + }; return { issuer: res.issuer, timestamp: res.buid, sig: res.sig, - target - } + target, + }; } - async revoke(req:any): Promise<HttpResult> { - const res = await this.pushEntity(req, http2raw.revocation, (raw:string) => this.server.writeRawRevocation(raw)) + async revoke(req: any): Promise<HttpResult> { + const res = await this.pushEntity(req, http2raw.revocation, (raw: string) => + this.server.writeRawRevocation(raw) + ); return { - result: true - } + result: true, + }; } async pendingMemberships(): Promise<HttpMembershipList> { const memberships = await this.server.dal.findNewcomers(); const json = { - memberships: memberships.map((ms:DBMembership) => { + memberships: memberships.map((ms: DBMembership) => { return { pubkey: ms.issuer, uid: ms.userid, @@ -287,11 +342,14 @@ export class WOTBinding extends AbstractController { membership: ms.membership, blockNumber: ms.blockNumber, blockHash: ms.blockHash, - written: (!ms.written_number && ms.written_number !== 0) ? null : ms.written_number + written: + !ms.written_number && ms.written_number !== 0 + ? null + : ms.written_number, }; - }) + }), }; - json.memberships = Underscore.sortBy(json.memberships, 'blockNumber'); + json.memberships = Underscore.sortBy(json.memberships, "blockNumber"); json.memberships.reverse(); return json; } diff --git a/app/modules/bma/lib/dtos.ts b/app/modules/bma/lib/dtos.ts index 4756b2415d1ce24612ae10921421dbcc6fd153a3..6a17335d51e3a70bfb6505b43bcd7bbe32da4ce4 100644 --- a/app/modules/bma/lib/dtos.ts +++ b/app/modules/bma/lib/dtos.ts @@ -11,24 +11,24 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {WS2PHead} from "../../ws2p/lib/WS2PCluster" -import {JSONDBPeer} from "../../../lib/db/DBPeer" +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { WS2PHead } from "../../ws2p/lib/WS2PCluster"; +import { JSONDBPeer } from "../../../lib/db/DBPeer"; export const Summary = { duniter: { - "software": String, - "version": String, - "forkWindowSize": Number - } + software: String, + version: String, + forkWindowSize: Number, + }, }; export interface HttpSummary { duniter: { - software: string - version: string - forkWindowSize: number - } + software: string; + version: string; + forkWindowSize: number; + }; } export const Parameters = { @@ -54,217 +54,217 @@ export const Parameters = { percentRot: Number, udTime0: Number, udReevalTime0: Number, - dtReeval: Number + dtReeval: Number, }; export interface HttpParameters { - currency: string - c: number - dt: number - ud0: number - sigPeriod: number - sigStock: number - sigWindow: number - sigValidity: number - sigReplay: number - sigQty: number - idtyWindow: number - msWindow: number - msPeriod: number - xpercent: number - msValidity: number - stepMax: number - medianTimeBlocks: number - avgGenTime: number - dtDiffEval: number - percentRot: number - udTime0: number - udReevalTime0: number - dtReeval: number + currency: string; + c: number; + dt: number; + ud0: number; + sigPeriod: number; + sigStock: number; + sigWindow: number; + sigValidity: number; + sigReplay: number; + sigQty: number; + idtyWindow: number; + msWindow: number; + msPeriod: number; + xpercent: number; + msValidity: number; + stepMax: number; + medianTimeBlocks: number; + avgGenTime: number; + dtDiffEval: number; + percentRot: number; + udTime0: number; + udReevalTime0: number; + dtReeval: number; } export const Membership = { - "signature": String, - "membership": { - "version": Number, - "currency": String, - "issuer": String, - "membership": String, - "date": Number, - "sigDate": Number, - "raw": String - } + signature: String, + membership: { + version: Number, + currency: String, + issuer: String, + membership: String, + date: Number, + sigDate: Number, + raw: String, + }, }; export interface HttpMembership { - signature: string + signature: string; membership: { - version: number - currency: string - issuer: string - membership: string - date: number - sigDate: number - raw: string - } + version: number; + currency: string; + issuer: string; + membership: string; + date: number; + sigDate: number; + raw: string; + }; } export const Memberships = { - "pubkey": String, - "uid": String, - "sigDate": String, - "memberships": [ + pubkey: String, + uid: String, + sigDate: String, + memberships: [ { - "version": Number, - "currency": String, - "membership": String, - "blockNumber": Number, - "blockHash": String, - "written": Number - } - ] + version: Number, + currency: String, + membership: String, + blockNumber: Number, + blockHash: String, + written: Number, + }, + ], }; export interface HttpMemberships { - pubkey: string - uid: string - sigDate: string + pubkey: string; + uid: string; + sigDate: string; memberships: { - version: number - currency: string - membership: string - blockNumber: number - blockHash: string - written: number - }[] + version: number; + currency: string; + membership: string; + blockNumber: number; + blockHash: string; + written: number; + }[]; } export const MembershipList = { - "memberships": [ + memberships: [ { - "pubkey": String, - "uid": String, - "version": Number, - "currency": String, - "membership": String, - "blockNumber": Number, - "blockHash": String, - "written": Number - } - ] + pubkey: String, + uid: String, + version: Number, + currency: String, + membership: String, + blockNumber: Number, + blockHash: String, + written: Number, + }, + ], }; export interface HttpMembershipList { memberships: { - pubkey: string - uid: string - version: number - currency: string - membership: string - blockNumber: number - blockHash: string - written: number|null - }[] + pubkey: string; + uid: string; + version: number; + currency: string; + membership: string; + blockNumber: number; + blockHash: string; + written: number | null; + }[]; } export const TransactionOfBlock = { - "version": Number, - "currency": String, - "comment": String, - "locktime": Number, - "signatures": [String], - "outputs": [String], - "inputs": [String], - "unlocks": [String], - "block_number": Number, - "blockstamp": String, - "blockstampTime": Number, - "time": Number, - "issuers": [String] + version: Number, + currency: String, + comment: String, + locktime: Number, + signatures: [String], + outputs: [String], + inputs: [String], + unlocks: [String], + block_number: Number, + blockstamp: String, + blockstampTime: Number, + time: Number, + issuers: [String], }; export interface HttpTransactionOfBlock { - version: number - currency: string - locktime: number - hash: string - blockstamp: string - blockstampTime: number - issuers: string[] - inputs: string[] - outputs: string[] - unlocks: string[] - signatures: string[] - comment: string + version: number; + currency: string; + locktime: number; + hash: string; + blockstamp: string; + blockstampTime: number; + issuers: string[]; + inputs: string[]; + outputs: string[]; + unlocks: string[]; + signatures: string[]; + comment: string; } export const Block = { - "version": Number, - "currency": String, - "number": Number, - "issuer": String, - "issuersFrame": Number, - "issuersFrameVar": Number, - "issuersCount": Number, - "parameters": String, - "membersCount": Number, - "monetaryMass": Number, - "powMin": Number, - "time": Number, - "medianTime": Number, - "dividend": Number, - "unitbase": Number, - "hash": String, - "previousHash": String, - "previousIssuer": String, - "identities": [String], - "certifications": [String], - "joiners": [String], - "actives": [String], - "leavers": [String], - "revoked": [String], - "excluded": [String], - "transactions": [TransactionOfBlock], - "nonce": Number, - "inner_hash": String, - "signature": String, - "raw": String + version: Number, + currency: String, + number: Number, + issuer: String, + issuersFrame: Number, + issuersFrameVar: Number, + issuersCount: Number, + parameters: String, + membersCount: Number, + monetaryMass: Number, + powMin: Number, + time: Number, + medianTime: Number, + dividend: Number, + unitbase: Number, + hash: String, + previousHash: String, + previousIssuer: String, + identities: [String], + certifications: [String], + joiners: [String], + actives: [String], + leavers: [String], + revoked: [String], + excluded: [String], + transactions: [TransactionOfBlock], + nonce: Number, + inner_hash: String, + signature: String, + raw: String, }; export interface HttpBlock { - version: number - currency: string - number: number - issuer: string - issuersFrame: number - issuersFrameVar: number - issuersCount: number - parameters: string - membersCount: number - monetaryMass: number - powMin: number - time: number - medianTime: number - dividend: number|null - unitbase: number - hash: string - previousHash: string - previousIssuer: string - identities: string[] - certifications: string[] - joiners: string[] - actives: string[] - leavers: string[] - revoked: string[] - excluded: string[] - transactions: HttpTransactionOfBlock[] - nonce: number - inner_hash: string - signature: string - raw: string -} - -export function block2HttpBlock(blockDTO:BlockDTO): HttpBlock { + version: number; + currency: string; + number: number; + issuer: string; + issuersFrame: number; + issuersFrameVar: number; + issuersCount: number; + parameters: string; + membersCount: number; + monetaryMass: number; + powMin: number; + time: number; + medianTime: number; + dividend: number | null; + unitbase: number; + hash: string; + previousHash: string; + previousIssuer: string; + identities: string[]; + certifications: string[]; + joiners: string[]; + actives: string[]; + leavers: string[]; + revoked: string[]; + excluded: string[]; + transactions: HttpTransactionOfBlock[]; + nonce: number; + inner_hash: string; + signature: string; + raw: string; +} + +export function block2HttpBlock(blockDTO: BlockDTO): HttpBlock { return { version: blockDTO.version, currency: blockDTO.currency, @@ -291,667 +291,673 @@ export function block2HttpBlock(blockDTO:BlockDTO): HttpBlock { leavers: blockDTO.leavers, revoked: blockDTO.revoked, excluded: blockDTO.excluded, - transactions: blockDTO.transactions.map((tx):HttpTransactionOfBlock => { - return { - version: tx.version, - currency: tx.currency, - comment: tx.comment, - locktime: tx.locktime, - issuers: tx.issuers, - signatures: tx.signatures, - outputs: tx.outputs, - inputs: tx.inputs, - unlocks: tx.unlocks, - hash: tx.hash, - blockstamp: tx.blockstamp, - blockstampTime: tx.blockstampTime, + transactions: blockDTO.transactions.map( + (tx): HttpTransactionOfBlock => { + return { + version: tx.version, + currency: tx.currency, + comment: tx.comment, + locktime: tx.locktime, + issuers: tx.issuers, + signatures: tx.signatures, + outputs: tx.outputs, + inputs: tx.inputs, + unlocks: tx.unlocks, + hash: tx.hash, + blockstamp: tx.blockstamp, + blockstampTime: tx.blockstampTime, + }; } - }), + ), nonce: blockDTO.nonce, inner_hash: blockDTO.inner_hash, signature: blockDTO.signature, - raw: blockDTO.getRawSigned() - } + raw: blockDTO.getRawSigned(), + }; } export const Hardship = { - "block": Number, - "level": Number + block: Number, + level: Number, }; export interface HttpHardship { - block: number - level: number + block: number; + level: number; } export const Difficulty = { - "uid": String, - "level": Number + uid: String, + level: Number, }; export interface HttpDifficulty { - uid: string - level: number + uid: string; + level: number; } export const Difficulties = { - "block": Number, - "levels": [Difficulty] + block: Number, + levels: [Difficulty], }; export interface HttpDifficulties { - block: number - levels: HttpDifficulty[] + block: number; + levels: HttpDifficulty[]; } export const Blocks = [Block]; export const Stat = { - "result": { - "blocks": [Number] - } + result: { + blocks: [Number], + }, }; export interface HttpStat { result: { - blocks: number[] - } + blocks: number[]; + }; } export const Branches = { - "blocks": [Block] + blocks: [Block], }; export interface HttpBranches { - blocks: HttpBlock[] + blocks: HttpBlock[]; } export const Peer = { - "version": Number, - "currency": String, - "pubkey": String, - "block": String, - "endpoints": [String], - "signature": String, - "raw": String + version: Number, + currency: String, + pubkey: String, + block: String, + endpoints: [String], + signature: String, + raw: String, }; export interface HttpPeer { - version: number - currency: string - pubkey: string - block: string - endpoints: string[] - signature: string - raw: string + version: number; + currency: string; + pubkey: string; + block: string; + endpoints: string[]; + signature: string; + raw: string; } export const DBPeer = { - "version": Number, - "currency": String, - "pubkey": String, - "block": String, - "status": String, - "first_down": Number, - "last_try": Number, - "endpoints": [String], - "signature": String, - "raw": String + version: Number, + currency: String, + pubkey: String, + block: String, + status: String, + first_down: Number, + last_try: Number, + endpoints: [String], + signature: String, + raw: String, }; export const Peers = { - "peers": [DBPeer] + peers: [DBPeer], }; export interface HttpPeers { - peers: JSONDBPeer[] + peers: JSONDBPeer[]; } export interface HttpWS2PInfo { peers: { - level1: number, - level2: number - } + level1: number; + level2: number; + }; } export interface HttpWS2PHeads { - heads: WS2PHead[] + heads: WS2PHead[]; } export const MerkleOfPeers = { - "depth": Number, - "nodesCount": Number, - "leavesCount": Number, - "root": String, - "leaves": [String], - "leaf": { - "hash": String, - "value": DBPeer - } + depth: Number, + nodesCount: Number, + leavesCount: Number, + root: String, + leaves: [String], + leaf: { + hash: String, + value: DBPeer, + }, }; export interface HttpMerkleOfPeers { - depth: number - nodesCount: number - leavesCount: number - root: string - leaves: string[] + depth: number; + nodesCount: number; + leavesCount: number; + root: string; + leaves: string[]; leaf: { - hash: string - value: JSONDBPeer - } + hash: string; + value: JSONDBPeer; + }; } export const Other = { - "pubkey": String, - "meta": { - "block_number": Number, - "block_hash": String + pubkey: String, + meta: { + block_number: Number, + block_hash: String, }, - "uids": [String], - "isMember": Boolean, - "wasMember": Boolean, - "signature": String + uids: [String], + isMember: Boolean, + wasMember: Boolean, + signature: String, }; export interface HttpOther { - pubkey: string, + pubkey: string; meta: { - block_number: number, - block_hash: string - }, - uids: string[], - isMember: boolean, - wasMember: boolean, - signature: string + block_number: number; + block_hash: string; + }; + uids: string[]; + isMember: boolean; + wasMember: boolean; + signature: string; } export const UID = { - "uid": String, - "meta": { - "timestamp": String + uid: String, + meta: { + timestamp: String, }, - "self": String, - "revocation_sig": String, - "revoked": Boolean, - "revoked_on": Number, - "others": [Other] + self: String, + revocation_sig: String, + revoked: Boolean, + revoked_on: Number, + others: [Other], }; export interface HttpUID { - uid: string, + uid: string; meta: { - timestamp: string - }, - self: string, - revocation_sig: string|null, - revoked: boolean, - revoked_on: number|null, - others: HttpOther[] + timestamp: string; + }; + self: string; + revocation_sig: string | null; + revoked: boolean; + revoked_on: number | null; + others: HttpOther[]; } export const Signed = { - "uid": String, - "pubkey": String, - "meta": { - "timestamp": String + uid: String, + pubkey: String, + meta: { + timestamp: String, }, - "cert_time": { - "block": Number, - "block_hash": String + cert_time: { + block: Number, + block_hash: String, }, - "isMember": Boolean, - "wasMember": Boolean, - "signature": String + isMember: Boolean, + wasMember: Boolean, + signature: String, }; export interface HttpSigned { - uid: string, - pubkey: string, + uid: string; + pubkey: string; meta: { - timestamp: string - }, + timestamp: string; + }; cert_time: { - block: number, - block_hash: string - }, - isMember: boolean, - wasMember: boolean, - signature: string + block: number; + block_hash: string; + }; + isMember: boolean; + wasMember: boolean; + signature: string; } export const CertIdentity = { - "issuer": String, - "uid": String, - "timestamp": String, - "sig": String + issuer: String, + uid: String, + timestamp: String, + sig: String, }; export interface HttpCertIdentity { - issuer: string - uid: string - timestamp: string - sig: string + issuer: string; + uid: string; + timestamp: string; + sig: string; } export const Cert = { - "issuer": String, - "timestamp": String, - "sig": String, - "target": CertIdentity + issuer: String, + timestamp: String, + sig: String, + target: CertIdentity, }; export interface HttpCert { - issuer: string - timestamp: string - sig: string - target: HttpCertIdentity + issuer: string; + timestamp: string; + sig: string; + target: HttpCertIdentity; } export const Identity = { - "pubkey": String, - "uids": [UID], - "signed": [Signed] + pubkey: String, + uids: [UID], + signed: [Signed], }; export interface HttpIdentity { - pubkey: string, - uids: HttpUID[], - signed: HttpSigned[] + pubkey: string; + uids: HttpUID[]; + signed: HttpSigned[]; } export const Result = { - "result": Boolean + result: Boolean, }; export interface HttpResult { - result: boolean + result: boolean; } export const Lookup = { - "partial": Boolean, - "results": [Identity] + partial: Boolean, + results: [Identity], }; export interface HttpLookup { - partial: boolean - results: HttpIdentity[] + partial: boolean; + results: HttpIdentity[]; } export const Members = { - "results": [{ - pubkey: String, - uid: String - }] + results: [ + { + pubkey: String, + uid: String, + }, + ], }; export interface HttpMembers { results: { - pubkey: string, - uid: string - }[] + pubkey: string; + uid: string; + }[]; } export const RequirementsCert = { from: String, to: String, expiresIn: Number, - sig: String + sig: String, }; export interface HttpRequirementsCert { - from: string - to: string - expiresIn: number - sig: string + from: string; + to: string; + expiresIn: number; + sig: string; } export const RequirementsPendingCert = { from: String, to: String, blockstamp: String, - sig: String + sig: String, }; export interface HttpRequirementsPendingCert { - from: string - to: string - blockstamp: string - sig: string + from: string; + to: string; + blockstamp: string; + sig: string; } export const RequirementsPendingMembership = { type: String, blockstamp: String, - sig: String + sig: String, }; export interface HttpRequirementsPendingMembership { - type: string, - blockstamp: string, - sig: string + type: string; + blockstamp: string; + sig: string; } export const Requirements = { - "identities": [{ - pubkey: String, - uid: String, - meta: { - timestamp: String + identities: [ + { + pubkey: String, + uid: String, + meta: { + timestamp: String, + }, + sig: String, + revocation_sig: String, + revoked: Boolean, + revoked_on: Number, + expired: Boolean, + outdistanced: Boolean, + isSentry: Boolean, + wasMember: Boolean, + certifications: [RequirementsCert], + pendingCerts: [RequirementsPendingCert], + pendingMemberships: [RequirementsPendingMembership], + membershipPendingExpiresIn: Number, + membershipExpiresIn: Number, }, - sig: String, - revocation_sig: String, - revoked: Boolean, - revoked_on: Number, - expired: Boolean, - outdistanced: Boolean, - isSentry: Boolean, - wasMember: Boolean, - certifications: [RequirementsCert], - pendingCerts: [RequirementsPendingCert], - pendingMemberships: [RequirementsPendingMembership], - membershipPendingExpiresIn: Number, - membershipExpiresIn: Number - }] + ], }; export interface HttpRequirements { - identities: HttpIdentityRequirement[] + identities: HttpIdentityRequirement[]; } export interface HttpIdentityRequirement { - pubkey: string - uid: string + pubkey: string; + uid: string; meta: { - timestamp: string - } - sig: string - revocation_sig: string | null - revoked: boolean - revoked_on: number | null - expired: boolean - outdistanced: boolean - isSentry: boolean - wasMember: boolean - certifications: HttpRequirementsCert[] - pendingCerts: HttpRequirementsPendingCert[] - pendingMemberships: HttpRequirementsPendingMembership[] - membershipPendingExpiresIn: number - membershipExpiresIn: number + timestamp: string; + }; + sig: string; + revocation_sig: string | null; + revoked: boolean; + revoked_on: number | null; + expired: boolean; + outdistanced: boolean; + isSentry: boolean; + wasMember: boolean; + certifications: HttpRequirementsCert[]; + pendingCerts: HttpRequirementsPendingCert[]; + pendingMemberships: HttpRequirementsPendingMembership[]; + membershipPendingExpiresIn: number; + membershipExpiresIn: number; } export const Certification = { - "pubkey": String, - "uid": String, - "isMember": Boolean, - "wasMember": Boolean, - "cert_time": { - "block": Number, - "medianTime": Number + pubkey: String, + uid: String, + isMember: Boolean, + wasMember: Boolean, + cert_time: { + block: Number, + medianTime: Number, }, - "sigDate": String, - "written": { - "number": Number, - "hash": String + sigDate: String, + written: { + number: Number, + hash: String, }, - "signature": String + signature: String, }; export interface HttpCertification { - pubkey: string - uid: string - isMember: boolean - wasMember: boolean + pubkey: string; + uid: string; + isMember: boolean; + wasMember: boolean; cert_time: { - block: number - medianTime: number - } - sigDate: string + block: number; + medianTime: number; + }; + sigDate: string; written: { - number: number - hash: string - } | null - signature: string + number: number; + hash: string; + } | null; + signature: string; } export const Certifications = { - "pubkey": String, - "uid": String, - "sigDate": String, - "isMember": Boolean, - "certifications": [Certification] + pubkey: String, + uid: String, + sigDate: String, + isMember: Boolean, + certifications: [Certification], }; export interface HttpCertifications { - pubkey: string - uid: string - sigDate: string - isMember: boolean - certifications: HttpCertification[] + pubkey: string; + uid: string; + sigDate: string; + isMember: boolean; + certifications: HttpCertification[]; } export const SimpleIdentity = { - "pubkey": String, - "uid": String, - "sigDate": String + pubkey: String, + uid: String, + sigDate: String, }; export interface HttpSimpleIdentity { - pubkey: string - uid: string - sigDate: string + pubkey: string; + uid: string; + sigDate: string; } export const Transaction = { - "version": Number, - "currency": String, - "issuers": [String], - "inputs": [String], - "unlocks": [String], - "outputs": [String], - "comment": String, - "locktime": Number, - "signatures": [String], - "raw": String, - "written_block": Number, - "hash": String + version: Number, + currency: String, + issuers: [String], + inputs: [String], + unlocks: [String], + outputs: [String], + comment: String, + locktime: Number, + signatures: [String], + raw: String, + written_block: Number, + hash: String, }; export interface HttpTransaction { - version: number - currency: string - issuers: string[] - inputs: string[] - unlocks: string[] - outputs: string[] - comment: string - locktime: number - signatures: string[] - raw: string - written_block: number|null - hash: string + version: number; + currency: string; + issuers: string[]; + inputs: string[]; + unlocks: string[]; + outputs: string[]; + comment: string; + locktime: number; + signatures: string[]; + raw: string; + written_block: number | null; + hash: string; } export interface HttpTransactionPending { - version: number - issuers: string[] - inputs: string[] - unlocks: string[] - outputs: string[] - comment: string - locktime: number - signatures: string[] - hash: string + version: number; + issuers: string[]; + inputs: string[]; + unlocks: string[]; + outputs: string[]; + comment: string; + locktime: number; + signatures: string[]; + hash: string; } export const Source = { - "type": String, - "noffset": Number, - "identifier": String, - "amount": Number, - "base": Number, - "conditions": String + type: String, + noffset: Number, + identifier: String, + amount: Number, + base: Number, + conditions: String, }; export interface HttpSource { - type: string - noffset: number - identifier: string - amount: number - base: number - conditions: string + type: string; + noffset: number; + identifier: string; + amount: number; + base: number; + conditions: string; } export const Sources = { - "currency": String, - "pubkey": String, - "sources": [Source] + currency: String, + pubkey: String, + sources: [Source], }; export interface HttpSources { - currency: string - pubkey: string - sources: HttpSource[] + currency: string; + pubkey: string; + sources: HttpSource[]; } export const TxOfHistory = { - "version": Number, - "issuers": [String], - "inputs": [String], - "unlocks": [String], - "outputs": [String], - "comment": String, - "locktime": Number, - "received": Number, - "signatures": [String], - "hash": String, - "block_number": Number, - "time": Number, - "blockstamp": String, - "blockstampTime": Number + version: Number, + issuers: [String], + inputs: [String], + unlocks: [String], + outputs: [String], + comment: String, + locktime: Number, + received: Number, + signatures: [String], + hash: String, + block_number: Number, + time: Number, + blockstamp: String, + blockstampTime: Number, }; export interface HttpTxOfHistory { - version: number - issuers: string[] - inputs: string[] - unlocks: string[] - outputs: string[] - comment: string - locktime: number - received: number - signatures: string[] - hash: string - block_number: number|null - time: number|null - blockstamp: string - blockstampTime: number|null + version: number; + issuers: string[]; + inputs: string[]; + unlocks: string[]; + outputs: string[]; + comment: string; + locktime: number; + received: number; + signatures: string[]; + hash: string; + block_number: number | null; + time: number | null; + blockstamp: string; + blockstampTime: number | null; } export const TxHistory = { - "currency": String, - "pubkey": String, - "history": { - "sent": [TxOfHistory], - "received": [TxOfHistory], - "sending": [TxOfHistory], - "receiving": [TxOfHistory], - "pending": [TxOfHistory] - } + currency: String, + pubkey: String, + history: { + sent: [TxOfHistory], + received: [TxOfHistory], + sending: [TxOfHistory], + receiving: [TxOfHistory], + pending: [TxOfHistory], + }, }; export interface HttpTxHistory { - currency: string - pubkey: string + currency: string; + pubkey: string; history: { - sent: HttpTxOfHistory[] - received: HttpTxOfHistory[] - sending: HttpTxOfHistory[] - receiving: HttpTxOfHistory[] - pending: HttpTxOfHistory[] - } + sent: HttpTxOfHistory[]; + received: HttpTxOfHistory[]; + sending: HttpTxOfHistory[]; + receiving: HttpTxOfHistory[]; + pending: HttpTxOfHistory[]; + }; } export const TxPending = { - "currency": String, - "pending": [Transaction] + currency: String, + pending: [Transaction], }; export interface HttpTxPending { - currency: string - pending: HttpTransactionPending[] + currency: string; + pending: HttpTransactionPending[]; } export const UD = { - "block_number": Number, - "consumed": Boolean, - "time": Number, - "amount": Number, - "base": Number + block_number: Number, + consumed: Boolean, + time: Number, + amount: Number, + base: Number, }; export interface HttpUD { - block_number: number - consumed: boolean - time: number - amount: number - base: number + block_number: number; + consumed: boolean; + time: number; + amount: number; + base: number; } export const UDHistory = { - "currency": String, - "pubkey": String, - "history": { - "history": [UD] - } + currency: String, + pubkey: String, + history: { + history: [UD], + }, }; export interface HttpUDHistory { - currency: string - pubkey: string + currency: string; + pubkey: string; history: { - history: HttpUD[] - } + history: HttpUD[]; + }; } export const BooleanDTO = { - "success": Boolean + success: Boolean, }; export const SummaryConf = { - "cpu": Number + cpu: Number, }; export const AdminSummary = { - "version": String, - "host": String, - "current": Block, - "rootBlock": Block, - "pubkey": String, - "seckey": String, - "conf": SummaryConf, - "parameters": Parameters, - "lastUDBlock": Block + version: String, + host: String, + current: Block, + rootBlock: Block, + pubkey: String, + seckey: String, + conf: SummaryConf, + parameters: Parameters, + lastUDBlock: Block, }; export const PoWSummary = { - "total": Number, - "mirror": Boolean, - "waiting": Boolean + total: Number, + mirror: Boolean, + waiting: Boolean, }; export const PreviewPubkey = { - "pubkey": String + pubkey: String, }; export const Sandbox = { size: Number, - free: Number + free: Number, }; export interface HttpSandbox { - size: number - free: number + size: number; + free: number; } export const IdentitySandbox = Sandbox; @@ -961,25 +967,25 @@ export const TransactionSandbox = Sandbox; export const Sandboxes = { identities: IdentitySandbox, memberships: MembershipSandbox, - transactions: TransactionSandbox + transactions: TransactionSandbox, }; export interface HttpSandboxes { - identities: HttpSandbox - memberships: HttpSandbox - transactions: HttpSandbox + identities: HttpSandbox; + memberships: HttpSandbox; + transactions: HttpSandbox; } export const LogLink = { - link: String + link: String, }; export interface HttpMilestonePage { - totalPages: number - chunkSize: number - milestonesPerPage: number - currentPage?: number - blocks?: HttpBlock[] + totalPages: number; + chunkSize: number; + milestonesPerPage: number; + currentPage?: number; + blocks?: HttpBlock[]; } export const Milestones = { @@ -987,11 +993,11 @@ export const Milestones = { chunkSize: Number, milestonesPerPage: Number, currentPage: Number, - "blocks": [Block] -} + blocks: [Block], +}; export const MilestonesPage = { totalPages: Number, chunkSize: Number, milestonesPerPage: Number, -} +}; diff --git a/app/modules/bma/lib/entity/source.ts b/app/modules/bma/lib/entity/source.ts index 15e678aacb9569b4d1ed78da3cb97efc3f6ee4c9..38bdb1fd5bcef0145e788efc6d804e827098b04e 100644 --- a/app/modules/bma/lib/entity/source.ts +++ b/app/modules/bma/lib/entity/source.ts @@ -11,43 +11,41 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Underscore} from "../../../../lib/common-libs/underscore" +import { Underscore } from "../../../../lib/common-libs/underscore"; export class Source { + [k: string]: any; - [k:string]: any - - constructor(json:any) { - Underscore.keys(json || {}).forEach((key:string) => { + constructor(json: any) { + Underscore.keys(json || {}).forEach((key: string) => { let value = json[key]; if (key == "number") { value = parseInt(value); - } - else if (key == "consumed") { + } else if (key == "consumed") { value = !!value; } this[key] = value; - }) + }); } json() { return { - "type": this.type, - "noffset": this.pos, - "identifier": this.identifier, - "amount": this.amount, - "conditions": this.conditions, - "base": this.base + type: this.type, + noffset: this.pos, + identifier: this.identifier, + amount: this.amount, + conditions: this.conditions, + base: this.base, }; - }; + } UDjson() { return { - "block_number": this.number, - "consumed": this.consumed, - "time": this.time, - "amount": this.amount, - "base": this.base + block_number: this.number, + consumed: this.consumed, + time: this.time, + amount: this.amount, + base: this.base, }; - }; + } } diff --git a/app/modules/bma/lib/http2raw.ts b/app/modules/bma/lib/http2raw.ts index cc7df590dca894a585281a242db643fe6e84a6b8..6b1d1926281521d8073724f0fffaa15a7a928d25 100644 --- a/app/modules/bma/lib/http2raw.ts +++ b/app/modules/bma/lib/http2raw.ts @@ -11,38 +11,53 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BMAConstants} from "./constants" +import { BMAConstants } from "./constants"; module.exports = { - identity: requiresParameter('identity', BMAConstants.ERRORS.HTTP_PARAM_IDENTITY_REQUIRED), - certification: requiresParameter('cert', BMAConstants.ERRORS.HTTP_PARAM_CERT_REQUIRED), - revocation: requiresParameter('revocation', BMAConstants.ERRORS.HTTP_PARAM_REVOCATION_REQUIRED), - transaction: requiresParameter('transaction', BMAConstants.ERRORS.HTTP_PARAM_TX_REQUIRED), - peer: requiresParameter('peer', BMAConstants.ERRORS.HTTP_PARAM_PEER_REQUIRED), - membership: Http2RawMembership, - block: requiresParameter('block', BMAConstants.ERRORS.HTTP_PARAM_BLOCK_REQUIRED), - conf: requiresParameter('conf', BMAConstants.ERRORS.HTTP_PARAM_CONF_REQUIRED), - cpu: requiresParameter('cpu', BMAConstants.ERRORS.HTTP_PARAM_CPU_REQUIRED) + identity: requiresParameter( + "identity", + BMAConstants.ERRORS.HTTP_PARAM_IDENTITY_REQUIRED + ), + certification: requiresParameter( + "cert", + BMAConstants.ERRORS.HTTP_PARAM_CERT_REQUIRED + ), + revocation: requiresParameter( + "revocation", + BMAConstants.ERRORS.HTTP_PARAM_REVOCATION_REQUIRED + ), + transaction: requiresParameter( + "transaction", + BMAConstants.ERRORS.HTTP_PARAM_TX_REQUIRED + ), + peer: requiresParameter("peer", BMAConstants.ERRORS.HTTP_PARAM_PEER_REQUIRED), + membership: Http2RawMembership, + block: requiresParameter( + "block", + BMAConstants.ERRORS.HTTP_PARAM_BLOCK_REQUIRED + ), + conf: requiresParameter("conf", BMAConstants.ERRORS.HTTP_PARAM_CONF_REQUIRED), + cpu: requiresParameter("cpu", BMAConstants.ERRORS.HTTP_PARAM_CPU_REQUIRED), }; -function requiresParameter(parameter:string, err:any) { - return (req:any) => { - if(!req.body || req.body[parameter] === undefined){ +function requiresParameter(parameter: string, err: any) { + return (req: any) => { + if (!req.body || req.body[parameter] === undefined) { throw err; } return req.body[parameter]; }; } -function Http2RawMembership (req:any) { - if(!(req.body && req.body.membership)){ +function Http2RawMembership(req: any) { + if (!(req.body && req.body.membership)) { throw BMAConstants.ERRORS.HTTP_PARAM_MEMBERSHIP_REQUIRED; } let ms = req.body.membership; - if(req.body && req.body.signature){ - ms = [ms, req.body.signature].join(''); + if (req.body && req.body.signature) { + ms = [ms, req.body.signature].join(""); if (!ms.match(/\n$/)) { - ms += '\n'; + ms += "\n"; } } return ms; diff --git a/app/modules/bma/lib/limiter.ts b/app/modules/bma/lib/limiter.ts index f44c13790f7d2ce5b0f1ea8af1abc73bf865e4c2..1dff85357e41afd346c946c82089ccc6a9f61099 100644 --- a/app/modules/bma/lib/limiter.ts +++ b/app/modules/bma/lib/limiter.ts @@ -17,24 +17,23 @@ const A_MINUTE = 60 * 1000; const A_SECOND = 1000; export class Limiter { - - private limitPerSecond:number - private limitPerMinute:number + private limitPerSecond: number; + private limitPerMinute: number; // Stock of request times - private reqsSec:number[] = [] - + private reqsSec: number[] = []; + // The length of reqs. // It is better to have it instead of calling reqs.length - private reqsSecLen:number + private reqsSecLen: number; // Minute specific - private reqsMin:number[] = [] - private reqsMinLen:number + private reqsMin: number[] = []; + private reqsMinLen: number; - constructor(strategy: { limitPerSecond:number, limitPerMinute:number }) { - this.limitPerSecond = strategy.limitPerSecond - this.limitPerMinute = strategy.limitPerMinute + constructor(strategy: { limitPerSecond: number; limitPerMinute: number }) { + this.limitPerSecond = strategy.limitPerSecond; + this.limitPerMinute = strategy.limitPerMinute; } /** @@ -43,11 +42,17 @@ export class Limiter { canAnswerNow() { // Rapid decision first. // Note: we suppose limitPerSecond < limitPerMinute - if (this.reqsSecLen < this.limitPerSecond && this.reqsMinLen < this.limitPerMinute) { + if ( + this.reqsSecLen < this.limitPerSecond && + this.reqsMinLen < this.limitPerMinute + ) { return true; } this.updateRequests(); - return this.reqsSecLen < this.limitPerSecond && this.reqsMinLen < this.limitPerMinute; + return ( + this.reqsSecLen < this.limitPerSecond && + this.reqsMinLen < this.limitPerMinute + ); } /** @@ -56,7 +61,9 @@ export class Limiter { updateRequests() { // Clean current requests stock and make the test again const now = Date.now(); - let i = 0, reqs = this.reqsMin, len = this.reqsMinLen; + let i = 0, + reqs = this.reqsMin, + len = this.reqsMinLen; // Reinit specific indicators this.reqsSec = []; this.reqsMin = []; @@ -73,7 +80,7 @@ export class Limiter { this.reqsSecLen = this.reqsSec.length; this.reqsMinLen = this.reqsMin.length; } - + processRequest() { const now = Date.now(); this.reqsSec.push(now); @@ -85,43 +92,48 @@ export class Limiter { let LOW_USAGE_STRATEGY = { limitPerSecond: 1, - limitPerMinute: 30 -} + limitPerMinute: 30, +}; let HIGH_USAGE_STRATEGY = { limitPerSecond: 10, - limitPerMinute: 300 -} + limitPerMinute: 300, +}; let VERY_HIGH_USAGE_STRATEGY = { limitPerSecond: 30, - limitPerMinute: 30 * 60 // Limit is only per secon -} + limitPerMinute: 30 * 60, // Limit is only per secon +}; let TEST_STRATEGY = { limitPerSecond: 5, - limitPerMinute: 6 -} + limitPerMinute: 6, +}; let NO_LIMIT_STRATEGY = { limitPerSecond: 1000000, - limitPerMinute: 1000000 * 60 -} + limitPerMinute: 1000000 * 60, +}; let disableLimits = false; export const BMALimitation = { - limitAsLowUsage() { - return disableLimits ? new Limiter(NO_LIMIT_STRATEGY) : new Limiter(LOW_USAGE_STRATEGY); + return disableLimits + ? new Limiter(NO_LIMIT_STRATEGY) + : new Limiter(LOW_USAGE_STRATEGY); }, limitAsHighUsage() { - return disableLimits ? new Limiter(NO_LIMIT_STRATEGY) : new Limiter(HIGH_USAGE_STRATEGY); + return disableLimits + ? new Limiter(NO_LIMIT_STRATEGY) + : new Limiter(HIGH_USAGE_STRATEGY); }, limitAsVeryHighUsage() { - return disableLimits ? new Limiter(NO_LIMIT_STRATEGY) : new Limiter(VERY_HIGH_USAGE_STRATEGY); + return disableLimits + ? new Limiter(NO_LIMIT_STRATEGY) + : new Limiter(VERY_HIGH_USAGE_STRATEGY); }, limitAsUnlimited() { @@ -129,7 +141,9 @@ export const BMALimitation = { }, limitAsTest() { - return disableLimits ? new Limiter(NO_LIMIT_STRATEGY) : new Limiter(TEST_STRATEGY); + return disableLimits + ? new Limiter(NO_LIMIT_STRATEGY) + : new Limiter(TEST_STRATEGY); }, noLimit() { @@ -138,5 +152,5 @@ export const BMALimitation = { withLimit() { disableLimits = false; - } + }, }; diff --git a/app/modules/bma/lib/network.ts b/app/modules/bma/lib/network.ts index 1d68a32a2fa2e06e20f8dda89471e31c4b6b20f5..059c4b2c27139c53b6e62283ef06573456318cd3 100644 --- a/app/modules/bma/lib/network.ts +++ b/app/modules/bma/lib/network.ts @@ -11,33 +11,32 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {NetworkConfDTO} from "../../../lib/dto/ConfDTO" -import {Server} from "../../../../server" -import {BMAConstants} from "./constants" -import {BMALimitation} from "./limiter" -import {Underscore} from "../../../lib/common-libs/underscore" -import {CommonConstants} from "../../../lib/common-libs/constants" -import {NewLogger} from "../../../lib/logger"; - -const os = require('os'); -const Q = require('q'); -const ddos = require('ddos'); -const http = require('http'); -const express = require('express'); -const morgan = require('morgan'); -const errorhandler = require('errorhandler'); -const bodyParser = require('body-parser'); -const cors = require('cors'); -const fileUpload = require('express-fileupload'); -const logger = NewLogger() +import { NetworkConfDTO } from "../../../lib/dto/ConfDTO"; +import { Server } from "../../../../server"; +import { BMAConstants } from "./constants"; +import { BMALimitation } from "./limiter"; +import { Underscore } from "../../../lib/common-libs/underscore"; +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { NewLogger } from "../../../lib/logger"; + +const os = require("os"); +const Q = require("q"); +const ddos = require("ddos"); +const http = require("http"); +const express = require("express"); +const morgan = require("morgan"); +const errorhandler = require("errorhandler"); +const bodyParser = require("body-parser"); +const cors = require("cors"); +const fileUpload = require("express-fileupload"); +const logger = NewLogger(); export interface NetworkInterface { - ip:string|null - port:number|null + ip: string | null; + port: number | null; } export const Network = { - getBestLocalIPv4, getBestLocalIPv6: getBestLocalIPv6, @@ -47,29 +46,45 @@ export const Network = { getRandomPort: getRandomPort, - createServersAndListen: async (name:string, server:Server, interfaces:NetworkInterface[], httpLogs:boolean, logger:any, staticPath:string|null, routingCallback:any, listenWebSocket:any, enableFileUpload:boolean = false) => { - + createServersAndListen: async ( + name: string, + server: Server, + interfaces: NetworkInterface[], + httpLogs: boolean, + logger: any, + staticPath: string | null, + routingCallback: any, + listenWebSocket: any, + enableFileUpload: boolean = false + ) => { const app = express(); // all environments if (httpLogs) { - app.use(morgan('\x1b[90m:remote-addr - :method :url HTTP/:http-version :status :res[content-length] - :response-time ms\x1b[0m', { - stream: { - write: function(message:string){ - message && logger && logger.trace(message.replace(/\n$/,'')); + app.use( + morgan( + "\x1b[90m:remote-addr - :method :url HTTP/:http-version :status :res[content-length] - :response-time ms\x1b[0m", + { + stream: { + write: function (message: string) { + message && logger && logger.trace(message.replace(/\n$/, "")); + }, + }, } - } - })); + ) + ); } // DDOS protection - const whitelist = interfaces.map(i => i.ip); - if (whitelist.indexOf('127.0.0.1') === -1) { - whitelist.push('127.0.0.1'); + const whitelist = interfaces.map((i) => i.ip); + if (whitelist.indexOf("127.0.0.1") === -1) { + whitelist.push("127.0.0.1"); } const ddosConf = server.conf.dos || {}; - ddosConf.silentStart = true - ddosConf.whitelist = Underscore.uniq((ddosConf.whitelist || []).concat(whitelist)); + ddosConf.silentStart = true; + ddosConf.whitelist = Underscore.uniq( + (ddosConf.whitelist || []).concat(whitelist) + ); const ddosInstance = new ddos(ddosConf); app.use(ddosInstance.express); @@ -81,21 +96,28 @@ export const Network = { app.use(fileUpload()); } - app.use(bodyParser.urlencoded({ - extended: true - })); - app.use(bodyParser.json({ limit: '10mb' })); + app.use( + bodyParser.urlencoded({ + extended: true, + }) + ); + app.use(bodyParser.json({ limit: "10mb" })); // development only - if (app.get('env') == 'development') { + if (app.get("env") == "development") { app.use(errorhandler()); } - const handleRequest = (method:any, uri:string, promiseFunc:(...args:any[])=>Promise<any>, theLimiter:any) => { + const handleRequest = ( + method: any, + uri: string, + promiseFunc: (...args: any[]) => Promise<any>, + theLimiter: any + ) => { const limiter = theLimiter || BMALimitation.limitAsUnlimited(); - method(uri, async function(req:any, res:any) { - res.set('Access-Control-Allow-Origin', '*'); - res.type('application/json'); + method(uri, async function (req: any, res: any) { + res.set("Access-Control-Allow-Origin", "*"); + res.type("application/json"); try { if (!limiter.canAnswerNow()) { throw BMAConstants.ERRORS.HTTP_LIMITATION; @@ -107,36 +129,57 @@ export const Network = { } catch (e) { let error = getResultingError(e, logger); // HTTP error - res.status(error.httpCode).send(JSON.stringify(error.uerr, null, " ")); + res + .status(error.httpCode) + .send(JSON.stringify(error.uerr, null, " ")); } }); }; - const handleFileRequest = (method:any, uri:string, promiseFunc:(...args:any[])=>Promise<any>, theLimiter:any) => { + const handleFileRequest = ( + method: any, + uri: string, + promiseFunc: (...args: any[]) => Promise<any>, + theLimiter: any + ) => { const limiter = theLimiter || BMALimitation.limitAsUnlimited(); - method(uri, async function(req:any, res:any) { - res.set('Access-Control-Allow-Origin', '*'); + method(uri, async function (req: any, res: any) { + res.set("Access-Control-Allow-Origin", "*"); try { if (!limiter.canAnswerNow()) { throw BMAConstants.ERRORS.HTTP_LIMITATION; } limiter.processRequest(); - let fileStream:any = await promiseFunc(req); + let fileStream: any = await promiseFunc(req); // HTTP answer fileStream.pipe(res); } catch (e) { let error = getResultingError(e, logger); // HTTP error - res.status(error.httpCode).send(JSON.stringify(error.uerr, null, " ")); - throw e + res + .status(error.httpCode) + .send(JSON.stringify(error.uerr, null, " ")); + throw e; } }); }; routingCallback(app, { - httpGET: (uri:string, promiseFunc:(...args:any[])=>Promise<any>, limiter:any) => handleRequest(app.get.bind(app), uri, promiseFunc, limiter), - httpPOST: (uri:string, promiseFunc:(...args:any[])=>Promise<any>, limiter:any) => handleRequest(app.post.bind(app), uri, promiseFunc, limiter), - httpGETFile: (uri:string, promiseFunc:(...args:any[])=>Promise<any>, limiter:any) => handleFileRequest(app.get.bind(app), uri, promiseFunc, limiter) + httpGET: ( + uri: string, + promiseFunc: (...args: any[]) => Promise<any>, + limiter: any + ) => handleRequest(app.get.bind(app), uri, promiseFunc, limiter), + httpPOST: ( + uri: string, + promiseFunc: (...args: any[]) => Promise<any>, + limiter: any + ) => handleRequest(app.post.bind(app), uri, promiseFunc, limiter), + httpGETFile: ( + uri: string, + promiseFunc: (...args: any[]) => Promise<any>, + limiter: any + ) => handleFileRequest(app.get.bind(app), uri, promiseFunc, limiter), }); if (staticPath) { @@ -145,81 +188,79 @@ export const Network = { const httpServers = interfaces.map(() => { const httpServer = http.createServer(app); - const sockets:any = {}; + const sockets: any = {}; let nextSocketId = 0; - httpServer.on('connection', (socket:any) => { + httpServer.on("connection", (socket: any) => { const socketId = nextSocketId++; sockets[socketId] = socket; //logger && logger.debug('socket %s opened', socketId); - socket.on('close', () => { + socket.on("close", () => { //logger && logger.debug('socket %s closed', socketId); delete sockets[socketId]; }); }); - httpServer.on('error', (err:any) => { + httpServer.on("error", (err: any) => { httpServer.errorPropagates(err); }); listenWebSocket && listenWebSocket(httpServer); return { http: httpServer, closeSockets: () => { - Underscore.keys(sockets).map((socketId:string) => { + Underscore.keys(sockets).map((socketId: string) => { sockets[socketId].destroy(); }); - } + }, }; }); - if (httpServers.length == 0){ - throw 'Duniter does not have any interface to listen to.'; + if (httpServers.length == 0) { + throw "Duniter does not have any interface to listen to."; } // Return API - return new BmaApi(name, interfaces, ddosInstance, httpServers, logger) - } -} + return new BmaApi(name, interfaces, ddosInstance, httpServers, logger); + }, +}; export class BmaApi { - - private listenings:boolean[] + private listenings: boolean[]; constructor( - private name:string, - private interfaces:any, - private ddosInstance:any, - private httpServers:any, - private logger:any + private name: string, + private interfaces: any, + private ddosInstance: any, + private httpServers: any, + private logger: any ) { - // May be removed when using Node 5.x where httpServer.listening boolean exists - this.listenings = interfaces.map(() => false) + this.listenings = interfaces.map(() => false); } getDDOS() { - return this.ddosInstance + return this.ddosInstance; } async closeConnections() { for (let i = 0, len = this.httpServers.length; i < len; i++) { - const httpServer = this.httpServers[i].http; - const isListening = this.listenings[i]; - if (isListening) { - this.listenings[i] = false; - this.logger && this.logger.info(this.name + ' stop listening'); - await Q.Promise((resolve:any, reject:any) => { - httpServer.errorPropagates((err:any) => { - reject(err); - }); - this.httpServers[i].closeSockets(); - httpServer.close((err:any) => { - err && this.logger && this.logger.error(err.stack || err); - resolve(); + const httpServer = this.httpServers[i].http; + const isListening = this.listenings[i]; + if (isListening) { + this.listenings[i] = false; + this.logger && this.logger.info(this.name + " stop listening"); + await Q.Promise((resolve: any, reject: any) => { + httpServer.errorPropagates((err: any) => { + reject(err); + }); + this.httpServers[i].closeSockets(); + httpServer.close((err: any) => { + err && this.logger && this.logger.error(err.stack || err); + resolve(); + }); }); - }); + } } - } - return []; + return []; } async openConnections() { @@ -230,21 +271,31 @@ export class BmaApi { const netInterface = this.interfaces[i].ip; const port = this.interfaces[i].port; try { - await Q.Promise((resolve:any, reject:any) => { + await Q.Promise((resolve: any, reject: any) => { // Weird the need of such a hack to catch an exception... - httpServer.errorPropagates = function(err:any) { + httpServer.errorPropagates = function (err: any) { reject(err); }; //httpServer.on('listening', resolve.bind(this, httpServer)); - httpServer.listen(port, netInterface, (err:any) => { + httpServer.listen(port, netInterface, (err: any) => { if (err) return reject(err); this.listenings[i] = true; resolve(httpServer); }); }); - logger.info(this.name + ' listening on http://' + (netInterface.match(/:/) ? '[' + netInterface + ']' : netInterface) + ':' + port); + logger.info( + this.name + + " listening on http://" + + (netInterface.match(/:/) + ? "[" + netInterface + "]" + : netInterface) + + ":" + + port + ); } catch (e) { - logger.warn('Could NOT listen to http://' + netInterface + ':' + port); + logger.warn( + "Could NOT listen to http://" + netInterface + ":" + port + ); logger.warn(e); } } @@ -253,12 +304,15 @@ export class BmaApi { } } -function getResultingError(e:any, logger:any) { +function getResultingError(e: any, logger: any) { // Default is 500 unknown error let error = BMAConstants.ERRORS.UNKNOWN; if (e) { // Print eventual stack trace - typeof e == 'string' && e !== "Block already known" && logger && logger.error(e); + typeof e == "string" && + e !== "Block already known" && + logger && + logger.error(e); e.stack && logger && logger.error(e.stack); e.message && logger && logger.warn(e.message); // BusinessException @@ -270,8 +324,8 @@ function getResultingError(e:any, logger:any) { httpCode: cp.httpCode, uerr: { ucode: cp.uerr.ucode, - message: e.message || e || error.uerr.message - } + message: e.message || e || error.uerr.message, + }, }; } } @@ -279,15 +333,23 @@ function getResultingError(e:any, logger:any) { } function getBestLocalIPv4() { - return getBestLocal('IPv4'); + return getBestLocal("IPv4"); } function getBestLocalIPv6() { const osInterfaces = listInterfaces(); for (let netInterface of osInterfaces) { const addresses = netInterface.addresses; - const filtered = Underscore.where(addresses, {family: 'IPv6', scopeid: 0, internal: false }) - const filtered2 = Underscore.filter(filtered, (address:any) => !address.address.match(/^fe80/) && !address.address.match(/^::1/)); + const filtered = Underscore.where(addresses, { + family: "IPv6", + scopeid: 0, + internal: false, + }); + const filtered2 = Underscore.filter( + filtered, + (address: any) => + !address.address.match(/^fe80/) && !address.address.match(/^::1/) + ); if (filtered2[0]) { return filtered2[0].address; } @@ -295,7 +357,7 @@ function getBestLocalIPv6() { return null; } -function getBestLocal(family:string) { +function getBestLocal(family: string) { let netInterfaces = os.networkInterfaces(); let keys = Underscore.keys(netInterfaces); let res = []; @@ -305,7 +367,7 @@ function getBestLocal(family:string) { if (!family || addr.family == family) { res.push({ name: name, - value: addr.address + value: addr.address, }); } } @@ -321,9 +383,9 @@ function getBestLocal(family:string) { /^Wi-Fi/, /^lo/, /^Loopback/, - /^None/ + /^None/, ]; - const best = Underscore.sortBy(res, function(entry:any) { + const best = Underscore.sortBy(res, function (entry: any) { for (let i = 0; i < interfacePriorityRegCatcher.length; i++) { // `i` is the priority (0 is the better, 1 is the second, ...) if (entry.name.match(interfacePriorityRegCatcher[i])) return i; @@ -340,24 +402,24 @@ function listInterfaces() { for (const name of keys) { res.push({ name: name, - addresses: netInterfaces[name] + addresses: netInterfaces[name], }); } return res; } -async function upnpConf (noupnp:boolean, logger:any) { - const client = require('nat-upnp').createClient(); +async function upnpConf(noupnp: boolean, logger: any) { + const client = require("nat-upnp").createClient(); // Look for 2 random ports - const publicPort = await getAvailablePort(client) - const privatePort = publicPort - const conf:NetworkConfDTO = { + const publicPort = await getAvailablePort(client); + const privatePort = publicPort; + const conf: NetworkConfDTO = { proxiesConf: undefined, nobma: true, bmaWithCrawler: false, port: privatePort, - ipv4: '127.0.0.1', - ipv6: '::1', + ipv4: "127.0.0.1", + ipv6: "::1", dos: null, upnp: false, httplogs: false, @@ -365,20 +427,23 @@ async function upnpConf (noupnp:boolean, logger:any) { remotehost: null, remoteipv4: null, remoteipv6: null, - nonWoTPeersLimit: CommonConstants.DEFAULT_NON_WOT_PEERS_LIMIT - } - logger && logger.info('Checking UPnP features...'); + nonWoTPeersLimit: CommonConstants.DEFAULT_NON_WOT_PEERS_LIMIT, + }; + logger && logger.info("Checking UPnP features..."); if (noupnp) { - throw Error('No UPnP'); + throw Error("No UPnP"); } const publicIP = await Q.nbind(client.externalIp, client)(); - await Q.nbind(client.portMapping, client)({ + await Q.nbind( + client.portMapping, + client + )({ public: publicPort, private: privatePort, - ttl: BMAConstants.UPNP_TTL + ttl: BMAConstants.UPNP_TTL, }); - const privateIP = await Q.Promise((resolve:any, reject:any) => { - client.findGateway((err:any, res:any, localIP:any) => { + const privateIP = await Q.Promise((resolve: any, reject: any) => { + client.findGateway((err: any, res: any, localIP: any) => { if (err) return reject(err); resolve(localIP); }); @@ -390,23 +455,32 @@ async function upnpConf (noupnp:boolean, logger:any) { return conf; } -async function getAvailablePort(client:any) { - const mappings:{ public: { port:number }}[] = await Q.nbind(client.getMappings, client)(); - const externalPortsUsed = mappings.map(m => m.public.port) - let availablePort = BMAConstants.BMA_PORTS_START - while (externalPortsUsed.indexOf(availablePort) !== -1 && availablePort <= BMAConstants.BMA_PORTS_END) { - availablePort++ +async function getAvailablePort(client: any) { + const mappings: { public: { port: number } }[] = await Q.nbind( + client.getMappings, + client + )(); + const externalPortsUsed = mappings.map((m) => m.public.port); + let availablePort = BMAConstants.BMA_PORTS_START; + while ( + externalPortsUsed.indexOf(availablePort) !== -1 && + availablePort <= BMAConstants.BMA_PORTS_END + ) { + availablePort++; } if (availablePort > BMAConstants.BMA_PORTS_END) { - throw "No port available for UPnP" + throw "No port available for UPnP"; } - return availablePort + return availablePort; } -function getRandomPort(conf:NetworkConfDTO) { +function getRandomPort(conf: NetworkConfDTO) { if (conf && conf.remoteport) { return conf.remoteport; } else { - return ~~(Math.random() * (65536 - BMAConstants.PORT_START)) + BMAConstants.PORT_START; + return ( + ~~(Math.random() * (65536 - BMAConstants.PORT_START)) + + BMAConstants.PORT_START + ); } } diff --git a/app/modules/bma/lib/parameters.ts b/app/modules/bma/lib/parameters.ts index 9126088fad473585e7d20f97b4e7282afb9eeab8..49a437f26c53ad1e00efcd9146f2bdcea021c730 100644 --- a/app/modules/bma/lib/parameters.ts +++ b/app/modules/bma/lib/parameters.ts @@ -12,143 +12,142 @@ // GNU Affero General Public License for more details. "use strict"; -import {BMAConstants} from "./constants" +import { BMAConstants } from "./constants"; -const Q = require('q'); +const Q = require("q"); export class ParametersService { - - static getSearch(req:any, callback:any) { - if(!req.params || !req.params.search){ + static getSearch(req: any, callback: any) { + if (!req.params || !req.params.search) { callback("No search criteria given"); return; } callback(null, req.params.search); } - static getSearchP(req:any) { - return Q.nbind(ParametersService.getSearch, this)(req) + static getSearchP(req: any) { + return Q.nbind(ParametersService.getSearch, this)(req); } - static getCountAndFrom(req:any) { - if(!req.params.from){ + static getCountAndFrom(req: any) { + if (!req.params.from) { throw "From is required"; } - if(!req.params.count){ + if (!req.params.count) { throw "Count is required"; } const matches = req.params.from.match(/^(\d+)$/); - if(!matches){ + if (!matches) { throw "From format is incorrect, must be a positive integer"; } const matches2 = req.params.count.match(/^(\d+)$/); - if(!matches2){ + if (!matches2) { throw "Count format is incorrect, must be a positive integer"; } return { count: matches2[1], - from: matches[1] + from: matches[1], }; } - static getHash(req:any) { - if(!req.params.hash){ + static getHash(req: any) { + if (!req.params.hash) { throw Error("`hash` is required"); } const matches = req.params.hash.match(BMAConstants.SHA256_HASH); - if(!matches){ + if (!matches) { throw Error("`hash` format is incorrect, must be a SHA256 hash"); } return req.params.hash; - }; + } - static getMinSig(req:any): number { - if(!req.params.minsig){ - return 4 // Default value + static getMinSig(req: any): number { + if (!req.params.minsig) { + return 4; // Default value } - const matches = req.params.minsig.match(/\d+/) - if(!matches){ - throw Error("`minsig` format is incorrect, must be an integer") + const matches = req.params.minsig.match(/\d+/); + if (!matches) { + throw Error("`minsig` format is incorrect, must be an integer"); } - return parseInt(req.params.minsig) + return parseInt(req.params.minsig); } - static getPage(req:any): number|undefined { - if(!req.params.page){ - return undefined + static getPage(req: any): number | undefined { + if (!req.params.page) { + return undefined; } - const matches = req.params.page.match(/\d+/) - if(!matches){ - throw Error("`page` format is incorrect, must be an integer") + const matches = req.params.page.match(/\d+/); + if (!matches) { + throw Error("`page` format is incorrect, must be an integer"); } - return parseInt(req.params.page) + return parseInt(req.params.page); } - static getPubkey = function (req:any, callback:any){ - if(!req.params.pubkey){ - callback('Parameter `pubkey` is required'); + static getPubkey = function (req: any, callback: any) { + if (!req.params.pubkey) { + callback("Parameter `pubkey` is required"); return; } const matches = req.params.pubkey.match(BMAConstants.PUBLIC_KEY); - if(!matches){ + if (!matches) { callback("Pubkey format is incorrect, must be a Base58 string"); return; } callback(null, matches[0]); - } + }; - static getPubkeyP(req:any) { - return Q.nbind(ParametersService.getPubkey, this)(req) + static getPubkeyP(req: any) { + return Q.nbind(ParametersService.getPubkey, this)(req); } - static getFrom(req:any, callback:any){ - if(!req.params.from){ - callback('Parameter `from` is required'); + static getFrom(req: any, callback: any) { + if (!req.params.from) { + callback("Parameter `from` is required"); return; } const matches = req.params.from.match(/^(\d+)$/); - if(!matches){ + if (!matches) { callback("From format is incorrect, must be a positive or zero integer"); return; } callback(null, matches[0]); } - static getFromP(req:any) { - return Q.nbind(ParametersService.getFrom, this)(req) + static getFromP(req: any) { + return Q.nbind(ParametersService.getFrom, this)(req); } - static getTo(req:any, callback:any){ - if(!req.params.to){ - callback('Parameter `to` is required'); + static getTo(req: any, callback: any) { + if (!req.params.to) { + callback("Parameter `to` is required"); return; } const matches = req.params.to.match(/^(\d+)$/); - if(!matches){ + if (!matches) { callback("To format is incorrect, must be a positive or zero integer"); return; } callback(null, matches[0]); } - static getToP(req:any) { - return Q.nbind(ParametersService.getTo, this)(req) + static getToP(req: any) { + return Q.nbind(ParametersService.getTo, this)(req); } - static getNumber(req:any, callback:any){ - if(!req.params.number){ + static getNumber(req: any, callback: any) { + if (!req.params.number) { callback("Number is required"); return; } const matches = req.params.number.match(/^(\d+)$/); - if(!matches){ + if (!matches) { callback("Number format is incorrect, must be a positive integer"); return; } callback(null, parseInt(matches[1])); } - static getNumberP(req:any) { - return Q.nbind(ParametersService.getNumber, this)(req) + static getNumberP(req: any) { + return Q.nbind(ParametersService.getNumber, this)(req); } } diff --git a/app/modules/bma/lib/sanitize.ts b/app/modules/bma/lib/sanitize.ts index 0a2716741e5d207690ed214b07bd9019fd552553..f45f3469ab34a425c9b19d23167734bd277e3322 100644 --- a/app/modules/bma/lib/sanitize.ts +++ b/app/modules/bma/lib/sanitize.ts @@ -11,13 +11,11 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Underscore} from "../../../lib/common-libs/underscore" - -module.exports = function sanitize (json:any, contract:any) { +import { Underscore } from "../../../lib/common-libs/underscore"; +module.exports = function sanitize(json: any, contract: any) { // Tries to sanitize only if contract is given if (contract) { - if (Object.prototype.toString.call(contract) === "[object Array]") { // Contract is an array @@ -42,9 +40,9 @@ module.exports = function sanitize (json:any, contract:any) { } } - let contractFields = Underscore.keys(contract) - let objectFields = Underscore.keys(json) - let toDeleteFromObj = Underscore.difference(objectFields, contractFields) + let contractFields = Underscore.keys(contract); + let objectFields = Underscore.keys(json); + let toDeleteFromObj = Underscore.difference(objectFields, contractFields); // Remove unwanted fields for (let i = 0, len = toDeleteFromObj.length; i < len; i++) { @@ -60,57 +58,49 @@ module.exports = function sanitize (json:any, contract:any) { if (propType.name) { t = propType.name; } else if (propType.length != undefined) { - t = 'Array'; + t = "Array"; } else { - t = 'Object'; + t = "Object"; } // Test json member type - let tjson:any = typeof json[prop]; - if (~['Array', 'Object'].indexOf(t)) { - if (tjson == 'object' && json[prop] !== null) { - tjson = json[prop].length == undefined ? 'Object' : 'Array'; + let tjson: any = typeof json[prop]; + if (~["Array", "Object"].indexOf(t)) { + if (tjson == "object" && json[prop] !== null) { + tjson = json[prop].length == undefined ? "Object" : "Array"; } } // Check coherence & alter member if needed if (json[prop] !== null && t.toLowerCase() != tjson.toLowerCase()) { try { if (t == "String") { - let s = json[prop] == undefined ? '' : json[prop]; + let s = json[prop] == undefined ? "" : json[prop]; json[prop] = String(s).valueOf(); - } - else if (t == "Number") { - let s = json[prop] == undefined ? '' : json[prop]; + } else if (t == "Number") { + let s = json[prop] == undefined ? "" : json[prop]; json[prop] = Number(s).valueOf(); - } - else if (t == "Array") { + } else if (t == "Array") { json[prop] = []; - } - else if (t == "Object") { + } else if (t == "Object") { json[prop] = {}; - } - else { + } else { json[prop] = Boolean(); } } catch (ex) { if (t == "String") { json[prop] = String(); - } - else if (t == "Number") { + } else if (t == "Number") { json[prop] = Number(); - } - else if (t == "Array") { + } else if (t == "Array") { json[prop] = []; - } - else if (t == "Object") { + } else if (t == "Object") { json[prop] = {}; - } - else { + } else { json[prop] = Boolean(); } } } // Arrays - if (t == 'Array') { + if (t == "Array") { let subt = propType[0]; for (let j = 0, len2 = json[prop].length; j < len2; j++) { if (!(subt == "String" || subt == "Number")) { @@ -119,7 +109,7 @@ module.exports = function sanitize (json:any, contract:any) { } } // Recursivity - if (t == 'Object' && json[prop] !== null) { + if (t == "Object" && json[prop] !== null) { json[prop] = sanitize(json[prop], contract[prop]); } } diff --git a/app/modules/bma/lib/tojson.ts b/app/modules/bma/lib/tojson.ts index 181e9b215f32fd0509254e34adac520ef42c5046..8330c2cf3cd4d72c61e88798cb135ffb25055106 100644 --- a/app/modules/bma/lib/tojson.ts +++ b/app/modules/bma/lib/tojson.ts @@ -11,52 +11,52 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {Underscore} from "../../../lib/common-libs/underscore" +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { Underscore } from "../../../lib/common-libs/underscore"; export const stat = (stat: number[]) => { - return { "blocks": stat } -} + return { blocks: stat }; +}; -export const block = (block:any) => { - const json:any = {}; - json.version = parseInt(block.version) - json.nonce = parseInt(block.nonce) - json.number = parseInt(block.number) - json.powMin = parseInt(block.powMin) - json.time = parseInt(block.time) - json.medianTime = parseInt(block.medianTime) - json.membersCount = parseInt(block.membersCount) - json.monetaryMass = parseInt(block.monetaryMass) - json.unitbase = parseInt(block.unitbase) - json.issuersCount = parseInt(block.issuersCount) - json.issuersFrame = parseInt(block.issuersFrame) - json.issuersFrameVar = parseInt(block.issuersFrameVar) - json.currency = block.currency || "" - json.issuer = block.issuer || "" - json.signature = block.signature || "" - json.hash = block.hash || "" - json.parameters = block.parameters || "" - json.previousHash = block.previousHash || null - json.previousIssuer = block.previousIssuer || null - json.inner_hash = block.inner_hash || null - json.dividend = parseInt(block.dividend) || null - json.identities = (block.identities || []) - json.joiners = (block.joiners || []) - json.actives = (block.actives || []) - json.leavers = (block.leavers || []) - json.revoked = (block.revoked || []) - json.excluded = (block.excluded || []) - json.certifications = (block.certifications || []) +export const block = (block: any) => { + const json: any = {}; + json.version = parseInt(block.version); + json.nonce = parseInt(block.nonce); + json.number = parseInt(block.number); + json.powMin = parseInt(block.powMin); + json.time = parseInt(block.time); + json.medianTime = parseInt(block.medianTime); + json.membersCount = parseInt(block.membersCount); + json.monetaryMass = parseInt(block.monetaryMass); + json.unitbase = parseInt(block.unitbase); + json.issuersCount = parseInt(block.issuersCount); + json.issuersFrame = parseInt(block.issuersFrame); + json.issuersFrameVar = parseInt(block.issuersFrameVar); + json.currency = block.currency || ""; + json.issuer = block.issuer || ""; + json.signature = block.signature || ""; + json.hash = block.hash || ""; + json.parameters = block.parameters || ""; + json.previousHash = block.previousHash || null; + json.previousIssuer = block.previousIssuer || null; + json.inner_hash = block.inner_hash || null; + json.dividend = parseInt(block.dividend) || null; + json.identities = block.identities || []; + json.joiners = block.joiners || []; + json.actives = block.actives || []; + json.leavers = block.leavers || []; + json.revoked = block.revoked || []; + json.excluded = block.excluded || []; + json.certifications = block.certifications || []; json.transactions = []; - block.transactions.forEach((obj:any) => { - json.transactions.push(Underscore.omit(obj, 'raw', 'certifiers', 'hash')) + block.transactions.forEach((obj: any) => { + json.transactions.push(Underscore.omit(obj, "raw", "certifiers", "hash")); }); - json.transactions = block.transactions.map((tx:any) => { - tx.inputs = tx.inputs.map((i:any) => i.raw || i) - tx.outputs = tx.outputs.map((o:any) => o.raw || o) - return tx - }) - json.raw = BlockDTO.fromJSONObject(block).getRawUnSigned() + json.transactions = block.transactions.map((tx: any) => { + tx.inputs = tx.inputs.map((i: any) => i.raw || i); + tx.outputs = tx.outputs.map((o: any) => o.raw || o); + return tx; + }); + json.raw = BlockDTO.fromJSONObject(block).getRawUnSigned(); return json; -} \ No newline at end of file +}; diff --git a/app/modules/bma/lib/upnp.ts b/app/modules/bma/lib/upnp.ts index 068e804efb5130008ad1beeb5f97b98b5323bc18..3c5eecb980b80cf0018e5753dcfd9f73220ebcae 100644 --- a/app/modules/bma/lib/upnp.ts +++ b/app/modules/bma/lib/upnp.ts @@ -11,65 +11,76 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BMAConstants} from "./constants" -import {ConfDTO} from "../../../lib/dto/ConfDTO" +import { BMAConstants } from "./constants"; +import { ConfDTO } from "../../../lib/dto/ConfDTO"; -const upnp = require('nat-upnp'); -const Q = require('q'); +const upnp = require("nat-upnp"); +const Q = require("q"); -export const Upnp = async function (localPort:number, remotePort:number, logger:any, conf:ConfDTO) { +export const Upnp = async function ( + localPort: number, + remotePort: number, + logger: any, + conf: ConfDTO +) { "use strict"; - logger.info('UPnP: configuring...'); - const api = new UpnpApi(localPort, remotePort, logger, conf) + logger.info("UPnP: configuring..."); + const api = new UpnpApi(localPort, remotePort, logger, conf); try { - await api.openPort() + await api.openPort(); } catch (e) { const client = upnp.createClient(); try { await Q.nbind(client.externalIp, client)(); } catch (err) { - if (err && err.message == 'timeout') { - throw 'No UPnP gateway found: your node won\'t be reachable from the Internet. Use --noupnp option to avoid this message.' + if (err && err.message == "timeout") { + throw 'No UPnP gateway found: your node won\'t be reachable from the Internet. Use --noupnp option to avoid this message.'; } throw err; } finally { client.close(); } } - return api + return api; }; export class UpnpApi { - - private interval:NodeJS.Timer|null + private interval: NodeJS.Timer | null; constructor( - private localPort:number, - private remotePort:number, - private logger:any, - private conf:ConfDTO - ) {} + private localPort: number, + private remotePort: number, + private logger: any, + private conf: ConfDTO + ) {} openPort() { "use strict"; - return Q.Promise((resolve:any, reject:any) => { - const suffix = this.conf.pair.pub.substr(0, 6) - this.logger.trace('UPnP: mapping external port %s to local %s...', this.remotePort, this.localPort); + return Q.Promise((resolve: any, reject: any) => { + const suffix = this.conf.pair.pub.substr(0, 6); + this.logger.trace( + "UPnP: mapping external port %s to local %s...", + this.remotePort, + this.localPort + ); const client = upnp.createClient(); - client.portMapping({ - 'public': this.remotePort, - 'private': this.localPort, - 'ttl': BMAConstants.UPNP_TTL, - 'description': 'duniter:bma:' + suffix - }, (err:any) => { - client.close(); - if (err) { - this.logger.warn(err); - return reject(err); + client.portMapping( + { + public: this.remotePort, + private: this.localPort, + ttl: BMAConstants.UPNP_TTL, + description: "duniter:bma:" + suffix, + }, + (err: any) => { + client.close(); + if (err) { + this.logger.warn(err); + return reject(err); + } + resolve(); } - resolve(); - }); + ); }); } @@ -93,12 +104,15 @@ export class UpnpApi { startRegular() { this.stopRegular(); // Update UPnP IGD every INTERVAL seconds - this.interval = setInterval(() => this.openPort(), 1000 * BMAConstants.UPNP_INTERVAL) + this.interval = setInterval( + () => this.openPort(), + 1000 * BMAConstants.UPNP_INTERVAL + ); } stopRegular() { if (this.interval) { - clearInterval(this.interval) + clearInterval(this.interval); } } -} \ No newline at end of file +} diff --git a/app/modules/check-config.ts b/app/modules/check-config.ts index 1c93db89983f835b0235c0603d2d1f6565c86b76..c20d17e0d8586c513967ff76c849bfd374a7c5f9 100644 --- a/app/modules/check-config.ts +++ b/app/modules/check-config.ts @@ -11,23 +11,24 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Server} from "../../server" +import { Server } from "../../server"; -const constants = require('../lib/constants'); -const wizard = require('../lib/wizard'); +const constants = require("../lib/constants"); +const wizard = require("../lib/wizard"); module.exports = { duniter: { + cli: [ + { + name: "check-config", + desc: "Checks the node's configuration", - cli: [{ - name: 'check-config', - desc: 'Checks the node\'s configuration', - - onConfiguredExecute: async (server:Server) => { - await server.checkConfig() - const logger = require('../lib/logger').NewLogger('wizard') - logger.warn('Configuration seems correct.'); - } - }] - } -} + onConfiguredExecute: async (server: Server) => { + await server.checkConfig(); + const logger = require("../lib/logger").NewLogger("wizard"); + logger.warn("Configuration seems correct."); + }, + }, + ], + }, +}; diff --git a/app/modules/config.ts b/app/modules/config.ts index 441808b4663bb0de9d91f4f6116ea49f1968cdad..cbbff7b09e8ee9794a3e606836755c2994744640 100644 --- a/app/modules/config.ts +++ b/app/modules/config.ts @@ -12,157 +12,176 @@ // GNU Affero General Public License for more details. "use strict"; -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" -import {CommonConstants} from "../lib/common-libs/constants" -import {Directory} from "../lib/system/directory" -import {Underscore} from "../lib/common-libs/underscore" -import {ProgramOptions} from "../lib/common-libs/programOptions" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; +import { CommonConstants } from "../lib/common-libs/constants"; +import { Directory } from "../lib/system/directory"; +import { Underscore } from "../lib/common-libs/underscore"; +import { ProgramOptions } from "../lib/common-libs/programOptions"; module.exports = { duniter: { - cliOptions: [ - { value: '--store-txs', desc: 'Enable full transaction history storage.' }, - { value: '--store-ww', desc: 'Enable WotWizard regular export.' }, + { + value: "--store-txs", + desc: "Enable full transaction history storage.", + }, + { value: "--store-ww", desc: "Enable WotWizard regular export." }, ], config: { - onLoading: async (conf:ConfDTO, program: ProgramOptions) => { - conf.msPeriod = conf.msWindow - conf.sigReplay = conf.msPeriod - conf.switchOnHeadAdvance = CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS + onLoading: async (conf: ConfDTO, program: ProgramOptions) => { + conf.msPeriod = conf.msWindow; + conf.sigReplay = conf.msPeriod; + conf.switchOnHeadAdvance = + CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS; // Transactions storage if (program.storeTxs) { if (!conf.storage) { - conf.storage = { transactions: true, wotwizard: false } - } - else { - conf.storage.transactions = true + conf.storage = { transactions: true, wotwizard: false }; + } else { + conf.storage.transactions = true; } } if (program.storeWw) { if (!conf.storage) { - conf.storage = { transactions: false, wotwizard: true } - } - else { - conf.storage.wotwizard = true + conf.storage = { transactions: false, wotwizard: true }; + } else { + conf.storage.wotwizard = true; } } }, - beforeSave: async (conf:ConfDTO) => { - conf.msPeriod = conf.msWindow - conf.sigReplay = conf.msPeriod - conf.switchOnHeadAdvance = CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS + beforeSave: async (conf: ConfDTO) => { + conf.msPeriod = conf.msWindow; + conf.sigReplay = conf.msPeriod; + conf.switchOnHeadAdvance = + CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS; if (!conf.storage) { conf.storage = { transactions: false, - wotwizard: false - } + wotwizard: false, + }; } - } + }, }, - - - cli: [{ - name: 'config', - desc: 'Register configuration in database', - // The command does nothing particular, it just stops the process right after configuration phase is over - onConfiguredExecute: (server:Server, conf:ConfDTO) => Promise.resolve(conf) - }, { - name: 'parse-logs', - desc: 'Extract data from logs.', - logs: true, - onConfiguredExecute: async (server:Server, conf:ConfDTO) => { - const fs = await Directory.getHomeFS(false, Directory.INSTANCE_HOME, false) - const lines = (await fs.fs.fsReadFile(Directory.INSTANCE_HOMELOG_FILE)).split('\n') - const aggregates = Underscore.uniq( - lines - .map(l => l.match(/: (\[\w+\](\[\w+\])*)/)) - .filter(l => l) - .map((l:string[]) => l[1]) - ) - console.log(aggregates) - const results = aggregates.map((a:string) => { - return { - name: a, - time: lines - .filter(l => l.match(new RegExp(a - .replace(/\[/g, '\\[') - .replace(/\]/g, '\\]') - ))) - .map(l => { - const m = l.match(/ (\d+)(\.\d+)?(ms|µs)( \d+)?$/) - if (!m) { - throw Error('Wrong match') - } - return m - }) - .map(match => { - return { - qty: parseInt(match[1]), - unit: match[3], - } - }) - .reduce((sumMicroSeconds, entry) => { - return sumMicroSeconds + (entry.qty * (entry.unit === 'ms' ? 1000 : 1)) - }, 0) / 1000000 + cli: [ + { + name: "config", + desc: "Register configuration in database", + // The command does nothing particular, it just stops the process right after configuration phase is over + onConfiguredExecute: (server: Server, conf: ConfDTO) => + Promise.resolve(conf), + }, + { + name: "parse-logs", + desc: "Extract data from logs.", + logs: true, + onConfiguredExecute: async (server: Server, conf: ConfDTO) => { + const fs = await Directory.getHomeFS( + false, + Directory.INSTANCE_HOME, + false + ); + const lines = ( + await fs.fs.fsReadFile(Directory.INSTANCE_HOMELOG_FILE) + ).split("\n"); + const aggregates = Underscore.uniq( + lines + .map((l) => l.match(/: (\[\w+\](\[\w+\])*)/)) + .filter((l) => l) + .map((l: string[]) => l[1]) + ); + console.log(aggregates); + const results = aggregates.map((a: string) => { + return { + name: a, + time: + lines + .filter((l) => + l.match( + new RegExp(a.replace(/\[/g, "\\[").replace(/\]/g, "\\]")) + ) + ) + .map((l) => { + const m = l.match(/ (\d+)(\.\d+)?(ms|µs)( \d+)?$/); + if (!m) { + throw Error("Wrong match"); + } + return m; + }) + .map((match) => { + return { + qty: parseInt(match[1]), + unit: match[3], + }; + }) + .reduce((sumMicroSeconds, entry) => { + return ( + sumMicroSeconds + + entry.qty * (entry.unit === "ms" ? 1000 : 1) + ); + }, 0) / 1000000, + }; + }); + const root: Tree = { + name: "root", + leaves: {}, + }; + for (const r of results) { + recursiveReduce(root, r.name, r.time); } - }) - const root:Tree = { - name: 'root', - leaves: {} - } - for (const r of results) { - recursiveReduce(root, r.name, r.time) - } - recursiveDump(root) - } - }] - } -} + recursiveDump(root); + }, + }, + ], + }, +}; interface Leaf { - name:string - value:number + name: string; + value: number; } interface Tree { - name:string - leaves: { [k:string]: Tree|Leaf } + name: string; + leaves: { [k: string]: Tree | Leaf }; } -function recursiveReduce(tree:Tree, path:string, duration:number) { +function recursiveReduce(tree: Tree, path: string, duration: number) { if (path.match(/\]\[/)) { - const m = (path.match(/^(\[\w+\])(\[.+)/) as string[]) - const key = m[1] + const m = path.match(/^(\[\w+\])(\[.+)/) as string[]; + const key = m[1]; if (!tree.leaves[key]) { tree.leaves[key] = { name: key, - leaves: {} - } + leaves: {}, + }; } - recursiveReduce(tree.leaves[key] as Tree, m[2], duration) + recursiveReduce(tree.leaves[key] as Tree, m[2], duration); } else { tree.leaves[path] = { name: path, - value: duration - } + value: duration, + }; } } -function recursiveDump(tree:Tree, level = -1) { +function recursiveDump(tree: Tree, level = -1) { if (level >= 0) { - console.log(" ".repeat(level), tree.name) + console.log(" ".repeat(level), tree.name); } for (const k of Object.keys(tree.leaves)) { - const element = tree.leaves[k] + const element = tree.leaves[k]; if ((<Tree>element).leaves) { - recursiveDump(<Tree>element, level + 1) + recursiveDump(<Tree>element, level + 1); } else { - console.log(" ".repeat(level + 1), (<Leaf>element).name, (<Leaf>element).value + 's') + console.log( + " ".repeat(level + 1), + (<Leaf>element).name, + (<Leaf>element).value + "s" + ); } } -} \ No newline at end of file +} diff --git a/app/modules/crawler/index.ts b/app/modules/crawler/index.ts index ed7594e9a7024d6b9046aae1ecbd9c0f9fddb380..77a3f2d7b9af186fbc024b78a100210732e9fb1a 100644 --- a/app/modules/crawler/index.ts +++ b/app/modules/crawler/index.ts @@ -11,62 +11,81 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../../lib/dto/ConfDTO" -import {Server} from "../../../server" -import {Contacter} from "./lib/contacter" -import {Crawler} from "./lib/crawler" -import {Synchroniser} from "./lib/sync" -import {req2fwd} from "./lib/req2fwd" -import {rawer} from "../../lib/common-libs/index" -import {PeerDTO} from "../../lib/dto/PeerDTO" -import {Buid} from "../../lib/common-libs/buid" -import {BlockDTO} from "../../lib/dto/BlockDTO" -import {Directory} from "../../lib/system/directory" -import {FileDAL} from "../../lib/dal/fileDAL" -import {RemoteSynchronizer} from "./lib/sync/RemoteSynchronizer" -import {AbstractSynchronizer} from "./lib/sync/AbstractSynchronizer" -import {LocalPathSynchronizer} from "./lib/sync/LocalPathSynchronizer" -import {CommonConstants} from "../../lib/common-libs/constants" -import {DataErrors} from "../../lib/common-libs/errors" -import {NewLogger} from "../../lib/logger" -import {CrawlerConstants} from "./lib/constants" -import {ExitCodes} from "../../lib/common-libs/exit-codes" -import {connect} from "./lib/connect" -import {BMARemoteContacter} from "./lib/sync/BMARemoteContacter" -import {applyMempoolRequirements, forwardToServer, pullSandboxToLocalServer} from "./lib/sandbox" -import {DBBlock} from "../../lib/db/DBBlock" +import { ConfDTO } from "../../lib/dto/ConfDTO"; +import { Server } from "../../../server"; +import { Contacter } from "./lib/contacter"; +import { Crawler } from "./lib/crawler"; +import { Synchroniser } from "./lib/sync"; +import { req2fwd } from "./lib/req2fwd"; +import { rawer } from "../../lib/common-libs/index"; +import { PeerDTO } from "../../lib/dto/PeerDTO"; +import { Buid } from "../../lib/common-libs/buid"; +import { BlockDTO } from "../../lib/dto/BlockDTO"; +import { Directory } from "../../lib/system/directory"; +import { FileDAL } from "../../lib/dal/fileDAL"; +import { RemoteSynchronizer } from "./lib/sync/RemoteSynchronizer"; +import { AbstractSynchronizer } from "./lib/sync/AbstractSynchronizer"; +import { LocalPathSynchronizer } from "./lib/sync/LocalPathSynchronizer"; +import { CommonConstants } from "../../lib/common-libs/constants"; +import { DataErrors } from "../../lib/common-libs/errors"; +import { NewLogger } from "../../lib/logger"; +import { CrawlerConstants } from "./lib/constants"; +import { ExitCodes } from "../../lib/common-libs/exit-codes"; +import { connect } from "./lib/connect"; +import { BMARemoteContacter } from "./lib/sync/BMARemoteContacter"; +import { + applyMempoolRequirements, + forwardToServer, + pullSandboxToLocalServer, +} from "./lib/sandbox"; +import { DBBlock } from "../../lib/db/DBBlock"; -const HOST_PATTERN = /^[^:/]+(:[0-9]{1,5})?(\/.*)?$/ -const FILE_PATTERN = /^(\/.+)$/ +const HOST_PATTERN = /^[^:/]+(:[0-9]{1,5})?(\/.*)?$/; +const FILE_PATTERN = /^(\/.+)$/; export const CrawlerDependency = { duniter: { - service: { - process: (server:Server, conf:ConfDTO, logger:any) => new Crawler(server, conf, logger) + process: (server: Server, conf: ConfDTO, logger: any) => + new Crawler(server, conf, logger), }, methods: { + contacter: (host: string, port: number, opts?: any) => + new Contacter(host, port, opts), - contacter: (host:string, port:number, opts?:any) => new Contacter(host, port, opts), - - pullBlocks: async (server:Server, pubkey = "") => { + pullBlocks: async (server: Server, pubkey = "") => { const crawler = new Crawler(server, server.conf, server.logger); return crawler.pullBlocks(server, pubkey); }, - pullSandbox: async (server:Server) => { + pullSandbox: async (server: Server) => { const crawler = new Crawler(server, server.conf, server.logger); - return crawler.sandboxPull(server) + return crawler.sandboxPull(server); }, - synchronize: (server:Server, onHost:string, onPort:number, upTo:number, chunkLength:number, allowLocalSync = false) => { - const strategy = new RemoteSynchronizer(onHost, onPort, server, chunkLength, undefined, undefined, allowLocalSync) - const remote = new Synchroniser(server, strategy) - const syncPromise = remote.sync(upTo, chunkLength) + synchronize: ( + server: Server, + onHost: string, + onPort: number, + upTo: number, + chunkLength: number, + allowLocalSync = false + ) => { + const strategy = new RemoteSynchronizer( + onHost, + onPort, + server, + chunkLength, + undefined, + undefined, + allowLocalSync + ); + const remote = new Synchroniser(server, strategy); + const syncPromise = remote.sync(upTo, chunkLength); return { flow: remote, - syncPromise + syncPromise, }; }, @@ -77,470 +96,697 @@ export const CrawlerDependency = { * @param {number} onPort * @returns {Promise<any>} */ - testForSync: (server:Server, onHost:string, onPort:number) => { - return RemoteSynchronizer.test(onHost, onPort, server.conf.pair) - } + testForSync: (server: Server, onHost: string, onPort: number) => { + return RemoteSynchronizer.test(onHost, onPort, server.conf.pair); + }, }, cliOptions: [ - { value: '--nointeractive', desc: 'Disable interactive sync UI.'}, - { value: '--nocautious', desc: 'Do not check blocks validity during sync.'}, - { value: '--cautious', desc: 'Check blocks validity during sync (overrides --nocautious option).'}, - { value: '--nopeers', desc: 'Do not retrieve peers during sync.'}, - { value: '--nop2p', desc: 'Disables P2P downloading of blocs during sync.'}, - { value: '--localsync', desc: 'Allow to synchronize on nodes with local network IP address for `sync` command' }, - { value: '--nosources', desc: 'Do not parse sources (UD, TX) during sync (debug purposes).'}, - { value: '--nosbx', desc: 'Do not retrieve sandboxes during sync.'}, - { value: '--onlypeers', desc: 'Will only try to sync peers.'}, - { value: '--slow', desc: 'Download slowly the blokchcain (for low connnections).'}, - { value: '--readfilesystem',desc: 'Also read the filesystem to speed up block downloading.'}, - { value: '--minsig <minsig>', desc: 'Minimum pending signatures count for `crawl-lookup`. Default is 5.'}, + { value: "--nointeractive", desc: "Disable interactive sync UI." }, + { + value: "--nocautious", + desc: "Do not check blocks validity during sync.", + }, + { + value: "--cautious", + desc: + "Check blocks validity during sync (overrides --nocautious option).", + }, + { value: "--nopeers", desc: "Do not retrieve peers during sync." }, + { + value: "--nop2p", + desc: "Disables P2P downloading of blocs during sync.", + }, + { + value: "--localsync", + desc: + "Allow to synchronize on nodes with local network IP address for `sync` command", + }, + { + value: "--nosources", + desc: "Do not parse sources (UD, TX) during sync (debug purposes).", + }, + { value: "--nosbx", desc: "Do not retrieve sandboxes during sync." }, + { value: "--onlypeers", desc: "Will only try to sync peers." }, + { + value: "--slow", + desc: "Download slowly the blokchcain (for low connnections).", + }, + { + value: "--readfilesystem", + desc: "Also read the filesystem to speed up block downloading.", + }, + { + value: "--minsig <minsig>", + desc: + "Minimum pending signatures count for `crawl-lookup`. Default is 5.", + }, ], - cli: [{ - name: 'sync [source] [to]', - desc: 'Synchronize blockchain from a remote Duniter node. [source] is [host][:port]. [to] defaults to remote current block number.', - preventIfRunning: true, - onConfiguredExecute: async (server:Server) => { - await server.resetData(); - }, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any): Promise<any> => { - const source = params[0] - const to = params[1] - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (either a host:port or a file path)' - } - let cautious; - if (program.nocautious) { - cautious = false; - } - if (program.cautious) { - cautious = true; - } - const upTo = parseInt(to); - const chunkLength = 0; - const interactive = !program.nointeractive; - const askedCautious = cautious; - const noShufflePeers = program.noshuffle; + cli: [ + { + name: "sync [source] [to]", + desc: + "Synchronize blockchain from a remote Duniter node. [source] is [host][:port]. [to] defaults to remote current block number.", + preventIfRunning: true, + onConfiguredExecute: async (server: Server) => { + await server.resetData(); + }, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ): Promise<any> => { + const source = params[0]; + const to = params[1]; + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (either a host:port or a file path)"; + } + let cautious; + if (program.nocautious) { + cautious = false; + } + if (program.cautious) { + cautious = true; + } + const upTo = parseInt(to); + const chunkLength = 0; + const interactive = !program.nointeractive; + const askedCautious = cautious; + const noShufflePeers = program.noshuffle; - let otherDAL = undefined - if (program.readfilesystem) { - const dbName = program.mdb; - const dbHome = program.home; - const home = Directory.getHome(dbName, dbHome); - const params = await Directory.getHomeParams(false, home) - otherDAL = new FileDAL(params, async() => null as any, async() => null as any) - } + let otherDAL = undefined; + if (program.readfilesystem) { + const dbName = program.mdb; + const dbHome = program.home; + const home = Directory.getHome(dbName, dbHome); + const params = await Directory.getHomeParams(false, home); + otherDAL = new FileDAL( + params, + async () => null as any, + async () => null as any + ); + } - let strategy: AbstractSynchronizer - if (source.match(HOST_PATTERN)) { - const sp = source.split(':') - const onHost = sp[0] - const onPort = parseInt(sp[1] ? sp[1] : '443') // Defaults to 443 - strategy = new RemoteSynchronizer(onHost, onPort, server, CommonConstants.SYNC_BLOCKS_CHUNK, noShufflePeers === true, otherDAL, program.localsync !== undefined) - } else { - strategy = new LocalPathSynchronizer(source, server, CommonConstants.SYNC_BLOCKS_CHUNK) - } - if (program.onlypeers === true) { - return strategy.syncPeers(true) - } else { - const remote = new Synchroniser(server, strategy, interactive === true) + let strategy: AbstractSynchronizer; + if (source.match(HOST_PATTERN)) { + const sp = source.split(":"); + const onHost = sp[0]; + const onPort = parseInt(sp[1] ? sp[1] : "443"); // Defaults to 443 + strategy = new RemoteSynchronizer( + onHost, + onPort, + server, + CommonConstants.SYNC_BLOCKS_CHUNK, + noShufflePeers === true, + otherDAL, + program.localsync !== undefined + ); + } else { + strategy = new LocalPathSynchronizer( + source, + server, + CommonConstants.SYNC_BLOCKS_CHUNK + ); + } + if (program.onlypeers === true) { + return strategy.syncPeers(true); + } else { + const remote = new Synchroniser( + server, + strategy, + interactive === true + ); - // If the sync fail, stop the program - process.on('unhandledRejection', (reason: any) => { - if (reason.message === DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) { - NewLogger().error('Synchronization interrupted: no node was found to continue downloading after %s tries.', CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND) - process.exit(ExitCodes.SYNC_FAIL) - } - }) + // If the sync fail, stop the program + process.on("unhandledRejection", (reason: any) => { + if ( + reason.message === + DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK] + ) { + NewLogger().error( + "Synchronization interrupted: no node was found to continue downloading after %s tries.", + CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND + ); + process.exit(ExitCodes.SYNC_FAIL); + } + }); - return remote.sync(upTo, chunkLength, askedCautious) - } - } - }, { - name: 'peer [host] [port]', - desc: 'Exchange peerings with another node', - preventIfRunning: true, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const host = params[0]; - const port = params[1]; - const logger = server.logger; - try { - const ERASE_IF_ALREADY_RECORDED = true; - logger.info('Fetching peering record at %s:%s...', host, port); - let peering = await Contacter.fetchPeer(host, port); - logger.info('Apply peering ...'); - await server.PeeringService.submitP(peering, ERASE_IF_ALREADY_RECORDED, !program.nocautious, true); - logger.info('Applied'); - let selfPeer = await server.dal.getPeer(server.PeeringService.pubkey); - if (!selfPeer) { - await server.PeeringService.generateSelfPeer(server.conf) - selfPeer = await server.dal.getPeer(server.PeeringService.pubkey); + return remote.sync(upTo, chunkLength, askedCautious); + } + }, + }, + { + name: "peer [host] [port]", + desc: "Exchange peerings with another node", + preventIfRunning: true, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const host = params[0]; + const port = params[1]; + const logger = server.logger; + try { + const ERASE_IF_ALREADY_RECORDED = true; + logger.info("Fetching peering record at %s:%s...", host, port); + let peering = await Contacter.fetchPeer(host, port); + logger.info("Apply peering ..."); + await server.PeeringService.submitP( + peering, + ERASE_IF_ALREADY_RECORDED, + !program.nocautious, + true + ); + logger.info("Applied"); + let selfPeer = await server.dal.getPeer( + server.PeeringService.pubkey + ); + if (!selfPeer) { + await server.PeeringService.generateSelfPeer(server.conf); + selfPeer = await server.dal.getPeer(server.PeeringService.pubkey); + } + logger.info("Send self peering ..."); + const p = PeerDTO.fromJSONObject(peering); + const contact = new Contacter( + p.getHostPreferDNS(), + p.getPort() as number, + {} + ); + await contact.postPeer(PeerDTO.fromJSONObject(selfPeer)); + logger.info("Sent."); + await server.disconnect(); + } catch (e) { + logger.error(e.code || e.message || e); + throw Error("Exiting"); + } + }, + }, + { + name: "import <fromHost> <fromPort> <search> <toHost> <toPort>", + desc: "Import all pending data from matching <search>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const fromHost = params[0]; + const fromPort = params[1]; + const search = params[2]; + const toHost = params[3]; + const toPort = params[4]; + const logger = server.logger; + try { + const peers = + fromHost && fromPort + ? [ + { + endpoints: [ + ["BASIC_MERKLED_API", fromHost, fromPort].join(" "), + ], + }, + ] + : await server.dal.peerDAL.withUPStatus(); + // Memberships + for (const p of peers) { + const peer = PeerDTO.fromJSONObject(p); + const fromHost = peer.getHostPreferDNS(); + const fromPort = peer.getPort(); + logger.info("Looking at %s:%s...", fromHost, fromPort); + try { + const node = new Contacter(fromHost, fromPort as number, { + timeout: 10000, + }); + const requirements = await node.getRequirements(search); + await req2fwd(requirements, toHost, toPort, logger); + } catch (e) { + logger.error(e); + } + } + await server.disconnect(); + } catch (e) { + logger.error(e); + throw Error("Exiting"); } - logger.info('Send self peering ...'); - const p = PeerDTO.fromJSONObject(peering) - const contact = new Contacter(p.getHostPreferDNS(), p.getPort() as number, {}) - await contact.postPeer(PeerDTO.fromJSONObject(selfPeer)) - logger.info('Sent.'); - await server.disconnect(); - } catch(e) { - logger.error(e.code || e.message || e); - throw Error("Exiting"); - } - } - }, { - name: 'import <fromHost> <fromPort> <search> <toHost> <toPort>', - desc: 'Import all pending data from matching <search>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const fromHost = params[0]; - const fromPort = params[1]; - const search = params[2]; - const toHost = params[3]; - const toPort = params[4]; - const logger = server.logger; - try { - const peers = fromHost && fromPort ? [{ endpoints: [['BASIC_MERKLED_API', fromHost, fromPort].join(' ')] }] : await server.dal.peerDAL.withUPStatus() - // Memberships - for (const p of peers) { - const peer = PeerDTO.fromJSONObject(p) + }, + }, + { + name: "sync-mempool <from>", + desc: "Import all pending data from matching <search>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const source: string = params[0]; + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (host[:port])"; + } + const logger = NewLogger(); + const from: string = params[0]; + const { host, port } = extractHostPort(from); + try { + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); const fromHost = peer.getHostPreferDNS(); const fromPort = peer.getPort(); - logger.info('Looking at %s:%s...', fromHost, fromPort); + logger.info("Looking at %s:%s...", fromHost, fromPort); try { - const node = new Contacter(fromHost, fromPort as number, { timeout: 10000 }); - const requirements = await node.getRequirements(search); - await req2fwd(requirements, toHost, toPort, logger) + const fromHost = await connect(peer, 60 * 1000); + const api = new BMARemoteContacter(fromHost); + await pullSandboxToLocalServer( + server.conf.currency, + api, + server, + logger + ); } catch (e) { logger.error(e); } - } - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'sync-mempool <from>', - desc: 'Import all pending data from matching <search>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const source: string = params[0] - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (host[:port])' - } - const logger = NewLogger() - const from: string = params[0] - const { host, port } = extractHostPort(from) - try { - const peer = PeerDTO.fromJSONObject({ endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] }) - const fromHost = peer.getHostPreferDNS(); - const fromPort = peer.getPort(); - logger.info('Looking at %s:%s...', fromHost, fromPort); - try { - const fromHost = await connect(peer, 60*1000) - const api = new BMARemoteContacter(fromHost) - await pullSandboxToLocalServer(server.conf.currency, api, server, logger) + + await server.disconnect(); } catch (e) { logger.error(e); + throw Error("Exiting"); } - - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'sync-mempool-search <from> <search>', - desc: 'Import all pending data from matching <search>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const source: string = params[0] - const search: string = params[1] - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (host[:port])' - } - const logger = NewLogger() - const from: string = params[0] - const { host, port } = extractHostPort(from) - try { - const peer = PeerDTO.fromJSONObject({ endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] }) - const fromHost = peer.getHostPreferDNS(); - const fromPort = peer.getPort(); - logger.info('Looking at %s:%s...', fromHost, fromPort); + }, + }, + { + name: "sync-mempool-search <from> <search>", + desc: "Import all pending data from matching <search>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const source: string = params[0]; + const search: string = params[1]; + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (host[:port])"; + } + const logger = NewLogger(); + const from: string = params[0]; + const { host, port } = extractHostPort(from); try { - const fromHost = await connect(peer) - const res = await fromHost.getRequirements(search) - await applyMempoolRequirements(server.conf.currency, res, server, logger) + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); + const fromHost = peer.getHostPreferDNS(); + const fromPort = peer.getPort(); + logger.info("Looking at %s:%s...", fromHost, fromPort); + try { + const fromHost = await connect(peer); + const res = await fromHost.getRequirements(search); + await applyMempoolRequirements( + server.conf.currency, + res, + server, + logger + ); + } catch (e) { + logger.error(e); + } + + await server.disconnect(); } catch (e) { logger.error(e); + throw Error("Exiting"); } - - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'sync-mempool-fwd <from> <to> <search>', - desc: 'Import all pending data from matching <search>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const source: string = params[0] - const target: string = params[1] - const search: string = params[2] - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (host[:port])' - } - if (!target || !(target.match(HOST_PATTERN) || target.match(FILE_PATTERN))) { - throw 'Target of sync is required. (host[:port])' - } - const logger = NewLogger() - const { host, port } = extractHostPort(source) - const { host: toHost, port: toPort } = extractHostPort(target) - try { - const peer = PeerDTO.fromJSONObject({ endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] }) - logger.info('Looking at %s...', source) + }, + }, + { + name: "sync-mempool-fwd <from> <to> <search>", + desc: "Import all pending data from matching <search>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const source: string = params[0]; + const target: string = params[1]; + const search: string = params[2]; + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (host[:port])"; + } + if ( + !target || + !(target.match(HOST_PATTERN) || target.match(FILE_PATTERN)) + ) { + throw "Target of sync is required. (host[:port])"; + } + const logger = NewLogger(); + const { host, port } = extractHostPort(source); + const { host: toHost, port: toPort } = extractHostPort(target); try { - const fromHost = await connect(peer) - const res = await fromHost.getRequirements(search) - await forwardToServer(server.conf.currency, res, toHost, toPort, logger) + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); + logger.info("Looking at %s...", source); + try { + const fromHost = await connect(peer); + const res = await fromHost.getRequirements(search); + await forwardToServer( + server.conf.currency, + res, + toHost, + toPort, + logger + ); + } catch (e) { + logger.error(e); + } + + await server.disconnect(); } catch (e) { logger.error(e); + throw Error("Exiting"); } - - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'pull <from> [<number>]', - desc: 'Pull blocks from <from> source up to block <number>', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const source: string = params[0] - const to = parseInt(params[1]) - if (!source || !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))) { - throw 'Source of sync is required. (host[:port])' - } - const logger = NewLogger() - const { host, port } = extractHostPort(source) - try { - const peer = PeerDTO.fromJSONObject({ endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] }) - logger.info('Looking at %s...', source) + }, + }, + { + name: "pull <from> [<number>]", + desc: "Pull blocks from <from> source up to block <number>", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const source: string = params[0]; + const to = parseInt(params[1]); + if ( + !source || + !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN)) + ) { + throw "Source of sync is required. (host[:port])"; + } + const logger = NewLogger(); + const { host, port } = extractHostPort(source); try { - const fromHost = await connect(peer) - let current: DBBlock|null = await server.dal.getCurrentBlockOrNull() - // Loop until an error occurs - while (current && (isNaN(to) || current.number < to)) { - current = await fromHost.getBlock(current.number + 1) - await server.writeBlock(current, false) + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); + logger.info("Looking at %s...", source); + try { + const fromHost = await connect(peer); + let current: DBBlock | null = await server.dal.getCurrentBlockOrNull(); + // Loop until an error occurs + while (current && (isNaN(to) || current.number < to)) { + current = await fromHost.getBlock(current.number + 1); + await server.writeBlock(current, false); + } + } catch (e) { + logger.error(e); } } catch (e) { logger.error(e); + throw Error("Exiting"); } - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'forward <number> <fromHost> <fromPort> <toHost> <toPort>', - desc: 'Forward existing block <number> from a host to another', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const number = params[0]; - const fromHost = params[1]; - const fromPort = params[2]; - const toHost = params[3]; - const toPort = params[4]; - const logger = server.logger; - try { - logger.info('Looking at %s:%s...', fromHost, fromPort) + }, + }, + { + name: "forward <number> <fromHost> <fromPort> <toHost> <toPort>", + desc: "Forward existing block <number> from a host to another", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const number = params[0]; + const fromHost = params[1]; + const fromPort = params[2]; + const toHost = params[3]; + const toPort = params[4]; + const logger = server.logger; try { - const source = new Contacter(fromHost, fromPort, { timeout: 10000 }) - const target = new Contacter(toHost, toPort, { timeout: 10000 }) - const block = await source.getBlock(number) - const raw = BlockDTO.fromJSONObject(block).getRawSigned() - await target.postBlock(raw) + logger.info("Looking at %s:%s...", fromHost, fromPort); + try { + const source = new Contacter(fromHost, fromPort, { + timeout: 10000, + }); + const target = new Contacter(toHost, toPort, { timeout: 10000 }); + const block = await source.getBlock(number); + const raw = BlockDTO.fromJSONObject(block).getRawSigned(); + await target.postBlock(raw); + } catch (e) { + logger.error(e); + } + await server.disconnect(); } catch (e) { logger.error(e); + throw Error("Exiting"); } - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'import-lookup [search] [fromhost] [fromport] [tohost] [toport]', - desc: 'Exchange peerings with another node', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const search = params[0]; - const fromhost = params[1]; - const fromport = params[2]; - const tohost = params[3]; - const toport = params[4]; - const logger = server.logger; - try { - logger.info('Looking for "%s" at %s:%s...', search, fromhost, fromport); - const sourcePeer = new Contacter(fromhost, fromport); - const targetPeer = new Contacter(tohost, toport); - const lookup = await sourcePeer.getLookup(search); - for (const res of lookup.results) { - for (const uid of res.uids) { - const rawIdty = rawer.getOfficialIdentity({ - currency: 'g1', - issuer: res.pubkey, - uid: uid.uid, - buid: uid.meta.timestamp, - sig: uid.self - }); - logger.info('Success idty %s', uid.uid); - try { - await targetPeer.postIdentity(rawIdty); - } catch (e) { - logger.error(e); + }, + }, + { + name: "import-lookup [search] [fromhost] [fromport] [tohost] [toport]", + desc: "Exchange peerings with another node", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const search = params[0]; + const fromhost = params[1]; + const fromport = params[2]; + const tohost = params[3]; + const toport = params[4]; + const logger = server.logger; + try { + logger.info( + 'Looking for "%s" at %s:%s...', + search, + fromhost, + fromport + ); + const sourcePeer = new Contacter(fromhost, fromport); + const targetPeer = new Contacter(tohost, toport); + const lookup = await sourcePeer.getLookup(search); + for (const res of lookup.results) { + for (const uid of res.uids) { + const rawIdty = rawer.getOfficialIdentity({ + currency: "g1", + issuer: res.pubkey, + uid: uid.uid, + buid: uid.meta.timestamp, + sig: uid.self, + }); + logger.info("Success idty %s", uid.uid); + try { + await targetPeer.postIdentity(rawIdty); + } catch (e) { + logger.error(e); + } + for (const received of uid.others) { + const rawCert = rawer.getOfficialCertification({ + currency: "g1", + issuer: received.pubkey, + idty_issuer: res.pubkey, + idty_uid: uid.uid, + idty_buid: uid.meta.timestamp, + idty_sig: uid.self, + buid: Buid.format.buid( + received.meta.block_number, + received.meta.block_hash + ), + sig: received.signature, + }); + try { + logger.info( + "Success cert %s -> %s", + received.pubkey.slice(0, 8), + uid.uid + ); + await targetPeer.postCert(rawCert); + } catch (e) { + logger.error(e); + } + } } - for (const received of uid.others) { + } + const certBy = await sourcePeer.getCertifiedBy(search); + const mapBlocks: any = {}; + for (const signed of certBy.certifications) { + if (signed.written) { + logger.info( + "Already written cert %s -> %s", + certBy.pubkey.slice(0, 8), + signed.uid + ); + } else { + const lookupIdty = await sourcePeer.getLookup(signed.pubkey); + let idty = null; + for (const result of lookupIdty.results) { + for (const uid of result.uids) { + if ( + uid.uid === signed.uid && + result.pubkey === signed.pubkey && + uid.meta.timestamp === signed.sigDate + ) { + idty = uid; + } + } + } + let block = mapBlocks[signed.cert_time.block]; + if (!block) { + block = await sourcePeer.getBlock(signed.cert_time.block); + mapBlocks[block.number] = block; + } const rawCert = rawer.getOfficialCertification({ - currency: 'g1', - issuer: received.pubkey, - idty_issuer: res.pubkey, - idty_uid: uid.uid, - idty_buid: uid.meta.timestamp, - idty_sig: uid.self, - buid: Buid.format.buid(received.meta.block_number, received.meta.block_hash), - sig: received.signature + currency: "g1", + issuer: certBy.pubkey, + idty_issuer: signed.pubkey, + idty_uid: signed.uid, + idty_buid: idty.meta.timestamp, + idty_sig: idty.self, + buid: Buid.format.buid(block.number, block.hash), + sig: signed.signature, }); try { - logger.info('Success cert %s -> %s', received.pubkey.slice(0, 8), uid.uid); + logger.info( + "Success cert %s -> %s", + certBy.pubkey.slice(0, 8), + signed.uid + ); await targetPeer.postCert(rawCert); } catch (e) { logger.error(e); } } } + logger.info("Sent."); + await server.disconnect(); + } catch (e) { + logger.error(e); + throw Error("Exiting"); } - const certBy = await sourcePeer.getCertifiedBy(search) - const mapBlocks:any = {} - for (const signed of certBy.certifications) { - if (signed.written) { - logger.info('Already written cert %s -> %s', certBy.pubkey.slice(0, 8), signed.uid) - } else { - const lookupIdty = await sourcePeer.getLookup(signed.pubkey); - let idty = null - for (const result of lookupIdty.results) { - for (const uid of result.uids) { - if (uid.uid === signed.uid && result.pubkey === signed.pubkey && uid.meta.timestamp === signed.sigDate) { - idty = uid - } - } - } - let block = mapBlocks[signed.cert_time.block] - if (!block) { - block = await sourcePeer.getBlock(signed.cert_time.block) - mapBlocks[block.number] = block - } - const rawCert = rawer.getOfficialCertification({ - currency: 'g1', - issuer: certBy.pubkey, - idty_issuer: signed.pubkey, - idty_uid: signed.uid, - idty_buid: idty.meta.timestamp, - idty_sig: idty.self, - buid: Buid.format.buid(block.number, block.hash), - sig: signed.signature - }); + }, + }, + { + name: "crawl-lookup <toHost> <toPort> [<fromHost> [<fromPort>]]", + desc: + "Make a full network scan and rebroadcast every WoT pending document (identity, certification, membership)", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const toHost = params[0]; + const toPort = params[1]; + const fromHost = params[2]; + const fromPort = params[3]; + const logger = server.logger; + try { + const peers = + fromHost && fromPort + ? [ + { + endpoints: [ + ["BASIC_MERKLED_API", fromHost, fromPort].join(" "), + ], + }, + ] + : await server.dal.peerDAL.withUPStatus(); + // Memberships + for (const p of peers) { + const peer = PeerDTO.fromJSONObject(p); + const fromHost = peer.getHostPreferDNS(); + const fromPort = peer.getPort(); + logger.info("Looking at %s:%s...", fromHost, fromPort); try { - logger.info('Success cert %s -> %s', certBy.pubkey.slice(0, 8), signed.uid); - await targetPeer.postCert(rawCert); + const node = new Contacter(fromHost, fromPort as number, { + timeout: 10000, + }); + const requirements = await node.getRequirementsPending( + program.minsig || 5 + ); + await req2fwd(requirements, toHost, toPort, logger); } catch (e) { logger.error(e); } } + await server.disconnect(); + } catch (e) { + logger.error(e); + throw Error("Exiting"); } - logger.info('Sent.'); - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'crawl-lookup <toHost> <toPort> [<fromHost> [<fromPort>]]', - desc: 'Make a full network scan and rebroadcast every WoT pending document (identity, certification, membership)', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const toHost = params[0] - const toPort = params[1] - const fromHost = params[2] - const fromPort = params[3] - const logger = server.logger; - try { - const peers = fromHost && fromPort ? [{ endpoints: [['BASIC_MERKLED_API', fromHost, fromPort].join(' ')] }] : await server.dal.peerDAL.withUPStatus() - // Memberships - for (const p of peers) { - const peer = PeerDTO.fromJSONObject(p) - const fromHost = peer.getHostPreferDNS(); - const fromPort = peer.getPort(); - logger.info('Looking at %s:%s...', fromHost, fromPort); - try { - const node = new Contacter(fromHost, fromPort as number, { timeout: 10000 }); - const requirements = await node.getRequirementsPending(program.minsig || 5); - await req2fwd(requirements, toHost, toPort, logger) - } catch (e) { - logger.error(e); - } - } - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }, { - name: 'fwd-pending-ms', - desc: 'Forwards all the local pending memberships to target node', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const logger = server.logger; - try { - const pendingMSS = await server.dal.msDAL.getPendingIN() - const targetPeer = new Contacter('g1.cgeek.fr', 80, { timeout: 5000 }); - // Membership - let rawMS - for (const theMS of pendingMSS) { - console.log('New membership pending for %s', theMS.userid); - try { - rawMS = rawer.getMembership({ - currency: 'g1', - issuer: theMS.issuer, - block: theMS.block, - membership: theMS.membership, - userid: theMS.userid, - certts: theMS.certts, - signature: theMS.signature - }); - await targetPeer.postRenew(rawMS); - logger.info('Success ms idty %s', theMS.userid); - } catch (e) { - logger.warn(e); + }, + }, + { + name: "fwd-pending-ms", + desc: "Forwards all the local pending memberships to target node", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const logger = server.logger; + try { + const pendingMSS = await server.dal.msDAL.getPendingIN(); + const targetPeer = new Contacter("g1.cgeek.fr", 80, { + timeout: 5000, + }); + // Membership + let rawMS; + for (const theMS of pendingMSS) { + console.log("New membership pending for %s", theMS.userid); + try { + rawMS = rawer.getMembership({ + currency: "g1", + issuer: theMS.issuer, + block: theMS.block, + membership: theMS.membership, + userid: theMS.userid, + certts: theMS.certts, + signature: theMS.signature, + }); + await targetPeer.postRenew(rawMS); + logger.info("Success ms idty %s", theMS.userid); + } catch (e) { + logger.warn(e); + } } + await server.disconnect(); + } catch (e) { + logger.error(e); + throw Error("Exiting"); } - await server.disconnect(); - } catch(e) { - logger.error(e); - throw Error("Exiting"); - } - } - }] - } -} + }, + }, + ], + }, +}; function extractHostPort(source: string) { - const sp = source.split(':') - const onHost = sp[0] - const onPort = sp[1] ? sp[1] : '443' // Defaults to 443 + const sp = source.split(":"); + const onHost = sp[0]; + const onPort = sp[1] ? sp[1] : "443"; // Defaults to 443 return { host: onHost, port: onPort, - } -} \ No newline at end of file + }; +} diff --git a/app/modules/crawler/lib/connect.ts b/app/modules/crawler/lib/connect.ts index 401f431f6e8178d836d4ec4827599f481886100f..0715aeb81f081959d113f53e2171358be3511fe7 100644 --- a/app/modules/crawler/lib/connect.ts +++ b/app/modules/crawler/lib/connect.ts @@ -11,18 +11,21 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CrawlerConstants} from "./constants" -import {Contacter} from "./contacter" -import {PeerDTO} from "../../../lib/dto/PeerDTO"; +import { CrawlerConstants } from "./constants"; +import { Contacter } from "./contacter"; +import { PeerDTO } from "../../../lib/dto/PeerDTO"; -const DEFAULT_HOST = 'localhost'; +const DEFAULT_HOST = "localhost"; -export const connect = (peer:PeerDTO, timeout:number|null = null) => { - return Promise.resolve(Contacter.fromHostPortPath( - peer.getDns() || peer.getIPv4() || peer.getIPv6() || DEFAULT_HOST, - peer.getPort() as number, - peer.getPath() as string, - { - timeout: timeout || CrawlerConstants.DEFAULT_TIMEOUT - })) -} +export const connect = (peer: PeerDTO, timeout: number | null = null) => { + return Promise.resolve( + Contacter.fromHostPortPath( + peer.getDns() || peer.getIPv4() || peer.getIPv6() || DEFAULT_HOST, + peer.getPort() as number, + peer.getPath() as string, + { + timeout: timeout || CrawlerConstants.DEFAULT_TIMEOUT, + } + ) + ); +}; diff --git a/app/modules/crawler/lib/constants.ts b/app/modules/crawler/lib/constants.ts index bff0afbfd58eb8beff89655330b0ffdf49206a19..1887af52f9faffc7f6a2d179b766ca26b0d1ad40 100644 --- a/app/modules/crawler/lib/constants.ts +++ b/app/modules/crawler/lib/constants.ts @@ -11,10 +11,9 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../../../lib/common-libs/constants" +import { CommonConstants } from "../../../lib/common-libs/constants"; export const CrawlerConstants = { - PEER_LONG_DOWN: 3600 * 24 * 2, // 48h SYNC_LONG_TIMEOUT: 30 * 1000, // 30 seconds DEFAULT_TIMEOUT: 10 * 1000, // 10 seconds @@ -42,16 +41,19 @@ export const CrawlerConstants = { AN_HOUR: 3600, A_DAY: 3600 * 24, A_WEEK: 3600 * 24 * 7, - A_MONTH: (3600 * 24 * 365.25) / 12 + A_MONTH: (3600 * 24 * 365.25) / 12, }, ERRORS: { - NEWER_PEER_DOCUMENT_AVAILABLE: { httpCode: 409, uerr: { ucode: 2022, message: "A newer peer document is available" }}, + NEWER_PEER_DOCUMENT_AVAILABLE: { + httpCode: 409, + uerr: { ucode: 2022, message: "A newer peer document is available" }, + }, }, ERROR: { PEER: { - UNKNOWN_REFERENCE_BLOCK: 'Unknown reference block of peer' - } - } -} \ No newline at end of file + UNKNOWN_REFERENCE_BLOCK: "Unknown reference block of peer", + }, + }, +}; diff --git a/app/modules/crawler/lib/contacter.ts b/app/modules/crawler/lib/contacter.ts index b4abbb98659640cfcfa841a801c1b4151ffb8eaf..8bac0518997ec3f305759f556f65ac9b222ba469 100644 --- a/app/modules/crawler/lib/contacter.ts +++ b/app/modules/crawler/lib/contacter.ts @@ -11,136 +11,162 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CrawlerConstants} from "./constants" -import {HttpMembershipList} from "../../bma/lib/dtos" +import { CrawlerConstants } from "./constants"; +import { HttpMembershipList } from "../../bma/lib/dtos"; -const rp = require('request-promise'); -const sanitize = require('../../../modules/bma/lib/sanitize') -const dtos = require('../../../modules/bma').BmaDependency.duniter.methods.dtos; +const rp = require("request-promise"); +const sanitize = require("../../../modules/bma/lib/sanitize"); +const dtos = require("../../../modules/bma").BmaDependency.duniter.methods.dtos; export class Contacter { - - path: string = '' - options:{ timeout:number } - fullyQualifiedHost:string - - constructor(public readonly host:string, public readonly port:number, opts:any = {}) { + path: string = ""; + options: { timeout: number }; + fullyQualifiedHost: string; + + constructor( + public readonly host: string, + public readonly port: number, + opts: any = {} + ) { this.options = { - timeout: (opts && opts.timeout) || CrawlerConstants.DEFAULT_TIMEOUT - } + timeout: (opts && opts.timeout) || CrawlerConstants.DEFAULT_TIMEOUT, + }; // We suppose that IPv6 is already wrapped by [], for example 'http://[::1]:80/index.html' - this.fullyQualifiedHost = [host, port].join(':'); + this.fullyQualifiedHost = [host, port].join(":"); } - public static fromHostPortPath(host:string, port:number, path:string, opts: { timeout?: number }) { - const contacter = new Contacter(host, port, opts) - contacter.path = path - return contacter + public static fromHostPortPath( + host: string, + port: number, + path: string, + opts: { timeout?: number } + ) { + const contacter = new Contacter(host, port, opts); + contacter.path = path; + return contacter; } getSummary() { - return this.get('/node/summary/', dtos.Summary) + return this.get("/node/summary/", dtos.Summary); } - - getCertifiedBy(search:string) { - return this.get('/wot/certified-by/' + search, dtos.Certifications) + + getCertifiedBy(search: string) { + return this.get("/wot/certified-by/" + search, dtos.Certifications); } - - getRequirements(search:string) { - return this.get('/wot/requirements/' + search, dtos.Requirements) + + getRequirements(search: string) { + return this.get("/wot/requirements/" + search, dtos.Requirements); } - - getRequirementsPending(minsig:number) { - return this.get('/wot/requirements-of-pending/' + minsig, dtos.Requirements) + + getRequirementsPending(minsig: number) { + return this.get( + "/wot/requirements-of-pending/" + minsig, + dtos.Requirements + ); } - - getLookup(search:string) { - return this.get('/wot/lookup/', dtos.Lookup, search) + + getLookup(search: string) { + return this.get("/wot/lookup/", dtos.Lookup, search); } - - getBlock(number:number) { - return this.get('/blockchain/block/', dtos.Block, number) + + getBlock(number: number) { + return this.get("/blockchain/block/", dtos.Block, number); } - + getCurrent() { - return this.get('/blockchain/current', dtos.Block) + return this.get("/blockchain/current", dtos.Block); } getMilestonesPage() { - return this.get('/blockchain/milestones', dtos.MilestonesPage) + return this.get("/blockchain/milestones", dtos.MilestonesPage); } getMilestones(page: number) { - return this.get('/blockchain/milestones/' + page, dtos.Milestones) + return this.get("/blockchain/milestones/" + page, dtos.Milestones); } - + getPeer() { - return this.get('/network/peering', dtos.Peer) + return this.get("/network/peering", dtos.Peer); } - - getPeers(obj?:any) { - return this.get('/network/peering/peers', dtos.MerkleOfPeers, obj) + + getPeers(obj?: any) { + return this.get("/network/peering/peers", dtos.MerkleOfPeers, obj); } getPeersArray() { - return this.get('/network/peers', dtos.Peers) + return this.get("/network/peers", dtos.Peers); } - - getSources(pubkey:string) { - return this.get('/tx/sources/', dtos.Sources, pubkey) + + getSources(pubkey: string) { + return this.get("/tx/sources/", dtos.Sources, pubkey); } - - getBlocks(count:number, fromNumber:number) { - return this.get('/blockchain/blocks/', dtos.Blocks, [count, fromNumber].join('/')) + + getBlocks(count: number, fromNumber: number) { + return this.get( + "/blockchain/blocks/", + dtos.Blocks, + [count, fromNumber].join("/") + ); } - - postPeer(peer:any) { - return this.post('/network/peering/peers', dtos.Peer, { peer: peer }) + + postPeer(peer: any) { + return this.post("/network/peering/peers", dtos.Peer, { peer: peer }); } - - postIdentity(raw:string) { - return this.post('/wot/add', dtos.Identity, { identity: raw }) + + postIdentity(raw: string) { + return this.post("/wot/add", dtos.Identity, { identity: raw }); } - - postCert(cert:string) { - return this.post('/wot/certify', dtos.Cert, { cert: cert}) + + postCert(cert: string) { + return this.post("/wot/certify", dtos.Cert, { cert: cert }); } - - postRenew(ms:string) { - return this.post('/blockchain/membership', dtos.Membership, { membership: ms }) + + postRenew(ms: string) { + return this.post("/blockchain/membership", dtos.Membership, { + membership: ms, + }); } - postRevocation(rev:string) { - return this.post('/wot/revoke', dtos.Identity, { revocation: rev }) + postRevocation(rev: string) { + return this.post("/wot/revoke", dtos.Identity, { revocation: rev }); } - + wotPending(): Promise<HttpMembershipList> { - return this.get('/wot/pending', dtos.MembershipList) + return this.get("/wot/pending", dtos.MembershipList); } - + wotMembers() { - return this.get('/wot/members', dtos.Members) + return this.get("/wot/members", dtos.Members); } - - postBlock(rawBlock:string) { - return this.post('/blockchain/block', dtos.Block, { block: rawBlock }) + + postBlock(rawBlock: string) { + return this.post("/blockchain/block", dtos.Block, { block: rawBlock }); } - - processTransaction(rawTX:string) { - return this.post('/tx/process', dtos.Transaction, { transaction: rawTX }) + + processTransaction(rawTX: string) { + return this.post("/tx/process", dtos.Transaction, { transaction: rawTX }); } - private async get(url:string, dtoContract:any, param?:any) { - if (typeof param === 'object') { + private async get(url: string, dtoContract: any, param?: any) { + if (typeof param === "object") { // Classical URL params (a=1&b=2&...) - param = '?' + Object.keys(param).map((k) => [k, param[k]].join('=')).join('&'); + param = + "?" + + Object.keys(param) + .map((k) => [k, param[k]].join("=")) + .join("&"); } try { - const path = this.path || '' + const path = this.path || ""; const json = await rp.get({ - url: Contacter.protocol(this.port) + this.fullyQualifiedHost + path + url + (param !== undefined ? param : ''), + url: + Contacter.protocol(this.port) + + this.fullyQualifiedHost + + path + + url + + (param !== undefined ? param : ""), json: true, - timeout: this.options.timeout + timeout: this.options.timeout, }); // Prevent JSON injection return sanitize(json, dtoContract); @@ -149,14 +175,15 @@ export class Contacter { } } - private async post(url:string, dtoContract:any, data:any) { + private async post(url: string, dtoContract: any, data: any) { try { - const path = this.path || '' + const path = this.path || ""; const json = await rp.post({ - url: Contacter.protocol(this.port) + this.fullyQualifiedHost + path + url, + url: + Contacter.protocol(this.port) + this.fullyQualifiedHost + path + url, body: data, json: true, - timeout: this.options.timeout + timeout: this.options.timeout, }); // Prevent JSON injection return sanitize(json, dtoContract); @@ -165,32 +192,37 @@ export class Contacter { } } - static protocol(port:number) { - return port == 443 ? 'https://' : 'http://'; + static protocol(port: number) { + return port == 443 ? "https://" : "http://"; } - static async quickly(host:string, port:number, opts:any, callbackPromise:any) { + static async quickly( + host: string, + port: number, + opts: any, + callbackPromise: any + ) { const node = new Contacter(host, port, opts); return callbackPromise(node); } - static async quickly2(peer:any, opts:any, callbackPromise:any) { - const Peer = require('./entity/peer'); + static async quickly2(peer: any, opts: any, callbackPromise: any) { + const Peer = require("./entity/peer"); const p = Peer.fromJSON(peer); const node = new Contacter(p.getHostPreferDNS(), p.getPort(), opts); return callbackPromise(node); } - static fetchPeer(host:string, port:number, opts:any = {}) { - return Contacter.quickly(host, port, opts, (node:any) => node.getPeer()) + static fetchPeer(host: string, port: number, opts: any = {}) { + return Contacter.quickly(host, port, opts, (node: any) => node.getPeer()); } - static fetchBlock(number:number, peer:any, opts:any = {}) { - return Contacter.quickly2(peer, opts, (node:any) => node.getBlock(number)) + static fetchBlock(number: number, peer: any, opts: any = {}) { + return Contacter.quickly2(peer, opts, (node: any) => node.getBlock(number)); } - static async isReachableFromTheInternet(peer:any, opts:any) { - const Peer = require('./entity/peer'); + static async isReachableFromTheInternet(peer: any, opts: any) { + const Peer = require("./entity/peer"); const p = Peer.fromJSON(peer); const node = new Contacter(p.getHostPreferDNS(), p.getPort(), opts); try { @@ -200,4 +232,4 @@ export class Contacter { return false; } } -} \ No newline at end of file +} diff --git a/app/modules/crawler/lib/crawler.ts b/app/modules/crawler/lib/crawler.ts index 9a187fa8740640b248fbfb72ff2c14a9e717b6d9..b46420a9b42161032a5b76b5d630671358a53616 100644 --- a/app/modules/crawler/lib/crawler.ts +++ b/app/modules/crawler/lib/crawler.ts @@ -11,101 +11,113 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as stream from "stream" -import {Server} from "../../../../server" -import {ConfDTO} from "../../../lib/dto/ConfDTO" -import {DuniterService} from "../../../../index" -import {PeerDTO} from "../../../lib/dto/PeerDTO" -import {AbstractDAO} from "./pulling" -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {DBBlock} from "../../../lib/db/DBBlock" -import {tx_cleaner} from "./tx_cleaner" -import {connect} from "./connect" -import {CrawlerConstants} from "./constants" -import {pullSandboxToLocalServer} from "./sandbox" -import {cleanLongDownPeers} from "./garbager" -import {Underscore} from "../../../lib/common-libs/underscore" -import {BMARemoteContacter} from "./sync/BMARemoteContacter" - -const async = require('async'); +import * as stream from "stream"; +import { Server } from "../../../../server"; +import { ConfDTO } from "../../../lib/dto/ConfDTO"; +import { DuniterService } from "../../../../index"; +import { PeerDTO } from "../../../lib/dto/PeerDTO"; +import { AbstractDAO } from "./pulling"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { tx_cleaner } from "./tx_cleaner"; +import { connect } from "./connect"; +import { CrawlerConstants } from "./constants"; +import { pullSandboxToLocalServer } from "./sandbox"; +import { cleanLongDownPeers } from "./garbager"; +import { Underscore } from "../../../lib/common-libs/underscore"; +import { BMARemoteContacter } from "./sync/BMARemoteContacter"; + +const async = require("async"); /** * Service which triggers the server's peering generation (actualization of the Peer document). * @constructor */ export class Crawler extends stream.Transform implements DuniterService { - - peerCrawler:PeerCrawler - peerTester:PeerTester - blockCrawler:BlockCrawler - sandboxCrawler:SandboxCrawler + peerCrawler: PeerCrawler; + peerTester: PeerTester; + blockCrawler: BlockCrawler; + sandboxCrawler: SandboxCrawler; constructor( - private server:Server, - private conf:ConfDTO, - private logger:any) { - super({ objectMode: true }) - - this.peerCrawler = new PeerCrawler(server, conf, logger) - this.peerTester = new PeerTester(server, conf, logger) - this.blockCrawler = new BlockCrawler(server, logger) - this.sandboxCrawler = new SandboxCrawler(server, conf, logger) + private server: Server, + private conf: ConfDTO, + private logger: any + ) { + super({ objectMode: true }); + + this.peerCrawler = new PeerCrawler(server, conf, logger); + this.peerTester = new PeerTester(server, conf, logger); + this.blockCrawler = new BlockCrawler(server, logger); + this.sandboxCrawler = new SandboxCrawler(server, conf, logger); } - pullBlocks(server:Server, pubkey = "") { - return this.blockCrawler.pullBlocks(server, pubkey) + pullBlocks(server: Server, pubkey = "") { + return this.blockCrawler.pullBlocks(server, pubkey); } - sandboxPull(server:Server) { - return this.sandboxCrawler.sandboxPull(server) + sandboxPull(server: Server) { + return this.sandboxCrawler.sandboxPull(server); } startService() { if (this.conf.nobma || !this.conf.bmaWithCrawler) { - return Promise.resolve() + return Promise.resolve(); } return Promise.all([ this.peerCrawler.startService(), this.peerTester.startService(), this.blockCrawler.startService(), - this.sandboxCrawler.startService() - ]) + this.sandboxCrawler.startService(), + ]); } stopService() { if (this.conf.nobma || !this.conf.bmaWithCrawler) { - return Promise.resolve() + return Promise.resolve(); } return Promise.all([ this.peerCrawler.stopService(), this.peerTester.stopService(), this.blockCrawler.stopService(), - this.sandboxCrawler.stopService() - ]) + this.sandboxCrawler.stopService(), + ]); } // Unused - _write(str:string, enc:any, done:any) { + _write(str: string, enc: any, done: any) { done && done(); - }; + } } export class PeerCrawler implements DuniterService { - private DONT_IF_MORE_THAN_FOUR_PEERS = true; - private crawlPeersInterval:NodeJS.Timer - private crawlPeersFifo = async.queue((task:any, callback:any) => task(callback), 1); + private crawlPeersInterval: NodeJS.Timer; + private crawlPeersFifo = async.queue( + (task: any, callback: any) => task(callback), + 1 + ); constructor( - private server:Server, - private conf:ConfDTO, - private logger:any) {} + private server: Server, + private conf: ConfDTO, + private logger: any + ) {} async startService() { - if (this.crawlPeersInterval) - clearInterval(this.crawlPeersInterval); - this.crawlPeersInterval = setInterval(() => this.crawlPeersFifo.push((cb:any) => this.crawlPeers(this.server, this.conf).then(cb).catch(cb)), 1000 * this.conf.avgGenTime * CrawlerConstants.SYNC_PEERS_INTERVAL); - await this.crawlPeers(this.server, this.conf, this.DONT_IF_MORE_THAN_FOUR_PEERS); + if (this.crawlPeersInterval) clearInterval(this.crawlPeersInterval); + this.crawlPeersInterval = setInterval( + () => + this.crawlPeersFifo.push((cb: any) => + this.crawlPeers(this.server, this.conf).then(cb).catch(cb) + ), + 1000 * this.conf.avgGenTime * CrawlerConstants.SYNC_PEERS_INTERVAL + ); + await this.crawlPeers( + this.server, + this.conf, + this.DONT_IF_MORE_THAN_FOUR_PEERS + ); } async stopService() { @@ -113,23 +125,34 @@ export class PeerCrawler implements DuniterService { clearInterval(this.crawlPeersInterval); } - private async crawlPeers(server:Server, conf:ConfDTO, dontCrawlIfEnoughPeers = false) { - this.logger.info('Crawling the network...'); - const peers = await server.dal.listAllPeersWithStatusNewUPWithtout(conf.pair.pub); - if (peers.length > CrawlerConstants.COUNT_FOR_ENOUGH_PEERS && dontCrawlIfEnoughPeers == this.DONT_IF_MORE_THAN_FOUR_PEERS) { + private async crawlPeers( + server: Server, + conf: ConfDTO, + dontCrawlIfEnoughPeers = false + ) { + this.logger.info("Crawling the network..."); + const peers = await server.dal.listAllPeersWithStatusNewUPWithtout( + conf.pair.pub + ); + if ( + peers.length > CrawlerConstants.COUNT_FOR_ENOUGH_PEERS && + dontCrawlIfEnoughPeers == this.DONT_IF_MORE_THAN_FOUR_PEERS + ) { return; } - let peersToTest = peers.slice().map(p => PeerDTO.fromJSONObject(p)) - let tested:string[] = []; + let peersToTest = peers.slice().map((p) => PeerDTO.fromJSONObject(p)); + let tested: string[] = []; const found = []; while (peersToTest.length > 0) { - const results = await Promise.all(peersToTest.map((p:PeerDTO) => this.crawlPeer(server, p))) - tested = tested.concat(peersToTest.map((p:PeerDTO) => p.pubkey)); + const results = await Promise.all( + peersToTest.map((p: PeerDTO) => this.crawlPeer(server, p)) + ); + tested = tested.concat(peersToTest.map((p: PeerDTO) => p.pubkey)); // End loop condition peersToTest.splice(0); // Eventually continue the loop for (let i = 0, len = results.length; i < len; i++) { - const res:any = results[i]; + const res: any = results[i]; for (let j = 0, len2 = res.length; j < len2; j++) { try { const subpeer = res[j].leaf.value; @@ -139,30 +162,38 @@ export class PeerCrawler implements DuniterService { found.push(p); } } catch (e) { - this.logger.warn('Invalid peer %s', res[j]); + this.logger.warn("Invalid peer %s", res[j]); } } } // Make unique list - peersToTest = Underscore.uniq(peersToTest, false, (p:PeerDTO) => p.pubkey) + peersToTest = Underscore.uniq( + peersToTest, + false, + (p: PeerDTO) => p.pubkey + ); } - this.logger.info('Crawling done.'); + this.logger.info("Crawling done."); for (let i = 0, len = found.length; i < len; i++) { let p = found[i]; try { // Try to write it - await server.writePeer(p) - } catch(e) { + await server.writePeer(p); + } catch (e) { // Silent error } } await cleanLongDownPeers(server, Date.now()); } - private async crawlPeer(server:Server, aPeer:PeerDTO) { - let subpeers:any[] = []; + private async crawlPeer(server: Server, aPeer: PeerDTO) { + let subpeers: any[] = []; try { - this.logger.debug('Crawling peers of %s %s', aPeer.pubkey.substr(0, 6), aPeer.getNamedURL()); + this.logger.debug( + "Crawling peers of %s %s", + aPeer.pubkey.substr(0, 6), + aPeer.getNamedURL() + ); const node = await connect(aPeer); await checkPeerValidity(server, aPeer, node); const json = await node.getPeers.bind(node)({ leaves: true }); @@ -179,22 +210,32 @@ export class PeerCrawler implements DuniterService { } export class SandboxCrawler implements DuniterService { - - private pullInterval:NodeJS.Timer - private pullFifo = async.queue((task:any, callback:any) => task(callback), 1); + private pullInterval: NodeJS.Timer; + private pullFifo = async.queue( + (task: any, callback: any) => task(callback), + 1 + ); constructor( - private server:Server, - private conf:ConfDTO, - private logger:any) {} + private server: Server, + private conf: ConfDTO, + private logger: any + ) {} async startService() { - if (this.pullInterval) - clearInterval(this.pullInterval); - this.pullInterval = setInterval(() => this.pullFifo.push((cb:any) => this.sandboxPull(this.server).then(cb).catch(cb)), 1000 * this.conf.avgGenTime * CrawlerConstants.SANDBOX_CHECK_INTERVAL); + if (this.pullInterval) clearInterval(this.pullInterval); + this.pullInterval = setInterval( + () => + this.pullFifo.push((cb: any) => + this.sandboxPull(this.server).then(cb).catch(cb) + ), + 1000 * this.conf.avgGenTime * CrawlerConstants.SANDBOX_CHECK_INTERVAL + ); setTimeout(() => { - this.pullFifo.push((cb:any) => this.sandboxPull(this.server).then(cb).catch(cb)) - }, CrawlerConstants.SANDBOX_FIRST_PULL_DELAY) + this.pullFifo.push((cb: any) => + this.sandboxPull(this.server).then(cb).catch(cb) + ); + }, CrawlerConstants.SANDBOX_FIRST_PULL_DELAY); } async stopService() { @@ -202,35 +243,54 @@ export class SandboxCrawler implements DuniterService { clearInterval(this.pullInterval); } - async sandboxPull(server:Server) { - this.logger && this.logger.info('Sandbox pulling started...'); - const peers = await server.dal.getRandomlyUPsWithout([this.conf.pair.pub]) - const randoms = chooseXin(peers.map(p => PeerDTO.fromDBPeer(p)), CrawlerConstants.SANDBOX_PEERS_COUNT) - let peersToTest = randoms.slice().map((p) => PeerDTO.fromJSONObject(p)); - for (const peer of peersToTest) { - const fromHost = await connect(peer) - const api = new BMARemoteContacter(fromHost) - await pullSandboxToLocalServer(server.conf.currency, api, server, this.logger) - } - this.logger && this.logger.info('Sandbox pulling done.'); + async sandboxPull(server: Server) { + this.logger && this.logger.info("Sandbox pulling started..."); + const peers = await server.dal.getRandomlyUPsWithout([this.conf.pair.pub]); + const randoms = chooseXin( + peers.map((p) => PeerDTO.fromDBPeer(p)), + CrawlerConstants.SANDBOX_PEERS_COUNT + ); + let peersToTest = randoms.slice().map((p) => PeerDTO.fromJSONObject(p)); + for (const peer of peersToTest) { + const fromHost = await connect(peer); + const api = new BMARemoteContacter(fromHost); + await pullSandboxToLocalServer( + server.conf.currency, + api, + server, + this.logger + ); + } + this.logger && this.logger.info("Sandbox pulling done."); } } export class PeerTester implements DuniterService { - - private FIRST_CALL = true - private testPeerFifo = async.queue((task:any, callback:any) => task(callback), 1); - private testPeerFifoInterval:NodeJS.Timer + private FIRST_CALL = true; + private testPeerFifo = async.queue( + (task: any, callback: any) => task(callback), + 1 + ); + private testPeerFifoInterval: NodeJS.Timer; constructor( - private server:Server, - private conf:ConfDTO, - private logger:any) {} + private server: Server, + private conf: ConfDTO, + private logger: any + ) {} async startService() { - if (this.testPeerFifoInterval) - clearInterval(this.testPeerFifoInterval); - this.testPeerFifoInterval = setInterval(() => this.testPeerFifo.push((cb:any) => this.testPeers.bind(this, this.server, this.conf, !this.FIRST_CALL)().then(cb).catch(cb)), 1000 * CrawlerConstants.TEST_PEERS_INTERVAL); + if (this.testPeerFifoInterval) clearInterval(this.testPeerFifoInterval); + this.testPeerFifoInterval = setInterval( + () => + this.testPeerFifo.push((cb: any) => + this.testPeers + .bind(this, this.server, this.conf, !this.FIRST_CALL)() + .then(cb) + .catch(cb) + ), + 1000 * CrawlerConstants.TEST_PEERS_INTERVAL + ); await this.testPeers(this.server, this.conf, this.FIRST_CALL); } @@ -239,81 +299,116 @@ export class PeerTester implements DuniterService { this.testPeerFifo.kill(); } - private async testPeers(server:Server, conf:ConfDTO, displayDelays:boolean) { + private async testPeers( + server: Server, + conf: ConfDTO, + displayDelays: boolean + ) { let peers = await server.dal.listAllPeers(); - let now = (new Date().getTime()); - peers = Underscore.filter(peers, (p:any) => p.pubkey != conf.pair.pub); - await Promise.all(peers.map(async (thePeer:any) => { - let p = PeerDTO.fromJSONObject(thePeer); - if (thePeer.status == 'DOWN') { - let shouldDisplayDelays = displayDelays; - let downAt = thePeer.first_down || now; - let waitRemaining = this.getWaitRemaining(now, downAt, thePeer.last_try); - let nextWaitRemaining = this.getWaitRemaining(now, downAt, now); - let testIt = waitRemaining <= 0; - if (testIt) { - // We try to reconnect only with peers marked as DOWN - try { - this.logger.trace('Checking if node %s is UP... (%s:%s) ', p.pubkey.substr(0, 6), p.getHostPreferDNS(), p.getPort()); - // We register the try anyway - await server.dal.setPeerDown(p.pubkey); - // Now we test - let node = await connect(p); - let peering = await node.getPeer(); - await checkPeerValidity(server, p, node); - // The node answered, it is no more DOWN! - this.logger.info('Node %s (%s:%s) is UP!', p.pubkey.substr(0, 6), p.getHostPreferDNS(), p.getPort()); - await server.dal.setPeerUP(p.pubkey); - // We try to forward its peering entry - let sp1 = peering.block.split('-'); - let currentBlockNumber = sp1[0]; - let currentBlockHash = sp1[1]; - let sp2 = peering.block.split('-'); - let blockNumber = sp2[0]; - let blockHash = sp2[1]; - if (!(currentBlockNumber == blockNumber && currentBlockHash == blockHash)) { - // The peering changed - await server.PeeringService.submitP(peering); - } - // Do not need to display when next check will occur: the node is now UP - shouldDisplayDelays = false; - } catch (err) { - if (!err) { - err = "NO_REASON" + let now = new Date().getTime(); + peers = Underscore.filter(peers, (p: any) => p.pubkey != conf.pair.pub); + await Promise.all( + peers.map(async (thePeer: any) => { + let p = PeerDTO.fromJSONObject(thePeer); + if (thePeer.status == "DOWN") { + let shouldDisplayDelays = displayDelays; + let downAt = thePeer.first_down || now; + let waitRemaining = this.getWaitRemaining( + now, + downAt, + thePeer.last_try + ); + let nextWaitRemaining = this.getWaitRemaining(now, downAt, now); + let testIt = waitRemaining <= 0; + if (testIt) { + // We try to reconnect only with peers marked as DOWN + try { + this.logger.trace( + "Checking if node %s is UP... (%s:%s) ", + p.pubkey.substr(0, 6), + p.getHostPreferDNS(), + p.getPort() + ); + // We register the try anyway + await server.dal.setPeerDown(p.pubkey); + // Now we test + let node = await connect(p); + let peering = await node.getPeer(); + await checkPeerValidity(server, p, node); + // The node answered, it is no more DOWN! + this.logger.info( + "Node %s (%s:%s) is UP!", + p.pubkey.substr(0, 6), + p.getHostPreferDNS(), + p.getPort() + ); + await server.dal.setPeerUP(p.pubkey); + // We try to forward its peering entry + let sp1 = peering.block.split("-"); + let currentBlockNumber = sp1[0]; + let currentBlockHash = sp1[1]; + let sp2 = peering.block.split("-"); + let blockNumber = sp2[0]; + let blockHash = sp2[1]; + if ( + !( + currentBlockNumber == blockNumber && + currentBlockHash == blockHash + ) + ) { + // The peering changed + await server.PeeringService.submitP(peering); + } + // Do not need to display when next check will occur: the node is now UP + shouldDisplayDelays = false; + } catch (err) { + if (!err) { + err = "NO_REASON"; + } + // Error: we set the peer as DOWN + this.logger.trace( + "Peer %s is DOWN (%s)", + p.pubkey, + (err.httpCode && "HTTP " + err.httpCode) || + err.code || + err.message || + err + ); + await server.dal.setPeerDown(p.pubkey); + shouldDisplayDelays = true; } - // Error: we set the peer as DOWN - this.logger.trace("Peer %s is DOWN (%s)", p.pubkey, (err.httpCode && 'HTTP ' + err.httpCode) || err.code || err.message || err); - await server.dal.setPeerDown(p.pubkey); - shouldDisplayDelays = true; + } + if (shouldDisplayDelays) { + this.logger.debug( + "Will check that node %s (%s:%s) is UP in %s min...", + p.pubkey.substr(0, 6), + p.getHostPreferDNS(), + p.getPort(), + (nextWaitRemaining / 60).toFixed(0) + ); } } - if (shouldDisplayDelays) { - this.logger.debug('Will check that node %s (%s:%s) is UP in %s min...', p.pubkey.substr(0, 6), p.getHostPreferDNS(), p.getPort(), (nextWaitRemaining / 60).toFixed(0)); - } - } - })) + }) + ); } - private getWaitRemaining(now:number, downAt:number, last_try:number) { + private getWaitRemaining(now: number, downAt: number, last_try: number) { let downDelay = Math.floor((now - downAt) / 1000); let waitedSinceLastTest = Math.floor((now - (last_try || now)) / 1000); let waitRemaining = 1; if (downDelay <= CrawlerConstants.DURATIONS.A_MINUTE) { - waitRemaining = CrawlerConstants.DURATIONS.TEN_SECONDS - waitedSinceLastTest; - } - else if (downDelay <= CrawlerConstants.DURATIONS.TEN_MINUTES) { + waitRemaining = + CrawlerConstants.DURATIONS.TEN_SECONDS - waitedSinceLastTest; + } else if (downDelay <= CrawlerConstants.DURATIONS.TEN_MINUTES) { waitRemaining = CrawlerConstants.DURATIONS.A_MINUTE - waitedSinceLastTest; - } - else if (downDelay <= CrawlerConstants.DURATIONS.AN_HOUR) { - waitRemaining = CrawlerConstants.DURATIONS.TEN_MINUTES - waitedSinceLastTest; - } - else if (downDelay <= CrawlerConstants.DURATIONS.A_DAY) { + } else if (downDelay <= CrawlerConstants.DURATIONS.AN_HOUR) { + waitRemaining = + CrawlerConstants.DURATIONS.TEN_MINUTES - waitedSinceLastTest; + } else if (downDelay <= CrawlerConstants.DURATIONS.A_DAY) { waitRemaining = CrawlerConstants.DURATIONS.AN_HOUR - waitedSinceLastTest; - } - else if (downDelay <= CrawlerConstants.DURATIONS.A_WEEK) { + } else if (downDelay <= CrawlerConstants.DURATIONS.A_WEEK) { waitRemaining = CrawlerConstants.DURATIONS.A_DAY - waitedSinceLastTest; - } - else if (downDelay <= CrawlerConstants.DURATIONS.A_MONTH) { + } else if (downDelay <= CrawlerConstants.DURATIONS.A_MONTH) { waitRemaining = CrawlerConstants.DURATIONS.A_WEEK - waitedSinceLastTest; } // Else do not check it, DOWN for too long @@ -322,22 +417,27 @@ export class PeerTester implements DuniterService { } export class BlockCrawler { - - private CONST_BLOCKS_CHUNK = CrawlerConstants.CRAWL_BLOCK_CHUNK - private pullingActualIntervalDuration = CrawlerConstants.PULLING_MINIMAL_DELAY - private programStart = Date.now() - private syncBlockFifo = async.queue((task:any, callback:any) => task(callback), 1) - private syncBlockInterval:NodeJS.Timer - - constructor( - private server:Server, - private logger:any) { - } + private CONST_BLOCKS_CHUNK = CrawlerConstants.CRAWL_BLOCK_CHUNK; + private pullingActualIntervalDuration = + CrawlerConstants.PULLING_MINIMAL_DELAY; + private programStart = Date.now(); + private syncBlockFifo = async.queue( + (task: any, callback: any) => task(callback), + 1 + ); + private syncBlockInterval: NodeJS.Timer; + + constructor(private server: Server, private logger: any) {} async startService() { - if (this.syncBlockInterval) - clearInterval(this.syncBlockInterval); - this.syncBlockInterval = setInterval(() => this.syncBlockFifo.push((cb:any) => this.syncBlock(this.server).then(cb).catch(cb)), 1000 * this.pullingActualIntervalDuration); + if (this.syncBlockInterval) clearInterval(this.syncBlockInterval); + this.syncBlockInterval = setInterval( + () => + this.syncBlockFifo.push((cb: any) => + this.syncBlock(this.server).then(cb).catch(cb) + ), + 1000 * this.pullingActualIntervalDuration + ); this.syncBlock(this.server); } @@ -346,195 +446,278 @@ export class BlockCrawler { this.syncBlockFifo.kill(); } - pullBlocks(server:Server, pubkey = "") { - return this.syncBlock(server, pubkey) + pullBlocks(server: Server, pubkey = "") { + return this.syncBlock(server, pubkey); } - private async syncBlock(server:Server, pubkey:string = "") { + private async syncBlock(server: Server, pubkey: string = "") { // Eventually change the interval duration - const minutesElapsed = Math.ceil((Date.now() - this.programStart) / (60 * 1000)); - const FACTOR = Math.sin((minutesElapsed / CrawlerConstants.PULLING_INTERVAL_TARGET) * (Math.PI / 2)); + const minutesElapsed = Math.ceil( + (Date.now() - this.programStart) / (60 * 1000) + ); + const FACTOR = Math.sin( + (minutesElapsed / CrawlerConstants.PULLING_INTERVAL_TARGET) * + (Math.PI / 2) + ); // Make the interval always higher than before - const pullingTheoreticalIntervalNow = Math.max(Math.max(FACTOR * CrawlerConstants.PULLING_INTERVAL_TARGET, CrawlerConstants.PULLING_MINIMAL_DELAY), this.pullingActualIntervalDuration); + const pullingTheoreticalIntervalNow = Math.max( + Math.max( + FACTOR * CrawlerConstants.PULLING_INTERVAL_TARGET, + CrawlerConstants.PULLING_MINIMAL_DELAY + ), + this.pullingActualIntervalDuration + ); if (pullingTheoreticalIntervalNow !== this.pullingActualIntervalDuration) { this.pullingActualIntervalDuration = pullingTheoreticalIntervalNow; // Change the interval - if (this.syncBlockInterval) - clearInterval(this.syncBlockInterval); - this.syncBlockInterval = setInterval(() => this.syncBlockFifo.push((cb:any) => this.syncBlock(server).then(cb).catch(cb)), 1000 * this.pullingActualIntervalDuration); + if (this.syncBlockInterval) clearInterval(this.syncBlockInterval); + this.syncBlockInterval = setInterval( + () => + this.syncBlockFifo.push((cb: any) => + this.syncBlock(server).then(cb).catch(cb) + ), + 1000 * this.pullingActualIntervalDuration + ); } try { - let current: DBBlock|null = await server.dal.getCurrentBlockOrNull(); + let current: DBBlock | null = await server.dal.getCurrentBlockOrNull(); if (current) { - this.pullingEvent(server, 'start', current.number); + this.pullingEvent(server, "start", current.number); this.logger && this.logger.info("Pulling blocks from the network..."); let peers = await server.dal.findAllPeersBut([server.conf.pair.pub]); peers = Underscore.shuffle(peers); if (pubkey) { - peers = Underscore.filter(peers, (p:any) => p.pubkey == pubkey) + peers = Underscore.filter(peers, (p: any) => p.pubkey == pubkey); } // Shuffle the peers peers = Underscore.shuffle(peers); // Only take at max X of them - peers = peers.slice(0, CrawlerConstants.MAX_NUMBER_OF_PEERS_FOR_PULLING); - await Promise.all(peers.map(async (thePeer:any, i:number) => { - let p = PeerDTO.fromJSONObject(thePeer); - this.pullingEvent(server, 'peer', Underscore.extend({number: i, length: peers.length}, p)); - this.logger && this.logger.trace("Try with %s %s", p.getURL(), p.pubkey.substr(0, 6)); - try { - let node:any = await connect(p); - let nodeCurrent:BlockDTO|null = null - node.pubkey = p.pubkey; - await checkPeerValidity(server, p, node); - - let dao = new (class extends AbstractDAO { - - private lastDownloaded:BlockDTO|null - - constructor(private crawler:BlockCrawler) { - super() - } + peers = peers.slice( + 0, + CrawlerConstants.MAX_NUMBER_OF_PEERS_FOR_PULLING + ); + await Promise.all( + peers.map(async (thePeer: any, i: number) => { + let p = PeerDTO.fromJSONObject(thePeer); + this.pullingEvent( + server, + "peer", + Underscore.extend({ number: i, length: peers.length }, p) + ); + this.logger && + this.logger.trace( + "Try with %s %s", + p.getURL(), + p.pubkey.substr(0, 6) + ); + try { + let node: any = await connect(p); + let nodeCurrent: BlockDTO | null = null; + node.pubkey = p.pubkey; + await checkPeerValidity(server, p, node); + + let dao = new (class extends AbstractDAO { + private lastDownloaded: BlockDTO | null; + + constructor(private crawler: BlockCrawler) { + super(); + } - async localCurrent(): Promise<DBBlock | null> { - return server.dal.getCurrentBlockOrNull() - } - async remoteCurrent(source?: any): Promise<BlockDTO | null> { - nodeCurrent = await source.getCurrent() - return nodeCurrent - } - async remotePeers(source?: any): Promise<PeerDTO[]> { - return Promise.resolve([node]) - } - async getLocalBlock(number: number): Promise<DBBlock> { - return server.dal.getBlockWeHaveItForSure(number) - } - async getRemoteBlock(thePeer: any, number: number): Promise<BlockDTO> { - let block = null; - try { - block = await thePeer.getBlock(number); - tx_cleaner(block.transactions); - } catch (e) { - if (e.httpCode != 404) { - throw e; - } + async localCurrent(): Promise<DBBlock | null> { + return server.dal.getCurrentBlockOrNull(); } - return block; - } - async applyMainBranch(block: BlockDTO): Promise<boolean> { - const existing = await server.dal.getAbsoluteBlockByNumberAndHash(block.number, block.hash) - if (!existing) { - let addedBlock = await server.writeBlock(block, false, true) - if (!this.lastDownloaded) { - this.lastDownloaded = await dao.remoteCurrent(node); + async remoteCurrent(source?: any): Promise<BlockDTO | null> { + nodeCurrent = await source.getCurrent(); + return nodeCurrent; + } + async remotePeers(source?: any): Promise<PeerDTO[]> { + return Promise.resolve([node]); + } + async getLocalBlock(number: number): Promise<DBBlock> { + return server.dal.getBlockWeHaveItForSure(number); + } + async getRemoteBlock( + thePeer: any, + number: number + ): Promise<BlockDTO> { + let block = null; + try { + block = await thePeer.getBlock(number); + tx_cleaner(block.transactions); + } catch (e) { + if (e.httpCode != 404) { + throw e; + } } - this.crawler.pullingEvent(server, 'applying', {number: block.number, last: this.lastDownloaded && this.lastDownloaded.number}); - if (addedBlock) { - current = DBBlock.fromBlockDTO(addedBlock); - // Emit block events (for sharing with the network) only in forkWindowSize - if (nodeCurrent && nodeCurrent.number - addedBlock.number < server.conf.forksize) { - server.streamPush(addedBlock); + return block; + } + async applyMainBranch(block: BlockDTO): Promise<boolean> { + const existing = await server.dal.getAbsoluteBlockByNumberAndHash( + block.number, + block.hash + ); + if (!existing) { + let addedBlock = await server.writeBlock( + block, + false, + true + ); + if (!this.lastDownloaded) { + this.lastDownloaded = await dao.remoteCurrent(node); + } + this.crawler.pullingEvent(server, "applying", { + number: block.number, + last: this.lastDownloaded && this.lastDownloaded.number, + }); + if (addedBlock) { + current = DBBlock.fromBlockDTO(addedBlock); + // Emit block events (for sharing with the network) only in forkWindowSize + if ( + nodeCurrent && + nodeCurrent.number - addedBlock.number < + server.conf.forksize + ) { + server.streamPush(addedBlock); + } } } + return true; } - return true - } - async removeForks(): Promise<boolean> { - return true - } - async isMemberPeer(thePeer: PeerDTO): Promise<boolean> { - return true - } - async downloadBlocks(thePeer: any, fromNumber: number, count?: number | undefined): Promise<BlockDTO[]> { - if (!count) { - count = this.crawler.CONST_BLOCKS_CHUNK; + async removeForks(): Promise<boolean> { + return true; } - let blocks = await thePeer.getBlocks(count, fromNumber); - // Fix for #734 - for (const block of blocks) { - for (const tx of block.transactions) { - tx.version = CrawlerConstants.TRANSACTION_VERSION; + async isMemberPeer(thePeer: PeerDTO): Promise<boolean> { + return true; + } + async downloadBlocks( + thePeer: any, + fromNumber: number, + count?: number | undefined + ): Promise<BlockDTO[]> { + if (!count) { + count = this.crawler.CONST_BLOCKS_CHUNK; + } + let blocks = await thePeer.getBlocks(count, fromNumber); + // Fix for #734 + for (const block of blocks) { + for (const tx of block.transactions) { + tx.version = CrawlerConstants.TRANSACTION_VERSION; + } } + return blocks; } - return blocks; + })(this); + await dao.pull(server.conf, server.logger); + } catch (e) { + if (this.isConnectionError(e)) { + this.logger && + this.logger.info( + "Peer %s unreachable: now considered as DOWN.", + p.pubkey + ); + await server.dal.setPeerDown(p.pubkey); + } else if (e.httpCode == 404) { + this.logger && + this.logger.trace( + "No new block from %s %s", + p.pubkey.substr(0, 6), + p.getURL() + ); + } else { + this.logger && this.logger.warn(e); } - })(this) - await dao.pull(server.conf, server.logger) - } catch (e) { - if (this.isConnectionError(e)) { - this.logger && this.logger.info("Peer %s unreachable: now considered as DOWN.", p.pubkey); - await server.dal.setPeerDown(p.pubkey); - } - else if (e.httpCode == 404) { - this.logger && this.logger.trace("No new block from %s %s", p.pubkey.substr(0, 6), p.getURL()); - } - else { - this.logger && this.logger.warn(e); } + }) + ); + + await this.server.BlockchainService.pushFIFO( + "crawlerResolution", + async () => { + await server.BlockchainService.blockResolution(); + await server.BlockchainService.forkResolution(); } - })) - - await this.server.BlockchainService.pushFIFO("crawlerResolution", async () => { - await server.BlockchainService.blockResolution() - await server.BlockchainService.forkResolution() - }) + ); - this.pullingEvent(server, 'end', current.number); + this.pullingEvent(server, "end", current.number); } - this.logger && this.logger.info('Will pull blocks from the network in %s min %s sec', Math.floor(this.pullingActualIntervalDuration / 60), Math.floor(this.pullingActualIntervalDuration % 60)); - } catch(err) { - this.pullingEvent(server, 'error'); - this.logger && this.logger.warn(err.code || err.stack || err.message || err); + this.logger && + this.logger.info( + "Will pull blocks from the network in %s min %s sec", + Math.floor(this.pullingActualIntervalDuration / 60), + Math.floor(this.pullingActualIntervalDuration % 60) + ); + } catch (err) { + this.pullingEvent(server, "error"); + this.logger && + this.logger.warn(err.code || err.stack || err.message || err); } } - private pullingEvent(server:Server, type:string, number:any = null) { - server.pullingEvent(type, number) + private pullingEvent(server: Server, type: string, number: any = null) { + server.pullingEvent(type, number); } - private isConnectionError(err:any) { - return err && ( - err.code == "E_DUNITER_PEER_CHANGED" - || err.code == "EINVAL" - || err.code == "ECONNREFUSED" - || err.code == "ETIMEDOUT" - || (err.httpCode !== undefined && err.httpCode !== 404)); + private isConnectionError(err: any) { + return ( + err && + (err.code == "E_DUNITER_PEER_CHANGED" || + err.code == "EINVAL" || + err.code == "ECONNREFUSED" || + err.code == "ETIMEDOUT" || + (err.httpCode !== undefined && err.httpCode !== 404)) + ); } } -function chooseXin (peers:PeerDTO[], max:number) { +function chooseXin(peers: PeerDTO[], max: number) { const chosen = []; const nbPeers = peers.length; for (let i = 0; i < Math.min(nbPeers, max); i++) { - const randIndex = Math.max(Math.floor(Math.random() * 10) - (10 - nbPeers) - i, 0); + const randIndex = Math.max( + Math.floor(Math.random() * 10) - (10 - nbPeers) - i, + 0 + ); chosen.push(peers[randIndex]); peers.splice(randIndex, 1); } return chosen; } -const checkPeerValidity = async (server:Server, p:PeerDTO, node:any) => { +const checkPeerValidity = async (server: Server, p: PeerDTO, node: any) => { try { let document = await node.getPeer(); let thePeer = PeerDTO.fromJSONObject(document); let goodSignature = server.PeeringService.checkPeerSignature(thePeer); if (!goodSignature) { - throw 'Signature from a peer must match'; + throw "Signature from a peer must match"; } if (p.currency !== thePeer.currency) { - throw 'Currency has changed from ' + p.currency + ' to ' + thePeer.currency; + throw ( + "Currency has changed from " + p.currency + " to " + thePeer.currency + ); } if (p.pubkey !== thePeer.pubkey) { - throw 'Public key of the peer has changed from ' + p.pubkey + ' to ' + thePeer.pubkey; + throw ( + "Public key of the peer has changed from " + + p.pubkey + + " to " + + thePeer.pubkey + ); } - let sp1 = p.block.split('-'); - let sp2 = thePeer.block.split('-'); + let sp1 = p.block.split("-"); + let sp2 = thePeer.block.split("-"); let blockNumber1 = parseInt(sp1[0]); let blockNumber2 = parseInt(sp2[0]); if (blockNumber2 < blockNumber1) { - throw 'Signature date has changed from block ' + blockNumber1 + ' to older block ' + blockNumber2; + throw ( + "Signature date has changed from block " + + blockNumber1 + + " to older block " + + blockNumber2 + ); } } catch (e) { throw { code: "E_DUNITER_PEER_CHANGED" }; } -} +}; diff --git a/app/modules/crawler/lib/garbager.ts b/app/modules/crawler/lib/garbager.ts index 1533ba49bbf7360bff124b80ac3734600ef13509..fc2f011a57c086924fdb142ca0b6a6e122d52032 100644 --- a/app/modules/crawler/lib/garbager.ts +++ b/app/modules/crawler/lib/garbager.ts @@ -11,10 +11,14 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CrawlerConstants} from "./constants" -import {Server} from "../../../../server" +import { CrawlerConstants } from "./constants"; +import { Server } from "../../../../server"; -export const cleanLongDownPeers = async (server:Server, now:number) => { - const first_down_limit = Math.floor((now - CrawlerConstants.PEER_LONG_DOWN * 1000) / 1000) - await server.dal.peerDAL.deleteNonWotPeersWhoseLastContactIsAbove(first_down_limit) -} +export const cleanLongDownPeers = async (server: Server, now: number) => { + const first_down_limit = Math.floor( + (now - CrawlerConstants.PEER_LONG_DOWN * 1000) / 1000 + ); + await server.dal.peerDAL.deleteNonWotPeersWhoseLastContactIsAbove( + first_down_limit + ); +}; diff --git a/app/modules/crawler/lib/pulling.ts b/app/modules/crawler/lib/pulling.ts index e82e5c2e948c7de7b14fd0bdfad6a5ee5386b443..dd619015e795614dbf5153dd7cc06444c39e4dc9 100644 --- a/app/modules/crawler/lib/pulling.ts +++ b/app/modules/crawler/lib/pulling.ts @@ -12,32 +12,35 @@ // GNU Affero General Public License for more details. "use strict"; -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {DBBlock} from "../../../lib/db/DBBlock" -import {PeerDTO} from "../../../lib/dto/PeerDTO" -import {BranchingDTO} from "../../../lib/dto/ConfDTO" -import {Underscore} from "../../../lib/common-libs/underscore" +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { PeerDTO } from "../../../lib/dto/PeerDTO"; +import { BranchingDTO } from "../../../lib/dto/ConfDTO"; +import { Underscore } from "../../../lib/common-libs/underscore"; export abstract class PullingDao { - abstract applyBranch(blocks:BlockDTO[]): Promise<boolean> - abstract localCurrent(): Promise<DBBlock|null> - abstract remoteCurrent(source?:any): Promise<BlockDTO|null> - abstract remotePeers(source?:any): Promise<PeerDTO[]> - abstract getLocalBlock(number:number): Promise<DBBlock> - abstract getRemoteBlock(thePeer:PeerDTO, number:number): Promise<BlockDTO> - abstract applyMainBranch(block:BlockDTO): Promise<boolean> - abstract removeForks(): Promise<boolean> - abstract isMemberPeer(thePeer:PeerDTO): Promise<boolean> - abstract downloadBlocks(thePeer:PeerDTO, fromNumber:number, count?:number): Promise<BlockDTO[]> + abstract applyBranch(blocks: BlockDTO[]): Promise<boolean>; + abstract localCurrent(): Promise<DBBlock | null>; + abstract remoteCurrent(source?: any): Promise<BlockDTO | null>; + abstract remotePeers(source?: any): Promise<PeerDTO[]>; + abstract getLocalBlock(number: number): Promise<DBBlock>; + abstract getRemoteBlock(thePeer: PeerDTO, number: number): Promise<BlockDTO>; + abstract applyMainBranch(block: BlockDTO): Promise<boolean>; + abstract removeForks(): Promise<boolean>; + abstract isMemberPeer(thePeer: PeerDTO): Promise<boolean>; + abstract downloadBlocks( + thePeer: PeerDTO, + fromNumber: number, + count?: number + ): Promise<BlockDTO[]>; } export abstract class AbstractDAO extends PullingDao { - /** * Sugar function. Apply a bunch of blocks instead of one. * @param blocks */ - async applyBranch (blocks:BlockDTO[]) { + async applyBranch(blocks: BlockDTO[]) { for (const block of blocks) { await this.applyMainBranch(block); } @@ -50,46 +53,46 @@ export abstract class AbstractDAO extends PullingDao { * @param forksize The maximum length we can look at to find common root block. * @returns {*|Promise} */ - async findCommonRoot(fork:any, forksize:number) { + async findCommonRoot(fork: any, forksize: number) { let commonRoot = null; let localCurrent = await this.localCurrent(); if (!localCurrent) { - throw Error('Local blockchain is empty, cannot find a common root') + throw Error("Local blockchain is empty, cannot find a common root"); } // We look between the top block that is known as fork ... let topBlock = fork.block; // ... and the bottom which is bounded by `forksize` - let bottomBlock = await this.getRemoteBlock(fork.peer, Math.max(0, localCurrent.number - forksize)); + let bottomBlock = await this.getRemoteBlock( + fork.peer, + Math.max(0, localCurrent.number - forksize) + ); let lookBlock = bottomBlock; let localEquivalent = await this.getLocalBlock(bottomBlock.number); let isCommonBlock = lookBlock.hash == localEquivalent.hash; if (isCommonBlock) { - // Then common root can be found between top and bottom. We process. - let position = -1, wrongRemotechain = false; + let position = -1, + wrongRemotechain = false; do { - isCommonBlock = lookBlock.hash == localEquivalent.hash; if (!isCommonBlock) { - // Too high, look downward topBlock = lookBlock; position = middle(topBlock.number, bottomBlock.number); - } - else { - let upperBlock = await this.getRemoteBlock(fork.peer, lookBlock.number + 1); + } else { + let upperBlock = await this.getRemoteBlock( + fork.peer, + lookBlock.number + 1 + ); let localUpper = await this.getLocalBlock(upperBlock.number); let isCommonUpper = upperBlock.hash == localUpper.hash; if (isCommonUpper) { - // Too low, look upward bottomBlock = lookBlock; position = middle(topBlock.number, bottomBlock.number); - } - else { - + } else { // Spotted! commonRoot = lookBlock; } @@ -113,9 +116,9 @@ export abstract class AbstractDAO extends PullingDao { } static defaultLocalBlock() { - const localCurrent = new DBBlock() - localCurrent.number = -1 - return localCurrent + const localCurrent = new DBBlock(); + localCurrent.number = -1; + return localCurrent; } /** @@ -125,47 +128,52 @@ export abstract class AbstractDAO extends PullingDao { * @param dao An abstract layer to retrieve peers data (blocks). * @param logger Logger of the main application. */ - async pull(conf:BranchingDTO, logger:any) { - let localCurrent:DBBlock = await this.localCurrent() || AbstractDAO.defaultLocalBlock() - const forks:any = []; + async pull(conf: BranchingDTO, logger: any) { + let localCurrent: DBBlock = + (await this.localCurrent()) || AbstractDAO.defaultLocalBlock(); + const forks: any = []; if (!localCurrent) { - localCurrent = new DBBlock() - localCurrent.number = -1 + localCurrent = new DBBlock(); + localCurrent.number = -1; } - const applyCoroutine = async (peer:PeerDTO, blocks:BlockDTO[]) => { + const applyCoroutine = async (peer: PeerDTO, blocks: BlockDTO[]) => { if (blocks.length > 0) { - let isFork = localCurrent - && localCurrent.number !== -1 - && !(blocks[0].previousHash == localCurrent.hash - && blocks[0].number == localCurrent.number + 1); + let isFork = + localCurrent && + localCurrent.number !== -1 && + !( + blocks[0].previousHash == localCurrent.hash && + blocks[0].number == localCurrent.number + 1 + ); if (!isFork) { await this.applyBranch(blocks); - const newLocalCurrent = await this.localCurrent() - localCurrent = newLocalCurrent || AbstractDAO.defaultLocalBlock() - const appliedSuccessfully = localCurrent.number == blocks[blocks.length - 1].number - && localCurrent.hash == blocks[blocks.length - 1].hash; + const newLocalCurrent = await this.localCurrent(); + localCurrent = newLocalCurrent || AbstractDAO.defaultLocalBlock(); + const appliedSuccessfully = + localCurrent.number == blocks[blocks.length - 1].number && + localCurrent.hash == blocks[blocks.length - 1].hash; return appliedSuccessfully; } else { let remoteCurrent = await this.remoteCurrent(peer); forks.push({ peer: peer, block: blocks[0], - current: remoteCurrent + current: remoteCurrent, }); return false; } } return true; - } + }; - const downloadCoroutine = async (peer:any, number:number) => { + const downloadCoroutine = async (peer: any, number: number) => { return await this.downloadBlocks(peer, number); - } + }; - const downloadChuncks = async (peer:PeerDTO) => { - let blocksToApply:BlockDTO[] = []; + const downloadChuncks = async (peer: PeerDTO) => { + let blocksToApply: BlockDTO[] = []; const currentBlock = await this.localCurrent(); let currentChunckStart; if (currentBlock) { @@ -173,21 +181,21 @@ export abstract class AbstractDAO extends PullingDao { } else { currentChunckStart = 0; } - let res:any = { applied: {}, downloaded: [] } + let res: any = { applied: {}, downloaded: [] }; do { - let [ applied, downloaded ] = await Promise.all([ + let [applied, downloaded] = await Promise.all([ applyCoroutine(peer, blocksToApply), - downloadCoroutine(peer, currentChunckStart) - ]) - res.applied = applied - res.downloaded = downloaded + downloadCoroutine(peer, currentChunckStart), + ]); + res.applied = applied; + res.downloaded = downloaded; blocksToApply = downloaded; currentChunckStart += downloaded.length; if (!applied) { - logger && logger.info("Blocks were not applied.") + logger && logger.info("Blocks were not applied."); } } while (res.downloaded.length > 0 && res.applied); - } + }; let peers = await this.remotePeers(); // Try to get new legit blocks for local blockchain @@ -195,7 +203,7 @@ export abstract class AbstractDAO extends PullingDao { for (const peer of peers) { downloadChuncksTasks.push(downloadChuncks(peer)); } - await Promise.all(downloadChuncksTasks) + await Promise.all(downloadChuncksTasks); // Filter forks: do not include mirror peers (non-member peers) let memberForks = []; for (const fork of forks) { @@ -211,26 +219,45 @@ export abstract class AbstractDAO extends PullingDao { } return result; }); - memberForks = Underscore.filter(memberForks, (fork:any) => { - let blockDistanceInBlocks = (fork.current.number - localCurrent.number) - let timeDistanceInBlocks = (fork.current.medianTime - localCurrent.medianTime) / conf.avgGenTime - const requiredTimeAdvance = conf.switchOnHeadAdvance - logger && logger.debug('Fork of %s has blockDistance %s ; timeDistance %s ; required is >= %s for both values to try to follow the fork', fork.peer.pubkey.substr(0, 6), blockDistanceInBlocks.toFixed(2), timeDistanceInBlocks.toFixed(2), requiredTimeAdvance); - return blockDistanceInBlocks >= requiredTimeAdvance - && timeDistanceInBlocks >= requiredTimeAdvance + memberForks = Underscore.filter(memberForks, (fork: any) => { + let blockDistanceInBlocks = fork.current.number - localCurrent.number; + let timeDistanceInBlocks = + (fork.current.medianTime - localCurrent.medianTime) / conf.avgGenTime; + const requiredTimeAdvance = conf.switchOnHeadAdvance; + logger && + logger.debug( + "Fork of %s has blockDistance %s ; timeDistance %s ; required is >= %s for both values to try to follow the fork", + fork.peer.pubkey.substr(0, 6), + blockDistanceInBlocks.toFixed(2), + timeDistanceInBlocks.toFixed(2), + requiredTimeAdvance + ); + return ( + blockDistanceInBlocks >= requiredTimeAdvance && + timeDistanceInBlocks >= requiredTimeAdvance + ); }); // Remove any previous fork block await this.removeForks(); // Find the common root block - let j = 0, successFork = false; + let j = 0, + successFork = false; while (!successFork && j < memberForks.length) { let fork = memberForks[j]; let commonRootBlock = await this.findCommonRoot(fork, conf.forksize); if (commonRootBlock) { - let blocksToApply = await this.downloadBlocks(fork.peer, commonRootBlock.number + 1, conf.forksize); + let blocksToApply = await this.downloadBlocks( + fork.peer, + commonRootBlock.number + 1, + conf.forksize + ); successFork = await this.applyBranch(blocksToApply); } else { - logger && logger.debug('No common root block with peer %s', fork.peer.pubkey.substr(0, 6)); + logger && + logger.debug( + "No common root block with peer %s", + fork.peer.pubkey.substr(0, 6) + ); } j++; } @@ -238,7 +265,7 @@ export abstract class AbstractDAO extends PullingDao { } } -function compare(f1:any, f2:any, field:string) { +function compare(f1: any, f2: any, field: string) { if (f1[field] > f2[field]) { return 1; } @@ -248,11 +275,11 @@ function compare(f1:any, f2:any, field:string) { return 0; } -function middle(top:number, bottom:number) { +function middle(top: number, bottom: number) { let difference = top - bottom; if (difference % 2 == 1) { // We look one step below to not forget any block difference++; } - return bottom + (difference / 2); + return bottom + difference / 2; } diff --git a/app/modules/crawler/lib/req2fwd.ts b/app/modules/crawler/lib/req2fwd.ts index 8e7519030871b9b9912b597f959c15df65e9d971..39b71672546cc8d07a9bae52bb3484d1ca23b306 100644 --- a/app/modules/crawler/lib/req2fwd.ts +++ b/app/modules/crawler/lib/req2fwd.ts @@ -11,105 +11,116 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Contacter} from "./contacter" -import {verify} from "duniteroxyde" -import {rawer} from "../../../lib/common-libs/index" -import {HttpRequirements} from "../../bma/lib/dtos" +import { Contacter } from "./contacter"; +import { verify } from "duniteroxyde"; +import { rawer } from "../../../lib/common-libs/index"; +import { HttpRequirements } from "../../bma/lib/dtos"; -export const req2fwd = async (requirements: HttpRequirements, toHost:string, toPort:number, logger:any) => { - const mss:any = {}; - const identities:any = {}; - const certs:any = {}; +export const req2fwd = async ( + requirements: HttpRequirements, + toHost: string, + toPort: number, + logger: any +) => { + const mss: any = {}; + const identities: any = {}; + const certs: any = {}; const targetPeer = new Contacter(toHost, toPort, { timeout: 10000 }); // Identities for (const idty of requirements.identities) { try { - const iid = [idty.pubkey, idty.uid, idty.meta.timestamp].join('-'); + const iid = [idty.pubkey, idty.uid, idty.meta.timestamp].join("-"); if (!identities[iid]) { - logger.info('New identity %s', idty.uid); + logger.info("New identity %s", idty.uid); identities[iid] = idty; try { const rawIdty = rawer.getOfficialIdentity({ - currency: 'g1', + currency: "g1", issuer: idty.pubkey, uid: idty.uid, buid: idty.meta.timestamp, - sig: idty.sig + sig: idty.sig, }); await targetPeer.postIdentity(rawIdty); - logger.info('Success idty %s', idty.uid); + logger.info("Success idty %s", idty.uid); } catch (e) { - logger.warn('Rejected idty %s...', idty.uid, e); + logger.warn("Rejected idty %s...", idty.uid, e); } if (idty.revocation_sig) { - logger.info('New revocation %s', idty.uid); + logger.info("New revocation %s", idty.uid); const revocation = rawer.getOfficialRevocation({ - currency: 'g1', // TODO: generalize - uid: idty.uid, - issuer: idty.pubkey, - buid: idty.meta.timestamp, - sig: idty.sig, - revocation: idty.revocation_sig - }) + currency: "g1", // TODO: generalize + uid: idty.uid, + issuer: idty.pubkey, + buid: idty.meta.timestamp, + sig: idty.sig, + revocation: idty.revocation_sig, + }); await targetPeer.postRevocation(revocation); } } for (const received of idty.pendingCerts) { - const cid = [received.from, iid].join('-'); + const cid = [received.from, iid].join("-"); if (!certs[cid]) { await new Promise((res) => setTimeout(res, 300)); certs[cid] = received; const rawCert = rawer.getOfficialCertification({ - currency: 'g1', + currency: "g1", issuer: received.from, idty_issuer: idty.pubkey, idty_uid: idty.uid, idty_buid: idty.meta.timestamp, idty_sig: idty.sig, buid: received.blockstamp, - sig: received.sig + sig: received.sig, }); const rawCertNoSig = rawer.getOfficialCertification({ - currency: 'g1', + currency: "g1", issuer: received.from, idty_issuer: idty.pubkey, idty_uid: idty.uid, idty_buid: idty.meta.timestamp, idty_sig: idty.sig, - buid: received.blockstamp + buid: received.blockstamp, }); try { - const chkSig = verify(rawCertNoSig, received.sig, received.from) + const chkSig = verify(rawCertNoSig, received.sig, received.from); if (!chkSig) { - throw "Wrong signature for certification?!" + throw "Wrong signature for certification?!"; } await targetPeer.postCert(rawCert); - logger.info('Success cert %s -> %s', received.from, idty.uid); + logger.info("Success cert %s -> %s", received.from, idty.uid); } catch (e) { - logger.warn('Rejected cert %s -> %s', received.from, idty.uid, received.blockstamp.substr(0,18), e); + logger.warn( + "Rejected cert %s -> %s", + received.from, + idty.uid, + received.blockstamp.substr(0, 18), + e + ); } } } for (const theMS of idty.pendingMemberships) { // + Membership - const id = [idty.pubkey, idty.uid, theMS.blockstamp].join('-'); + const id = [idty.pubkey, idty.uid, theMS.blockstamp].join("-"); if (!mss[id]) { - mss[id] = theMS + mss[id] = theMS; try { const rawMS = rawer.getMembership({ - currency: 'g1', + currency: "g1", issuer: idty.pubkey, userid: idty.uid, block: theMS.blockstamp, membership: theMS.type, certts: idty.meta.timestamp, - signature: theMS.sig + signature: theMS.sig, }); await targetPeer.postRenew(rawMS); - logger.info('Success ms idty %s', idty.uid); + logger.info("Success ms idty %s", idty.uid); } catch (e) { - logger.warn('Rejected ms idty %s', idty.uid, e); + logger.warn("Rejected ms idty %s", idty.uid, e); } } } @@ -117,4 +128,4 @@ export const req2fwd = async (requirements: HttpRequirements, toHost:string, toP logger.warn(e); } } -} \ No newline at end of file +}; diff --git a/app/modules/crawler/lib/sandbox.ts b/app/modules/crawler/lib/sandbox.ts index 7f1715d0b3529a84f902a69bac8d6d7b8fbbc4f3..82add1d4c20bc7f35637618be74ef79454661c61 100644 --- a/app/modules/crawler/lib/sandbox.ts +++ b/app/modules/crawler/lib/sandbox.ts @@ -12,225 +12,307 @@ // GNU Affero General Public License for more details. "use strict"; -import {Server} from "../../../../server" -import {rawer} from "../../../lib/common-libs/index" -import {parsers} from "../../../lib/common-libs/parsers/index" -import {IRemoteContacter} from "./sync/IRemoteContacter" -import {HttpRequirements} from "../../bma/lib/dtos" -import {Watcher} from "./sync/Watcher" -import {PeerDTO} from "../../../lib/dto/PeerDTO" -import {connect} from "./connect" - -export const pullSandboxToLocalServer = async (currency:string, fromHost:IRemoteContacter, toServer:Server, logger:any, watcher:any = null, nbCertsMin = 1, notify = true) => { - let res +import { Server } from "../../../../server"; +import { rawer } from "../../../lib/common-libs/index"; +import { parsers } from "../../../lib/common-libs/parsers/index"; +import { IRemoteContacter } from "./sync/IRemoteContacter"; +import { HttpRequirements } from "../../bma/lib/dtos"; +import { Watcher } from "./sync/Watcher"; +import { PeerDTO } from "../../../lib/dto/PeerDTO"; +import { connect } from "./connect"; + +export const pullSandboxToLocalServer = async ( + currency: string, + fromHost: IRemoteContacter, + toServer: Server, + logger: any, + watcher: any = null, + nbCertsMin = 1, + notify = true +) => { + let res; try { - res = await fromHost.getRequirementsPending(nbCertsMin || 1) + res = await fromHost.getRequirementsPending(nbCertsMin || 1); } catch (e) { - watcher && watcher.writeStatus('Sandbox pulling: could not fetch requirements on %s', fromHost.getName()) + watcher && + watcher.writeStatus( + "Sandbox pulling: could not fetch requirements on %s", + fromHost.getName() + ); } if (res) { - await applyMempoolRequirements(currency, res, toServer) + await applyMempoolRequirements(currency, res, toServer); } -} - -export async function applyMempoolRequirements(currency: string, res: HttpRequirements, toServer: Server, notify = true, logger?: any, watcher?: Watcher) { +}; - const docs = getDocumentsTree(currency, res) +export async function applyMempoolRequirements( + currency: string, + res: HttpRequirements, + toServer: Server, + notify = true, + logger?: any, + watcher?: Watcher +) { + const docs = getDocumentsTree(currency, res); - let t = 0 - let T = docs.identities.length + docs.certifications.length + docs.revocations.length + docs.memberships.length + let t = 0; + let T = + docs.identities.length + + docs.certifications.length + + docs.revocations.length + + docs.memberships.length; for (let i = 0; i < docs.identities.length; i++) { const idty = docs.identities[i]; - watcher && watcher.writeStatus('Identity ' + (i+1) + '/' + docs.identities.length) - watcher && watcher.sbxPercent((t++) / T * 100) - await submitIdentityToServer(idty, toServer, notify, logger) + watcher && + watcher.writeStatus("Identity " + (i + 1) + "/" + docs.identities.length); + watcher && watcher.sbxPercent((t++ / T) * 100); + await submitIdentityToServer(idty, toServer, notify, logger); } for (let i = 0; i < docs.revocations.length; i++) { const idty = docs.revocations[i]; - watcher && watcher.writeStatus('Revocation ' + (i+1) + '/' + docs.revocations.length) - watcher && watcher.sbxPercent((t++) / T * 100) - await submitRevocationToServer(idty, toServer, notify, logger) + watcher && + watcher.writeStatus( + "Revocation " + (i + 1) + "/" + docs.revocations.length + ); + watcher && watcher.sbxPercent((t++ / T) * 100); + await submitRevocationToServer(idty, toServer, notify, logger); } for (let i = 0; i < docs.certifications.length; i++) { const cert = docs.certifications[i]; - watcher && watcher.writeStatus('Certification ' + (i+1) + '/' + docs.certifications.length) - watcher && watcher.sbxPercent((t++) / T * 100) - await submitCertificationToServer(cert, toServer, notify, logger) + watcher && + watcher.writeStatus( + "Certification " + (i + 1) + "/" + docs.certifications.length + ); + watcher && watcher.sbxPercent((t++ / T) * 100); + await submitCertificationToServer(cert, toServer, notify, logger); } for (let i = 0; i < docs.memberships.length; i++) { const ms = docs.memberships[i]; - watcher && watcher.writeStatus('Membership ' + (i+1) + '/' + docs.memberships.length) - watcher && watcher.sbxPercent((t++) / T * 100) - await submitMembershipToServer(ms, toServer, notify, logger) + watcher && + watcher.writeStatus( + "Membership " + (i + 1) + "/" + docs.memberships.length + ); + watcher && watcher.sbxPercent((t++ / T) * 100); + await submitMembershipToServer(ms, toServer, notify, logger); } - watcher && watcher.sbxPercent(100) + watcher && watcher.sbxPercent(100); } -export async function forwardToServer(currency: string, res: HttpRequirements, toHost: string, toPort: string, logger?: any, watcher?: Watcher) { - - const docs = getDocumentsTree(currency, res) - const [port, path] = toPort.split('/') - let ep = [port == '443' ? 'BMAS' : 'BASIC_MERKLED_API', toHost, port].join(' ') +export async function forwardToServer( + currency: string, + res: HttpRequirements, + toHost: string, + toPort: string, + logger?: any, + watcher?: Watcher +) { + const docs = getDocumentsTree(currency, res); + const [port, path] = toPort.split("/"); + let ep = [port == "443" ? "BMAS" : "BASIC_MERKLED_API", toHost, port].join( + " " + ); if (path) { - ep += ' /' + path + ep += " /" + path; } - toPort = ':' + toPort - const peer = PeerDTO.fromJSONObject({endpoints: [ep]}) - logger.info('Forwarded to %s...', toHost + toPort) - const target = await connect(peer) + toPort = ":" + toPort; + const peer = PeerDTO.fromJSONObject({ endpoints: [ep] }); + logger.info("Forwarded to %s...", toHost + toPort); + const target = await connect(peer); - let t = 0 - let T = docs.identities.length + docs.certifications.length + docs.revocations.length + docs.memberships.length + let t = 0; + let T = + docs.identities.length + + docs.certifications.length + + docs.revocations.length + + docs.memberships.length; for (let i = 0; i < docs.identities.length; i++) { const idty = docs.identities[i]; - watcher && watcher.writeStatus('Identity ' + (i+1) + '/' + docs.identities.length) - watcher && watcher.sbxPercent((t++) / T * 100) + watcher && + watcher.writeStatus("Identity " + (i + 1) + "/" + docs.identities.length); + watcher && watcher.sbxPercent((t++ / T) * 100); try { - await target.postIdentity(idty) - logger.info('Forwarded identity to %s...', toHost + toPort) + await target.postIdentity(idty); + logger.info("Forwarded identity to %s...", toHost + toPort); } catch (e) { - logger.warn(e) + logger.warn(e); } } for (let i = 0; i < docs.revocations.length; i++) { const revo = docs.revocations[i]; - watcher && watcher.writeStatus('Revocation ' + (i+1) + '/' + docs.revocations.length) - watcher && watcher.sbxPercent((t++) / T * 100) + watcher && + watcher.writeStatus( + "Revocation " + (i + 1) + "/" + docs.revocations.length + ); + watcher && watcher.sbxPercent((t++ / T) * 100); try { - await target.postRevocation(revo) - logger.info('Forwarded revocation to %s...', toHost + toPort) + await target.postRevocation(revo); + logger.info("Forwarded revocation to %s...", toHost + toPort); } catch (e) { - logger.warn(e) + logger.warn(e); } } for (let i = 0; i < docs.certifications.length; i++) { const cert = docs.certifications[i]; - watcher && watcher.writeStatus('Certification ' + (i+1) + '/' + docs.certifications.length) - watcher && watcher.sbxPercent((t++) / T * 100) + watcher && + watcher.writeStatus( + "Certification " + (i + 1) + "/" + docs.certifications.length + ); + watcher && watcher.sbxPercent((t++ / T) * 100); try { - await target.postCert(cert) - logger.info('Forwarded cert to %s...', toHost + toPort) + await target.postCert(cert); + logger.info("Forwarded cert to %s...", toHost + toPort); } catch (e) { - logger.warn(e) + logger.warn(e); } } for (let i = 0; i < docs.memberships.length; i++) { const ms = docs.memberships[i]; - watcher && watcher.writeStatus('Membership ' + (i+1) + '/' + docs.memberships.length) - watcher && watcher.sbxPercent((t++) / T * 100) + watcher && + watcher.writeStatus( + "Membership " + (i + 1) + "/" + docs.memberships.length + ); + watcher && watcher.sbxPercent((t++ / T) * 100); try { - await target.postRenew(ms) - logger.info('Forwarded membership to %s...', toHost + toPort) + await target.postRenew(ms); + logger.info("Forwarded membership to %s...", toHost + toPort); } catch (e) { - logger.warn(e) + logger.warn(e); } } - watcher && watcher.sbxPercent(100) + watcher && watcher.sbxPercent(100); } -function getDocumentsTree(currency:string, res:HttpRequirements) { - const documents:any = { +function getDocumentsTree(currency: string, res: HttpRequirements) { + const documents: any = { identities: [], certifications: [], memberships: [], - revocations: [] - } - for(const idty of res.identities) { + revocations: [], + }; + for (const idty of res.identities) { const identity = rawer.getOfficialIdentity({ currency, - uid: idty.uid, - pubkey: idty.pubkey, - buid: idty.meta.timestamp, - sig: idty.sig - }) + uid: idty.uid, + pubkey: idty.pubkey, + buid: idty.meta.timestamp, + sig: idty.sig, + }); if (idty.revocation_sig) { const revocation = rawer.getOfficialRevocation({ currency, - uid: idty.uid, - issuer: idty.pubkey, - buid: idty.meta.timestamp, - sig: idty.sig, - revocation: idty.revocation_sig - }) - documents.revocations.push(revocation) + uid: idty.uid, + issuer: idty.pubkey, + buid: idty.meta.timestamp, + sig: idty.sig, + revocation: idty.revocation_sig, + }); + documents.revocations.push(revocation); } - documents.identities.push(identity) + documents.identities.push(identity); for (const cert of idty.pendingCerts) { const certification = rawer.getOfficialCertification({ currency, idty_issuer: idty.pubkey, - idty_uid: idty.uid, - idty_buid: idty.meta.timestamp, - idty_sig: idty.sig, - issuer: cert.from, - buid: cert.blockstamp, - sig: cert.sig - }) - documents.certifications.push(certification) + idty_uid: idty.uid, + idty_buid: idty.meta.timestamp, + idty_sig: idty.sig, + issuer: cert.from, + buid: cert.blockstamp, + sig: cert.sig, + }); + documents.certifications.push(certification); } for (const ms of idty.pendingMemberships) { const membership = rawer.getMembership({ currency, - userid: idty.uid, - issuer: idty.pubkey, - certts: idty.meta.timestamp, + userid: idty.uid, + issuer: idty.pubkey, + certts: idty.meta.timestamp, membership: ms.type, - block: ms.blockstamp, - signature: ms.sig - }) - documents.memberships.push(membership) + block: ms.blockstamp, + signature: ms.sig, + }); + documents.memberships.push(membership); } } - return documents + return documents; } -async function submitIdentityToServer(idty:any, toServer:any, notify:boolean, logger:any) { +async function submitIdentityToServer( + idty: any, + toServer: any, + notify: boolean, + logger: any +) { try { - const obj = parsers.parseIdentity.syncWrite(idty) - await toServer.writeIdentity(obj, notify) - logger && logger.trace('Sandbox pulling: success with identity \'%s\'', obj.uid) + const obj = parsers.parseIdentity.syncWrite(idty); + await toServer.writeIdentity(obj, notify); + logger && + logger.trace("Sandbox pulling: success with identity '%s'", obj.uid); } catch (e) { // Silent error } } -async function submitRevocationToServer(revocation:any, toServer:any, notify:boolean, logger:any) { +async function submitRevocationToServer( + revocation: any, + toServer: any, + notify: boolean, + logger: any +) { try { - const obj = parsers.parseRevocation.syncWrite(revocation) - await toServer.writeRevocation(obj, notify) - logger && logger.trace('Sandbox pulling: success with revocation \'%s\'', obj.uid) + const obj = parsers.parseRevocation.syncWrite(revocation); + await toServer.writeRevocation(obj, notify); + logger && + logger.trace("Sandbox pulling: success with revocation '%s'", obj.uid); } catch (e) { // Silent error } } -async function submitCertificationToServer(cert:any, toServer:any, notify:boolean, logger:any) { +async function submitCertificationToServer( + cert: any, + toServer: any, + notify: boolean, + logger: any +) { try { - const obj = parsers.parseCertification.syncWrite(cert) - await toServer.writeCertification(obj, notify) - logger && logger.trace('Sandbox pulling: success with cert key %s => %s', cert.from.substr(0, 6), cert.idty_uid) + const obj = parsers.parseCertification.syncWrite(cert); + await toServer.writeCertification(obj, notify); + logger && + logger.trace( + "Sandbox pulling: success with cert key %s => %s", + cert.from.substr(0, 6), + cert.idty_uid + ); } catch (e) { // Silent error } } -async function submitMembershipToServer(ms:any, toServer:any, notify:boolean, logger:any) { +async function submitMembershipToServer( + ms: any, + toServer: any, + notify: boolean, + logger: any +) { try { - const obj = parsers.parseMembership.syncWrite(ms) - await toServer.writeMembership(obj, notify) - logger && logger.trace('Sandbox pulling: success with membership \'%s\'', ms.uid) + const obj = parsers.parseMembership.syncWrite(ms); + await toServer.writeMembership(obj, notify); + logger && + logger.trace("Sandbox pulling: success with membership '%s'", ms.uid); } catch (e) { // Silent error } diff --git a/app/modules/crawler/lib/sync.ts b/app/modules/crawler/lib/sync.ts index 8b3f56cc78e465bfc2c97bd9bb944f358c51ce71..1cab9817ff344f4bf5534da1efee9e61f32d7c71 100644 --- a/app/modules/crawler/lib/sync.ts +++ b/app/modules/crawler/lib/sync.ts @@ -11,56 +11,87 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as stream from "stream" -import * as moment from "moment" -import {Server} from "../../../../server" -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {DBBlock} from "../../../lib/db/DBBlock" -import {ConfDTO} from "../../../lib/dto/ConfDTO" -import {PeeringService} from "../../../service/PeeringService" -import {EventWatcher, LoggerWatcher, MultimeterWatcher} from "./sync/Watcher" -import {AbstractSynchronizer} from "./sync/AbstractSynchronizer" -import {DownloadStream} from "./sync/v2/DownloadStream" -import {LocalIndexStream} from "./sync/v2/LocalIndexStream" -import {GlobalIndexStream} from "./sync/v2/GlobalIndexStream" -import {BlockchainService} from "../../../service/BlockchainService" -import {FileDAL} from "../../../lib/dal/fileDAL" -import {cliprogram} from "../../../lib/common-libs/programOptions" -import {ValidatorStream} from "./sync/v2/ValidatorStream" +import * as stream from "stream"; +import * as moment from "moment"; +import { Server } from "../../../../server"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { ConfDTO } from "../../../lib/dto/ConfDTO"; +import { PeeringService } from "../../../service/PeeringService"; +import { EventWatcher, LoggerWatcher, MultimeterWatcher } from "./sync/Watcher"; +import { AbstractSynchronizer } from "./sync/AbstractSynchronizer"; +import { DownloadStream } from "./sync/v2/DownloadStream"; +import { LocalIndexStream } from "./sync/v2/LocalIndexStream"; +import { GlobalIndexStream } from "./sync/v2/GlobalIndexStream"; +import { BlockchainService } from "../../../service/BlockchainService"; +import { FileDAL } from "../../../lib/dal/fileDAL"; +import { cliprogram } from "../../../lib/common-libs/programOptions"; +import { ValidatorStream } from "./sync/v2/ValidatorStream"; export class Synchroniser extends stream.Duplex { - - private watcher:EventWatcher - private speed = 0 - private blocksApplied = 0 + private watcher: EventWatcher; + private speed = 0; + private blocksApplied = 0; constructor( - private server:Server, + private server: Server, private syncStrategy: AbstractSynchronizer, - interactive = false) { - - super({ objectMode: true }) + interactive = false + ) { + super({ objectMode: true }); // Wrapper to also push event stream - this.watcher = new EventWatcher(interactive ? new MultimeterWatcher() : new LoggerWatcher(this.logger)) - this.watcher.onEvent('downloadChange', () => this.push(this.watcher.getStats())) - this.watcher.onEvent('storageChange', () => this.push(this.watcher.getStats())) - this.watcher.onEvent('appliedChange', () => this.push(this.watcher.getStats())) - this.watcher.onEvent('sbxChange', () => this.push(this.watcher.getStats())) - this.watcher.onEvent('peersChange', () => this.push(this.watcher.getStats())) - this.watcher.onEvent('addWrongChunkFailure', (data) => this.push({ p2pData: { name: 'addWrongChunkFailure', data }})) - this.watcher.onEvent('failToGetChunk', (data) => this.push({ p2pData: { name: 'failToGetChunk', data }})) - this.watcher.onEvent('gettingChunk', (data) => this.push({ p2pData: { name: 'gettingChunk', data }})) - this.watcher.onEvent('gotChunk', (data) => this.push({ p2pData: { name: 'gotChunk', data }})) - this.watcher.onEvent('reserveNodes', (data) => this.push({ p2pData: { name: 'reserveNodes', data }})) - this.watcher.onEvent('unableToDownloadChunk', (data) => this.push({ p2pData: { name: 'unableToDownloadChunk', data }})) - this.watcher.onEvent('wantToDownload', (data) => this.push({ p2pData: { name: 'wantToDownload', data }})) - this.watcher.onEvent('wantToLoad', (data) => this.push({ p2pData: { name: 'wantToLoad', data }})) - this.watcher.onEvent('beforeReadyNodes', (data) => this.push({ p2pData: { name: 'beforeReadyNodes', data }})) - this.watcher.onEvent('syncFailNoNodeFound', (data) => this.push({ p2pData: { name: 'syncFailNoNodeFound', data }})) - this.watcher.onEvent('syncFailCannotConnectToRemote', (data) => this.push({ p2pData: { name: 'syncFailCannotConnectToRemote', data }})) - - this.syncStrategy.setWatcher(this.watcher) + this.watcher = new EventWatcher( + interactive ? new MultimeterWatcher() : new LoggerWatcher(this.logger) + ); + this.watcher.onEvent("downloadChange", () => + this.push(this.watcher.getStats()) + ); + this.watcher.onEvent("storageChange", () => + this.push(this.watcher.getStats()) + ); + this.watcher.onEvent("appliedChange", () => + this.push(this.watcher.getStats()) + ); + this.watcher.onEvent("sbxChange", () => this.push(this.watcher.getStats())); + this.watcher.onEvent("peersChange", () => + this.push(this.watcher.getStats()) + ); + this.watcher.onEvent("addWrongChunkFailure", (data) => + this.push({ p2pData: { name: "addWrongChunkFailure", data } }) + ); + this.watcher.onEvent("failToGetChunk", (data) => + this.push({ p2pData: { name: "failToGetChunk", data } }) + ); + this.watcher.onEvent("gettingChunk", (data) => + this.push({ p2pData: { name: "gettingChunk", data } }) + ); + this.watcher.onEvent("gotChunk", (data) => + this.push({ p2pData: { name: "gotChunk", data } }) + ); + this.watcher.onEvent("reserveNodes", (data) => + this.push({ p2pData: { name: "reserveNodes", data } }) + ); + this.watcher.onEvent("unableToDownloadChunk", (data) => + this.push({ p2pData: { name: "unableToDownloadChunk", data } }) + ); + this.watcher.onEvent("wantToDownload", (data) => + this.push({ p2pData: { name: "wantToDownload", data } }) + ); + this.watcher.onEvent("wantToLoad", (data) => + this.push({ p2pData: { name: "wantToLoad", data } }) + ); + this.watcher.onEvent("beforeReadyNodes", (data) => + this.push({ p2pData: { name: "beforeReadyNodes", data } }) + ); + this.watcher.onEvent("syncFailNoNodeFound", (data) => + this.push({ p2pData: { name: "syncFailNoNodeFound", data } }) + ); + this.watcher.onEvent("syncFailCannotConnectToRemote", (data) => + this.push({ p2pData: { name: "syncFailCannotConnectToRemote", data } }) + ); + + this.syncStrategy.setWatcher(this.watcher); if (interactive) { this.logger.mute(); @@ -68,93 +99,96 @@ export class Synchroniser extends stream.Duplex { } get conf(): ConfDTO { - return this.server.conf + return this.server.conf; } get logger() { - return this.server.logger + return this.server.logger; } get PeeringService(): PeeringService { - return this.server.PeeringService + return this.server.PeeringService; } get BlockchainService(): BlockchainService { - return this.server.BlockchainService + return this.server.BlockchainService; } get dal(): FileDAL { - return this.server.dal + return this.server.dal; } // Unused, but made mandatory by Duplex interface _read() {} _write() {} - - private async logRemaining(to:number) { + private async logRemaining(to: number) { const lCurrent = await this.dal.getCurrentBlockOrNull(); const localNumber = lCurrent ? lCurrent.number : -1; if (to > 1 && this.speed > 0) { - const remain = (to - (localNumber + 1 + this.blocksApplied)); + const remain = to - (localNumber + 1 + this.blocksApplied); const secondsLeft = remain / this.speed; const momDuration = moment.duration(secondsLeft * 1000); - this.watcher.writeStatus('Remaining ' + momDuration.humanize() + ''); + this.watcher.writeStatus("Remaining " + momDuration.humanize() + ""); } } - async sync(to:number, chunkLen:number, askedCautious = false) { - + async sync(to: number, chunkLen: number, askedCautious = false) { try { - await this.syncStrategy.init() - this.logger.info('Sync started.'); + await this.syncStrategy.init(); + this.logger.info("Sync started."); const fullSync = !to; //============ // Blockchain headers //============ - this.logger.info('Getting remote blockchain info...'); - const lCurrent:DBBlock|null = await this.dal.getCurrentBlockOrNull(); + this.logger.info("Getting remote blockchain info..."); + const lCurrent: DBBlock | null = await this.dal.getCurrentBlockOrNull(); const localNumber = lCurrent ? lCurrent.number : -1; - let rCurrent:BlockDTO|null + let rCurrent: BlockDTO | null; if (isNaN(to)) { rCurrent = await this.syncStrategy.getCurrent(); if (!rCurrent) { - throw 'Remote does not have a current block. Sync aborted.' + throw "Remote does not have a current block. Sync aborted."; } } else { - rCurrent = await this.syncStrategy.getBlock(to) + rCurrent = await this.syncStrategy.getBlock(to); if (!rCurrent) { - throw 'Remote does not have a target block. Sync aborted.' + throw "Remote does not have a target block. Sync aborted."; } } - to = rCurrent.number || 0 + to = rCurrent.number || 0; - const rootBlock = await this.syncStrategy.getBlock(0) + const rootBlock = await this.syncStrategy.getBlock(0); if (!rootBlock) { - throw 'Could not get root block. Sync aborted.' + throw "Could not get root block. Sync aborted."; } - await this.BlockchainService.saveParametersForRootBlock(rootBlock) - await this.server.reloadConf() + await this.BlockchainService.saveParametersForRootBlock(rootBlock); + await this.server.reloadConf(); - await this.syncStrategy.initWithKnownLocalAndToAndCurrency(to, localNumber, rCurrent.currency) + await this.syncStrategy.initWithKnownLocalAndToAndCurrency( + to, + localNumber, + rCurrent.currency + ); //============ // Blockchain //============ - this.logger.info('Downloading Blockchain...'); + this.logger.info("Downloading Blockchain..."); // We use cautious mode if it is asked, or not particulary asked but blockchain has been started - const cautious = (askedCautious === true || localNumber >= 0); + const cautious = askedCautious === true || localNumber >= 0; const milestonesStream = new ValidatorStream( localNumber, to, rCurrent.hash, this.syncStrategy, - this.watcher) + this.watcher + ); const download = new DownloadStream( localNumber, to, @@ -162,9 +196,10 @@ export class Synchroniser extends stream.Duplex { this.syncStrategy, this.server.dal, !cautious, - this.watcher) + this.watcher + ); - const localIndexer = new LocalIndexStream() + const localIndexer = new LocalIndexStream(); const globalIndexer = new GlobalIndexStream( this.server.conf, this.server.dal, @@ -172,18 +207,17 @@ export class Synchroniser extends stream.Duplex { localNumber, cautious, this.syncStrategy, - this.watcher, - - ) + this.watcher + ); await new Promise((res, rej) => { milestonesStream .pipe(download) .pipe(localIndexer) .pipe(globalIndexer) - .on('finish', res) - .on('error', rej); - }) + .on("finish", res) + .on("error", rej); + }); // Finished blocks this.watcher.downloadPercent(100.0); @@ -196,22 +230,25 @@ export class Synchroniser extends stream.Duplex { //======= // Sandboxes //======= - await this.syncStrategy.syncSandbox() + await this.syncStrategy.syncSandbox(); } if (!cliprogram.nopeers) { //======= // Peers //======= - await this.syncStrategy.syncPeers(fullSync, to) + await this.syncStrategy.syncPeers(fullSync, to); } this.watcher.end(); this.push({ sync: true }); - this.logger.info('Sync finished.'); + this.logger.info("Sync finished."); } catch (err) { this.push({ sync: false, msg: err }); - err && this.watcher.writeStatus(err.message || (err.uerr && err.uerr.message) || String(err)); + err && + this.watcher.writeStatus( + err.message || (err.uerr && err.uerr.message) || String(err) + ); this.watcher.end(); throw err; } diff --git a/app/modules/crawler/lib/sync/ASyncDownloader.ts b/app/modules/crawler/lib/sync/ASyncDownloader.ts index 3501f2699c4127710e554256f4e7a9da5aab17ce..a65a109e0a9fa892377a7ca3e66d3886a4fbb6d4 100644 --- a/app/modules/crawler/lib/sync/ASyncDownloader.ts +++ b/app/modules/crawler/lib/sync/ASyncDownloader.ts @@ -1,18 +1,16 @@ -import {ISyncDownloader} from "./ISyncDownloader" -import {BlockDTO} from "../../../../lib/dto/BlockDTO" +import { ISyncDownloader } from "./ISyncDownloader"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; export abstract class ASyncDownloader implements ISyncDownloader { + protected constructor(public chunkSize: number) {} - protected constructor( - public chunkSize: number) {} - - async getBlock(number: number): Promise<BlockDTO|null> { - const chunkNumber = parseInt(String(number / this.chunkSize)) - const position = number % this.chunkSize - const chunk = await this.getChunk(chunkNumber) - return chunk[position] + async getBlock(number: number): Promise<BlockDTO | null> { + const chunkNumber = parseInt(String(number / this.chunkSize)); + const position = number % this.chunkSize; + const chunk = await this.getChunk(chunkNumber); + return chunk[position]; } - abstract maxSlots: number - abstract getChunk(i: number): Promise<BlockDTO[]> + abstract maxSlots: number; + abstract getChunk(i: number): Promise<BlockDTO[]>; } diff --git a/app/modules/crawler/lib/sync/AbstractSynchronizer.ts b/app/modules/crawler/lib/sync/AbstractSynchronizer.ts index 61455b3812fe81fc0ff7d7e8d1c5536302ef28d7..1384ae0e4d3f1a99705436bdf322e902d788d677 100644 --- a/app/modules/crawler/lib/sync/AbstractSynchronizer.ts +++ b/app/modules/crawler/lib/sync/AbstractSynchronizer.ts @@ -11,39 +11,41 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../../../../lib/dto/BlockDTO" -import {ISyncDownloader} from "./ISyncDownloader" -import {CommonConstants} from "../../../../lib/common-libs/constants" -import {PeerDTO} from "../../../../lib/dto/PeerDTO" -import {Watcher} from "./Watcher" -import {FileDAL} from "../../../../lib/dal/fileDAL" -import * as path from 'path' +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { ISyncDownloader } from "./ISyncDownloader"; +import { CommonConstants } from "../../../../lib/common-libs/constants"; +import { PeerDTO } from "../../../../lib/dto/PeerDTO"; +import { Watcher } from "./Watcher"; +import { FileDAL } from "../../../../lib/dal/fileDAL"; +import * as path from "path"; export abstract class AbstractSynchronizer { + constructor(public readonly chunkSize: number) {} - constructor(public readonly chunkSize: number) { - } - - abstract init(): Promise<void> - abstract initWithKnownLocalAndToAndCurrency(to: number, localNumber: number, currency: string): Promise<void> - abstract getCurrent(): Promise<BlockDTO|null> - abstract getBlock(number: number): Promise<BlockDTO|null> - abstract getMilestone(number: number): Promise<BlockDTO|null> - abstract p2pDownloader(): ISyncDownloader - abstract fsDownloader(): ISyncDownloader - abstract syncPeers(fullSync:boolean, to?:number): Promise<void> - abstract syncSandbox(): Promise<void> - abstract getPeer(): PeerDTO - abstract setWatcher(watcher: Watcher): void - public abstract getCurrency(): string - public abstract getChunksPath(): string - public abstract get readDAL(): FileDAL + abstract init(): Promise<void>; + abstract initWithKnownLocalAndToAndCurrency( + to: number, + localNumber: number, + currency: string + ): Promise<void>; + abstract getCurrent(): Promise<BlockDTO | null>; + abstract getBlock(number: number): Promise<BlockDTO | null>; + abstract getMilestone(number: number): Promise<BlockDTO | null>; + abstract p2pDownloader(): ISyncDownloader; + abstract fsDownloader(): ISyncDownloader; + abstract syncPeers(fullSync: boolean, to?: number): Promise<void>; + abstract syncSandbox(): Promise<void>; + abstract getPeer(): PeerDTO; + abstract setWatcher(watcher: Watcher): void; + public abstract getCurrency(): string; + public abstract getChunksPath(): string; + public abstract get readDAL(): FileDAL; public getChunkRelativePath(i: number) { - return path.join(this.getCurrency(), this.getChunkName(i)) + return path.join(this.getCurrency(), this.getChunkName(i)); } public getChunkName(i: number) { - return CommonConstants.CHUNK_PREFIX + i + "-" + this.chunkSize + ".json" + return CommonConstants.CHUNK_PREFIX + i + "-" + this.chunkSize + ".json"; } } diff --git a/app/modules/crawler/lib/sync/BMARemoteContacter.ts b/app/modules/crawler/lib/sync/BMARemoteContacter.ts index 63ddb845d247205ede03bd9b77aea2dc7d709971..9e08866d65bd138062d98790b18386e6f9dff175 100644 --- a/app/modules/crawler/lib/sync/BMARemoteContacter.ts +++ b/app/modules/crawler/lib/sync/BMARemoteContacter.ts @@ -11,55 +11,65 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {NewLogger} from "../../../../lib/logger" -import {IRemoteContacter} from "./IRemoteContacter"; -import {Contacter} from "../contacter"; -import {HttpRequirements} from "../../../bma/lib/dtos"; -import {JSONDBPeer} from "../../../../lib/db/DBPeer"; -import {BlockDTO} from "../../../../lib/dto/BlockDTO"; +import { NewLogger } from "../../../../lib/logger"; +import { IRemoteContacter } from "./IRemoteContacter"; +import { Contacter } from "../contacter"; +import { HttpRequirements } from "../../../bma/lib/dtos"; +import { JSONDBPeer } from "../../../../lib/db/DBPeer"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; -const logger = NewLogger() +const logger = NewLogger(); export class BMARemoteContacter implements IRemoteContacter { + public type: "BMA" | "WS2P" = "BMA"; - public type: 'BMA'| 'WS2P' = 'BMA' - - constructor(protected contacter: Contacter) { - } + constructor(protected contacter: Contacter) {} getBlock(number: number): Promise<BlockDTO | null> { - return this.contacter.getBlock(number) + return this.contacter.getBlock(number); } getCurrent(): Promise<BlockDTO | null> { - return this.contacter.getCurrent() + return this.contacter.getCurrent(); } getBlocks(count: number, from: number): Promise<BlockDTO[]> { - return this.contacter.getBlocks(count, from) + return this.contacter.getBlocks(count, from); } - getMilestones(page: number): Promise<{ chunkSize: number; totalPages: number; currentPage: number; milestonesPerPage: number; blocks: BlockDTO[] }> { - return this.contacter.getMilestones(page) + getMilestones( + page: number + ): Promise<{ + chunkSize: number; + totalPages: number; + currentPage: number; + milestonesPerPage: number; + blocks: BlockDTO[]; + }> { + return this.contacter.getMilestones(page); } - getMilestonesPage(): Promise<{ chunkSize: number; totalPages: number; milestonesPerPage: number }> { - return this.contacter.getMilestonesPage() + getMilestonesPage(): Promise<{ + chunkSize: number; + totalPages: number; + milestonesPerPage: number; + }> { + return this.contacter.getMilestonesPage(); } - async getPeers(): Promise<(JSONDBPeer|null)[]> { - return (await this.contacter.getPeersArray()).peers + async getPeers(): Promise<(JSONDBPeer | null)[]> { + return (await this.contacter.getPeersArray()).peers; } getRequirementsPending(minsig: number): Promise<HttpRequirements> { - return this.contacter.getRequirementsPending(minsig) + return this.contacter.getRequirementsPending(minsig); } getName(): string { - return "BMA remote '" + this.contacter.fullyQualifiedHost + "'" + return "BMA remote '" + this.contacter.fullyQualifiedHost + "'"; } get hostName() { - return this.contacter.host + return this.contacter.host; } } diff --git a/app/modules/crawler/lib/sync/FsSyncDownloader.ts b/app/modules/crawler/lib/sync/FsSyncDownloader.ts index ecd2708115df669b5e1109f76942d11f60fbd2b7..f948c0f147fda9700a174fc98c265ba276657fb3 100644 --- a/app/modules/crawler/lib/sync/FsSyncDownloader.ts +++ b/app/modules/crawler/lib/sync/FsSyncDownloader.ts @@ -1,46 +1,46 @@ -import {ISyncDownloader} from "./ISyncDownloader" -import {BlockDTO} from "../../../../lib/dto/BlockDTO" -import {FileSystem} from "../../../../lib/system/directory" -import * as path from 'path' -import {CommonConstants} from "../../../../lib/common-libs/constants" -import {ASyncDownloader} from "./ASyncDownloader" +import { ISyncDownloader } from "./ISyncDownloader"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { FileSystem } from "../../../../lib/system/directory"; +import * as path from "path"; +import { CommonConstants } from "../../../../lib/common-libs/constants"; +import { ASyncDownloader } from "./ASyncDownloader"; -export class FsSyncDownloader extends ASyncDownloader implements ISyncDownloader { - - private ls: Promise<string[]> - private ttas: number[] = [] +export class FsSyncDownloader extends ASyncDownloader + implements ISyncDownloader { + private ls: Promise<string[]>; + private ttas: number[] = []; constructor( private fs: FileSystem, private basePath: string, - private getChunkName:(i: number) => string, + private getChunkName: (i: number) => string, public chunkSize: number - ) { - super(chunkSize) - this.ls = this.fs.fsList(basePath) + ) { + super(chunkSize); + this.ls = this.fs.fsList(basePath); } async getChunk(i: number): Promise<BlockDTO[]> { - const start = Date.now() - const files = await this.ls - const filepath = path.join(this.basePath, this.getChunkName(i)) - const basename = path.basename(filepath) - let existsOnDAL = files.filter(f => f === basename).length === 1 + const start = Date.now(); + const files = await this.ls; + const filepath = path.join(this.basePath, this.getChunkName(i)); + const basename = path.basename(filepath); + let existsOnDAL = files.filter((f) => f === basename).length === 1; if (!existsOnDAL) { // We make another try in case the file was created after the initial `ls` test - existsOnDAL = await this.fs.fsExists(filepath) + existsOnDAL = await this.fs.fsExists(filepath); } if (existsOnDAL) { - const content: any = JSON.parse(await this.fs.fsReadFile(filepath)) + const content: any = JSON.parse(await this.fs.fsReadFile(filepath)); // Record the reading duration - this.ttas.push(Date.now() - start) + this.ttas.push(Date.now() - start); // Returns a promise of file content - return content.blocks + return content.blocks; } - return [] + return []; } get maxSlots(): number { - return CommonConstants.MAX_READING_SLOTS_FOR_FILE_SYNC + return CommonConstants.MAX_READING_SLOTS_FOR_FILE_SYNC; } } diff --git a/app/modules/crawler/lib/sync/IRemoteContacter.ts b/app/modules/crawler/lib/sync/IRemoteContacter.ts index 9a4ea10d238b666762692ad55355acfd044eb403..85e8ff267506d443ec55280a26b064135b845af3 100644 --- a/app/modules/crawler/lib/sync/IRemoteContacter.ts +++ b/app/modules/crawler/lib/sync/IRemoteContacter.ts @@ -11,29 +11,40 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {JSONDBPeer} from "../../../../lib/db/DBPeer"; -import {BlockDTO} from "../../../../lib/dto/BlockDTO"; -import {HttpRequirements} from "../../../bma/lib/dtos"; +import { JSONDBPeer } from "../../../../lib/db/DBPeer"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { HttpRequirements } from "../../../bma/lib/dtos"; export interface IRemoteContacter { + getName(): string; - getName(): string + getPeers(): Promise<(JSONDBPeer | null)[]>; - getPeers(): Promise<(JSONDBPeer|null)[]> + getCurrent(): Promise<BlockDTO | null>; - getCurrent(): Promise<BlockDTO|null> + getBlock(number: number): Promise<BlockDTO | null>; - getBlock(number: number): Promise<BlockDTO|null> + getMilestonesPage(): Promise<{ + chunkSize: number; + totalPages: number; + milestonesPerPage: number; + }>; - getMilestonesPage(): Promise<{ chunkSize: number, totalPages: number, milestonesPerPage: number }> + getMilestones( + page: number + ): Promise<{ + chunkSize: number; + totalPages: number; + currentPage: number; + milestonesPerPage: number; + blocks: BlockDTO[]; + }>; - getMilestones(page: number): Promise<{ chunkSize: number, totalPages: number, currentPage: number, milestonesPerPage: number, blocks: BlockDTO[] }> + getBlocks(count: number, from: number): Promise<BlockDTO[]>; - getBlocks(count: number, from: number): Promise<BlockDTO[]> + getRequirementsPending(number: number): Promise<HttpRequirements>; - getRequirementsPending(number: number): Promise<HttpRequirements> + hostName: string; - hostName: string - - type: 'BMA' | 'WS2P' + type: "BMA" | "WS2P"; } diff --git a/app/modules/crawler/lib/sync/ISyncDownloader.ts b/app/modules/crawler/lib/sync/ISyncDownloader.ts index 456f1d171c4d3524ac89243bc75b690f081d687b..f2f1472ed504be3ea2b61f025a5df8c001cb3373 100644 --- a/app/modules/crawler/lib/sync/ISyncDownloader.ts +++ b/app/modules/crawler/lib/sync/ISyncDownloader.ts @@ -1,8 +1,8 @@ -import {BlockDTO} from "../../../../lib/dto/BlockDTO" +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; export interface ISyncDownloader { - getChunk(i: number): Promise<BlockDTO[]> - getBlock(number: number): Promise<BlockDTO|null> - maxSlots: number - chunkSize: number + getChunk(i: number): Promise<BlockDTO[]>; + getBlock(number: number): Promise<BlockDTO | null>; + maxSlots: number; + chunkSize: number; } diff --git a/app/modules/crawler/lib/sync/LocalPathSynchronizer.ts b/app/modules/crawler/lib/sync/LocalPathSynchronizer.ts index 3bb3881c01038054e79f051ddd1bb9342b322a8e..ab2da0707f3a1ce1cf0cbd5fdf4582e169ffd69d 100644 --- a/app/modules/crawler/lib/sync/LocalPathSynchronizer.ts +++ b/app/modules/crawler/lib/sync/LocalPathSynchronizer.ts @@ -11,102 +11,116 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ISyncDownloader} from "./ISyncDownloader" -import {BlockDTO} from "../../../../lib/dto/BlockDTO" -import {PeerDTO} from "../../../../lib/dto/PeerDTO" -import {Watcher} from "./Watcher" -import {PeeringService} from "../../../../service/PeeringService" -import {Server} from "../../../../../server" -import {FileDAL} from "../../../../lib/dal/fileDAL" -import {FsSyncDownloader} from "./FsSyncDownloader" -import {AbstractSynchronizer} from "./AbstractSynchronizer" -import {CommonConstants} from "../../../../lib/common-libs/constants" -import {RealFS} from "../../../../lib/system/directory" +import { ISyncDownloader } from "./ISyncDownloader"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { PeerDTO } from "../../../../lib/dto/PeerDTO"; +import { Watcher } from "./Watcher"; +import { PeeringService } from "../../../../service/PeeringService"; +import { Server } from "../../../../../server"; +import { FileDAL } from "../../../../lib/dal/fileDAL"; +import { FsSyncDownloader } from "./FsSyncDownloader"; +import { AbstractSynchronizer } from "./AbstractSynchronizer"; +import { CommonConstants } from "../../../../lib/common-libs/constants"; +import { RealFS } from "../../../../lib/system/directory"; export class LocalPathSynchronizer extends AbstractSynchronizer { - - private theP2pDownloader: ISyncDownloader - private theFsDownloader: ISyncDownloader - private currency: string - private watcher: Watcher - private ls: Promise<string[]> - - constructor( - private path: string, - private server:Server, - chunkSize: number, - ) { - super(chunkSize) - const fs = RealFS() - this.ls = fs.fsList(path) + private theP2pDownloader: ISyncDownloader; + private theFsDownloader: ISyncDownloader; + private currency: string; + private watcher: Watcher; + private ls: Promise<string[]>; + + constructor(private path: string, private server: Server, chunkSize: number) { + super(chunkSize); + const fs = RealFS(); + this.ls = fs.fsList(path); // We read from the real file system here, directly. - this.theFsDownloader = new FsSyncDownloader(fs, this.path, this.getChunkName.bind(this), chunkSize) - this.theP2pDownloader = new FsSyncDownloader(fs, this.path, this.getChunkName.bind(this), chunkSize) + this.theFsDownloader = new FsSyncDownloader( + fs, + this.path, + this.getChunkName.bind(this), + chunkSize + ); + this.theP2pDownloader = new FsSyncDownloader( + fs, + this.path, + this.getChunkName.bind(this), + chunkSize + ); } get dal(): FileDAL { - return this.server.dal + return this.server.dal; } get readDAL(): FileDAL { - return this.dal + return this.dal; } get PeeringService(): PeeringService { - return this.server.PeeringService + return this.server.PeeringService; } getCurrency(): string { - return this.currency + return this.currency; } getPeer(): PeerDTO { - return this as any + return this as any; } getChunksPath(): string { - return this.path + return this.path; } setWatcher(watcher: Watcher): void { - this.watcher = watcher + this.watcher = watcher; } async init(): Promise<void> { // TODO: check that path exists and that files seem consistent } - async initWithKnownLocalAndToAndCurrency(to: number, localNumber: number, currency: string): Promise<void> { - this.currency = currency + async initWithKnownLocalAndToAndCurrency( + to: number, + localNumber: number, + currency: string + ): Promise<void> { + this.currency = currency; } p2pDownloader(): ISyncDownloader { - return this.theP2pDownloader + return this.theP2pDownloader; } fsDownloader(): ISyncDownloader { - return this.theFsDownloader + return this.theFsDownloader; } - async getCurrent(): Promise<BlockDTO|null> { - const chunkNumbers: number[] = (await this.ls).map(s => parseInt(s.replace(CommonConstants.CHUNK_PREFIX, ''))) - const topChunk = chunkNumbers.reduce((number, max) => Math.max(number, max), -1) + async getCurrent(): Promise<BlockDTO | null> { + const chunkNumbers: number[] = (await this.ls).map((s) => + parseInt(s.replace(CommonConstants.CHUNK_PREFIX, "")) + ); + const topChunk = chunkNumbers.reduce( + (number, max) => Math.max(number, max), + -1 + ); if (topChunk === -1) { - return null + return null; } - const chunk = await this.theFsDownloader.getChunk(topChunk) - return chunk[chunk.length - 1] // This is the top block of the top chunk = the current block + const chunk = await this.theFsDownloader.getChunk(topChunk); + return chunk[chunk.length - 1]; // This is the top block of the top chunk = the current block } - async getBlock(number: number): Promise<BlockDTO|null> { - const chunkNumber = parseInt(String(number / this.chunkSize)) - const position = number % this.chunkSize - const chunk = await this.theFsDownloader.getChunk(chunkNumber) - return chunk[position] + async getBlock(number: number): Promise<BlockDTO | null> { + const chunkNumber = parseInt(String(number / this.chunkSize)); + const position = number % this.chunkSize; + const chunk = await this.theFsDownloader.getChunk(chunkNumber); + return chunk[position]; } getMilestone(number: number) { - return this.getBlock(number) + return this.getBlock(number); } async syncPeers(fullSync: boolean, to?: number): Promise<void> { diff --git a/app/modules/crawler/lib/sync/P2PSyncDownloader.ts b/app/modules/crawler/lib/sync/P2PSyncDownloader.ts index ffdf4f2bcb5ff71bec11aaa9b2f56dda429f6946..a677cdb4b1dafeccfaa6cb064a614b5ce4fe45dc 100644 --- a/app/modules/crawler/lib/sync/P2PSyncDownloader.ts +++ b/app/modules/crawler/lib/sync/P2PSyncDownloader.ts @@ -1,75 +1,98 @@ -import {JSONDBPeer} from "../../../../lib/db/DBPeer" -import {PeerDTO} from "../../../../lib/dto/PeerDTO" -import {Underscore} from "../../../../lib/common-libs/underscore" -import {BlockDTO} from "../../../../lib/dto/BlockDTO" -import {Watcher} from "./Watcher" -import {ISyncDownloader} from "./ISyncDownloader" -import {cliprogram} from "../../../../lib/common-libs/programOptions" -import {Keypair} from "../../../../lib/dto/ConfDTO" -import {IRemoteContacter} from "./IRemoteContacter" -import {ManualPromise} from "../../../../lib/common-libs/manual-promise" -import {GlobalFifoPromise} from "../../../../service/GlobalFifoPromise" -import {getNanosecondsTime} from "../../../../ProcessCpuProfiler" -import {CommonConstants} from "../../../../lib/common-libs/constants" -import {DataErrors} from "../../../../lib/common-libs/errors" -import {ASyncDownloader} from "./ASyncDownloader" -import {P2pCandidate} from "./p2p/p2p-candidate" -import {CrawlerConstants} from "../constants" - -export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloader { +import { JSONDBPeer } from "../../../../lib/db/DBPeer"; +import { PeerDTO } from "../../../../lib/dto/PeerDTO"; +import { Underscore } from "../../../../lib/common-libs/underscore"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { Watcher } from "./Watcher"; +import { ISyncDownloader } from "./ISyncDownloader"; +import { cliprogram } from "../../../../lib/common-libs/programOptions"; +import { Keypair } from "../../../../lib/dto/ConfDTO"; +import { IRemoteContacter } from "./IRemoteContacter"; +import { ManualPromise } from "../../../../lib/common-libs/manual-promise"; +import { GlobalFifoPromise } from "../../../../service/GlobalFifoPromise"; +import { getNanosecondsTime } from "../../../../ProcessCpuProfiler"; +import { CommonConstants } from "../../../../lib/common-libs/constants"; +import { DataErrors } from "../../../../lib/common-libs/errors"; +import { ASyncDownloader } from "./ASyncDownloader"; +import { P2pCandidate } from "./p2p/p2p-candidate"; +import { CrawlerConstants } from "../constants"; +export class P2PSyncDownloader extends ASyncDownloader + implements ISyncDownloader { private PARALLEL_PER_CHUNK = 1; private MAX_DELAY_PER_DOWNLOAD = cliprogram.slow ? 2 * 60000 : 15000; - private TOO_LONG_TIME_DOWNLOAD:string - private nbBlocksToDownload:number - private numberOfChunksToDownload:number - private processing:any - private handler:any - private p2pCandidates: P2pCandidate[] = [] - private nbDownloadsTried = 0 - private nbDownloading = 0 - private downloads: { [chunk: number]: P2pCandidate } = {} - private fifoPromise = new GlobalFifoPromise() - private noNodeFoundCumulation = 0 + private TOO_LONG_TIME_DOWNLOAD: string; + private nbBlocksToDownload: number; + private numberOfChunksToDownload: number; + private processing: any; + private handler: any; + private p2pCandidates: P2pCandidate[] = []; + private nbDownloadsTried = 0; + private nbDownloading = 0; + private downloads: { [chunk: number]: P2pCandidate } = {}; + private fifoPromise = new GlobalFifoPromise(); + private noNodeFoundCumulation = 0; constructor( private currency: string, private keypair: Keypair, - private localNumber:number, - private to:number, - private peers:JSONDBPeer[], - private watcher:Watcher, - private logger:any, + private localNumber: number, + private to: number, + private peers: JSONDBPeer[], + private watcher: Watcher, + private logger: any, public chunkSize: number, - public allowLocalSync: boolean, - ) { - super(chunkSize) - this.TOO_LONG_TIME_DOWNLOAD = "No answer after " + this.MAX_DELAY_PER_DOWNLOAD + "ms, will retry download later."; + public allowLocalSync: boolean + ) { + super(chunkSize); + this.TOO_LONG_TIME_DOWNLOAD = + "No answer after " + + this.MAX_DELAY_PER_DOWNLOAD + + "ms, will retry download later."; this.nbBlocksToDownload = Math.max(0, to - localNumber); - this.numberOfChunksToDownload = Math.ceil(this.nbBlocksToDownload / this.chunkSize); - this.processing = Array.from({ length: this.numberOfChunksToDownload }).map(() => false); - this.handler = Array.from({ length: this.numberOfChunksToDownload }).map(() => null); + this.numberOfChunksToDownload = Math.ceil( + this.nbBlocksToDownload / this.chunkSize + ); + this.processing = Array.from({ length: this.numberOfChunksToDownload }).map( + () => false + ); + this.handler = Array.from({ length: this.numberOfChunksToDownload }).map( + () => null + ); - this.p2pCandidates = peers.map(p => new P2pCandidate(PeerDTO.fromJSONObject(p), this.keypair, this.logger, allowLocalSync)) + this.p2pCandidates = peers.map( + (p) => + new P2pCandidate( + PeerDTO.fromJSONObject(p), + this.keypair, + this.logger, + allowLocalSync + ) + ); } get maxSlots(): number { - return this.p2pCandidates.filter(p => p.hasAvailableApi()).length + return this.p2pCandidates.filter((p) => p.hasAvailableApi()).length; } - private async waitForAvailableNodesAndReserve(needed = 1): Promise<P2pCandidate[]> { - this.watcher.beforeReadyNodes(this.p2pCandidates) - let nodesToWaitFor = this.p2pCandidates.slice() - let nodesAvailable: P2pCandidate[] = [] - let i = 0 + private async waitForAvailableNodesAndReserve( + needed = 1 + ): Promise<P2pCandidate[]> { + this.watcher.beforeReadyNodes(this.p2pCandidates); + let nodesToWaitFor = this.p2pCandidates.slice(); + let nodesAvailable: P2pCandidate[] = []; + let i = 0; while (nodesAvailable.length < needed && i < needed) { - await Promise.race(nodesToWaitFor.map(p => p.waitAvailability(CommonConstants.WAIT_P2P_CANDIDATE_HEARTBEAT))) - const readyNodes = nodesToWaitFor.filter(p => p.isReady()) - nodesToWaitFor = nodesToWaitFor.filter(p => !p.isReady()) - nodesAvailable = nodesAvailable.concat(readyNodes) - i++ + await Promise.race( + nodesToWaitFor.map((p) => + p.waitAvailability(CommonConstants.WAIT_P2P_CANDIDATE_HEARTBEAT) + ) + ); + const readyNodes = nodesToWaitFor.filter((p) => p.isReady()); + nodesToWaitFor = nodesToWaitFor.filter((p) => !p.isReady()); + nodesAvailable = nodesAvailable.concat(readyNodes); + i++; } - return nodesAvailable + return nodesAvailable; } /** @@ -78,25 +101,33 @@ export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloade * this method would not return it. */ private async getP2Pcandidates(chunkIndex: number): Promise<P2pCandidate[]> { - return this.fifoPromise.pushFIFOPromise('getP2Pcandidates_' + getNanosecondsTime(), async () => { - const needed = 1 - // We wait a bit to have some available nodes - const readyNodes = await this.waitForAvailableNodesAndReserve() - // We remove the nodes impossible to reach (timeout) - let byAvgAnswerTime = Underscore.sortBy(readyNodes, p => p.avgResponseTime()) - const parallelMax = Math.min(this.PARALLEL_PER_CHUNK, byAvgAnswerTime.length) - byAvgAnswerTime = byAvgAnswerTime.slice(0, parallelMax) - this.watcher.reserveNodes(byAvgAnswerTime) - byAvgAnswerTime.slice(0, needed).forEach(n => { - n.reserve() - }) - if (byAvgAnswerTime.length === 0) { - this.logger.warn('No node found to download chunk #%s.', chunkIndex) - this.watcher.unableToDownloadChunk(chunkIndex) - throw Error(DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) + return this.fifoPromise.pushFIFOPromise( + "getP2Pcandidates_" + getNanosecondsTime(), + async () => { + const needed = 1; + // We wait a bit to have some available nodes + const readyNodes = await this.waitForAvailableNodesAndReserve(); + // We remove the nodes impossible to reach (timeout) + let byAvgAnswerTime = Underscore.sortBy(readyNodes, (p) => + p.avgResponseTime() + ); + const parallelMax = Math.min( + this.PARALLEL_PER_CHUNK, + byAvgAnswerTime.length + ); + byAvgAnswerTime = byAvgAnswerTime.slice(0, parallelMax); + this.watcher.reserveNodes(byAvgAnswerTime); + byAvgAnswerTime.slice(0, needed).forEach((n) => { + n.reserve(); + }); + if (byAvgAnswerTime.length === 0) { + this.logger.warn("No node found to download chunk #%s.", chunkIndex); + this.watcher.unableToDownloadChunk(chunkIndex); + throw Error(DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]); + } + return byAvgAnswerTime; } - return byAvgAnswerTime - }) + ); } /** @@ -105,47 +136,72 @@ export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloade * @param count The number of blocks to download. * @param chunkIndex The # of the chunk in local algorithm (logging purposes only) */ - private async p2pDownload(from:number, count:number, chunkIndex:number) { + private async p2pDownload(from: number, count: number, chunkIndex: number) { // if this chunk has already been downloaded before, we exclude its supplier node from the download list as it won't give correct answer now - const lastSupplier = this.downloads[chunkIndex] + const lastSupplier = this.downloads[chunkIndex]; if (lastSupplier) { - this.watcher.addWrongChunkFailure(chunkIndex, lastSupplier) - lastSupplier.addFailure() + this.watcher.addWrongChunkFailure(chunkIndex, lastSupplier); + lastSupplier.addFailure(); } - this.watcher.wantToDownload(chunkIndex) + this.watcher.wantToDownload(chunkIndex); // Only 1 candidate for now - const candidates = await this.getP2Pcandidates(chunkIndex) + const candidates = await this.getP2Pcandidates(chunkIndex); // Book the nodes - this.watcher.gettingChunk(chunkIndex, candidates) - return await this.raceOrCancelIfTimeout(this.MAX_DELAY_PER_DOWNLOAD, candidates.map(async (node) => { - try { - this.handler[chunkIndex] = node; - this.nbDownloading++; - this.watcher.writeStatus('Getting chunck #' + chunkIndex + '/' + (this.numberOfChunksToDownload - 1) + ' from ' + from + ' to ' + (from + count - 1) + ' on peer ' + node.hostName); - let blocks = await node.downloadBlocks(count, from); - this.watcher.gotChunk(chunkIndex, node) - this.watcher.writeStatus('GOT chunck #' + chunkIndex + '/' + (this.numberOfChunksToDownload - 1) + ' from ' + from + ' to ' + (from + count - 1) + ' on peer ' + node.hostName); - if (this.PARALLEL_PER_CHUNK === 1) { - // Only works if we have 1 concurrent peer per chunk - this.downloads[chunkIndex] = node + this.watcher.gettingChunk(chunkIndex, candidates); + return await this.raceOrCancelIfTimeout( + this.MAX_DELAY_PER_DOWNLOAD, + candidates.map(async (node) => { + try { + this.handler[chunkIndex] = node; + this.nbDownloading++; + this.watcher.writeStatus( + "Getting chunck #" + + chunkIndex + + "/" + + (this.numberOfChunksToDownload - 1) + + " from " + + from + + " to " + + (from + count - 1) + + " on peer " + + node.hostName + ); + let blocks = await node.downloadBlocks(count, from); + this.watcher.gotChunk(chunkIndex, node); + this.watcher.writeStatus( + "GOT chunck #" + + chunkIndex + + "/" + + (this.numberOfChunksToDownload - 1) + + " from " + + from + + " to " + + (from + count - 1) + + " on peer " + + node.hostName + ); + if (this.PARALLEL_PER_CHUNK === 1) { + // Only works if we have 1 concurrent peer per chunk + this.downloads[chunkIndex] = node; + } + this.nbDownloading--; + this.nbDownloadsTried++; + return blocks; + } catch (e) { + this.watcher.failToGetChunk(chunkIndex, node); + this.nbDownloading--; + this.nbDownloadsTried++; + throw e; } - this.nbDownloading--; - this.nbDownloadsTried++; - return blocks; - } catch (e) { - this.watcher.failToGetChunk(chunkIndex, node) - this.nbDownloading--; - this.nbDownloadsTried++; - throw e; - } - })) + }) + ); } /** * Function for downloading a chunk by its number. * @param index Number of the chunk. */ - private async downloadChunk(index:number): Promise<BlockDTO[]> { + private async downloadChunk(index: number): Promise<BlockDTO[]> { // The algorithm to download a chunk const from = this.localNumber + 1 + index * this.chunkSize; let count = this.chunkSize; @@ -153,19 +209,24 @@ export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloade count = this.nbBlocksToDownload % this.chunkSize || this.chunkSize; } try { - const res = await this.p2pDownload(from, count, index) as BlockDTO[] - this.noNodeFoundCumulation = 0 - return res + const res = (await this.p2pDownload(from, count, index)) as BlockDTO[]; + this.noNodeFoundCumulation = 0; + return res; } catch (e) { this.logger.error(e); - if (e.message === DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) { - this.noNodeFoundCumulation++ - if (this.noNodeFoundCumulation >= CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND) { - this.watcher.syncFailNoNodeFound() - throw Error(DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) + if ( + e.message === DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK] + ) { + this.noNodeFoundCumulation++; + if ( + this.noNodeFoundCumulation >= + CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND + ) { + this.watcher.syncFailNoNodeFound(); + throw Error(DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]); } } - await new Promise(res => setTimeout(res, 1000)) // Wait 1s before retrying + await new Promise((res) => setTimeout(res, 1000)); // Wait 1s before retrying return this.downloadChunk(index); } } @@ -176,16 +237,18 @@ export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloade * @param races * @returns {Promise} */ - private raceOrCancelIfTimeout(timeout:number, races:any[]) { - return Promise.race([ - // Process the race, but cancel it if we don't get an anwser quickly enough - new Promise((resolve, reject) => { - setTimeout(() => { - reject(this.TOO_LONG_TIME_DOWNLOAD); - }, timeout) - }) - ].concat(races)); - }; + private raceOrCancelIfTimeout(timeout: number, races: any[]) { + return Promise.race( + [ + // Process the race, but cancel it if we don't get an anwser quickly enough + new Promise((resolve, reject) => { + setTimeout(() => { + reject(this.TOO_LONG_TIME_DOWNLOAD); + }, timeout); + }), + ].concat(races) + ); + } /** * PUBLIC API @@ -195,17 +258,17 @@ export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloade * Promises a chunk to be downloaded and returned * @param index The number of the chunk to download & return */ - getChunk(index:number): Promise<BlockDTO[]> { - return this.downloadChunk(index) + getChunk(index: number): Promise<BlockDTO[]> { + return this.downloadChunk(index); } } interface ProfiledNode { - api: IRemoteContacter - tta: number - ttas: number[] - nbSuccess: number - hostName: string - excluded: boolean - readyForDownload: ManualPromise<boolean> + api: IRemoteContacter; + tta: number; + ttas: number[]; + nbSuccess: number; + hostName: string; + excluded: boolean; + readyForDownload: ManualPromise<boolean>; } diff --git a/app/modules/crawler/lib/sync/PromiseOfBlockReading.ts b/app/modules/crawler/lib/sync/PromiseOfBlockReading.ts index 8b35249ada4daf2fad0b1660bfcb42cb1c463204..1b22a18cdc962bf7663f11051dcd4aaf6d0b5e61 100644 --- a/app/modules/crawler/lib/sync/PromiseOfBlockReading.ts +++ b/app/modules/crawler/lib/sync/PromiseOfBlockReading.ts @@ -1,5 +1,5 @@ -import {BlockDTO} from "../../../../lib/dto/BlockDTO" +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; export interface PromiseOfBlocksReading { - (): Promise<BlockDTO[]> + (): Promise<BlockDTO[]>; } diff --git a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts index b6307ee0d717128db405c984d1c459744161ded9..f173627cc4a47fedbb42f6dfac25892c8236706e 100644 --- a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts +++ b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts @@ -11,158 +11,207 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ISyncDownloader} from "./ISyncDownloader" -import {BlockDTO} from "../../../../lib/dto/BlockDTO" -import {PeerDTO} from "../../../../lib/dto/PeerDTO" -import {connect} from "../connect" -import {NewLogger} from "../../../../lib/logger" -import {cliprogram} from "../../../../lib/common-libs/programOptions" -import {Watcher} from "./Watcher" -import {PeeringService} from "../../../../service/PeeringService" -import {Server} from "../../../../../server" -import {DBPeer, JSONDBPeer} from "../../../../lib/db/DBPeer" -import {Underscore} from "../../../../lib/common-libs/underscore" -import {FileDAL} from "../../../../lib/dal/fileDAL" -import {P2PSyncDownloader} from "./P2PSyncDownloader" -import {FsSyncDownloader} from "./FsSyncDownloader" -import {AbstractSynchronizer} from "./AbstractSynchronizer" -import {pullSandboxToLocalServer} from "../sandbox" -import * as path from 'path' -import {IRemoteContacter} from "./IRemoteContacter" -import {BMARemoteContacter} from "./BMARemoteContacter" -import {WS2PConnection, WS2PPubkeyRemoteAuth, WS2PPubkeySyncLocalAuth} from "../../../ws2p/lib/WS2PConnection" -import {WS2PRequester} from "../../../ws2p/lib/WS2PRequester" -import {WS2PMessageHandler} from "../../../ws2p/lib/impl/WS2PMessageHandler" -import {WS2PResponse} from "../../../ws2p/lib/impl/WS2PResponse" -import {DataErrors} from "../../../../lib/common-libs/errors" -import {Key} from "../../../../lib/common-libs/crypto/keyring" -import {WS2PRemoteContacter} from "./WS2PRemoteContacter" -import {Keypair} from "../../../../lib/dto/ConfDTO" +import { ISyncDownloader } from "./ISyncDownloader"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { PeerDTO } from "../../../../lib/dto/PeerDTO"; +import { connect } from "../connect"; +import { NewLogger } from "../../../../lib/logger"; +import { cliprogram } from "../../../../lib/common-libs/programOptions"; +import { Watcher } from "./Watcher"; +import { PeeringService } from "../../../../service/PeeringService"; +import { Server } from "../../../../../server"; +import { DBPeer, JSONDBPeer } from "../../../../lib/db/DBPeer"; +import { Underscore } from "../../../../lib/common-libs/underscore"; +import { FileDAL } from "../../../../lib/dal/fileDAL"; +import { P2PSyncDownloader } from "./P2PSyncDownloader"; +import { FsSyncDownloader } from "./FsSyncDownloader"; +import { AbstractSynchronizer } from "./AbstractSynchronizer"; +import { pullSandboxToLocalServer } from "../sandbox"; +import * as path from "path"; +import { IRemoteContacter } from "./IRemoteContacter"; +import { BMARemoteContacter } from "./BMARemoteContacter"; +import { + WS2PConnection, + WS2PPubkeyRemoteAuth, + WS2PPubkeySyncLocalAuth, +} from "../../../ws2p/lib/WS2PConnection"; +import { WS2PRequester } from "../../../ws2p/lib/WS2PRequester"; +import { WS2PMessageHandler } from "../../../ws2p/lib/impl/WS2PMessageHandler"; +import { WS2PResponse } from "../../../ws2p/lib/impl/WS2PResponse"; +import { DataErrors } from "../../../../lib/common-libs/errors"; +import { Key } from "../../../../lib/common-libs/crypto/keyring"; +import { WS2PRemoteContacter } from "./WS2PRemoteContacter"; +import { Keypair } from "../../../../lib/dto/ConfDTO"; -const logger = NewLogger() +const logger = NewLogger(); export class RemoteSynchronizer extends AbstractSynchronizer { - - private currency:string - private node:IRemoteContacter - private peer:PeerDTO - private shuffledPeers: JSONDBPeer[] - private theP2pDownloader: ISyncDownloader - private theFsDownloader: ISyncDownloader - private to: number - private localNumber: number - private watcher: Watcher - private endpoint: string = "" - private hasMilestonesPages: boolean|undefined - private milestones: { [k: number]: BlockDTO } = {} - private milestonesPerPage = 1 - private maxPage = 0 + private currency: string; + private node: IRemoteContacter; + private peer: PeerDTO; + private shuffledPeers: JSONDBPeer[]; + private theP2pDownloader: ISyncDownloader; + private theFsDownloader: ISyncDownloader; + private to: number; + private localNumber: number; + private watcher: Watcher; + private endpoint: string = ""; + private hasMilestonesPages: boolean | undefined; + private milestones: { [k: number]: BlockDTO } = {}; + private milestonesPerPage = 1; + private maxPage = 0; constructor( private host: string, private port: number, - private server:Server, + private server: Server, chunkSize: number, private noShufflePeers = false, - private otherDAL?:FileDAL, - private allowLocalSync = false, + private otherDAL?: FileDAL, + private allowLocalSync = false ) { - super(chunkSize) + super(chunkSize); } get dal(): FileDAL { - return this.server.dal + return this.server.dal; } get readDAL(): FileDAL { - return this.otherDAL || this.dal + return this.otherDAL || this.dal; } get PeeringService(): PeeringService { - return this.server.PeeringService + return this.server.PeeringService; } getCurrency(): string { - return this.currency || 'unknown-currency' + return this.currency || "unknown-currency"; } getPeer(): PeerDTO { - return this.node as any + return this.node as any; } setWatcher(watcher: Watcher): void { - this.watcher = watcher + this.watcher = watcher; } getChunksPath(): string { - return this.getCurrency() + return this.getCurrency(); } async init(): Promise<void> { - const syncApi = await RemoteSynchronizer.getSyncAPI([{ host: this.host, port: this.port }], this.server.conf.pair) + const syncApi = await RemoteSynchronizer.getSyncAPI( + [{ host: this.host, port: this.port }], + this.server.conf.pair + ); if (!syncApi.api) { - this.watcher.syncFailCannotConnectToRemote() - throw Error(DataErrors[DataErrors.CANNOT_CONNECT_TO_REMOTE_FOR_SYNC]) + this.watcher.syncFailCannotConnectToRemote(); + throw Error(DataErrors[DataErrors.CANNOT_CONNECT_TO_REMOTE_FOR_SYNC]); } - this.currency = syncApi.currency - this.endpoint = syncApi.endpoint - this.node = syncApi.api - this.peer = PeerDTO.fromJSONObject(syncApi.peering) - logger.info("Try with %s %s", this.peer.getURL(), this.peer.pubkey.substr(0, 6)) + this.currency = syncApi.currency; + this.endpoint = syncApi.endpoint; + this.node = syncApi.api; + this.peer = PeerDTO.fromJSONObject(syncApi.peering); + logger.info( + "Try with %s %s", + this.peer.getURL(), + this.peer.pubkey.substr(0, 6) + ); // We save this peer as a trusted peer for future contact try { - await this.server.PeeringService.submitP(DBPeer.fromPeerDTO(this.peer), false, false, true) + await this.server.PeeringService.submitP( + DBPeer.fromPeerDTO(this.peer), + false, + false, + true + ); } catch (e) { - logger.debug(e) + logger.debug(e); } - ;(this.node as any).pubkey = this.peer.pubkey + (this.node as any).pubkey = this.peer.pubkey; } - public static async getSyncAPI(hosts: { isBMA?: boolean, isWS2P?: boolean, host: string, port: number, path?: string }[], keypair: Keypair) { - let api: IRemoteContacter|undefined - let peering: any - let endpoint = "" + public static async getSyncAPI( + hosts: { + isBMA?: boolean; + isWS2P?: boolean; + host: string; + port: number; + path?: string; + }[], + keypair: Keypair + ) { + let api: IRemoteContacter | undefined; + let peering: any; + let endpoint = ""; for (const access of hosts) { - const host = access.host - const port = access.port - const path = access.path - logger.info(`Connecting to address ${host} :${port}...`) + const host = access.host; + const port = access.port; + const path = access.path; + logger.info(`Connecting to address ${host} :${port}...`); // If we know this is a WS2P connection, don't try BMA if (access.isWS2P !== true) { try { - const contacter = await connect(PeerDTO.fromJSONObject({ endpoints: [`BASIC_MERKLED_API ${host} ${port}${path && ' ' + path || ''}`]}), 3000) - peering = await contacter.getPeer() - api = new BMARemoteContacter(contacter) - endpoint = 'BASIC_MERKLED_API ' + host + ' ' + port + ((path && ' ' + path) || '') - } catch (e) { - } + const contacter = await connect( + PeerDTO.fromJSONObject({ + endpoints: [ + `BASIC_MERKLED_API ${host} ${port}${ + (path && " " + path) || "" + }`, + ], + }), + 3000 + ); + peering = await contacter.getPeer(); + api = new BMARemoteContacter(contacter); + endpoint = + "BASIC_MERKLED_API " + + host + + " " + + port + + ((path && " " + path) || ""); + } catch (e) {} } // If BMA is unreachable and the connection is not marked as strict BMA, let's try WS2P if (!api && access.isBMA !== true) { - const pair = new Key(keypair.pub, keypair.sec) - const connection = WS2PConnection.newConnectionToAddress(1, - `ws://${host}:${port}${path && ' ' + path || ''}`, + const pair = new Key(keypair.pub, keypair.sec); + const connection = WS2PConnection.newConnectionToAddress( + 1, + `ws://${host}:${port}${(path && " " + path) || ""}`, new (class SyncMessageHandler implements WS2PMessageHandler { - async answerToRequest(json: any, c: WS2PConnection): Promise<WS2PResponse> { - throw Error(DataErrors[DataErrors.WS2P_SYNC_PERIMETER_IS_LIMITED]) + async answerToRequest( + json: any, + c: WS2PConnection + ): Promise<WS2PResponse> { + throw Error( + DataErrors[DataErrors.WS2P_SYNC_PERIMETER_IS_LIMITED] + ); } - async handlePushMessage(json: any, c: WS2PConnection): Promise<void> { - logger.warn('Receiving push messages, which are not allowed during a SYNC.', json) + async handlePushMessage( + json: any, + c: WS2PConnection + ): Promise<void> { + logger.warn( + "Receiving push messages, which are not allowed during a SYNC.", + json + ); } - }), - new WS2PPubkeySyncLocalAuth("", pair, '00000000'), + })(), + new WS2PPubkeySyncLocalAuth("", pair, "00000000"), new WS2PPubkeyRemoteAuth("", pair), // The currency will be set by the remote node undefined - ) + ); try { - const requester = WS2PRequester.fromConnection(connection) - peering = await requester.getPeer() - api = new WS2PRemoteContacter(requester) - endpoint = 'WS2P 99999999 ' + host + ' ' + port + ((path && ' ' + path) || '') - } catch (e) { - } + const requester = WS2PRequester.fromConnection(connection); + peering = await requester.getPeer(); + api = new WS2PRemoteContacter(requester); + endpoint = + "WS2P 99999999 " + host + " " + port + ((path && " " + path) || ""); + } catch (e) {} } // If we have a working API: stop! if (api && peering) { @@ -170,153 +219,191 @@ export class RemoteSynchronizer extends AbstractSynchronizer { } } if (!api) { - throw Error(DataErrors[DataErrors.CANNOT_CONNECT_TO_REMOTE_FOR_SYNC]) + throw Error(DataErrors[DataErrors.CANNOT_CONNECT_TO_REMOTE_FOR_SYNC]); } if (!peering) { - throw Error(DataErrors[DataErrors.NO_PEERING_AVAILABLE_FOR_SYNC]) + throw Error(DataErrors[DataErrors.NO_PEERING_AVAILABLE_FOR_SYNC]); } return { api, peering, endpoint, - currency: peering.currency - } + currency: peering.currency, + }; } - async initWithKnownLocalAndToAndCurrency(to: number, localNumber: number): Promise<void> { - this.to = to - this.localNumber = localNumber + async initWithKnownLocalAndToAndCurrency( + to: number, + localNumber: number + ): Promise<void> { + this.to = to; + this.localNumber = localNumber; //======= // Peers (just for P2P download) //======= - let peers:(JSONDBPeer|null)[] = []; - const p2psync = !cliprogram.nop2p + let peers: (JSONDBPeer | null)[] = []; + const p2psync = !cliprogram.nop2p; if (p2psync) { - this.watcher.writeStatus('Peers...'); - peers = await this.node.getPeers() + this.watcher.writeStatus("Peers..."); + peers = await this.node.getPeers(); } // Add current peer if it is not returned (example of a local node) peers.push({ version: 1, - currency: '', - status: 'UP', + currency: "", + status: "UP", first_down: null, last_try: null, - pubkey: '', - block: '', - signature: '', - endpoints: [this.endpoint] - }) + pubkey: "", + block: "", + signature: "", + endpoints: [this.endpoint], + }); - peers = peers.filter(p => { - if (!p) return false - let hasWS2P = false - let hasBMA = false + peers = peers.filter((p) => { + if (!p) return false; + let hasWS2P = false; + let hasBMA = false; for (const e of p.endpoints) { - if (e.indexOf('MERKLED')) { - hasBMA = true + if (e.indexOf("MERKLED")) { + hasBMA = true; } - if (e.indexOf('WS2P') !== -1) { - hasWS2P = true + if (e.indexOf("WS2P") !== -1) { + hasWS2P = true; } } - return (hasWS2P || hasBMA) && p.status === 'UP' - }) + return (hasWS2P || hasBMA) && p.status === "UP"; + }); if (!peers.length) { - peers.push(DBPeer.fromPeerDTO(this.peer)) + peers.push(DBPeer.fromPeerDTO(this.peer)); } - this.shuffledPeers = (this.noShufflePeers ? peers : Underscore.shuffle(peers)).filter(p => !!(p)) as JSONDBPeer[] + this.shuffledPeers = (this.noShufflePeers + ? peers + : Underscore.shuffle(peers) + ).filter((p) => !!p) as JSONDBPeer[]; } p2pDownloader(): ISyncDownloader { if (!this.theP2pDownloader) { - this.theP2pDownloader = new P2PSyncDownloader(this.currency, this.server.conf.pair, this.localNumber, this.to, this.shuffledPeers, this.watcher, logger, this.chunkSize, this.allowLocalSync) + this.theP2pDownloader = new P2PSyncDownloader( + this.currency, + this.server.conf.pair, + this.localNumber, + this.to, + this.shuffledPeers, + this.watcher, + logger, + this.chunkSize, + this.allowLocalSync + ); } - return this.theP2pDownloader + return this.theP2pDownloader; } fsDownloader(): ISyncDownloader { if (!this.theFsDownloader) { - this.theFsDownloader = new FsSyncDownloader(this.readDAL.fs, path.join(this.readDAL.rootPath, this.getChunksPath()), this.getChunkName.bind(this), this.chunkSize) + this.theFsDownloader = new FsSyncDownloader( + this.readDAL.fs, + path.join(this.readDAL.rootPath, this.getChunksPath()), + this.getChunkName.bind(this), + this.chunkSize + ); } - return this.theFsDownloader + return this.theFsDownloader; } - getCurrent(): Promise<BlockDTO|null> { - return this.node.getCurrent() + getCurrent(): Promise<BlockDTO | null> { + return this.node.getCurrent(); } - getBlock(number: number): Promise<BlockDTO|null> { - return this.node.getBlock(number) + getBlock(number: number): Promise<BlockDTO | null> { + return this.node.getBlock(number); } - async getMilestone(number: number): Promise<BlockDTO|null> { + async getMilestone(number: number): Promise<BlockDTO | null> { if (this.hasMilestonesPages === undefined) { try { - const mlPage = await this.node.getMilestonesPage() - this.hasMilestonesPages = mlPage.chunkSize === this.chunkSize - this.milestonesPerPage = mlPage.milestonesPerPage - this.maxPage = mlPage.totalPages + const mlPage = await this.node.getMilestonesPage(); + this.hasMilestonesPages = mlPage.chunkSize === this.chunkSize; + this.milestonesPerPage = mlPage.milestonesPerPage; + this.maxPage = mlPage.totalPages; } catch (e) { - this.hasMilestonesPages = false + this.hasMilestonesPages = false; } } if (!this.hasMilestonesPages) { - return this.getBlock(number) + return this.getBlock(number); } if (this.milestones[number]) { - return this.milestones[number] + return this.milestones[number]; } if ((number + 1) % this.chunkSize !== 0) { // Something went wrong: we cannot rely on milestones method - this.hasMilestonesPages = false - return this.getBlock(number) + this.hasMilestonesPages = false; + return this.getBlock(number); } - const chunkNumber = (number + 1) / this.chunkSize - const pageNumber = (chunkNumber - (chunkNumber % this.milestonesPerPage)) / this.milestonesPerPage + 1 + const chunkNumber = (number + 1) / this.chunkSize; + const pageNumber = + (chunkNumber - (chunkNumber % this.milestonesPerPage)) / + this.milestonesPerPage + + 1; if (pageNumber > this.maxPage) { // The page is not available: we cannot rely on milestones method at this point - this.hasMilestonesPages = false - return this.getBlock(number) + this.hasMilestonesPages = false; + return this.getBlock(number); } - const mlPage = await this.node.getMilestones(pageNumber) - mlPage.blocks.forEach(b => this.milestones[b.number] = b) + const mlPage = await this.node.getMilestones(pageNumber); + mlPage.blocks.forEach((b) => (this.milestones[b.number] = b)); if (this.milestones[number]) { - return this.milestones[number] + return this.milestones[number]; } // Even after the download, it seems we don't have our milestone. We will download normally. - this.hasMilestonesPages = false - return this.getBlock(number) + this.hasMilestonesPages = false; + return this.getBlock(number); } - static async test(host: string, port: number, keypair: Keypair): Promise<BlockDTO> { - const syncApi = await RemoteSynchronizer.getSyncAPI([{ host, port }], keypair) - const current = await syncApi.api.getCurrent() + static async test( + host: string, + port: number, + keypair: Keypair + ): Promise<BlockDTO> { + const syncApi = await RemoteSynchronizer.getSyncAPI( + [{ host, port }], + keypair + ); + const current = await syncApi.api.getCurrent(); if (!current) { - throw Error(DataErrors[DataErrors.REMOTE_HAS_NO_CURRENT_BLOCK]) + throw Error(DataErrors[DataErrors.REMOTE_HAS_NO_CURRENT_BLOCK]); } - return current + return current; } async syncPeers(fullSync: boolean, to?: number): Promise<void> { - const peers = await this.node.getPeers() + const peers = await this.node.getPeers(); for (let i = 0; i < peers.length; i++) { - const peer = PeerDTO.fromJSONObject(peers[i]) - this.watcher.writeStatus('Peer ' + peer.pubkey) - this.watcher.peersPercent(Math.ceil(i / peers.length * 100)) + const peer = PeerDTO.fromJSONObject(peers[i]); + this.watcher.writeStatus("Peer " + peer.pubkey); + this.watcher.peersPercent(Math.ceil((i / peers.length) * 100)); try { - await this.PeeringService.submitP(DBPeer.fromPeerDTO(peer)) - } catch (e) { - } + await this.PeeringService.submitP(DBPeer.fromPeerDTO(peer)); + } catch (e) {} } - this.watcher.peersPercent(100) + this.watcher.peersPercent(100); } async syncSandbox(): Promise<void> { - this.watcher.writeStatus('Synchronizing the sandboxes...'); - await pullSandboxToLocalServer(this.currency, this.node, this.server, this.server.logger, this.watcher, 1, false) + this.watcher.writeStatus("Synchronizing the sandboxes..."); + await pullSandboxToLocalServer( + this.currency, + this.node, + this.server, + this.server.logger, + this.watcher, + 1, + false + ); } } diff --git a/app/modules/crawler/lib/sync/WS2PRemoteContacter.ts b/app/modules/crawler/lib/sync/WS2PRemoteContacter.ts index 3af758ce9500452a57f751f0f169fbcc0082d790..cb0c7bfa7d30802d52ca83bdb6aaa9235af31d60 100644 --- a/app/modules/crawler/lib/sync/WS2PRemoteContacter.ts +++ b/app/modules/crawler/lib/sync/WS2PRemoteContacter.ts @@ -11,56 +11,68 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {NewLogger} from "../../../../lib/logger" -import {IRemoteContacter} from "./IRemoteContacter"; -import {WS2PRequester} from "../../../ws2p/lib/WS2PRequester"; -import {DBPeer, JSONDBPeer} from "../../../../lib/db/DBPeer"; -import {BlockDTO} from "../../../../lib/dto/BlockDTO"; -import {PeerDTO} from "../../../../lib/dto/PeerDTO"; -import {HttpRequirements} from "../../../bma/lib/dtos"; +import { NewLogger } from "../../../../lib/logger"; +import { IRemoteContacter } from "./IRemoteContacter"; +import { WS2PRequester } from "../../../ws2p/lib/WS2PRequester"; +import { DBPeer, JSONDBPeer } from "../../../../lib/db/DBPeer"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { PeerDTO } from "../../../../lib/dto/PeerDTO"; +import { HttpRequirements } from "../../../bma/lib/dtos"; -const logger = NewLogger() +const logger = NewLogger(); export class WS2PRemoteContacter implements IRemoteContacter { - - public type: 'BMA'| 'WS2P' = 'WS2P' + public type: "BMA" | "WS2P" = "WS2P"; getRequirementsPending(min: number): Promise<HttpRequirements> { - return this.requester.getRequirementsPending(min) + return this.requester.getRequirementsPending(min); } - constructor(protected requester: WS2PRequester) { - } + constructor(protected requester: WS2PRequester) {} getBlock(number: number): Promise<BlockDTO | null> { - return this.requester.getBlock(number) + return this.requester.getBlock(number); } - getMilestones(page: number): Promise<{ chunkSize: number; totalPages: number; currentPage: number; milestonesPerPage: number; blocks: BlockDTO[] }> { - return this.requester.getMilestones(page) as any + getMilestones( + page: number + ): Promise<{ + chunkSize: number; + totalPages: number; + currentPage: number; + milestonesPerPage: number; + blocks: BlockDTO[]; + }> { + return this.requester.getMilestones(page) as any; } - getMilestonesPage(): Promise<{ chunkSize: number; totalPages: number; milestonesPerPage: number }> { - return this.requester.getMilestonesPage() + getMilestonesPage(): Promise<{ + chunkSize: number; + totalPages: number; + milestonesPerPage: number; + }> { + return this.requester.getMilestonesPage(); } getCurrent(): Promise<BlockDTO | null> { - return this.requester.getCurrent() + return this.requester.getCurrent(); } getBlocks(count: number, from: number): Promise<BlockDTO[]> { - return this.requester.getBlocks(count, from) + return this.requester.getBlocks(count, from); } async getPeers(): Promise<(JSONDBPeer | null)[]> { - return (await this.requester.getPeers()).map(p => DBPeer.fromPeerDTO(PeerDTO.fromJSONObject(p))) + return (await this.requester.getPeers()).map((p) => + DBPeer.fromPeerDTO(PeerDTO.fromJSONObject(p)) + ); } getName(): string { - return "WS2P remote" + return "WS2P remote"; } get hostName() { - return this.requester.hostName + return this.requester.hostName; } } diff --git a/app/modules/crawler/lib/sync/Watcher.ts b/app/modules/crawler/lib/sync/Watcher.ts index 3679f8f877c2a56a115efe176323aaeb459d42a1..796d99b2b3de5ec6196a75f1550d921db33258e4 100644 --- a/app/modules/crawler/lib/sync/Watcher.ts +++ b/app/modules/crawler/lib/sync/Watcher.ts @@ -1,97 +1,125 @@ -import * as events from "events" -import {cliprogram} from "../../../../lib/common-libs/programOptions" -import {P2pCandidate} from "./p2p/p2p-candidate" +import * as events from "events"; +import { cliprogram } from "../../../../lib/common-libs/programOptions"; +import { P2pCandidate } from "./p2p/p2p-candidate"; -const multimeter = require('multimeter') +const multimeter = require("multimeter"); export interface Watcher { - writeStatus(str: string): void - downloadPercent(pct?: number): number - storagePercent(pct?: number): number - appliedPercent(pct?: number): number - sbxPercent(pct?: number): number - peersPercent(pct?: number): number - end(): void + writeStatus(str: string): void; + downloadPercent(pct?: number): number; + storagePercent(pct?: number): number; + appliedPercent(pct?: number): number; + sbxPercent(pct?: number): number; + peersPercent(pct?: number): number; + end(): void; - reserveNodes(nodesAvailable: P2pCandidate[]): void + reserveNodes(nodesAvailable: P2pCandidate[]): void; - unableToDownloadChunk(chunkIndex: number): void + unableToDownloadChunk(chunkIndex: number): void; - gettingChunk(chunkIndex: number, candidates: P2pCandidate[]): void + gettingChunk(chunkIndex: number, candidates: P2pCandidate[]): void; - gotChunk(chunkIndex: number, node: P2pCandidate): void + gotChunk(chunkIndex: number, node: P2pCandidate): void; - failToGetChunk(chunkIndex: number, node: P2pCandidate): void + failToGetChunk(chunkIndex: number, node: P2pCandidate): void; - wantToDownload(chunkIndex: number): void + wantToDownload(chunkIndex: number): void; - addWrongChunkFailure(chunkIndex: number, lastSupplier: P2pCandidate): void + addWrongChunkFailure(chunkIndex: number, lastSupplier: P2pCandidate): void; - wantToLoad(chunkIndex: number): void + wantToLoad(chunkIndex: number): void; - beforeReadyNodes(p2pCandidates: P2pCandidate[]): void + beforeReadyNodes(p2pCandidates: P2pCandidate[]): void; - syncFailNoNodeFound(): void + syncFailNoNodeFound(): void; - syncFailCannotConnectToRemote(): void + syncFailCannotConnectToRemote(): void; } -export type EventName = 'downloadChange'|'storageChange'|'appliedChange'|'sbxChange'|'peersChange' - | 'addWrongChunkFailure' - | 'failToGetChunk' - | 'gettingChunk' - | 'gotChunk' - | 'reserveNodes' - | 'unableToDownloadChunk' - | 'wantToDownload' - | 'wantToLoad' - | 'beforeReadyNodes' - | 'syncFailNoNodeFound' - | 'syncFailCannotConnectToRemote' +export type EventName = + | "downloadChange" + | "storageChange" + | "appliedChange" + | "sbxChange" + | "peersChange" + | "addWrongChunkFailure" + | "failToGetChunk" + | "gettingChunk" + | "gotChunk" + | "reserveNodes" + | "unableToDownloadChunk" + | "wantToDownload" + | "wantToLoad" + | "beforeReadyNodes" + | "syncFailNoNodeFound" + | "syncFailCannotConnectToRemote"; export class EventWatcher extends events.EventEmitter implements Watcher { - - constructor(private innerWatcher:Watcher) { - super() + constructor(private innerWatcher: Watcher) { + super(); } writeStatus(str: string): void { - this.innerWatcher.writeStatus(str) + this.innerWatcher.writeStatus(str); } downloadPercent(pct?: number): number { - return this.change('downloadChange', (pct) => this.innerWatcher.downloadPercent(pct), pct) + return this.change( + "downloadChange", + (pct) => this.innerWatcher.downloadPercent(pct), + pct + ); } storagePercent(pct?: number): number { - return this.change('storageChange', (pct) => this.innerWatcher.storagePercent(pct), pct) + return this.change( + "storageChange", + (pct) => this.innerWatcher.storagePercent(pct), + pct + ); } appliedPercent(pct?: number): number { - return this.change('appliedChange', (pct) => this.innerWatcher.appliedPercent(pct), pct) + return this.change( + "appliedChange", + (pct) => this.innerWatcher.appliedPercent(pct), + pct + ); } sbxPercent(pct?: number): number { - return this.change('sbxChange', (pct) => this.innerWatcher.sbxPercent(pct), pct) + return this.change( + "sbxChange", + (pct) => this.innerWatcher.sbxPercent(pct), + pct + ); } peersPercent(pct?: number): number { - return this.change('peersChange', (pct) => this.innerWatcher.peersPercent(pct), pct) - } - - change(changeName: EventName, method: (pct?: number) => number, pct?: number) { + return this.change( + "peersChange", + (pct) => this.innerWatcher.peersPercent(pct), + pct + ); + } + + change( + changeName: EventName, + method: (pct?: number) => number, + pct?: number + ) { if (pct !== undefined && method() < pct) { - this.emit(changeName, pct || 0) + this.emit(changeName, pct || 0); } - return method(pct) + return method(pct); } end(): void { - this.innerWatcher.end() + this.innerWatcher.end(); } onEvent(e: EventName, cb: (pct: number) => void) { - this.on(e, cb) + this.on(e, cb); } getStats() { @@ -101,91 +129,90 @@ export class EventWatcher extends events.EventEmitter implements Watcher { applied: this.appliedPercent(), sandbox: this.sbxPercent(), peersSync: this.peersPercent(), - } + }; } /************* P2P DOWNLOAD EVENTS ****************/ addWrongChunkFailure(chunkIndex: number, lastSupplier: P2pCandidate): void { - this.emit('addWrongChunkFailure', { chunkIndex, node: lastSupplier }) + this.emit("addWrongChunkFailure", { chunkIndex, node: lastSupplier }); } failToGetChunk(chunkIndex: number, node: P2pCandidate): void { - this.emit('failToGetChunk', { chunkIndex, node }) + this.emit("failToGetChunk", { chunkIndex, node }); } gettingChunk(chunkIndex: number, candidates: P2pCandidate[]): void { - this.emit('gettingChunk', { chunkIndex, nodes: candidates }) + this.emit("gettingChunk", { chunkIndex, nodes: candidates }); } gotChunk(chunkIndex: number, node: P2pCandidate): void { - this.emit('gotChunk', { chunkIndex, node }) + this.emit("gotChunk", { chunkIndex, node }); } reserveNodes(nodesAvailable: P2pCandidate[]): void { - this.emit('reserveNodes', { nodes: nodesAvailable }) + this.emit("reserveNodes", { nodes: nodesAvailable }); } unableToDownloadChunk(chunkIndex: number): void { - this.emit('unableToDownloadChunk', { chunkIndex }) + this.emit("unableToDownloadChunk", { chunkIndex }); } wantToDownload(chunkIndex: number): void { - this.emit('wantToDownload', { chunkIndex }) + this.emit("wantToDownload", { chunkIndex }); } wantToLoad(chunkIndex: number): void { - this.emit('wantToLoad', { chunkIndex }) + this.emit("wantToLoad", { chunkIndex }); } beforeReadyNodes(p2pCandidates: P2pCandidate[]): void { - this.emit('beforeReadyNodes', { nodes: p2pCandidates }) + this.emit("beforeReadyNodes", { nodes: p2pCandidates }); } syncFailNoNodeFound(): void { - this.emit('syncFailNoNodeFound', {}) + this.emit("syncFailNoNodeFound", {}); } syncFailCannotConnectToRemote(): void { - this.emit('syncFailCannotConnectToRemote', {}) + this.emit("syncFailCannotConnectToRemote", {}); } } export class MultimeterWatcher implements Watcher { - - private xPos:number - private yPos:number - private multi:any - private charm:any - private appliedBar:any - private savedBar:any - private downloadBar:any - private sbxBar:any - private peersBar:any - private writtens:string[] = [] + private xPos: number; + private yPos: number; + private multi: any; + private charm: any; + private appliedBar: any; + private savedBar: any; + private downloadBar: any; + private sbxBar: any; + private peersBar: any; + private writtens: string[] = []; constructor() { this.multi = multimeter(process); this.charm = this.multi.charm; - this.charm.on('^C', process.exit); + this.charm.on("^C", process.exit); this.charm.reset(); - this.multi.write('Progress:\n\n'); + this.multi.write("Progress:\n\n"); - let line = 3 - this.savedBar = this.createBar('Milestones', line++) - this.downloadBar = this.createBar('Download', line++) - this.appliedBar = this.createBar('Apply', line++) + let line = 3; + this.savedBar = this.createBar("Milestones", line++); + this.downloadBar = this.createBar("Download", line++); + this.appliedBar = this.createBar("Apply", line++); if (!cliprogram.nosbx) { - this.sbxBar = this.createBar('Sandbox', line++) + this.sbxBar = this.createBar("Sandbox", line++); } if (!cliprogram.nopeers) { - this.peersBar = this.createBar('Peers', line++) + this.peersBar = this.createBar("Peers", line++); } - this.multi.write('\nStatus: '); + this.multi.write("\nStatus: "); - this.charm.position( (x:number, y:number) => { + this.charm.position((x: number, y: number) => { this.xPos = x; this.yPos = y; }); @@ -203,191 +230,165 @@ export class MultimeterWatcher implements Watcher { } } - writeStatus(str:string) { + writeStatus(str: string) { this.writtens.push(str); - this.charm - .position(this.xPos, this.yPos) - .erase('end') - .write(str) - ; - }; + this.charm.position(this.xPos, this.yPos).erase("end").write(str); + } - downloadPercent(pct:number) { - return this.downloadBar.percent(pct) + downloadPercent(pct: number) { + return this.downloadBar.percent(pct); } - storagePercent(pct:number) { - return this.savedBar.percent(pct) + storagePercent(pct: number) { + return this.savedBar.percent(pct); } - appliedPercent(pct:number) { - return this.appliedBar.percent(pct) + appliedPercent(pct: number) { + return this.appliedBar.percent(pct); } - sbxPercent(pct:number) { + sbxPercent(pct: number) { if (!cliprogram.nosbx) { - return this.sbxBar.percent(pct) + return this.sbxBar.percent(pct); } - return 0 + return 0; } - peersPercent(pct:number) { + peersPercent(pct: number) { if (!cliprogram.nopeers) { - return this.peersBar.percent(pct) + return this.peersBar.percent(pct); } - return 0 + return 0; } end() { - this.multi.write('\nAll done.\n'); + this.multi.write("\nAll done.\n"); this.multi.destroy(); } private createBar(title: string, line: number) { - const header = (title + ':').padEnd(14, ' ') + '\n' - this.multi.write(header) + const header = (title + ":").padEnd(14, " ") + "\n"; + this.multi.write(header); return this.multi(header.length, line, { - width : 20, - solid : { - text : '|', - foreground : 'white', - background : 'blue' + width: 20, + solid: { + text: "|", + foreground: "white", + background: "blue", }, - empty : { text : ' ' } - }) + empty: { text: " " }, + }); } /************* NOT IMPLEMENTED ****************/ - addWrongChunkFailure(chunkIndex: number, lastSupplier: P2pCandidate): void { - } + addWrongChunkFailure(chunkIndex: number, lastSupplier: P2pCandidate): void {} - failToGetChunk(chunkIndex: number, node: P2pCandidate): void { - } + failToGetChunk(chunkIndex: number, node: P2pCandidate): void {} - gettingChunk(chunkIndex: number, candidates: P2pCandidate[]): void { - } + gettingChunk(chunkIndex: number, candidates: P2pCandidate[]): void {} - gotChunk(chunkIndex: number, node: P2pCandidate): void { - } + gotChunk(chunkIndex: number, node: P2pCandidate): void {} - reserveNodes(nodesAvailable: P2pCandidate[]): void { - } + reserveNodes(nodesAvailable: P2pCandidate[]): void {} - unableToDownloadChunk(chunkIndex: number): void { - } + unableToDownloadChunk(chunkIndex: number): void {} - wantToDownload(chunkIndex: number): void { - } + wantToDownload(chunkIndex: number): void {} - wantToLoad(chunkIndex: number): void { - } + wantToLoad(chunkIndex: number): void {} - beforeReadyNodes(p2pCandidates: P2pCandidate[]): void { - } + beforeReadyNodes(p2pCandidates: P2pCandidate[]): void {} - syncFailNoNodeFound(): void { - } - - syncFailCannotConnectToRemote(): void { - } + syncFailNoNodeFound(): void {} + syncFailCannotConnectToRemote(): void {} } export class LoggerWatcher implements Watcher { + private downPct = 0; + private savedPct = 0; + private appliedPct = 0; + private sbxPct = 0; + private peersPct = 0; + private lastMsg = ""; - private downPct = 0 - private savedPct = 0 - private appliedPct = 0 - private sbxPct = 0 - private peersPct = 0 - private lastMsg = "" - - constructor(private logger:any) { - } + constructor(private logger: any) {} showProgress() { - return this.logger.info('Milestones %s%, Downloaded %s%, Applied %s%', this.savedPct, this.downPct, this.appliedPct) + return this.logger.info( + "Milestones %s%, Downloaded %s%, Applied %s%", + this.savedPct, + this.downPct, + this.appliedPct + ); } - writeStatus(str:string) { + writeStatus(str: string) { if (str != this.lastMsg) { this.lastMsg = str; this.logger.info(str); } } - downloadPercent(pct:number) { - return this.change('downPct', pct) + downloadPercent(pct: number) { + return this.change("downPct", pct); } - storagePercent(pct:number) { - return this.change('savedPct', pct) + storagePercent(pct: number) { + return this.change("savedPct", pct); } - appliedPercent(pct:number) { - return this.change('appliedPct', pct) + appliedPercent(pct: number) { + return this.change("appliedPct", pct); } - sbxPercent(pct:number) { + sbxPercent(pct: number) { if (pct > this.sbxPct) { - this.sbxPct = pct + this.sbxPct = pct; } - return this.sbxPct + return this.sbxPct; } - peersPercent(pct:number) { + peersPercent(pct: number) { if (pct > this.peersPct) { - this.peersPct = pct + this.peersPct = pct; } - return this.peersPct + return this.peersPct; } - change(prop: 'downPct'|'savedPct'|'appliedPct', pct:number) { + change(prop: "downPct" | "savedPct" | "appliedPct", pct: number) { if (pct !== undefined) { - let changed = pct > this[prop] - this[prop] = pct - if (changed) this.showProgress() + let changed = pct > this[prop]; + this[prop] = pct; + if (changed) this.showProgress(); } - return this[prop] + return this[prop]; } - end() { - } + end() {} /************* NOT IMPLEMENTED ****************/ - addWrongChunkFailure(chunkIndex: number, lastSupplier: P2pCandidate): void { - } - - failToGetChunk(chunkIndex: number, node: P2pCandidate): void { - } + addWrongChunkFailure(chunkIndex: number, lastSupplier: P2pCandidate): void {} - gettingChunk(chunkIndex: number, candidates: P2pCandidate[]): void { - } + failToGetChunk(chunkIndex: number, node: P2pCandidate): void {} - gotChunk(chunkIndex: number, node: P2pCandidate): void { - } + gettingChunk(chunkIndex: number, candidates: P2pCandidate[]): void {} - reserveNodes(nodesAvailable: P2pCandidate[]): void { - } + gotChunk(chunkIndex: number, node: P2pCandidate): void {} - unableToDownloadChunk(chunkIndex: number): void { - } + reserveNodes(nodesAvailable: P2pCandidate[]): void {} - wantToDownload(chunkIndex: number): void { - } + unableToDownloadChunk(chunkIndex: number): void {} - wantToLoad(chunkIndex: number): void { - } + wantToDownload(chunkIndex: number): void {} - beforeReadyNodes(p2pCandidates: P2pCandidate[]): void { - } + wantToLoad(chunkIndex: number): void {} - syncFailNoNodeFound(): void { - } + beforeReadyNodes(p2pCandidates: P2pCandidate[]): void {} - syncFailCannotConnectToRemote(): void { - } + syncFailNoNodeFound(): void {} + syncFailCannotConnectToRemote(): void {} } diff --git a/app/modules/crawler/lib/sync/p2p/p2p-candidate.ts b/app/modules/crawler/lib/sync/p2p/p2p-candidate.ts index 16c59e199d13669bb9a4e1519ebe80dc352178d4..8f81ed886764eee8f5bd31571fc27921ccfc7f5a 100644 --- a/app/modules/crawler/lib/sync/p2p/p2p-candidate.ts +++ b/app/modules/crawler/lib/sync/p2p/p2p-candidate.ts @@ -1,109 +1,127 @@ -import {Querable, querablep} from "../../../../../lib/common-libs/querable" -import {PeerDTO} from "../../../../../lib/dto/PeerDTO" -import {Keypair} from "../../../../../lib/dto/ConfDTO" -import {RemoteSynchronizer} from "../RemoteSynchronizer" -import {IRemoteContacter} from "../IRemoteContacter" -import {BlockDTO} from "../../../../../lib/dto/BlockDTO" -import {newResolveTimeoutPromise} from "../../../../../lib/common-libs/timeout-promise" +import { Querable, querablep } from "../../../../../lib/common-libs/querable"; +import { PeerDTO } from "../../../../../lib/dto/PeerDTO"; +import { Keypair } from "../../../../../lib/dto/ConfDTO"; +import { RemoteSynchronizer } from "../RemoteSynchronizer"; +import { IRemoteContacter } from "../IRemoteContacter"; +import { BlockDTO } from "../../../../../lib/dto/BlockDTO"; +import { newResolveTimeoutPromise } from "../../../../../lib/common-libs/timeout-promise"; export class P2pCandidate { - - private readonly apiPromise: Querable<any> - private dlPromise: Querable<BlockDTO[]|null> - private readonly responseTimes: number[] = [] - private api: IRemoteContacter|null|undefined - private nbSuccess = 0 - private isExcluded: boolean - private failures = 0 - private reserved = false + private readonly apiPromise: Querable<any>; + private dlPromise: Querable<BlockDTO[] | null>; + private readonly responseTimes: number[] = []; + private api: IRemoteContacter | null | undefined; + private nbSuccess = 0; + private isExcluded: boolean; + private failures = 0; + private reserved = false; constructor( public p: PeerDTO, private keypair: Keypair, private logger: any, - private allowLocalSync: boolean, + private allowLocalSync: boolean ) { - this.apiPromise = this.initAPI() - this.dlPromise = querablep(Promise.resolve(null)) + this.apiPromise = this.initAPI(); + this.dlPromise = querablep(Promise.resolve(null)); } addFailure() { - this.failures++ + this.failures++; if (this.failures >= 5 && !this.isExcluded) { - this.isExcluded = true - this.logger.warn('Excluding node %s as it returned unchainable chunks %s times', this.hostName, this.failures) + this.isExcluded = true; + this.logger.warn( + "Excluding node %s as it returned unchainable chunks %s times", + this.hostName, + this.failures + ); } } isReady() { - return !this.reserved && this.apiPromise.isResolved() && this.dlPromise.isResolved() && this.api && !this.isExcluded + return ( + !this.reserved && + this.apiPromise.isResolved() && + this.dlPromise.isResolved() && + this.api && + !this.isExcluded + ); } async waitAvailability(maxWait: number): Promise<boolean> { return Promise.race([ // Wait for availablity - (async () => !this.isExcluded - && !this.reserved - && (this.apiPromise.isRejected() ? await newResolveTimeoutPromise(maxWait, false) : !!(await this.apiPromise)) - && (this.dlPromise.isRejected() ? await newResolveTimeoutPromise(maxWait, false) : !!(await this.dlPromise)))(), + (async () => + !this.isExcluded && + !this.reserved && + (this.apiPromise.isRejected() + ? await newResolveTimeoutPromise(maxWait, false) + : !!(await this.apiPromise)) && + (this.dlPromise.isRejected() + ? await newResolveTimeoutPromise(maxWait, false) + : !!(await this.dlPromise)))(), // Maximum wait trigger - newResolveTimeoutPromise(maxWait, false) - ]) + newResolveTimeoutPromise(maxWait, false), + ]); } hasAvailableApi() { - return !!this.api + return !!this.api; } avgResponseTime() { if (!this.responseTimes.length) { - return 0 + return 0; } - return this.responseTimes.reduce((sum, rt) => sum + rt, 0) / this.responseTimes.length + return ( + this.responseTimes.reduce((sum, rt) => sum + rt, 0) / + this.responseTimes.length + ); } get hostName() { - return (this.api && this.api.hostName) || 'NO_API' + return (this.api && this.api.hostName) || "NO_API"; } async downloadBlocks(count: number, from: number) { - const start = Date.now() - let error: Error|undefined - this.reserved = false - this.dlPromise = querablep((async () => { - // We try to download the blocks - let blocks: BlockDTO[]|null - try { - blocks = await (this.api as IRemoteContacter).getBlocks(count, from) - } - catch (e) { - // Unfortunately this can fail - blocks = null - error = e - } - this.responseTimes.push(Date.now() - start); - // Only keep a flow of 5 ttas for the node - if (this.responseTimes.length > 5) this.responseTimes.shift() - this.nbSuccess++ - if (error) { - throw error - } - return blocks - })()) - return this.dlPromise + const start = Date.now(); + let error: Error | undefined; + this.reserved = false; + this.dlPromise = querablep( + (async () => { + // We try to download the blocks + let blocks: BlockDTO[] | null; + try { + blocks = await (this.api as IRemoteContacter).getBlocks(count, from); + } catch (e) { + // Unfortunately this can fail + blocks = null; + error = e; + } + this.responseTimes.push(Date.now() - start); + // Only keep a flow of 5 ttas for the node + if (this.responseTimes.length > 5) this.responseTimes.shift(); + this.nbSuccess++; + if (error) { + throw error; + } + return blocks; + })() + ); + return this.dlPromise; } private getRemoteAPIs() { - const bmaAPI = this.p.getBMA() - const ws2pAPI = this.p.getFirstNonTorWS2P() - const apis: RemoteAPI[] = [] - const bmaHost = bmaAPI.dns || bmaAPI.ipv4 || bmaAPI.ipv6 + const bmaAPI = this.p.getBMA(); + const ws2pAPI = this.p.getFirstNonTorWS2P(); + const apis: RemoteAPI[] = []; + const bmaHost = bmaAPI.dns || bmaAPI.ipv4 || bmaAPI.ipv6; if (bmaAPI.port && bmaHost) { apis.push({ isBMA: true, port: bmaAPI.port, - host: bmaHost - }) + host: bmaHost, + }); } if (ws2pAPI) { apis.push({ @@ -111,40 +129,50 @@ export class P2pCandidate { host: ws2pAPI.host, port: ws2pAPI.port, path: ws2pAPI.path, - }) + }); } - return apis + return apis; } private initAPI() { - return querablep((async (): Promise<IRemoteContacter|null> => { - try { - const apis = this.getRemoteAPIs() - const syncApi = await RemoteSynchronizer.getSyncAPI(apis, this.keypair) - if (!this.allowLocalSync && ((syncApi && syncApi.api.hostName || '').match(/^(localhost|192|127)/))) { - return null + return querablep( + (async (): Promise<IRemoteContacter | null> => { + try { + const apis = this.getRemoteAPIs(); + const syncApi = await RemoteSynchronizer.getSyncAPI( + apis, + this.keypair + ); + if ( + !this.allowLocalSync && + ((syncApi && syncApi.api.hostName) || "").match( + /^(localhost|192|127)/ + ) + ) { + return null; + } + this.api = syncApi.api; + return syncApi.api; + } catch (e) { + return null; } - this.api = syncApi.api - return syncApi.api - } catch (e) { - return null - } - })()) + })() + ); } reserve() { - this.reserved = true + this.reserved = true; } get apiName() { - return this.api && this.api.type + return this.api && this.api.type; } } interface RemoteAPI { - isBMA?: boolean - isWS2P?: boolean - host: string - port: number - path?: string -} \ No newline at end of file + isBMA?: boolean; + isWS2P?: boolean; + host: string; + port: number; + path?: string; +} diff --git a/app/modules/crawler/lib/sync/v2/DownloadStream.ts b/app/modules/crawler/lib/sync/v2/DownloadStream.ts index e5b861da8910770090ca5c9311a51a8ce5d41cdc..496a55d3a3c49d06c70bb62cd29321a3b1cb87ac 100644 --- a/app/modules/crawler/lib/sync/v2/DownloadStream.ts +++ b/app/modules/crawler/lib/sync/v2/DownloadStream.ts @@ -1,167 +1,214 @@ -import {Duplex} from 'stream' -import {FileDAL} from "../../../../../lib/dal/fileDAL" -import {AbstractSynchronizer} from "../AbstractSynchronizer" -import {Watcher} from "../Watcher" -import {ISyncDownloader} from "../ISyncDownloader" -import {BlockDTO} from "../../../../../lib/dto/BlockDTO" -import {Querable, querablep} from "../../../../../lib/common-libs/querable" -import {DBBlock} from "../../../../../lib/db/DBBlock" -import {ManualPromise, newManualPromise} from "../../../../../lib/common-libs/manual-promise" -import {NewLogger} from "../../../../../lib/logger" -import {getBlockInnerHashAndNonceWithSignature, getBlockInnerPart} from "../../../../../lib/common-libs/rawer" -import {PromiseOfBlocksReading} from "../PromiseOfBlockReading" -import {hashf} from "../../../../../lib/common" -import {CrawlerConstants} from "../../constants" +import { Duplex } from "stream"; +import { FileDAL } from "../../../../../lib/dal/fileDAL"; +import { AbstractSynchronizer } from "../AbstractSynchronizer"; +import { Watcher } from "../Watcher"; +import { ISyncDownloader } from "../ISyncDownloader"; +import { BlockDTO } from "../../../../../lib/dto/BlockDTO"; +import { Querable, querablep } from "../../../../../lib/common-libs/querable"; +import { DBBlock } from "../../../../../lib/db/DBBlock"; +import { + ManualPromise, + newManualPromise, +} from "../../../../../lib/common-libs/manual-promise"; +import { NewLogger } from "../../../../../lib/logger"; +import { + getBlockInnerHashAndNonceWithSignature, + getBlockInnerPart, +} from "../../../../../lib/common-libs/rawer"; +import { PromiseOfBlocksReading } from "../PromiseOfBlockReading"; +import { hashf } from "../../../../../lib/common"; +import { CrawlerConstants } from "../../constants"; -const logger = NewLogger() +const logger = NewLogger(); export class DownloadStream extends Duplex { + private fsDownloader: ISyncDownloader; + private p2PDownloader: ISyncDownloader; + private numberOfChunksToDownload: number; + private currentChunkNumber = 0; + private chunks: BlockDTO[][]; + private milestones: ManualPromise<BlockDTO>[]; + private dowloading: Querable<BlockDTO[]>[]; + private bestDownloaded = -1; - private fsDownloader: ISyncDownloader - private p2PDownloader: ISyncDownloader - private numberOfChunksToDownload:number - private currentChunkNumber = 0 - private chunks: BlockDTO[][] - private milestones: ManualPromise<BlockDTO>[] - private dowloading: Querable<BlockDTO[]>[] - private bestDownloaded = -1 - - private writeDAL: FileDAL + private writeDAL: FileDAL; constructor( - private localNumber:number, - private to:number, - private toHash:string, + private localNumber: number, + private to: number, + private toHash: string, private syncStrategy: AbstractSynchronizer, - dal:FileDAL, - private nocautious:boolean, - private watcher:Watcher, + dal: FileDAL, + private nocautious: boolean, + private watcher: Watcher ) { - super({objectMode: true}) - this.writeDAL = dal - const nbBlocksToDownload = Math.max(0, to - localNumber) - this.numberOfChunksToDownload = Math.ceil(nbBlocksToDownload / syncStrategy.chunkSize) - this.p2PDownloader = syncStrategy.p2pDownloader() - this.fsDownloader = syncStrategy.fsDownloader() + super({ objectMode: true }); + this.writeDAL = dal; + const nbBlocksToDownload = Math.max(0, to - localNumber); + this.numberOfChunksToDownload = Math.ceil( + nbBlocksToDownload / syncStrategy.chunkSize + ); + this.p2PDownloader = syncStrategy.p2pDownloader(); + this.fsDownloader = syncStrategy.fsDownloader(); - this.chunks = Array.from({ length: this.numberOfChunksToDownload }) - this.dowloading = Array.from({ length: this.numberOfChunksToDownload }) - this.milestones = Array.from({ length: this.numberOfChunksToDownload }).map(() => newManualPromise()) + this.chunks = Array.from({ length: this.numberOfChunksToDownload }); + this.dowloading = Array.from({ length: this.numberOfChunksToDownload }); + this.milestones = Array.from({ + length: this.numberOfChunksToDownload, + }).map(() => newManualPromise()); - this.downloadChunk(0) + this.downloadChunk(0); } private async downloadChunk(i: number): Promise<BlockDTO[]> { if (i + 1 > this.numberOfChunksToDownload) { - return Promise.resolve([]) + return Promise.resolve([]); } - if (!this.dowloading[i] && !this.chunks[i]) { - this.dowloading[i] = querablep((async (): Promise<BlockDTO[]> => { - const milestone = await this.milestones[i] - let downloader: ISyncDownloader = this.fsDownloader // First, we try with saved file - let chunk: BlockDTO[] - // We don't have the file locally: we loop on P2P download until we have it (or until P2P throws a general error) - do { - this.watcher.wantToLoad(i) - chunk = await downloader.getChunk(i) - if (chunk.length) { - // NewLogger().info("Chunk #%s is COMPLETE", i) - const topIndex = Math.min(milestone.number % this.syncStrategy.chunkSize, chunk.length - 1) - const topBlock = chunk[topIndex] - if (topBlock.number !== milestone.number || topBlock.hash !== milestone.hash) { - // This chunk is invalid, let's try another one - chunk = [] - } - if (i > 0) { - const previous = await this.downloadChunk(i - 1) - const chainsWell = await chainsCorrectly(previous, () => Promise.resolve(chunk), this.to, this.toHash, this.syncStrategy.chunkSize) - if (!chainsWell) { - NewLogger().warn("Chunk #%s DOES NOT CHAIN CORRECTLY. Retrying.", i) - chunk = [] + if (!this.dowloading[i] && !this.chunks[i]) { + this.dowloading[i] = querablep( + (async (): Promise<BlockDTO[]> => { + const milestone = await this.milestones[i]; + let downloader: ISyncDownloader = this.fsDownloader; // First, we try with saved file + let chunk: BlockDTO[]; + // We don't have the file locally: we loop on P2P download until we have it (or until P2P throws a general error) + do { + this.watcher.wantToLoad(i); + chunk = await downloader.getChunk(i); + if (chunk.length) { + // NewLogger().info("Chunk #%s is COMPLETE", i) + const topIndex = Math.min( + milestone.number % this.syncStrategy.chunkSize, + chunk.length - 1 + ); + const topBlock = chunk[topIndex]; + if ( + topBlock.number !== milestone.number || + topBlock.hash !== milestone.hash + ) { + // This chunk is invalid, let's try another one + chunk = []; + } + if (i > 0) { + const previous = await this.downloadChunk(i - 1); + const chainsWell = await chainsCorrectly( + previous, + () => Promise.resolve(chunk), + this.to, + this.toHash, + this.syncStrategy.chunkSize + ); + if (!chainsWell) { + NewLogger().warn( + "Chunk #%s DOES NOT CHAIN CORRECTLY. Retrying.", + i + ); + chunk = []; + } } } + if (!chunk.length) { + // Now we try using P2P + downloader = this.p2PDownloader; + } + } while (!chunk.length && i <= this.numberOfChunksToDownload); + // NewLogger().info("Chunk #%s chains well.", i) + const fileName = this.syncStrategy.getChunkRelativePath(i); + let doWrite = + downloader !== this.fsDownloader || + !(await this.writeDAL.coreFS.exists(fileName)); + if (doWrite) { + // Store the file to avoid re-downloading + if ( + this.localNumber <= 0 && + chunk.length === this.syncStrategy.chunkSize + ) { + await this.writeDAL.coreFS.makeTree( + this.syncStrategy.getCurrency() + ); + const content = { + blocks: chunk.map((b: any) => DBBlock.fromBlockDTO(b)), + }; + await this.writeDAL.coreFS.writeJSON(fileName, content); + } } - if (!chunk.length) { - // Now we try using P2P - downloader = this.p2PDownloader - } - } while (!chunk.length && i <= this.numberOfChunksToDownload) - // NewLogger().info("Chunk #%s chains well.", i) - const fileName = this.syncStrategy.getChunkRelativePath(i) - let doWrite = downloader !== this.fsDownloader - || !(await this.writeDAL.coreFS.exists(fileName)) - if (doWrite) { - // Store the file to avoid re-downloading - if (this.localNumber <= 0 && chunk.length === this.syncStrategy.chunkSize) { - await this.writeDAL.coreFS.makeTree(this.syncStrategy.getCurrency()) - const content = { blocks: chunk.map((b:any) => DBBlock.fromBlockDTO(b)) } - await this.writeDAL.coreFS.writeJSON(fileName, content) + if (i > this.bestDownloaded) { + this.bestDownloaded = i; + this.watcher.downloadPercent( + Math.round(((i + 1) / this.numberOfChunksToDownload) * 100) + ); } - } - if (i > this.bestDownloaded) { - this.bestDownloaded = i - this.watcher.downloadPercent(Math.round((i + 1) / this.numberOfChunksToDownload * 100)) - } - return chunk - })()) - this.dowloading[i] - .then(chunk => { - this.chunks[i] = chunk - delete this.dowloading[i] - }) - return this.dowloading[i] || this.chunks[i] + return chunk; + })() + ); + this.dowloading[i].then((chunk) => { + this.chunks[i] = chunk; + delete this.dowloading[i]; + }); + return this.dowloading[i] || this.chunks[i]; } - return this.dowloading[i] || this.chunks[i] + return this.dowloading[i] || this.chunks[i]; } _read(size: number) { if (this.currentChunkNumber == this.numberOfChunksToDownload) { - this.push(null) + this.push(null); } else { // Asks for next chunk: do we have it? if (this.chunks[this.currentChunkNumber]) { - this.push(this.chunks[this.currentChunkNumber]) - delete this.chunks[this.currentChunkNumber] + this.push(this.chunks[this.currentChunkNumber]); + delete this.chunks[this.currentChunkNumber]; // Let's start the download of next chunk - this.currentChunkNumber++ - let p = this.downloadChunk(this.currentChunkNumber) + this.currentChunkNumber++; + let p = this.downloadChunk(this.currentChunkNumber); for (let i = 1; i <= CrawlerConstants.SYNC_CHUNKS_IN_ADVANCE; i++) { - p = p.then(() => this.downloadChunk(this.currentChunkNumber + i)) + p = p.then(() => this.downloadChunk(this.currentChunkNumber + i)); } - } - else { + } else { // We don't have it yet - this.push(undefined) + this.push(undefined); } } } - _write(block: BlockDTO|undefined, encoding: any, callback: (err: any) => void) { + _write( + block: BlockDTO | undefined, + encoding: any, + callback: (err: any) => void + ) { if (block) { - const i = Math.ceil(((block.number + 1) / this.syncStrategy.chunkSize) - 1) + const i = Math.ceil((block.number + 1) / this.syncStrategy.chunkSize - 1); // console.log('Done validation of chunk #%s', i) - this.milestones[i].resolve(block) + this.milestones[i].resolve(block); } setTimeout(() => { - callback(null) - }, 1) + callback(null); + }, 1); } - } - -export async function chainsCorrectly(blocks:BlockDTO[], readNextChunk: PromiseOfBlocksReading, topNumber: number, topHash: string, chunkSize: number) { - +export async function chainsCorrectly( + blocks: BlockDTO[], + readNextChunk: PromiseOfBlocksReading, + topNumber: number, + topHash: string, + chunkSize: number +) { if (!blocks.length) { - return false + return false; } for (let i = blocks.length - 1; i > 0; i--) { - if (blocks[i].number !== blocks[i - 1].number + 1 || blocks[i].previousHash !== blocks[i - 1].hash) { + if ( + blocks[i].number !== blocks[i - 1].number + 1 || + blocks[i].previousHash !== blocks[i - 1].hash + ) { logger.error("Blocks do not chaing correctly", blocks[i].number); return false; } - if (blocks[i].version != blocks[i - 1].version && blocks[i].version != blocks[i - 1].version + 1) { + if ( + blocks[i].version != blocks[i - 1].version && + blocks[i].version != blocks[i - 1].version + 1 + ) { logger.error("Version cannot be downgraded", blocks[i].number); return false; } @@ -175,30 +222,46 @@ export async function chainsCorrectly(blocks:BlockDTO[], readNextChunk: PromiseO tx.version = CrawlerConstants.TRANSACTION_VERSION; } } - if (blocks[i].inner_hash !== hashf(getBlockInnerPart(blocks[i])).toUpperCase()) { - logger.error("Inner hash of block#%s from %s does not match", blocks[i].number) - return false + if ( + blocks[i].inner_hash !== hashf(getBlockInnerPart(blocks[i])).toUpperCase() + ) { + logger.error( + "Inner hash of block#%s from %s does not match", + blocks[i].number + ); + return false; } - if (blocks[i].hash !== hashf(getBlockInnerHashAndNonceWithSignature(blocks[i])).toUpperCase()) { - logger.error("Hash of block#%s from %s does not match", blocks[i].number) - return false + if ( + blocks[i].hash !== + hashf(getBlockInnerHashAndNonceWithSignature(blocks[i])).toUpperCase() + ) { + logger.error("Hash of block#%s from %s does not match", blocks[i].number); + return false; } } const lastBlockOfChunk = blocks[blocks.length - 1]; - if ((lastBlockOfChunk.number === topNumber || blocks.length < chunkSize) && lastBlockOfChunk.hash != topHash) { + if ( + (lastBlockOfChunk.number === topNumber || blocks.length < chunkSize) && + lastBlockOfChunk.hash != topHash + ) { // Top chunk - logger.error('Top block is not on the right chain') - return false + logger.error("Top block is not on the right chain"); + return false; } else { // Chaining between downloads - const previousChunk = await readNextChunk() - const blockN = blocks[blocks.length - 1] // The block n - const blockNp1 = (await previousChunk)[0] // The block n + 1 - if (blockN && blockNp1 && (blockN.number + 1 !== blockNp1.number || blockN.hash != blockNp1.previousHash)) { - logger.error('Chunk is not referenced by the upper one') - return false + const previousChunk = await readNextChunk(); + const blockN = blocks[blocks.length - 1]; // The block n + const blockNp1 = (await previousChunk)[0]; // The block n + 1 + if ( + blockN && + blockNp1 && + (blockN.number + 1 !== blockNp1.number || + blockN.hash != blockNp1.previousHash) + ) { + logger.error("Chunk is not referenced by the upper one"); + return false; } } - return true + return true; } diff --git a/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts b/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts index c2bb86ff22f19806b71d15a9305411633a514288..df7aa5b1d2f7039aec4059dea34b7240bbc7fdf4 100644 --- a/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts +++ b/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts @@ -1,4 +1,4 @@ -import {Duplex} from 'stream' +import { Duplex } from "stream"; import { AccountsGarbagingDAL, CindexEntry, @@ -7,69 +7,74 @@ import { Indexer, MindexEntry, SimpleUdEntryForWallet, - SindexEntry -} from "../../../../../lib/indexer" -import {ConfDTO, CurrencyConfDTO} from "../../../../../lib/dto/ConfDTO" -import {FileDAL} from "../../../../../lib/dal/fileDAL" -import {DuniterBlockchain, requiredBindexSizeForTail} from "../../../../../lib/blockchain/DuniterBlockchain" -import {BlockDTO} from "../../../../../lib/dto/BlockDTO" -import {Underscore} from "../../../../../lib/common-libs/underscore" -import {MonitorExecutionTime} from "../../../../../lib/debug/MonitorExecutionTime" -import {Wot} from "duniteroxyde" -import {NewLogger} from "../../../../../lib/logger" -import {CommonConstants} from "../../../../../lib/common-libs/constants" -import {DBBlock} from "../../../../../lib/db/DBBlock" -import {AbstractSynchronizer} from "../AbstractSynchronizer" -import {cliprogram} from "../../../../../lib/common-libs/programOptions" -import {DBHead} from "../../../../../lib/db/DBHead" -import {Watcher} from "../Watcher" -import {DataErrors} from "../../../../../lib/common-libs/errors" -import {ProtocolIndexesStream} from "./ProtocolIndexesStream" -import { Directory } from '../../../../../lib/system/directory' - -const constants = require('../../constants') - -let sync_expires: number[] = [] -let sync_bindex: any [] = [] -let sync_iindex: any[] = [] -let sync_mindex: any[] = [] -let sync_cindex: any[] = [] -let sync_nextExpiring = 0 -let sync_bindexSize = 0 -let txCount = 0 -let logger = NewLogger() - -const sync_memoryWallets: any = {} -const sync_memoryDAL:AccountsGarbagingDAL = { - getWallet: (conditions: string) => Promise.resolve(sync_memoryWallets[conditions] || { conditions, balance: 0 }), + SindexEntry, +} from "../../../../../lib/indexer"; +import { ConfDTO, CurrencyConfDTO } from "../../../../../lib/dto/ConfDTO"; +import { FileDAL } from "../../../../../lib/dal/fileDAL"; +import { + DuniterBlockchain, + requiredBindexSizeForTail, +} from "../../../../../lib/blockchain/DuniterBlockchain"; +import { BlockDTO } from "../../../../../lib/dto/BlockDTO"; +import { Underscore } from "../../../../../lib/common-libs/underscore"; +import { MonitorExecutionTime } from "../../../../../lib/debug/MonitorExecutionTime"; +import { Wot } from "duniteroxyde"; +import { NewLogger } from "../../../../../lib/logger"; +import { CommonConstants } from "../../../../../lib/common-libs/constants"; +import { DBBlock } from "../../../../../lib/db/DBBlock"; +import { AbstractSynchronizer } from "../AbstractSynchronizer"; +import { cliprogram } from "../../../../../lib/common-libs/programOptions"; +import { DBHead } from "../../../../../lib/db/DBHead"; +import { Watcher } from "../Watcher"; +import { DataErrors } from "../../../../../lib/common-libs/errors"; +import { ProtocolIndexesStream } from "./ProtocolIndexesStream"; +import { Directory } from "../../../../../lib/system/directory"; + +const constants = require("../../constants"); + +let sync_expires: number[] = []; +let sync_bindex: any[] = []; +let sync_iindex: any[] = []; +let sync_mindex: any[] = []; +let sync_cindex: any[] = []; +let sync_nextExpiring = 0; +let sync_bindexSize = 0; +let txCount = 0; +let logger = NewLogger(); + +const sync_memoryWallets: any = {}; +const sync_memoryDAL: AccountsGarbagingDAL = { + getWallet: (conditions: string) => + Promise.resolve( + sync_memoryWallets[conditions] || { conditions, balance: 0 } + ), saveWallet: async (wallet: any) => { // Make a copy sync_memoryWallets[wallet.conditions] = { conditions: wallet.conditions, - balance: wallet.balance - } + balance: wallet.balance, + }; }, sindexDAL: { - getAvailableForConditions: (conditions:string) => Promise.resolve([]) - } -} + getAvailableForConditions: (conditions: string) => Promise.resolve([]), + }, +}; export interface GDataProtocolIndexesStream { - mindex: MindexEntry[] - iindex: IindexEntry[] - sindex: SindexEntry[] - cindex: CindexEntry[] + mindex: MindexEntry[]; + iindex: IindexEntry[]; + sindex: SindexEntry[]; + cindex: CindexEntry[]; } interface GindexData { - block: BlockDTO - head: DBHead - lindex: GDataProtocolIndexesStream - gindex: GDataProtocolIndexesStream + block: BlockDTO; + head: DBHead; + lindex: GDataProtocolIndexesStream; + gindex: GDataProtocolIndexesStream; } export class GlobalIndexStream extends Duplex { - private sync_currConf: CurrencyConfDTO; private memoryOnly: boolean; @@ -78,42 +83,46 @@ export class GlobalIndexStream extends Duplex { private wotbFilePath: string; - private memSyncInjection: Promise<void> + private memSyncInjection: Promise<void>; - private currentChunkNumber = 0 - private numberOfChunksToDownload:number - private memToCopyDone = false + private currentChunkNumber = 0; + private numberOfChunksToDownload: number; + private memToCopyDone = false; - private mapInjection: { [k: string]: any } = {} + private mapInjection: { [k: string]: any } = {}; - constructor(private conf: ConfDTO, - private dal:FileDAL, - private to: number, - private localNumber:number, - private cautious: boolean, - private syncStrategy: AbstractSynchronizer, - private watcher:Watcher, - ) { - super({ objectMode: true }) - this.memoryOnly = dal.fs.isMemoryOnly() - this.wotbMem = dal.wotb + constructor( + private conf: ConfDTO, + private dal: FileDAL, + private to: number, + private localNumber: number, + private cautious: boolean, + private syncStrategy: AbstractSynchronizer, + private watcher: Watcher + ) { + super({ objectMode: true }); + this.memoryOnly = dal.fs.isMemoryOnly(); + this.wotbMem = dal.wotb; if (!this.memoryOnly) { this.wotbFilePath = Directory.getWotbFilePathSync(dal.rootPath); } - const nbBlocksToDownload = Math.max(0, to - localNumber) - this.numberOfChunksToDownload = Math.ceil(nbBlocksToDownload / syncStrategy.chunkSize) + const nbBlocksToDownload = Math.max(0, to - localNumber); + this.numberOfChunksToDownload = Math.ceil( + nbBlocksToDownload / syncStrategy.chunkSize + ); - this.readChunk(this.currentChunkNumber) + this.readChunk(this.currentChunkNumber); sync_memoryDAL.sindexDAL = { - getAvailableForConditions: (conditions:string) => this.dal.sindexDAL.getAvailableForConditions(conditions) - } + getAvailableForConditions: (conditions: string) => + this.dal.sindexDAL.getAvailableForConditions(conditions), + }; this.memSyncInjection = (async () => { // Disabled function - })() + })(); } private async injectLoki<T, K extends keyof T>(dal: T, f: K, obj: T[K]) { @@ -122,28 +131,35 @@ export class GlobalIndexStream extends Duplex { // await (obj as any).triggerInit() } - readChunk(i: number) { - } + readChunk(i: number) {} _read(size: number) { - this.push(null) + this.push(null); } - _write(dataArray: ProtocolIndexesStream[]|undefined, encoding: any, callback: (err: any) => void) { - + _write( + dataArray: ProtocolIndexesStream[] | undefined, + encoding: any, + callback: (err: any) => void + ) { (async () => { - - await this.memSyncInjection + await this.memSyncInjection; if (!dataArray) { - return callback(null) + return callback(null); } - await this.transform(dataArray) - this.watcher.appliedPercent(Math.round(dataArray[0].block.number / this.syncStrategy.chunkSize / this.numberOfChunksToDownload * 100)) - callback(null) - - })() + await this.transform(dataArray); + this.watcher.appliedPercent( + Math.round( + (dataArray[0].block.number / + this.syncStrategy.chunkSize / + this.numberOfChunksToDownload) * + 100 + ) + ); + callback(null); + })(); } /** @@ -152,15 +168,15 @@ export class GlobalIndexStream extends Duplex { * @returns {Promise<GindexData[]>} */ @MonitorExecutionTime() - private async transform(dataArray:ProtocolIndexesStream[]): Promise<GindexData[]> { - - await this.beforeBlocks(dataArray.map(d => d.block)) + private async transform( + dataArray: ProtocolIndexesStream[] + ): Promise<GindexData[]> { + await this.beforeBlocks(dataArray.map((d) => d.block)); - const gindex: GindexData[] = [] + const gindex: GindexData[] = []; for (const data of dataArray) { - - const block = data.block + const block = data.block; const gData: GindexData = { lindex: { @@ -177,54 +193,76 @@ export class GlobalIndexStream extends Duplex { }, block, head: null as any, - } + }; // VERY FIRST: parameters, otherwise we compute wrong variables such as UDTime if (block.number == 0) { - this.sync_currConf = BlockDTO.getConf(block) - await DuniterBlockchain.saveParametersForRoot(block, this.conf, this.dal) + this.sync_currConf = BlockDTO.getConf(block); + await DuniterBlockchain.saveParametersForRoot( + block, + this.conf, + this.dal + ); } - const bindexSize = requiredBindexSizeForTail(block, this.conf) - if ((block.number <= this.to - bindexSize - 1 || cliprogram.noSources) && !this.cautious) { // If we require nosources option, this blockchain can't be valid so we don't make checks - - const HEAD = await Indexer.quickCompleteGlobalScope(block, this.sync_currConf, sync_bindex, data.iindex, data.mindex, data.cindex, this.dal) - sync_bindex.push(HEAD) + const bindexSize = requiredBindexSizeForTail(block, this.conf); + if ( + (block.number <= this.to - bindexSize - 1 || cliprogram.noSources) && + !this.cautious + ) { + // If we require nosources option, this blockchain can't be valid so we don't make checks + + const HEAD = await Indexer.quickCompleteGlobalScope( + block, + this.sync_currConf, + sync_bindex, + data.iindex, + data.mindex, + data.cindex, + this.dal + ); + sync_bindex.push(HEAD); // GINDEX - gData.head = HEAD + gData.head = HEAD; // Remember expiration dates for (const entry of data.cindex) { if (entry.expires_on) { - sync_expires.push(entry.expires_on) + sync_expires.push(entry.expires_on); } } for (const entry of data.mindex) { if (entry.expires_on) { - sync_expires.push(entry.expires_on) + sync_expires.push(entry.expires_on); } } for (const entry of data.mindex) { if (entry.revokes_on) { - sync_expires.push(entry.revokes_on) + sync_expires.push(entry.revokes_on); } } if (data.iindex.length) { - await DuniterBlockchain.createNewcomers(data.iindex, this.dal, NewLogger(), this.wotbMem) + await DuniterBlockchain.createNewcomers( + data.iindex, + this.dal, + NewLogger(), + this.wotbMem + ); } - if ((block.dividend && !cliprogram.noSources) - || block.joiners.length - || block.actives.length - || block.revoked.length - || block.excluded.length - || block.certifications.length - || (block.transactions.length && !cliprogram.noSources) - || block.medianTime >= sync_nextExpiring) { - - const nextExpiringChanged = block.medianTime >= sync_nextExpiring + if ( + (block.dividend && !cliprogram.noSources) || + block.joiners.length || + block.actives.length || + block.revoked.length || + block.excluded.length || + block.certifications.length || + (block.transactions.length && !cliprogram.noSources) || + block.medianTime >= sync_nextExpiring + ) { + const nextExpiringChanged = block.medianTime >= sync_nextExpiring; for (let i = 0; i < sync_expires.length; i++) { let expire = sync_expires[i]; @@ -233,47 +271,86 @@ export class GlobalIndexStream extends Duplex { i--; } } - sync_nextExpiring = sync_expires.reduce((max, value) => max ? Math.min(max, value) : value, 9007199254740991); // Far far away date + sync_nextExpiring = sync_expires.reduce( + (max, value) => (max ? Math.min(max, value) : value), + 9007199254740991 + ); // Far far away date if (!cliprogram.noSources) { - if (data.sindex.length) { - await this.blockFillTxSourcesConditions(data.sindex) + await this.blockFillTxSourcesConditions(data.sindex); } // Dividends and account garbaging - let dividends: SimpleUdEntryForWallet[] = [] + let dividends: SimpleUdEntryForWallet[] = []; if (HEAD.new_dividend) { - dividends = await Indexer.ruleIndexGenDividend(HEAD, data.iindex, this.dal) + dividends = await Indexer.ruleIndexGenDividend( + HEAD, + data.iindex, + this.dal + ); } else { for (const newcomer of data.iindex) { - await this.dal.dividendDAL.createMember(newcomer.pub) + await this.dal.dividendDAL.createMember(newcomer.pub); } } if (block.transactions.length) { - data.sindex = data.sindex.concat(await Indexer.ruleIndexGarbageSmallAccounts(HEAD, data.sindex, dividends, sync_memoryDAL)); + data.sindex = data.sindex.concat( + await Indexer.ruleIndexGarbageSmallAccounts( + HEAD, + data.sindex, + dividends, + sync_memoryDAL + ) + ); } if (data.sindex.length) { - gData.gindex.sindex = data.sindex - await this.flushSindex(data.sindex) + gData.gindex.sindex = data.sindex; + await this.flushSindex(data.sindex); } if (data.sindex.length || dividends.length) { - await DuniterBlockchain.updateWallets(data.sindex, dividends, sync_memoryDAL, false, block.number) + await DuniterBlockchain.updateWallets( + data.sindex, + dividends, + sync_memoryDAL, + false, + block.number + ); } } if (data.mindex.length || data.iindex.length || data.cindex.length) { - await this.flushMicIndexes(data.mindex, data.iindex, data.cindex) + await this.flushMicIndexes(data.mindex, data.iindex, data.cindex); } if (nextExpiringChanged) { - sync_cindex = sync_cindex.concat(await Indexer.ruleIndexGenCertificationExpiry(HEAD, this.dal)); - sync_mindex = sync_mindex.concat(await Indexer.ruleIndexGenMembershipExpiry(HEAD, this.dal)); - sync_iindex = sync_iindex.concat(await Indexer.ruleIndexGenExclusionByMembership(HEAD, sync_mindex, this.dal)); - sync_iindex = sync_iindex.concat(await Indexer.ruleIndexGenExclusionByCertificatons(HEAD, sync_cindex, data.iindex, this.conf, this.dal)); - sync_mindex = sync_mindex.concat(await Indexer.ruleIndexGenImplicitRevocation(HEAD, this.dal)); + sync_cindex = sync_cindex.concat( + await Indexer.ruleIndexGenCertificationExpiry(HEAD, this.dal) + ); + sync_mindex = sync_mindex.concat( + await Indexer.ruleIndexGenMembershipExpiry(HEAD, this.dal) + ); + sync_iindex = sync_iindex.concat( + await Indexer.ruleIndexGenExclusionByMembership( + HEAD, + sync_mindex, + this.dal + ) + ); + sync_iindex = sync_iindex.concat( + await Indexer.ruleIndexGenExclusionByCertificatons( + HEAD, + sync_cindex, + data.iindex, + this.conf, + this.dal + ) + ); + sync_mindex = sync_mindex.concat( + await Indexer.ruleIndexGenImplicitRevocation(HEAD, this.dal) + ); } if (sync_mindex.length || sync_iindex.length || sync_cindex.length) { @@ -283,75 +360,90 @@ export class GlobalIndexStream extends Duplex { iindex: sync_iindex, sindex: [], cindex: sync_cindex, - }) + }); } if (sync_cindex.length) { - await this.updateWotbLinks(sync_cindex) + await this.updateWotbLinks(sync_cindex); } - gData.gindex.iindex = sync_iindex - gData.gindex.mindex = sync_mindex - gData.gindex.cindex = sync_cindex + gData.gindex.iindex = sync_iindex; + gData.gindex.mindex = sync_mindex; + gData.gindex.cindex = sync_cindex; sync_iindex = []; sync_mindex = []; sync_cindex = []; // TODO GINDEX - if (block.joiners.length || block.revoked.length || block.excluded.length) { - await this.updateMembers(block) + if ( + block.joiners.length || + block.revoked.length || + block.excluded.length + ) { + await this.updateMembers(block); } - } else { // Concat the results to the pending data sync_iindex = sync_iindex.concat(data.iindex); sync_cindex = sync_cindex.concat(data.cindex); sync_mindex = sync_mindex.concat(data.mindex); - gData.gindex.iindex = data.iindex - gData.gindex.cindex = data.cindex - gData.gindex.mindex = data.mindex + gData.gindex.iindex = data.iindex; + gData.gindex.cindex = data.cindex; + gData.gindex.mindex = data.mindex; } // Trim the bindex - sync_bindexSize = this.conf.forksize + [ - block.issuersCount, - block.issuersFrame, - this.conf.medianTimeBlocks, - this.conf.dtDiffEval, - dataArray.length - ].reduce((max, value) => { - return Math.max(max, value); - }, 0); + sync_bindexSize = + this.conf.forksize + + [ + block.issuersCount, + block.issuersFrame, + this.conf.medianTimeBlocks, + this.conf.dtDiffEval, + dataArray.length, + ].reduce((max, value) => { + return Math.max(max, value); + }, 0); if (sync_bindexSize && sync_bindex.length >= 2 * sync_bindexSize) { // We trim it, not necessary to store it all (we already store the full blocks) sync_bindex.splice(0, sync_bindexSize); // TODO GINDEX - await this.doTrimming() + await this.doTrimming(); } } else if (block.number <= this.to) { - const dto = BlockDTO.fromJSONObject(block) - await this.finalizeSync(block, dto) + const dto = BlockDTO.fromJSONObject(block); + await this.finalizeSync(block, dto); } - gindex.push(gData) + gindex.push(gData); } - return gindex + return gindex; } @MonitorExecutionTime() - private async beforeBlocks(blocks:BlockDTO[]) { - await this.dal.blockDAL.insertBatch(blocks.map(b => { - txCount += b.transactions.length - const block = DBBlock.fromBlockDTO(b) - block.fork = false - return block - })) + private async beforeBlocks(blocks: BlockDTO[]) { + await this.dal.blockDAL.insertBatch( + blocks.map((b) => { + txCount += b.transactions.length; + const block = DBBlock.fromBlockDTO(b); + block.fork = false; + return block; + }) + ); if (this.conf.storage && this.conf.storage.transactions) { - await Promise.all(blocks.map(block => this.dal.saveTxsInFiles(block.transactions, block.number, block.medianTime))) + await Promise.all( + blocks.map((block) => + this.dal.saveTxsInFiles( + block.transactions, + block.number, + block.medianTime + ) + ) + ); } - logger.debug('Total tx count: %s', txCount) + logger.debug("Total tx count: %s", txCount); } @MonitorExecutionTime() @@ -361,50 +453,67 @@ export class GlobalIndexStream extends Duplex { iindex: [], cindex: [], sindex: local_sindex, - }) + }); } @MonitorExecutionTime() - private async flushMicIndexes(local_mindex: MindexEntry[], local_iindex: IindexEntry[], local_cindex: CindexEntry[]) { + private async flushMicIndexes( + local_mindex: MindexEntry[], + local_iindex: IindexEntry[], + local_cindex: CindexEntry[] + ) { // Flush the INDEX (not bindex, which is particular) await this.dal.flushIndexes({ mindex: sync_mindex, iindex: sync_iindex, sindex: [], cindex: sync_cindex, - }) - sync_iindex = local_iindex - sync_cindex = local_cindex - sync_mindex = local_mindex + }); + sync_iindex = local_iindex; + sync_cindex = local_cindex; + sync_mindex = local_mindex; } @MonitorExecutionTime() private async blockFillTxSourcesConditions(local_sindex: SindexEntry[]) { // Fills in correctly the SINDEX - await Promise.all(Underscore.where(local_sindex, {op: 'UPDATE'}).map(async entry => { - if (!entry.conditions) { - if (entry.srcType === 'D') { - entry.conditions = 'SIG(' + entry.identifier + ')' - } else { - // First: have a look locally, but only chained transactions would have `localSrc` matching (giving conditions) - const localSrc = local_sindex.filter(s => s.identifier === entry.identifier && s.pos === entry.pos && s.conditions)[0] - const src = localSrc || (await this.dal.getSource(entry.identifier, entry.pos, false)) as FullSindexEntry - entry.conditions = src.conditions + await Promise.all( + Underscore.where(local_sindex, { op: "UPDATE" }).map(async (entry) => { + if (!entry.conditions) { + if (entry.srcType === "D") { + entry.conditions = "SIG(" + entry.identifier + ")"; + } else { + // First: have a look locally, but only chained transactions would have `localSrc` matching (giving conditions) + const localSrc = local_sindex.filter( + (s) => + s.identifier === entry.identifier && + s.pos === entry.pos && + s.conditions + )[0]; + const src = + localSrc || + ((await this.dal.getSource( + entry.identifier, + entry.pos, + false + )) as FullSindexEntry); + entry.conditions = src.conditions; + } } - } - })) + }) + ); } @MonitorExecutionTime() private async updateWotbLinks(links: CindexEntry[]) { // --> Update links - await this.dal.updateWotbLinks(links, this.wotbMem) + await this.dal.updateWotbLinks(links, this.wotbMem); } @MonitorExecutionTime() private async updateMembers(block: BlockDTO) { // Create/Update nodes in wotb - await DuniterBlockchain.updateMembers(block, this.dal, this.wotbMem) + await DuniterBlockchain.updateMembers(block, this.dal, this.wotbMem); } @MonitorExecutionTime() @@ -422,28 +531,36 @@ export class GlobalIndexStream extends Duplex { iindex: sync_iindex, sindex: [], cindex: sync_cindex, - }) + }); if (!this.memToCopyDone) { - // Save the intermediary table of wallets - const conditions = Underscore.keys(sync_memoryWallets) - const nonEmptyKeys = Underscore.filter(conditions, (k: any) => sync_memoryWallets[k] && sync_memoryWallets[k].balance > 0) - const walletsToRecord = nonEmptyKeys.map((k: any) => sync_memoryWallets[k]) - await this.dal.walletDAL.insertBatch(walletsToRecord) + const conditions = Underscore.keys(sync_memoryWallets); + const nonEmptyKeys = Underscore.filter( + conditions, + (k: any) => sync_memoryWallets[k] && sync_memoryWallets[k].balance > 0 + ); + const walletsToRecord = nonEmptyKeys.map( + (k: any) => sync_memoryWallets[k] + ); + await this.dal.walletDAL.insertBatch(walletsToRecord); for (const cond of conditions) { - delete sync_memoryWallets[cond] + delete sync_memoryWallets[cond]; } - NewLogger().info('Mem2File [wotb]...') + NewLogger().info("Mem2File [wotb]..."); // Persist the memory wotb if (!this.memoryOnly) { - this.wotbMem.writeInFile(this.wotbFilePath) + this.wotbMem.writeInFile(this.wotbFilePath); } - + // Disabled for now //const that = this - async function inject<T, K extends keyof T, R, S extends T[K]>(fileDal: T, field: K, getRows: () => Promise<R[]>) { + async function inject<T, K extends keyof T, R, S extends T[K]>( + fileDal: T, + field: K, + getRows: () => Promise<R[]> + ) { // const dao = that.mapInjection[field] // if (dao) { // NewLogger().info(`Mem2File [${field}]...`) @@ -456,16 +573,28 @@ export class GlobalIndexStream extends Duplex { // } } - this.memToCopyDone = true + this.memToCopyDone = true; } if (block.number === 0) { - await DuniterBlockchain.saveParametersForRoot(block, this.conf, this.dal) + await DuniterBlockchain.saveParametersForRoot(block, this.conf, this.dal); } // Last block: cautious mode to trigger all the INDEX expiry mechanisms - const { index, HEAD } = await DuniterBlockchain.checkBlock(dto, constants.WITH_SIGNATURES_AND_POW, this.conf, this.dal) - await DuniterBlockchain.pushTheBlock(dto, index, HEAD, this.conf, this.dal, NewLogger()) + const { index, HEAD } = await DuniterBlockchain.checkBlock( + dto, + constants.WITH_SIGNATURES_AND_POW, + this.conf, + this.dal + ); + await DuniterBlockchain.pushTheBlock( + dto, + index, + HEAD, + this.conf, + this.dal, + NewLogger() + ); // Clean temporary variables sync_bindex = []; @@ -476,5 +605,4 @@ export class GlobalIndexStream extends Duplex { sync_expires = []; sync_nextExpiring = 0; } - } diff --git a/app/modules/crawler/lib/sync/v2/LocalIndexStream.ts b/app/modules/crawler/lib/sync/v2/LocalIndexStream.ts index 28d67154b1923fb93a3b1b2294ec1cb33419beb8..0c69bd56bc94662c569e3ccc0d5640808b865163 100644 --- a/app/modules/crawler/lib/sync/v2/LocalIndexStream.ts +++ b/app/modules/crawler/lib/sync/v2/LocalIndexStream.ts @@ -1,39 +1,39 @@ -import {Transform} from 'stream' -import {Indexer} from "../../../../../lib/indexer" -import {cliprogram} from "../../../../../lib/common-libs/programOptions" -import {BlockDTO} from "../../../../../lib/dto/BlockDTO" -import {CurrencyConfDTO} from "../../../../../lib/dto/ConfDTO" -import {ProtocolIndexesStream} from "./ProtocolIndexesStream" +import { Transform } from "stream"; +import { Indexer } from "../../../../../lib/indexer"; +import { cliprogram } from "../../../../../lib/common-libs/programOptions"; +import { BlockDTO } from "../../../../../lib/dto/BlockDTO"; +import { CurrencyConfDTO } from "../../../../../lib/dto/ConfDTO"; +import { ProtocolIndexesStream } from "./ProtocolIndexesStream"; export class LocalIndexStream extends Transform { - private sync_currConf: CurrencyConfDTO; - private currentChunkNumber = 0 + private currentChunkNumber = 0; constructor() { - super({ objectMode: true }) + super({ objectMode: true }); } - _transform(blocks: BlockDTO[]|undefined, encoding: any, callback: (err: any, data: ProtocolIndexesStream[]|undefined) => void) { - + _transform( + blocks: BlockDTO[] | undefined, + encoding: any, + callback: (err: any, data: ProtocolIndexesStream[] | undefined) => void + ) { (async (): Promise<any> => { - if (!blocks) { - return setTimeout(() => callback(null, undefined), 1) + return setTimeout(() => callback(null, undefined), 1); } - const result: ProtocolIndexesStream[] = [] + const result: ProtocolIndexesStream[] = []; for (const block of blocks) { - // The new kind of object stored - const dto = BlockDTO.fromJSONObject(block) + const dto = BlockDTO.fromJSONObject(block); if (block.number == 0) { - this.sync_currConf = BlockDTO.getConf(block) + this.sync_currConf = BlockDTO.getConf(block); } - const index:any = Indexer.localIndex(dto, this.sync_currConf) + const index: any = Indexer.localIndex(dto, this.sync_currConf); result.push({ block, @@ -41,14 +41,13 @@ export class LocalIndexStream extends Transform { cindex: Indexer.cindex(index), sindex: cliprogram.noSources ? [] : Indexer.sindex(index), mindex: Indexer.mindex(index), - }) + }); } - this.currentChunkNumber++ + this.currentChunkNumber++; // Done for this chunk - callback(null, result) - })() + callback(null, result); + })(); } - } diff --git a/app/modules/crawler/lib/sync/v2/ProtocolIndexesStream.ts b/app/modules/crawler/lib/sync/v2/ProtocolIndexesStream.ts index 35a58cc7d4069fe6305cb4809f6d51f31d4cc392..f79c92d77552df88f1f7f35d334f33613d24c8eb 100644 --- a/app/modules/crawler/lib/sync/v2/ProtocolIndexesStream.ts +++ b/app/modules/crawler/lib/sync/v2/ProtocolIndexesStream.ts @@ -1,10 +1,15 @@ -import {CindexEntry, IindexEntry, MindexEntry, SindexEntry} from "../../../../../lib/indexer" -import {BlockDTO} from "../../../../../lib/dto/BlockDTO" +import { + CindexEntry, + IindexEntry, + MindexEntry, + SindexEntry, +} from "../../../../../lib/indexer"; +import { BlockDTO } from "../../../../../lib/dto/BlockDTO"; export interface ProtocolIndexesStream { - block: BlockDTO - mindex: MindexEntry[] - iindex: IindexEntry[] - sindex: SindexEntry[] - cindex: CindexEntry[] + block: BlockDTO; + mindex: MindexEntry[]; + iindex: IindexEntry[]; + sindex: SindexEntry[]; + cindex: CindexEntry[]; } diff --git a/app/modules/crawler/lib/sync/v2/ValidatorStream.ts b/app/modules/crawler/lib/sync/v2/ValidatorStream.ts index a082a0782993e4e2f14ca6497661cdc9f2150020..9f2802eb55758c66ab9b701df1e3ab7a867924f4 100644 --- a/app/modules/crawler/lib/sync/v2/ValidatorStream.ts +++ b/app/modules/crawler/lib/sync/v2/ValidatorStream.ts @@ -1,119 +1,131 @@ -import {Readable} from 'stream' -import {AbstractSynchronizer} from "../AbstractSynchronizer" -import {BlockDTO} from "../../../../../lib/dto/BlockDTO" -import {Querable, querablep} from "../../../../../lib/common-libs/querable" -import {DataErrors} from "../../../../../lib/common-libs/errors" -import {NewLogger} from "../../../../../lib/logger" -import {ISyncDownloader} from "../ISyncDownloader" -import {Watcher} from "../Watcher" -import {ExitCodes} from "../../../../../lib/common-libs/exit-codes" - +import { Readable } from "stream"; +import { AbstractSynchronizer } from "../AbstractSynchronizer"; +import { BlockDTO } from "../../../../../lib/dto/BlockDTO"; +import { Querable, querablep } from "../../../../../lib/common-libs/querable"; +import { DataErrors } from "../../../../../lib/common-libs/errors"; +import { NewLogger } from "../../../../../lib/logger"; +import { ISyncDownloader } from "../ISyncDownloader"; +import { Watcher } from "../Watcher"; +import { ExitCodes } from "../../../../../lib/common-libs/exit-codes"; export class ValidatorStream extends Readable { - - private fsSynchronizer: ISyncDownloader - private numberOfChunksToDownload:number - private currentChunkNumber = 0 - private chunks: BlockDTO[] - private dowloading: Querable<BlockDTO>[] - private cacheLevelValidationPromise: Promise<number> - private bestDownloaded = -1 + private fsSynchronizer: ISyncDownloader; + private numberOfChunksToDownload: number; + private currentChunkNumber = 0; + private chunks: BlockDTO[]; + private dowloading: Querable<BlockDTO>[]; + private cacheLevelValidationPromise: Promise<number>; + private bestDownloaded = -1; constructor( - private localNumber:number, - private to:number, - private toHash:string, + private localNumber: number, + private to: number, + private toHash: string, private syncStrategy: AbstractSynchronizer, - private watcher:Watcher, + private watcher: Watcher ) { - super({objectMode: true}) - const nbBlocksToDownload = Math.max(0, to - localNumber) - this.numberOfChunksToDownload = Math.ceil(nbBlocksToDownload / syncStrategy.chunkSize) + super({ objectMode: true }); + const nbBlocksToDownload = Math.max(0, to - localNumber); + this.numberOfChunksToDownload = Math.ceil( + nbBlocksToDownload / syncStrategy.chunkSize + ); - this.chunks = Array.from({ length: this.numberOfChunksToDownload }) - this.dowloading = Array.from({ length: this.numberOfChunksToDownload }) + this.chunks = Array.from({ length: this.numberOfChunksToDownload }); + this.dowloading = Array.from({ length: this.numberOfChunksToDownload }); - this.fsSynchronizer = syncStrategy.fsDownloader() + this.fsSynchronizer = syncStrategy.fsDownloader(); - this.downloadBlock(0) + this.downloadBlock(0); } private async downloadBlock(i: number, forceDownload = false) { - const maximumCacheNumber = forceDownload ? -1 : await this.validateCacheLevel() + const maximumCacheNumber = forceDownload + ? -1 + : await this.validateCacheLevel(); if (i + 1 > this.numberOfChunksToDownload) { - return Promise.resolve() + return Promise.resolve(); } - if (!this.dowloading[i] && !this.chunks[i]) { - this.dowloading[i] = querablep((async (): Promise<BlockDTO> => { - let failures = 0 - let block: BlockDTO|null - do { - try { - const bNumber = Math.min(this.to, (i + 1) * this.syncStrategy.chunkSize - 1) - if (bNumber > maximumCacheNumber) { - block = await this.syncStrategy.getMilestone(bNumber) - } else { - block = await this.getBlockFromCache(bNumber) - } - if (!forceDownload && i > this.bestDownloaded) { - this.watcher.storagePercent(Math.round((i + 1) / this.numberOfChunksToDownload * 100)) - this.bestDownloaded = i - } - if (!block) { - throw Error(DataErrors[DataErrors.CANNOT_GET_VALIDATION_BLOCK_FROM_REMOTE]) + if (!this.dowloading[i] && !this.chunks[i]) { + this.dowloading[i] = querablep( + (async (): Promise<BlockDTO> => { + let failures = 0; + let block: BlockDTO | null; + do { + try { + const bNumber = Math.min( + this.to, + (i + 1) * this.syncStrategy.chunkSize - 1 + ); + if (bNumber > maximumCacheNumber) { + block = await this.syncStrategy.getMilestone(bNumber); + } else { + block = await this.getBlockFromCache(bNumber); + } + if (!forceDownload && i > this.bestDownloaded) { + this.watcher.storagePercent( + Math.round(((i + 1) / this.numberOfChunksToDownload) * 100) + ); + this.bestDownloaded = i; + } + if (!block) { + throw Error( + DataErrors[DataErrors.CANNOT_GET_VALIDATION_BLOCK_FROM_REMOTE] + ); + } + } catch (e) { + failures++; + await new Promise((res) => setTimeout(res, 3000)); + if (failures >= 15) { + NewLogger().error( + "Could not get a validation from remote blockchain after %s trials. Stopping sync.", + failures + ); + process.exit(ExitCodes.SYNC_FAIL); + } + block = null; } - } - catch (e) { - failures++ - await new Promise((res) => setTimeout(res, 3000)) - if (failures >= 15) { - NewLogger().error('Could not get a validation from remote blockchain after %s trials. Stopping sync.', failures) - process.exit(ExitCodes.SYNC_FAIL) - } - block = null - } - } while (!block) - return block - })()) + } while (!block); + return block; + })() + ); this.dowloading[i] - .then(chunk => { - this.chunks[i] = chunk - delete this.dowloading[i] + .then((chunk) => { + this.chunks[i] = chunk; + delete this.dowloading[i]; // this.push(chunk) }) - .catch(err => { - throw err - }) - return this.dowloading[i] || this.chunks[i] + .catch((err) => { + throw err; + }); + return this.dowloading[i] || this.chunks[i]; } - return this.dowloading[i] || this.chunks[i] + return this.dowloading[i] || this.chunks[i]; } - private async getBlockFromCache(bNumber: number): Promise<BlockDTO|null> { - return this.fsSynchronizer.getBlock(bNumber) + private async getBlockFromCache(bNumber: number): Promise<BlockDTO | null> { + return this.fsSynchronizer.getBlock(bNumber); } _read(size: number) { if (this.currentChunkNumber == this.numberOfChunksToDownload) { - this.push(null) + this.push(null); } else { // Asks for next chunk: do we have it? if (this.chunks[this.currentChunkNumber]) { - this.push(this.chunks[this.currentChunkNumber]) - delete this.chunks[this.currentChunkNumber] + this.push(this.chunks[this.currentChunkNumber]); + delete this.chunks[this.currentChunkNumber]; // Let's start the download of next chunk - this.currentChunkNumber++ + this.currentChunkNumber++; this.downloadBlock(this.currentChunkNumber) .then(() => this.downloadBlock(this.currentChunkNumber + 1)) .then(() => this.downloadBlock(this.currentChunkNumber + 2)) .then(() => this.downloadBlock(this.currentChunkNumber + 3)) .then(() => this.downloadBlock(this.currentChunkNumber + 4)) .then(() => this.downloadBlock(this.currentChunkNumber + 5)) - .then(() => this.downloadBlock(this.currentChunkNumber + 6)) - } - else { + .then(() => this.downloadBlock(this.currentChunkNumber + 6)); + } else { // We don't have it yet - this.push(undefined) + this.push(undefined); } } } @@ -122,30 +134,40 @@ export class ValidatorStream extends Readable { if (!this.cacheLevelValidationPromise) { this.cacheLevelValidationPromise = (async (): Promise<number> => { // Find the best common chunk with remote - let topChunk = this.numberOfChunksToDownload - 1 // We ignore the top chunk, which is special (most unlikely to be full) - let botChunk = -1 // In the worst case, this is the good index + let topChunk = this.numberOfChunksToDownload - 1; // We ignore the top chunk, which is special (most unlikely to be full) + let botChunk = -1; // In the worst case, this is the good index let current; do { - current = topChunk - ((topChunk - botChunk) % 2 == 0 ? (topChunk - botChunk) / 2 : ((topChunk - botChunk + 1) / 2) - 1) + current = + topChunk - + ((topChunk - botChunk) % 2 == 0 + ? (topChunk - botChunk) / 2 + : (topChunk - botChunk + 1) / 2 - 1); if (current === 0) { // we have no compliant cache - return -1 + return -1; } - const bNumber = current * this.syncStrategy.chunkSize - 1 - const remoteBlock = await this.downloadBlock(current - 1, true) - const localBlock = await this.fsSynchronizer.getBlock(bNumber) - if (remoteBlock && localBlock && remoteBlock.hash === localBlock.hash) { + const bNumber = current * this.syncStrategy.chunkSize - 1; + const remoteBlock = await this.downloadBlock(current - 1, true); + const localBlock = await this.fsSynchronizer.getBlock(bNumber); + if ( + remoteBlock && + localBlock && + remoteBlock.hash === localBlock.hash + ) { // Success! Let's look forward - botChunk = current + botChunk = current; } else { // Fail: let's look backward - topChunk = current - 1 + topChunk = current - 1; } - } while (botChunk !== topChunk) + } while (botChunk !== topChunk); // retur topChunk or botChunk, it is the same - return topChunk === -1 ? -1 : (topChunk * this.syncStrategy.chunkSize) - 1 - })() + return topChunk === -1 + ? -1 + : topChunk * this.syncStrategy.chunkSize - 1; + })(); } - return this.cacheLevelValidationPromise + return this.cacheLevelValidationPromise; } } diff --git a/app/modules/crawler/lib/tx_cleaner.ts b/app/modules/crawler/lib/tx_cleaner.ts index 354e210a7f896e7f440ea3530183316b48040176..84046475ca34d5f4d4755f9abca8e7c7a6e87a70 100644 --- a/app/modules/crawler/lib/tx_cleaner.ts +++ b/app/modules/crawler/lib/tx_cleaner.ts @@ -11,12 +11,11 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -export const tx_cleaner = (txs:any) => - +export const tx_cleaner = (txs: any) => // Remove unused signatories - see https://github.com/duniter/duniter/issues/494 - txs.forEach((tx:any) => { + txs.forEach((tx: any) => { if (tx.signatories) { - delete tx.signatories + delete tx.signatories; } - return tx - }) + return tx; + }); diff --git a/app/modules/daemon.ts b/app/modules/daemon.ts index 8d14bdba73a3d4c38f582d8a67f25934d69027b6..7604e6043c8ae898edeaca2af200896205cbc53f 100644 --- a/app/modules/daemon.ts +++ b/app/modules/daemon.ts @@ -11,170 +11,215 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" -import {Directory, RealFS} from "../lib/system/directory" -import {ExitCodes} from "../lib/common-libs/exit-codes" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; +import { Directory, RealFS } from "../lib/system/directory"; +import { ExitCodes } from "../lib/common-libs/exit-codes"; -const constants = require('../lib/constants'); -const Tail = require("tail").Tail +const constants = require("../lib/constants"); +const Tail = require("tail").Tail; module.exports = { duniter: { - cliOptions: [ - { value: '--loglevel <level>', desc: 'Logs level, either [error,warning,info,debug,trace]. default to `info`.' }, - { value: '--sql-traces', desc: 'Will log every SQL query that is executed. Requires --loglevel \'trace\'.' } + { + value: "--loglevel <level>", + desc: + "Logs level, either [error,warning,info,debug,trace]. default to `info`.", + }, + { + value: "--sql-traces", + desc: + "Will log every SQL query that is executed. Requires --loglevel 'trace'.", + }, ], service: { - process: (server:Server) => ServerService(server) + process: (server: Server) => ServerService(server), }, config: { - /***** * Tries to load a specific parameter `conf.loglevel` */ - onLoading: async (conf:ConfDTO, program:any) => { - conf.loglevel = program.loglevel || conf.loglevel || 'info' - } + onLoading: async (conf: ConfDTO, program: any) => { + conf.loglevel = program.loglevel || conf.loglevel || "info"; + }, }, - cli: [{ - - name: 'start', - desc: 'Starts Duniter as a daemon (background task).', - logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - await server.checkConfig() - const daemon = server.getDaemon('direct_start', 'start') - await startDaemon(daemon) - } - }, { - - name: 'stop', - desc: 'Stops Duniter daemon if it is running.', - logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const daemon = server.getDaemon() - await stopDaemon(daemon) - } - }, { - - name: 'restart', - desc: 'Stops Duniter daemon and restart it.', - logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - await server.checkConfig() - const daemon = server.getDaemon('direct_start', 'restart') - await stopDaemon(daemon) - await startDaemon(daemon) - } - }, { - - name: 'status', - desc: 'Get Duniter daemon status.', - logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - await server.checkConfig() - const pid = server.getDaemon().status() - if (pid) { - console.log('Duniter is running using PID %s.', pid) - process.exit(ExitCodes.OK) - } else { - console.log('Duniter is not running.') - process.exit(ExitCodes.DUNITER_NOT_RUNNING) - } - } - }, { - - name: 'logs', - desc: 'Follow duniter logs.', - logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - printTailAndWatchFile(Directory.INSTANCE_HOMELOG_FILE, constants.NB_INITIAL_LINES_TO_SHOW) - // Never ending command - return new Promise(res => null) - } - }, { - - name: 'direct_start', - desc: 'Start Duniter node with direct output, non-daemonized.', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any, startServices:any) => { - const logger = server.logger; - - logger.info(">> Server starting..."); - - // Log NodeJS version - logger.info('NodeJS version: ' + process.version); - - await server.checkConfig(); - // Add signing & public key functions to PeeringService - logger.info('Node version: ' + server.version); - logger.info('Node pubkey: ' + server.conf.pair.pub); - - // Services - await startServices(); - - logger.info('>> Server ready!'); - - return new Promise(() => null); // Never ending - } - }] - } + cli: [ + { + name: "start", + desc: "Starts Duniter as a daemon (background task).", + logs: false, + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + await server.checkConfig(); + const daemon = server.getDaemon("direct_start", "start"); + await startDaemon(daemon); + }, + }, + { + name: "stop", + desc: "Stops Duniter daemon if it is running.", + logs: false, + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const daemon = server.getDaemon(); + await stopDaemon(daemon); + }, + }, + { + name: "restart", + desc: "Stops Duniter daemon and restart it.", + logs: false, + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + await server.checkConfig(); + const daemon = server.getDaemon("direct_start", "restart"); + await stopDaemon(daemon); + await startDaemon(daemon); + }, + }, + { + name: "status", + desc: "Get Duniter daemon status.", + logs: false, + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + await server.checkConfig(); + const pid = server.getDaemon().status(); + if (pid) { + console.log("Duniter is running using PID %s.", pid); + process.exit(ExitCodes.OK); + } else { + console.log("Duniter is not running."); + process.exit(ExitCodes.DUNITER_NOT_RUNNING); + } + }, + }, + { + name: "logs", + desc: "Follow duniter logs.", + logs: false, + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + printTailAndWatchFile( + Directory.INSTANCE_HOMELOG_FILE, + constants.NB_INITIAL_LINES_TO_SHOW + ); + // Never ending command + return new Promise((res) => null); + }, + }, + { + name: "direct_start", + desc: "Start Duniter node with direct output, non-daemonized.", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any, + startServices: any + ) => { + const logger = server.logger; + + logger.info(">> Server starting..."); + + // Log NodeJS version + logger.info("NodeJS version: " + process.version); + + await server.checkConfig(); + // Add signing & public key functions to PeeringService + logger.info("Node version: " + server.version); + logger.info("Node pubkey: " + server.conf.pair.pub); + + // Services + await startServices(); + + logger.info(">> Server ready!"); + + return new Promise(() => null); // Never ending + }, + }, + ], + }, }; -function ServerService(server:Server) { +function ServerService(server: Server) { server.startService = () => Promise.resolve(); server.stopService = () => Promise.resolve(); return server; } -function startDaemon(daemon:any) { - return new Promise((resolve, reject) => daemon.start((err:any) => { - if (err) return reject(err) - resolve() - })) +function startDaemon(daemon: any) { + return new Promise((resolve, reject) => + daemon.start((err: any) => { + if (err) return reject(err); + resolve(); + }) + ); } -function stopDaemon(daemon:any) { - return new Promise((resolve, reject) => daemon.stop((err:any) => { - err && console.error(err); - if (err) return reject(err) - resolve() - })) +function stopDaemon(daemon: any) { + return new Promise((resolve, reject) => + daemon.stop((err: any) => { + err && console.error(err); + if (err) return reject(err); + resolve(); + }) + ); } -async function printTailAndWatchFile(file:any, tailSize:number) { - const fs = RealFS() - if (await fs.fsExists(file)) { - const content = await fs.fsReadFile(file) - const lines = content.split('\n') - const from = Math.max(0, lines.length - tailSize) - const lastLines = lines.slice(from).join('\n') - console.log(lastLines) - } - watchFile(file) +async function printTailAndWatchFile(file: any, tailSize: number) { + const fs = RealFS(); + if (await fs.fsExists(file)) { + const content = await fs.fsReadFile(file); + const lines = content.split("\n"); + const from = Math.max(0, lines.length - tailSize); + const lastLines = lines.slice(from).join("\n"); + console.log(lastLines); + } + watchFile(file); } -function watchFile(file:any) { +function watchFile(file: any) { const tail = new Tail(file); // Specific errors handling - process.on('uncaughtException', (err:any) => { + process.on("uncaughtException", (err: any) => { if (err.code === "ENOENT") { - console.error('EXCEPTION: ', err.message); - setTimeout(() => watchFile(file), 1000) // Wait a second + console.error("EXCEPTION: ", err.message); + setTimeout(() => watchFile(file), 1000); // Wait a second } }); // On new line - tail.on("line", function(data:any) { + tail.on("line", function (data: any) { console.log(data); }); - tail.on("error", function(error:any) { - console.error('ERROR: ', error); + tail.on("error", function (error: any) { + console.error("ERROR: ", error); }); } diff --git a/app/modules/dump.ts b/app/modules/dump.ts index 5b3d4cfe4a2ac859a2be69ba70c773f354c54b13..d89ebb7475c5bb579d1ef7b587f26e85ed959653 100644 --- a/app/modules/dump.ts +++ b/app/modules/dump.ts @@ -11,362 +11,466 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {exec} from "child_process" -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" -import {moment} from "../lib/common-libs/moment" -import {DBBlock} from "../lib/db/DBBlock" -import {FullIindexEntry, IindexEntry, SindexEntry} from "../lib/indexer" -import {BlockDTO} from "../lib/dto/BlockDTO" -import {Underscore} from "../lib/common-libs/underscore" -import {dumpWotWizard} from "./dump/wotwizard/wotwizard.dump" -import {OtherConstants} from "../lib/other_constants" -import {Querable, querablep} from "../lib/common-libs/querable" -import {dumpBlocks, dumpForks} from "./dump/blocks/dump.blocks" -import {newResolveTimeoutPromise} from "../lib/common-libs/timeout-promise" -import {LevelDBIindex} from "../lib/dal/indexDAL/leveldb/LevelDBIindex" -import {dumpBindex, dumpCindex, dumpCindexPretty, dumpIindex, dumpMindex, dumpSindex} from "../lib/debug/dump" -import {readFileSync} from "fs" -import {IdentityDTO} from "../lib/dto/IdentityDTO" -import {CertificationDTO, ShortCertificationDTO} from "../lib/dto/CertificationDTO" -import {MembershipDTO} from "../lib/dto/MembershipDTO" -import {RevocationDTO, ShortRevocation} from "../lib/dto/RevocationDTO" - -const Table = require('cli-table') +import { exec } from "child_process"; +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; +import { moment } from "../lib/common-libs/moment"; +import { DBBlock } from "../lib/db/DBBlock"; +import { FullIindexEntry, IindexEntry, SindexEntry } from "../lib/indexer"; +import { BlockDTO } from "../lib/dto/BlockDTO"; +import { Underscore } from "../lib/common-libs/underscore"; +import { dumpWotWizard } from "./dump/wotwizard/wotwizard.dump"; +import { OtherConstants } from "../lib/other_constants"; +import { Querable, querablep } from "../lib/common-libs/querable"; +import { dumpBlocks, dumpForks } from "./dump/blocks/dump.blocks"; +import { newResolveTimeoutPromise } from "../lib/common-libs/timeout-promise"; +import { LevelDBIindex } from "../lib/dal/indexDAL/leveldb/LevelDBIindex"; +import { + dumpBindex, + dumpCindex, + dumpCindexPretty, + dumpIindex, + dumpMindex, + dumpSindex, +} from "../lib/debug/dump"; +import { readFileSync } from "fs"; +import { IdentityDTO } from "../lib/dto/IdentityDTO"; +import { + CertificationDTO, + ShortCertificationDTO, +} from "../lib/dto/CertificationDTO"; +import { MembershipDTO } from "../lib/dto/MembershipDTO"; +import { RevocationDTO, ShortRevocation } from "../lib/dto/RevocationDTO"; + +const Table = require("cli-table"); module.exports = { duniter: { - service: { - neutral: (server:Server, conf:ConfDTO) => { + neutral: (server: Server, conf: ConfDTO) => { return { startService: () => { if (conf.storage && conf.storage.wotwizard) { - let fifo: Querable<any> = querablep(Promise.resolve()) - server - .on('bcEvent', (e) => { - if ((e.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED || e.bcEvent === OtherConstants.BC_EVENT.SWITCHED) && fifo.isFulfilled()) { - fifo = querablep(fifo.then(async () => { + let fifo: Querable<any> = querablep(Promise.resolve()); + server.on("bcEvent", (e) => { + if ( + (e.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED || + e.bcEvent === OtherConstants.BC_EVENT.SWITCHED) && + fifo.isFulfilled() + ) { + fifo = querablep( + fifo.then(async () => { try { - await dumpWotWizard(server) + await dumpWotWizard(server); } catch (e) {} - })) - } - }) + }) + ); + } + }); } }, stopService: () => { // Never stops, just wait for blocks - } - } - } + }, + }; + }, }, - cli: [{ - name: 'current', - desc: 'Shows current block\'s blockstamp', - logs: false, - preventIfRunning: true, - - onDatabaseExecute: async (server:Server) => { - const current = await server.dal.getCurrentBlockOrNull() - if (!current) { - return console.log('None') - } - const blockstamp = `${current.number}-${current.hash}` - console.log(blockstamp) - // Save DB - await server.disconnect(); - } - }, { - name: 'trim-indexes', - desc: 'Force trimming of indexes', - logs: true, - preventIfRunning: true, - - onConfiguredExecute: async (server:Server) => { - await server.dal.init(server.conf) - await server.BlockchainService.trimIndexes() - // Save DB - await server.disconnect(); - } - }, { - name: 'dump [what] [name] [cond]', - desc: 'Dumps data of the blockchain.', - logs: false, - preventIfRunning: true, - - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const what: string = params[0] || '' - const name: string = params[1] || '' - const cond: string = params[2] || '' - - await server.dal.init(server.conf) - - try { - - switch (what) { - - case 'current': - await dumpCurrent(server) - break - - case 'blocks': - await dumpBlocks(server, name) - break - - case 'forks': - await dumpForks(server, name) - break - - case 'volumes': - await dumpVolumes(server) - break - - case 'table': - await dumpTable(server, name, cond) - break - - case 'wot': - await dumpWot(server) - break - - case 'history': - await dumpHistory(server, name) - break - - case 'wotwizard': - await dumpWotWizard(server) - break - - default: - console.error(`Unknown dump ${what}`) - break - } - } catch (e) { - console.error(e) - } - // Save DB - await server.disconnect(); - } - }, { - name: 'search [pattern]', - desc: 'Dumps data of the blockchain matching given pattern.', - logs: false, - preventIfRunning: true, - - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const pattern: string = params[0] || '' - - try { - - const files: string[] = await new Promise<string[]>((res, rej) => exec(`grep -r ${pattern} ${server.home}/${server.conf.currency} -l | grep .json`, (err, stdout) => { - if (err) return rej(err) - console.log(stdout) - res(stdout.split('\n').filter(l => l)) - })) - - const blocks = Underscore.sortBy(await findBlocksMatching(pattern, files), b => b.number) - - const events: { b: BlockDTO, event: (IdentityDTO|ShortCertificationDTO|MembershipDTO|ShortRevocation|{ type: 'exclusion', pub: string }) }[] = [] - for (const b of blocks) { - b.identities.filter(i => i.includes(pattern)).forEach(i => { - events.push({ b, event: IdentityDTO.fromInline(i) }) - }) - b.certifications.filter(c => c.includes(pattern)).forEach(c => { - events.push({ b, event: CertificationDTO.fromInline(c) }) - }) - b.joiners.concat(b.actives).concat(b.leavers).filter(m => m.includes(pattern)).forEach(m => { - events.push({ b, event: MembershipDTO.fromInline(m) }) - }) - b.revoked.filter(m => m.includes(pattern)).forEach(r => { - events.push({ b, event: RevocationDTO.fromInline(r) }) - }) - b.excluded.filter(m => m.includes(pattern)).forEach(r => { - events.push({ b, event: { type: 'exclusion', pub: r } }) - }) + cli: [ + { + name: "current", + desc: "Shows current block's blockstamp", + logs: false, + preventIfRunning: true, + + onDatabaseExecute: async (server: Server) => { + const current = await server.dal.getCurrentBlockOrNull(); + if (!current) { + return console.log("None"); } - - for (const e of events) { - if ((e.event as IdentityDTO).uid) { - const date = await getDateForBlock(e.b) - const idty = e.event as IdentityDTO - console.log('%s: new identity %s (created on %s)', date, idty.uid, await getDateFor(server, idty.buid as string)) - } - if ((e.event as { type: 'exclusion', pub: string }).type === 'exclusion') { - const date = await getDateForBlock(e.b) - console.log('%s: excluded', date) + const blockstamp = `${current.number}-${current.hash}`; + console.log(blockstamp); + // Save DB + await server.disconnect(); + }, + }, + { + name: "trim-indexes", + desc: "Force trimming of indexes", + logs: true, + preventIfRunning: true, + + onConfiguredExecute: async (server: Server) => { + await server.dal.init(server.conf); + await server.BlockchainService.trimIndexes(); + // Save DB + await server.disconnect(); + }, + }, + { + name: "dump [what] [name] [cond]", + desc: "Dumps data of the blockchain.", + logs: false, + preventIfRunning: true, + + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const what: string = params[0] || ""; + const name: string = params[1] || ""; + const cond: string = params[2] || ""; + + await server.dal.init(server.conf); + + try { + switch (what) { + case "current": + await dumpCurrent(server); + break; + + case "blocks": + await dumpBlocks(server, name); + break; + + case "forks": + await dumpForks(server, name); + break; + + case "volumes": + await dumpVolumes(server); + break; + + case "table": + await dumpTable(server, name, cond); + break; + + case "wot": + await dumpWot(server); + break; + + case "history": + await dumpHistory(server, name); + break; + + case "wotwizard": + await dumpWotWizard(server); + break; + + default: + console.error(`Unknown dump ${what}`); + break; } + } catch (e) { + console.error(e); } + // Save DB + await server.disconnect(); + }, + }, + { + name: "search [pattern]", + desc: "Dumps data of the blockchain matching given pattern.", + logs: false, + preventIfRunning: true, + + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const pattern: string = params[0] || ""; + + try { + const files: string[] = await new Promise<string[]>((res, rej) => + exec( + `grep -r ${pattern} ${server.home}/${server.conf.currency} -l | grep .json`, + (err, stdout) => { + if (err) return rej(err); + console.log(stdout); + res(stdout.split("\n").filter((l) => l)); + } + ) + ); + + const blocks = Underscore.sortBy( + await findBlocksMatching(pattern, files), + (b) => b.number + ); + + const events: { + b: BlockDTO; + event: + | IdentityDTO + | ShortCertificationDTO + | MembershipDTO + | ShortRevocation + | { type: "exclusion"; pub: string }; + }[] = []; + for (const b of blocks) { + b.identities + .filter((i) => i.includes(pattern)) + .forEach((i) => { + events.push({ b, event: IdentityDTO.fromInline(i) }); + }); + b.certifications + .filter((c) => c.includes(pattern)) + .forEach((c) => { + events.push({ b, event: CertificationDTO.fromInline(c) }); + }); + b.joiners + .concat(b.actives) + .concat(b.leavers) + .filter((m) => m.includes(pattern)) + .forEach((m) => { + events.push({ b, event: MembershipDTO.fromInline(m) }); + }); + b.revoked + .filter((m) => m.includes(pattern)) + .forEach((r) => { + events.push({ b, event: RevocationDTO.fromInline(r) }); + }); + b.excluded + .filter((m) => m.includes(pattern)) + .forEach((r) => { + events.push({ b, event: { type: "exclusion", pub: r } }); + }); + } - console.log(events.map(e => e.event)) + for (const e of events) { + if ((e.event as IdentityDTO).uid) { + const date = await getDateForBlock(e.b); + const idty = e.event as IdentityDTO; + console.log( + "%s: new identity %s (created on %s)", + date, + idty.uid, + await getDateFor(server, idty.buid as string) + ); + } + if ( + (e.event as { type: "exclusion"; pub: string }).type === + "exclusion" + ) { + const date = await getDateForBlock(e.b); + console.log("%s: excluded", date); + } + } - } catch (e) { - console.error(e) - } - // Save DB - await server.disconnect(); - } - }, { - name: 'dump-ww', - desc: 'Dumps WotWizard export.', - logs: true, - preventIfRunning: true, - onDatabaseExecute: async (server:Server) => dumpWotWizard(server) - }] - } -} + console.log(events.map((e) => e.event)); + } catch (e) { + console.error(e); + } + // Save DB + await server.disconnect(); + }, + }, + { + name: "dump-ww", + desc: "Dumps WotWizard export.", + logs: true, + preventIfRunning: true, + onDatabaseExecute: async (server: Server) => dumpWotWizard(server), + }, + ], + }, +}; async function findBlocksMatching(pattern: string, files: string[]) { - const matchingBlocks: BlockDTO[] = [] + const matchingBlocks: BlockDTO[] = []; for (const f of files) { - const blocks: any[] = JSON.parse(await readFileSync(f, 'utf8')).blocks + const blocks: any[] = JSON.parse(await readFileSync(f, "utf8")).blocks; for (const jsonBlock of blocks) { - const b = BlockDTO.fromJSONObject(jsonBlock) - const raw = b.getRawSigned() + const b = BlockDTO.fromJSONObject(jsonBlock); + const raw = b.getRawSigned(); if (raw.includes(pattern)) { - matchingBlocks.push(b) + matchingBlocks.push(b); } } } - return matchingBlocks + return matchingBlocks; } async function dumpCurrent(server: Server) { - const current = await server.dal.getCurrentBlockOrNull() + const current = await server.dal.getCurrentBlockOrNull(); if (!current) { - console.log('') - } - else { - console.log(BlockDTO.fromJSONObject(current).getRawSigned()) + console.log(""); + } else { + console.log(BlockDTO.fromJSONObject(current).getRawSigned()); } } async function dumpVolumes(server: Server) { - const nbUdo = await server.dal.dividendDAL.count() - const nbTxo = await server.dal.sindexDAL.count() - const iindex = await server.dal.iindexDAL.count() - const mindex = await server.dal.mindexDAL.count() - const cindex = await server.dal.cindexDAL.count() - - console.log('Sindex : %s (%s UD, %s TX)', nbTxo + nbUdo, nbUdo, nbTxo) - console.log('Iindex : %s', iindex) - console.log('Mindex : %s', mindex) - console.log('Cindex : %s', cindex) + const nbUdo = await server.dal.dividendDAL.count(); + const nbTxo = await server.dal.sindexDAL.count(); + const iindex = await server.dal.iindexDAL.count(); + const mindex = await server.dal.mindexDAL.count(); + const cindex = await server.dal.cindexDAL.count(); + + console.log("Sindex : %s (%s UD, %s TX)", nbTxo + nbUdo, nbUdo, nbTxo); + console.log("Iindex : %s", iindex); + console.log("Mindex : %s", mindex); + console.log("Cindex : %s", cindex); } async function dumpTable(server: Server, name: string, condition?: string) { - const criterion: any = {} - const filters = condition && condition.split(',') || [] + const criterion: any = {}; + const filters = (condition && condition.split(",")) || []; for (const f of filters) { - const k = f.split('=')[0] - const v = f.split('=')[1] - if (v === 'true' || v === 'false') { - criterion[k] = v === 'true' ? true : 0 - } else if (v === 'NULL') { - criterion[k] = null + const k = f.split("=")[0]; + const v = f.split("=")[1]; + if (v === "true" || v === "false") { + criterion[k] = v === "true" ? true : 0; + } else if (v === "NULL") { + criterion[k] = null; } else if (v.match(/^\d+$/)) { - criterion[k] = parseInt(v) + criterion[k] = parseInt(v); } else { - criterion[k] = v + criterion[k] = v; } } - let rows: any[] + let rows: any[]; switch (name) { - case 'b_index': - rows = await server.dal.bindexDAL.findRawWithOrder(criterion, [['number', false]]) - return dumpBindex(rows) + case "b_index": + rows = await server.dal.bindexDAL.findRawWithOrder(criterion, [ + ["number", false], + ]); + return dumpBindex(rows); /** * Dumps issuers visible in current bindex */ - case 'issuers': - rows = await server.dal.bindexDAL.findRawWithOrder(criterion, [['number', false]]) - const identites = await Promise.all(Underscore.uniq(rows.map(b => b.issuer)).map(i => server.dal.iindexDAL.getFullFromPubkey(i))) - console.log(identites.map(i => i.uid)) - break - - case 'i_index': - rows = await server.dal.iindexDAL.findRawWithOrder(criterion, [['writtenOn', false], ['wotb_id', false]]) - return dumpIindex(rows) - case 'm_index': - rows = await server.dal.mindexDAL.findRawWithOrder(criterion, [['writtenOn', false], ['pub', false]]) - return dumpMindex(rows) - case 'c_index': - rows = await server.dal.cindexDAL.findRawWithOrder(criterion, [['writtenOn', false], ['issuer', false], ['receiver', false]]) - return dumpCindex(rows) - break - case 's_index': - const rowsTX = await server.dal.sindexDAL.findRawWithOrder(criterion, [['writtenOn', false], ['identifier', false], ['pos', false]]) - const rowsUD = await server.dal.dividendDAL.findForDump(criterion) - rows = rowsTX.concat(rowsUD) - sortSindex(rows) - return dumpSindex(rows) - - case 'c_index_pretty': - rows = await server.dal.cindexDAL.findRawWithOrder(criterion, [['writtenOn', false], ['issuer', false], ['receiver', false]]) - rows = rows.filter((row: any) => Object.entries(criterion).reduce((ok, crit: any) => ok && row[crit[0]] === crit[1], true)) + case "issuers": + rows = await server.dal.bindexDAL.findRawWithOrder(criterion, [ + ["number", false], + ]); + const identites = await Promise.all( + Underscore.uniq(rows.map((b) => b.issuer)).map((i) => + server.dal.iindexDAL.getFullFromPubkey(i) + ) + ); + console.log(identites.map((i) => i.uid)); + break; + + case "i_index": + rows = await server.dal.iindexDAL.findRawWithOrder(criterion, [ + ["writtenOn", false], + ["wotb_id", false], + ]); + return dumpIindex(rows); + case "m_index": + rows = await server.dal.mindexDAL.findRawWithOrder(criterion, [ + ["writtenOn", false], + ["pub", false], + ]); + return dumpMindex(rows); + case "c_index": + rows = await server.dal.cindexDAL.findRawWithOrder(criterion, [ + ["writtenOn", false], + ["issuer", false], + ["receiver", false], + ]); + return dumpCindex(rows); + break; + case "s_index": + const rowsTX = await server.dal.sindexDAL.findRawWithOrder(criterion, [ + ["writtenOn", false], + ["identifier", false], + ["pos", false], + ]); + const rowsUD = await server.dal.dividendDAL.findForDump(criterion); + rows = rowsTX.concat(rowsUD); + sortSindex(rows); + return dumpSindex(rows); + + case "c_index_pretty": + rows = await server.dal.cindexDAL.findRawWithOrder(criterion, [ + ["writtenOn", false], + ["issuer", false], + ["receiver", false], + ]); + rows = rows.filter((row: any) => + Object.entries(criterion).reduce( + (ok, crit: any) => ok && row[crit[0]] === crit[1], + true + ) + ); await dumpCindexPretty(rows, async (pub) => { - const iindexEntry = await server.dal.getWrittenIdtyByPubkey(pub) - return (iindexEntry as IindexEntry).uid as string - }) + const iindexEntry = await server.dal.getWrittenIdtyByPubkey(pub); + return (iindexEntry as IindexEntry).uid as string; + }); default: - console.error(`Unknown dump table ${name}`) - break + console.error(`Unknown dump table ${name}`); + break; } } async function dumpHistory(server: Server, pub: string) { - const irows = (await server.dal.iindexDAL.findRawWithOrder({ pub }, [['writtenOn', false]])).filter(r => pub ? r.pub === pub : true) - const mrows = (await server.dal.mindexDAL.findRawWithOrder({ pub }, [['writtenOn', false]])).filter(r => pub ? r.pub === pub : true) - const crows = (await server.dal.cindexDAL.findRawWithOrder({ pub }, [['writtenOn', false]])).filter(r => pub ? r.issuer === pub || r.receiver === pub: true) - console.log('----- IDENTITY -----') + const irows = ( + await server.dal.iindexDAL.findRawWithOrder({ pub }, [["writtenOn", false]]) + ).filter((r) => (pub ? r.pub === pub : true)); + const mrows = ( + await server.dal.mindexDAL.findRawWithOrder({ pub }, [["writtenOn", false]]) + ).filter((r) => (pub ? r.pub === pub : true)); + const crows = ( + await server.dal.cindexDAL.findRawWithOrder({ pub }, [["writtenOn", false]]) + ).filter((r) => (pub ? r.issuer === pub || r.receiver === pub : true)); + console.log("----- IDENTITY -----"); for (const e of irows) { - const date = await getDateFor(server, e.written_on) + const date = await getDateFor(server, e.written_on); if (e.uid) { - console.log('%s: new identity %s (created on %s)', date, e.uid, await getDateFor(server, e.created_on as string)) + console.log( + "%s: new identity %s (created on %s)", + date, + e.uid, + await getDateFor(server, e.created_on as string) + ); } else if (e.member) { - console.log('%s: comeback', date) + console.log("%s: comeback", date); } else if (e.kick) { // console.log('%s: being kicked... (either)', date) } else if (e.member === false) { - console.log('%s: excluded', date) + console.log("%s: excluded", date); } else { - console.log('Non displayable IINDEX entry') + console.log("Non displayable IINDEX entry"); } } - console.log('----- MEMBERSHIP -----') + console.log("----- MEMBERSHIP -----"); for (const e of mrows) { - const date = await getDateFor(server, e.written_on) + const date = await getDateFor(server, e.written_on); if (e.chainable_on) { - console.log('%s: join/renew', date) + console.log("%s: join/renew", date); } else if (e.expired_on) { - console.log('%s: expired', date) + console.log("%s: expired", date); } else if (e.revoked_on) { - console.log('%s: revoked', date) + console.log("%s: revoked", date); } else { - console.log('Non displayable MINDEX entry') + console.log("Non displayable MINDEX entry"); } } - console.log('----- CERTIFICATION -----') - crows.forEach(crow => { - console.log(JSON.stringify(crow)) - }) + console.log("----- CERTIFICATION -----"); + crows.forEach((crow) => { + console.log(JSON.stringify(crow)); + }); for (const e of crows) { - const dateW = await getDateFor(server, e.written_on) - const dateC = await getDateForBlockNumber(server, e.created_on) + const dateW = await getDateFor(server, e.written_on); + const dateC = await getDateForBlockNumber(server, e.created_on); if (e.receiver === pub) { - const issuer = await server.dal.getWrittenIdtyByPubkey(e.issuer) as FullIindexEntry - if (e.op === 'UPDATE') { - console.log('%s : %s: from %s (update)', dateC, dateW, issuer.uid) + const issuer = (await server.dal.getWrittenIdtyByPubkey( + e.issuer + )) as FullIindexEntry; + if (e.op === "UPDATE") { + console.log("%s : %s: from %s (update)", dateC, dateW, issuer.uid); + } else { + console.log("%s : %s: from %s", dateC, dateW, issuer.uid); } - else { - console.log('%s : %s: from %s', dateC, dateW, issuer.uid) - } - // } else if (e.issuer === pub) { - // const receiver = await server.dal.getWrittenIdtyByPubkey(e.receiver) as FullIindexEntry - // console.log('%s: to ', date, receiver.uid) + // } else if (e.issuer === pub) { + // const receiver = await server.dal.getWrittenIdtyByPubkey(e.receiver) as FullIindexEntry + // console.log('%s: to ', date, receiver.uid) } else { // console.log('Non displayable CINDEX entry') } @@ -374,33 +478,35 @@ async function dumpHistory(server: Server, pub: string) { } async function dumpWot(server: Server) { - const data = server.dal.wotb.dump() - console.log(data) - await newResolveTimeoutPromise(1000, null) + const data = server.dal.wotb.dump(); + console.log(data); + await newResolveTimeoutPromise(1000, null); } async function getDateFor(server: Server, blockstamp: string) { - const b = (await server.dal.getAbsoluteBlockByBlockstamp(blockstamp)) as DBBlock - const s = " " + b.number - const bnumberPadded = s.substr(s.length - 6) - return formatTimestamp(b.medianTime) + ' (#' + bnumberPadded + ')' + const b = (await server.dal.getAbsoluteBlockByBlockstamp( + blockstamp + )) as DBBlock; + const s = " " + b.number; + const bnumberPadded = s.substr(s.length - 6); + return formatTimestamp(b.medianTime) + " (#" + bnumberPadded + ")"; } async function getDateForBlockNumber(server: Server, number: number) { - const b = (await server.dal.getBlock(number)) as DBBlock - const s = " " + b.number - const bnumberPadded = s.substr(s.length - 6) - return formatTimestamp(b.medianTime) + ' (#' + bnumberPadded + ')' + const b = (await server.dal.getBlock(number)) as DBBlock; + const s = " " + b.number; + const bnumberPadded = s.substr(s.length - 6); + return formatTimestamp(b.medianTime) + " (#" + bnumberPadded + ")"; } async function getDateForBlock(b: BlockDTO) { - const s = " " + b.number - const bnumberPadded = s.substr(s.length - 6) - return formatTimestamp(b.medianTime) + ' (#' + bnumberPadded + ')' + const s = " " + b.number; + const bnumberPadded = s.substr(s.length - 6); + return formatTimestamp(b.medianTime) + " (#" + bnumberPadded + ")"; } function formatTimestamp(ts: number) { - return moment(ts * 1000).format('YYYY-MM-DD hh:mm:ss') + return moment(ts * 1000).format("YYYY-MM-DD hh:mm:ss"); } function sortSindex(rows: SindexEntry[]) { @@ -409,17 +515,19 @@ function sortSindex(rows: SindexEntry[]) { if (a.writtenOn === b.writtenOn) { if (a.identifier === b.identifier) { if (a.pos === b.pos) { - return a.op === 'CREATE' && b.op === 'UPDATE' ? -1 : (a.op === 'UPDATE' && b.op === 'CREATE' ? 1 : 0) + return a.op === "CREATE" && b.op === "UPDATE" + ? -1 + : a.op === "UPDATE" && b.op === "CREATE" + ? 1 + : 0; } else { - return a.pos < b.pos ? -1 : 1 + return a.pos < b.pos ? -1 : 1; } + } else { + return a.identifier < b.identifier ? -1 : 1; } - else { - return a.identifier < b.identifier ? -1 : 1 - } - } - else { - return a.writtenOn < b.writtenOn ? -1 : 1 + } else { + return a.writtenOn < b.writtenOn ? -1 : 1; } - }) -} \ No newline at end of file + }); +} diff --git a/app/modules/dump/blocks/dump.blocks.ts b/app/modules/dump/blocks/dump.blocks.ts index 6c9412bd146653fc730c74387b2b44a1d0523c67..f0967048486f6adacec248205fe03dc2a10d3b31 100644 --- a/app/modules/dump/blocks/dump.blocks.ts +++ b/app/modules/dump/blocks/dump.blocks.ts @@ -1,26 +1,32 @@ -import {Server} from "../../../../server" -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {DBBlock} from "../../../lib/db/DBBlock" +import { Server } from "../../../../server"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { DBBlock } from "../../../lib/db/DBBlock"; export async function dumpForks(server: Server, blocks: string) { - return dumpBlocks(server, blocks, false) + return dumpBlocks(server, blocks, false); } -export async function dumpBlocks(server: Server, blocks: string, showMainBcOnly = true) { - const patterns = blocks.split(',') +export async function dumpBlocks( + server: Server, + blocks: string, + showMainBcOnly = true +) { + const patterns = blocks.split(","); for (const p of patterns) { // Single block to dump if (p.match(/^\d+$/)) { - const bNumber = parseInt(p) + const bNumber = parseInt(p); if (showMainBcOnly) { - dumpBlockIfDefined(await server.dal.getBlock(bNumber)) + dumpBlockIfDefined(await server.dal.getBlock(bNumber)); } else { - (await server.dal.getPotentialForkBlocks(bNumber, 0, bNumber)).forEach(dumpBlockIfDefined) + (await server.dal.getPotentialForkBlocks(bNumber, 0, bNumber)).forEach( + dumpBlockIfDefined + ); } } } } -export function dumpBlockIfDefined(b: DBBlock|undefined|null) { - console.log(BlockDTO.fromJSONObject(b).getRawSigned()) +export function dumpBlockIfDefined(b: DBBlock | undefined | null) { + console.log(BlockDTO.fromJSONObject(b).getRawSigned()); } diff --git a/app/modules/dump/wotwizard/wotwizard.constants.ts b/app/modules/dump/wotwizard/wotwizard.constants.ts index 97f98be1a39c1518c201b8a33b86ac84a3366d82..e707684c98428fa646ea3383a8e6ecff0a59f243 100644 --- a/app/modules/dump/wotwizard/wotwizard.constants.ts +++ b/app/modules/dump/wotwizard/wotwizard.constants.ts @@ -1,8 +1,7 @@ export const WotWizardConstants = { - - DB_NAME_0: 'wotwizard-export_0.db', - DB_NAME: 'wotwizard-export.db', - FILE_UPDATING: 'updating.txt', + DB_NAME_0: "wotwizard-export_0.db", + DB_NAME: "wotwizard-export.db", + FILE_UPDATING: "updating.txt", BLOCKS_SAVE_BATCH_SIZE: 10, DELAY_FOR_UPDATING: 15 * 1000, // in milliseconds -} \ No newline at end of file +}; diff --git a/app/modules/dump/wotwizard/wotwizard.copy.mempool.ts b/app/modules/dump/wotwizard/wotwizard.copy.mempool.ts index 90be83d2afd3652ac7016aa7e64af35fa4329348..491ff7c28102936325ad7ebed1684d15297c6828 100644 --- a/app/modules/dump/wotwizard/wotwizard.copy.mempool.ts +++ b/app/modules/dump/wotwizard/wotwizard.copy.mempool.ts @@ -1,25 +1,37 @@ -import {WotWizardDAL} from "./wotwizard.init.structure" -import {Server} from "../../../../server" -import {DBBlock} from "../../../lib/db/DBBlock" -import {Underscore} from "../../../lib/common-libs/underscore" -import {NewLogger} from "../../../lib/logger" +import { WotWizardDAL } from "./wotwizard.init.structure"; +import { Server } from "../../../../server"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { Underscore } from "../../../lib/common-libs/underscore"; +import { NewLogger } from "../../../lib/logger"; export async function copyMemPool(server: Server, wwDAL: WotWizardDAL) { + const logger = NewLogger(); - const logger = NewLogger() - - const identities = await server.dal.idtyDAL.sqlListAll() + const identities = await server.dal.idtyDAL.sqlListAll(); // Blocks on which are based identities - const blocks = await Promise.all(identities.map(async idty => returnBlockIfPresentInServerButNotInWW(idty.buid, server, wwDAL))) + const blocks = await Promise.all( + identities.map(async (idty) => + returnBlockIfPresentInServerButNotInWW(idty.buid, server, wwDAL) + ) + ); - const toPersist: DBBlock[] = Underscore.uniq(blocks.filter(b => b) as DBBlock[], false, b => [b.number, b.hash].join('-')) + const toPersist: DBBlock[] = Underscore.uniq( + blocks.filter((b) => b) as DBBlock[], + false, + (b) => [b.number, b.hash].join("-") + ); - logger.debug('Persisting %s blocks for identities...', toPersist.length) - await wwDAL.blockDao.insertBatch(toPersist.map(b => { (b as any).legacy = true; return b })) - await wwDAL.idtyDao.insertBatch(identities) - await wwDAL.certDao.insertBatch(await server.dal.certDAL.sqlListAll()) - await wwDAL.msDao.insertBatch(await server.dal.msDAL.sqlListAll()) + logger.debug("Persisting %s blocks for identities...", toPersist.length); + await wwDAL.blockDao.insertBatch( + toPersist.map((b) => { + (b as any).legacy = true; + return b; + }) + ); + await wwDAL.idtyDao.insertBatch(identities); + await wwDAL.certDao.insertBatch(await server.dal.certDAL.sqlListAll()); + await wwDAL.msDao.insertBatch(await server.dal.msDAL.sqlListAll()); } /** @@ -28,14 +40,18 @@ export async function copyMemPool(server: Server, wwDAL: WotWizardDAL) { * @param server * @param wwDAL */ -async function returnBlockIfPresentInServerButNotInWW(blockstamp: string, server: Server, wwDAL: WotWizardDAL) { - let b = await server.dal.getAbsoluteBlockByBlockstamp(blockstamp) +async function returnBlockIfPresentInServerButNotInWW( + blockstamp: string, + server: Server, + wwDAL: WotWizardDAL +) { + let b = await server.dal.getAbsoluteBlockByBlockstamp(blockstamp); if (b) { if (!(await wwHasBlock(wwDAL, b))) { - return b + return b; } } - return null + return null; } /** @@ -43,7 +59,10 @@ async function returnBlockIfPresentInServerButNotInWW(blockstamp: string, server * @param wwDAL * @param b */ -export async function wwHasBlock(wwDAL: WotWizardDAL, b: { number: number, hash: string}) { - const wwBlock = await wwDAL.blockDao.getAbsoluteBlock(b.number, b.hash) - return !!wwBlock +export async function wwHasBlock( + wwDAL: WotWizardDAL, + b: { number: number; hash: string } +) { + const wwBlock = await wwDAL.blockDao.getAbsoluteBlock(b.number, b.hash); + return !!wwBlock; } diff --git a/app/modules/dump/wotwizard/wotwizard.delete.ts b/app/modules/dump/wotwizard/wotwizard.delete.ts index 5c3bf33b382bc5f661cfdd7e886c9717bb1b7063..9be50388e1ea3f9a81c37ba137c202d6dbd94985 100644 --- a/app/modules/dump/wotwizard/wotwizard.delete.ts +++ b/app/modules/dump/wotwizard/wotwizard.delete.ts @@ -1,10 +1,9 @@ -import {WotWizardDAL} from "./wotwizard.init.structure" +import { WotWizardDAL } from "./wotwizard.init.structure"; export async function deleteNonLegacy(wwDAL: WotWizardDAL) { - - await wwDAL.iindexDao.exec('DELETE FROM i_index WHERE NOT legacy') - await wwDAL.blockDao.exec('DELETE FROM block WHERE NOT legacy') - await wwDAL.idtyDao.sqlDeleteAll() - await wwDAL.certDao.sqlDeleteAll() - await wwDAL.msDao.sqlDeleteAll() -} \ No newline at end of file + await wwDAL.iindexDao.exec("DELETE FROM i_index WHERE NOT legacy"); + await wwDAL.blockDao.exec("DELETE FROM block WHERE NOT legacy"); + await wwDAL.idtyDao.sqlDeleteAll(); + await wwDAL.certDao.sqlDeleteAll(); + await wwDAL.msDao.sqlDeleteAll(); +} diff --git a/app/modules/dump/wotwizard/wotwizard.dump.ts b/app/modules/dump/wotwizard/wotwizard.dump.ts index cf619fecc8b9fd06277123d8e47c1241c25146b5..35ce2cf4fdef6c38767df176c9eaee8fe315750b 100644 --- a/app/modules/dump/wotwizard/wotwizard.dump.ts +++ b/app/modules/dump/wotwizard/wotwizard.dump.ts @@ -1,51 +1,58 @@ -import * as fs from "fs" -import {Server} from "../../../../server" -import {createExportStructure} from "./wotwizard.init.structure" -import {WotWizardConstants} from "./wotwizard.constants" -import {addLegacyBlocks} from "./wotwizard.legacy.blocks" -import {addNewBlocks} from "./wotwizard.new.blocks" -import {deleteNonLegacy} from "./wotwizard.delete" -import {copyMemPool} from "./wotwizard.copy.mempool" -import {Directory} from "../../../lib/system/directory" +import * as fs from "fs"; +import { Server } from "../../../../server"; +import { createExportStructure } from "./wotwizard.init.structure"; +import { WotWizardConstants } from "./wotwizard.constants"; +import { addLegacyBlocks } from "./wotwizard.legacy.blocks"; +import { addNewBlocks } from "./wotwizard.new.blocks"; +import { deleteNonLegacy } from "./wotwizard.delete"; +import { copyMemPool } from "./wotwizard.copy.mempool"; +import { Directory } from "../../../lib/system/directory"; export async function dumpWotWizard(server: Server) { - // 1. Create dump structure if it does not exist - const wwDAL = await createExportStructure(WotWizardConstants.DB_NAME_0) + const wwDAL = await createExportStructure(WotWizardConstants.DB_NAME_0); // 2. Integrate legacy blocks (= non-forkable) - await addLegacyBlocks(server, wwDAL) + await addLegacyBlocks(server, wwDAL); // 3. Delete non-legacy data - await deleteNonLegacy(wwDAL) + await deleteNonLegacy(wwDAL); // 4. Integrate new blocks (= forkable) - await addNewBlocks(server, wwDAL) + await addNewBlocks(server, wwDAL); // 5. Copy mempool - await copyMemPool(server, wwDAL) + await copyMemPool(server, wwDAL); // 6. Close SQL connections - await Promise.all([ - wwDAL.blockDao, - wwDAL.iindexDao, - wwDAL.idtyDao, - wwDAL.certDao, - wwDAL.msDao, - ].map(dao => dao.close())) + await Promise.all( + [ + wwDAL.blockDao, + wwDAL.iindexDao, + wwDAL.idtyDao, + wwDAL.certDao, + wwDAL.msDao, + ].map((dao) => dao.close()) + ); // 7. Copy - let lastCopyIsOldEnough = false - const updatingFile = Directory.GET_FILE_PATH(WotWizardConstants.FILE_UPDATING) + let lastCopyIsOldEnough = false; + const updatingFile = Directory.GET_FILE_PATH( + WotWizardConstants.FILE_UPDATING + ); if (fs.existsSync(updatingFile)) { - const content = parseInt(fs.readFileSync(updatingFile, 'utf8')) - lastCopyIsOldEnough = Date.now() - content > WotWizardConstants.DELAY_FOR_UPDATING + const content = parseInt(fs.readFileSync(updatingFile, "utf8")); + lastCopyIsOldEnough = + Date.now() - content > WotWizardConstants.DELAY_FOR_UPDATING; } else { // Never done - lastCopyIsOldEnough = true + lastCopyIsOldEnough = true; } if (lastCopyIsOldEnough) { - fs.copyFileSync(Directory.GET_FILE_PATH(WotWizardConstants.DB_NAME_0), Directory.GET_FILE_PATH(WotWizardConstants.DB_NAME)) - fs.writeFileSync(updatingFile, Date.now()) + fs.copyFileSync( + Directory.GET_FILE_PATH(WotWizardConstants.DB_NAME_0), + Directory.GET_FILE_PATH(WotWizardConstants.DB_NAME) + ); + fs.writeFileSync(updatingFile, Date.now()); } } diff --git a/app/modules/dump/wotwizard/wotwizard.init.structure.ts b/app/modules/dump/wotwizard/wotwizard.init.structure.ts index c0867dc374b7ac31adba73a2b8b624c0b43f0e84..40e55db01882aac4d530b7de2089e1eff1edc738 100644 --- a/app/modules/dump/wotwizard/wotwizard.init.structure.ts +++ b/app/modules/dump/wotwizard/wotwizard.init.structure.ts @@ -1,46 +1,50 @@ -import {Directory} from "../../../lib/system/directory" -import {IdentityDAL} from "../../../lib/dal/sqliteDAL/IdentityDAL" -import {MembershipDAL} from "../../../lib/dal/sqliteDAL/MembershipDAL" -import {CertDAL} from "../../../lib/dal/sqliteDAL/CertDAL" -import {BlockDAL} from "../../../lib/dal/sqliteDAL/BlockDAL" -import {IIndexDAL} from "../../../lib/dal/sqliteDAL/index/IIndexDAL" +import { Directory } from "../../../lib/system/directory"; +import { IdentityDAL } from "../../../lib/dal/sqliteDAL/IdentityDAL"; +import { MembershipDAL } from "../../../lib/dal/sqliteDAL/MembershipDAL"; +import { CertDAL } from "../../../lib/dal/sqliteDAL/CertDAL"; +import { BlockDAL } from "../../../lib/dal/sqliteDAL/BlockDAL"; +import { IIndexDAL } from "../../../lib/dal/sqliteDAL/index/IIndexDAL"; export interface WotWizardDAL { - idtyDao: IdentityDAL - certDao: CertDAL - msDao: MembershipDAL - blockDao: BlockDAL - iindexDao: IIndexDAL + idtyDao: IdentityDAL; + certDao: CertDAL; + msDao: MembershipDAL; + blockDao: BlockDAL; + iindexDao: IIndexDAL; } -export async function createExportStructure(dbName: string): Promise<WotWizardDAL> { - const driver = await Directory.getHomeDB(false, dbName) +export async function createExportStructure( + dbName: string +): Promise<WotWizardDAL> { + const driver = await Directory.getHomeDB(false, dbName); // DAOs - const idtyDao = new IdentityDAL(driver) - const certDao = new CertDAL(driver) - const msDao = new MembershipDAL(driver) - const blockDao = new BlockDAL(driver) - const iindexDao = new IIndexDAL(driver) + const idtyDao = new IdentityDAL(driver); + const certDao = new CertDAL(driver); + const msDao = new MembershipDAL(driver); + const blockDao = new BlockDAL(driver); + const iindexDao = new IIndexDAL(driver); // Create tables - await idtyDao.init() - await certDao.init() - await msDao.init() - await blockDao.init() - await iindexDao.init() + await idtyDao.init(); + await certDao.init(); + await msDao.init(); + await blockDao.init(); + await iindexDao.init(); - const data = await blockDao.query('SELECT COUNT(*) as count FROM block') - const blocksCount = parseInt(String((data[0] as any).count)) + const data = await blockDao.query("SELECT COUNT(*) as count FROM block"); + const blocksCount = parseInt(String((data[0] as any).count)); // If first DB initialization if (blocksCount === 0) { // Manual updates (which are normally present in MetaDAL) - await idtyDao.exec('ALTER TABLE idty ADD COLUMN expired INTEGER NULL') - await idtyDao.exec('ALTER TABLE idty ADD COLUMN revoked_on INTEGER NULL') - await idtyDao.exec('ALTER TABLE idty ADD COLUMN removed BOOLEAN NULL DEFAULT 0') - await certDao.exec('ALTER TABLE cert ADD COLUMN expired INTEGER NULL') - await msDao.exec('ALTER TABLE membership ADD COLUMN expired INTEGER NULL') + await idtyDao.exec("ALTER TABLE idty ADD COLUMN expired INTEGER NULL"); + await idtyDao.exec("ALTER TABLE idty ADD COLUMN revoked_on INTEGER NULL"); + await idtyDao.exec( + "ALTER TABLE idty ADD COLUMN removed BOOLEAN NULL DEFAULT 0" + ); + await certDao.exec("ALTER TABLE cert ADD COLUMN expired INTEGER NULL"); + await msDao.exec("ALTER TABLE membership ADD COLUMN expired INTEGER NULL"); } return { @@ -49,5 +53,5 @@ export async function createExportStructure(dbName: string): Promise<WotWizardDA msDao, blockDao, iindexDao, - } + }; } diff --git a/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts b/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts index 8aa7070a26d6529d29e8e21bd0460c12f3ae6791..86025ac294165a0044643e20b188c5a362a365b7 100644 --- a/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts +++ b/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts @@ -1,42 +1,54 @@ -import {WotWizardDAL} from "./wotwizard.init.structure" -import {Server} from "../../../../server" -import {CommonConstants} from "../../../lib/common-libs/constants" -import {DBBlock} from "../../../lib/db/DBBlock" -import {NewLogger} from "../../../lib/logger" +import { WotWizardDAL } from "./wotwizard.init.structure"; +import { Server } from "../../../../server"; +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { NewLogger } from "../../../lib/logger"; export async function addLegacyBlocks(server: Server, wwDAL: WotWizardDAL) { + const logger = NewLogger(); - const logger = NewLogger() + const currentWW = await wwDAL.blockDao.getCurrent(); + const current = await server.dal.blockDAL.getCurrent(); + const start = (currentWW && currentWW.number + 1) || 0; + const end = (current && Math.max(-1, current.number - 100)) || -1; - const currentWW = await wwDAL.blockDao.getCurrent() - const current = await server.dal.blockDAL.getCurrent() - const start = currentWW && currentWW.number + 1 || 0 - const end = current && Math.max(-1, current.number - 100) || -1 - - let blocksSaved: DBBlock[] = [] - logger.debug('Reading blocks...') + let blocksSaved: DBBlock[] = []; + logger.debug("Reading blocks..."); // We loop to work in flow mode (avoid big memory consumption) for (let i = start; i <= end; i += CommonConstants.BLOCKS_IN_MEMORY_MAX) { - const blocks = await server.dal.getBlocksBetween(i, Math.min(end, i + CommonConstants.BLOCKS_IN_MEMORY_MAX) - 1) - const legacies = blocks.map(f => { (f as any).legacy = true; return f }) - legacies.forEach(l => blocksSaved.push(l)) + const blocks = await server.dal.getBlocksBetween( + i, + Math.min(end, i + CommonConstants.BLOCKS_IN_MEMORY_MAX) - 1 + ); + const legacies = blocks.map((f) => { + (f as any).legacy = true; + return f; + }); + legacies.forEach((l) => blocksSaved.push(l)); if (i % 25000 === 0) { - logger.debug('Saving 25 blocks... (%s yet stored)', i) - await wwDAL.blockDao.insertBatch(blocksSaved) - blocksSaved = [] + logger.debug("Saving 25 blocks... (%s yet stored)", i); + await wwDAL.blockDao.insertBatch(blocksSaved); + blocksSaved = []; } } - logger.debug('Saving blocks...') - await wwDAL.blockDao.insertBatch(blocksSaved) - - await Promise.all(blocksSaved) - - const iindexRows = (await server.dal.iindexDAL.findRawWithOrder({}, [['writtenOn', false], ['wotb_id', false]])) - .filter(r => r.hash && r.writtenOn >= start && r.writtenOn <= end) - - logger.debug('Saving %s iindex rows...', iindexRows.length) - const legacies = iindexRows.map(f => { (f as any).legacy = true; return f }) - await wwDAL.iindexDao.insertBatch(legacies) -} \ No newline at end of file + logger.debug("Saving blocks..."); + await wwDAL.blockDao.insertBatch(blocksSaved); + + await Promise.all(blocksSaved); + + const iindexRows = ( + await server.dal.iindexDAL.findRawWithOrder({}, [ + ["writtenOn", false], + ["wotb_id", false], + ]) + ).filter((r) => r.hash && r.writtenOn >= start && r.writtenOn <= end); + + logger.debug("Saving %s iindex rows...", iindexRows.length); + const legacies = iindexRows.map((f) => { + (f as any).legacy = true; + return f; + }); + await wwDAL.iindexDao.insertBatch(legacies); +} diff --git a/app/modules/dump/wotwizard/wotwizard.new.blocks.ts b/app/modules/dump/wotwizard/wotwizard.new.blocks.ts index a8fce50ee76c985ad8418c7ef21387c334c310e4..ab914cc066f54e551d4db25a96dad764615aeb62 100644 --- a/app/modules/dump/wotwizard/wotwizard.new.blocks.ts +++ b/app/modules/dump/wotwizard/wotwizard.new.blocks.ts @@ -1,38 +1,47 @@ -import {WotWizardDAL} from "./wotwizard.init.structure" -import {Server} from "../../../../server" -import {CommonConstants} from "../../../lib/common-libs/constants" -import {NewLogger} from "../../../lib/logger" +import { WotWizardDAL } from "./wotwizard.init.structure"; +import { Server } from "../../../../server"; +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { NewLogger } from "../../../lib/logger"; export async function addNewBlocks(server: Server, wwDAL: WotWizardDAL) { + const logger = NewLogger(); - const logger = NewLogger() + wwDAL.blockDao.cleanCache(); - wwDAL.blockDao.cleanCache() + const currentWW = await wwDAL.blockDao.getCurrent(); + const current = await server.dal.blockDAL.getCurrent(); + const start = (currentWW && currentWW.number + 1) || 0; + const end = (current && current.number) || -1; - const currentWW = await wwDAL.blockDao.getCurrent() - const current = await server.dal.blockDAL.getCurrent() - const start = currentWW && currentWW.number + 1 || 0 - const end = current && current.number || -1 - - const blocksSaved: Promise<any>[] = [] + const blocksSaved: Promise<any>[] = []; // We loop to work in flow mode (avoid big memory consumption) for (let i = start; i <= end; i += CommonConstants.BLOCKS_IN_MEMORY_MAX) { - const beginAt = i - const endAt = Math.min(end, i + CommonConstants.BLOCKS_IN_MEMORY_MAX) - 1 - const blocks = await server.dal.getBlocksBetween(beginAt, endAt) - const forks = await server.dal.getPotentialForkBlocks(beginAt, 0, endAt) - const all = blocks.concat(forks).map(f => { (f as any).legacy = false; return f }) - logger.debug('Saving %s pending blocks...', all.length) - blocksSaved.push(wwDAL.blockDao.insertBatch(all)) + const beginAt = i; + const endAt = Math.min(end, i + CommonConstants.BLOCKS_IN_MEMORY_MAX) - 1; + const blocks = await server.dal.getBlocksBetween(beginAt, endAt); + const forks = await server.dal.getPotentialForkBlocks(beginAt, 0, endAt); + const all = blocks.concat(forks).map((f) => { + (f as any).legacy = false; + return f; + }); + logger.debug("Saving %s pending blocks...", all.length); + blocksSaved.push(wwDAL.blockDao.insertBatch(all)); } - await Promise.all(blocksSaved) - - const iindexRows = (await server.dal.iindexDAL.findRawWithOrder({}, [['writtenOn', false], ['wotb_id', false]])) - .filter(r => r.writtenOn >= start && r.uid) - - logger.debug('Saving %s iindex rows...', iindexRows.length) - const legacies = iindexRows.map(f => { (f as any).legacy = false; return f }) - await wwDAL.iindexDao.insertBatch(legacies) -} \ No newline at end of file + await Promise.all(blocksSaved); + + const iindexRows = ( + await server.dal.iindexDAL.findRawWithOrder({}, [ + ["writtenOn", false], + ["wotb_id", false], + ]) + ).filter((r) => r.writtenOn >= start && r.uid); + + logger.debug("Saving %s iindex rows...", iindexRows.length); + const legacies = iindexRows.map((f) => { + (f as any).legacy = false; + return f; + }); + await wwDAL.iindexDao.insertBatch(legacies); +} diff --git a/app/modules/export-bc.ts b/app/modules/export-bc.ts index 1a0de70cbdefa1b29ac33335d5680eb11eebc1c1..5d3b91098c1b6126cd993ebef388815dc6960bf2 100644 --- a/app/modules/export-bc.ts +++ b/app/modules/export-bc.ts @@ -11,53 +11,67 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" -import {BlockDTO} from "../lib/dto/BlockDTO" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; +import { BlockDTO } from "../lib/dto/BlockDTO"; module.exports = { duniter: { - cli: [{ - name: 'export-bc [upto]', - desc: 'Exports the whole blockchain as JSON array, up to [upto] block number (excluded).', - logs: false, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const upto = params[0]; - const logger = server.logger; - try { - let CHUNK_SIZE = 500; - let jsoned:any = []; - let current = await server.dal.getCurrentBlockOrNull(); - let lastNumber = current ? current.number + 1 : -1; - if (upto !== undefined && upto.match(/\d+/)) { - lastNumber = Math.min(parseInt(upto), lastNumber); + cli: [ + { + name: "export-bc [upto]", + desc: + "Exports the whole blockchain as JSON array, up to [upto] block number (excluded).", + logs: false, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const upto = params[0]; + const logger = server.logger; + try { + let CHUNK_SIZE = 500; + let jsoned: any = []; + let current = await server.dal.getCurrentBlockOrNull(); + let lastNumber = current ? current.number + 1 : -1; + if (upto !== undefined && upto.match(/\d+/)) { + lastNumber = Math.min(parseInt(upto), lastNumber); + } + let chunksCount = Math.floor(lastNumber / CHUNK_SIZE); + let chunks = []; + // Max-size chunks + for (let i = 0, len = chunksCount; i < len; i++) { + chunks.push({ + start: i * CHUNK_SIZE, + to: i * CHUNK_SIZE + CHUNK_SIZE - 1, + }); + } + // A last chunk + if (lastNumber > chunksCount * CHUNK_SIZE) { + chunks.push({ start: chunksCount * CHUNK_SIZE, to: lastNumber }); + } + for (const chunk of chunks) { + let blocks = await server.dal.getBlocksBetween( + chunk.start, + chunk.to + ); + blocks.forEach(function (block: any) { + jsoned.push(BlockDTO.fromJSONObject(block).json()); + }); + } + if (!program.nostdout) { + console.log(JSON.stringify(jsoned, null, " ")); + } + await server.disconnect(); + return jsoned; + } catch (err) { + logger.warn(err.message || err); + await server.disconnect(); } - let chunksCount = Math.floor(lastNumber / CHUNK_SIZE); - let chunks = []; - // Max-size chunks - for (let i = 0, len = chunksCount; i < len; i++) { - chunks.push({start: i * CHUNK_SIZE, to: i * CHUNK_SIZE + CHUNK_SIZE - 1}); - } - // A last chunk - if (lastNumber > chunksCount * CHUNK_SIZE) { - chunks.push({start: chunksCount * CHUNK_SIZE, to: lastNumber}); - } - for (const chunk of chunks) { - let blocks = await server.dal.getBlocksBetween(chunk.start, chunk.to); - blocks.forEach(function (block:any) { - jsoned.push(BlockDTO.fromJSONObject(block).json()) - }); - } - if (!program.nostdout) { - console.log(JSON.stringify(jsoned, null, " ")); - } - await server.disconnect(); - return jsoned; - } catch(err) { - logger.warn(err.message || err); - await server.disconnect(); - } - } - }] - } -} + }, + }, + ], + }, +}; diff --git a/app/modules/keypair/index.ts b/app/modules/keypair/index.ts index d91d508959dbb74961f0e451336e41b4dd6f62e2..053b2ba6d5ffb8ddea6313021c3e97fb6607bf17 100644 --- a/app/modules/keypair/index.ts +++ b/app/modules/keypair/index.ts @@ -11,86 +11,116 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {randomKey} from "../../lib/common-libs/crypto/keyring" -import {ConfDTO, KeypairConfDTO} from "../../lib/dto/ConfDTO" -import {Server} from "../../../server" -import {Scrypt} from "./lib/scrypt" +import { randomKey } from "../../lib/common-libs/crypto/keyring"; +import { ConfDTO, KeypairConfDTO } from "../../lib/dto/ConfDTO"; +import { Server } from "../../../server"; +import { Scrypt } from "./lib/scrypt"; -const inquirer = require('inquirer'); -const fs = require('fs'); -const yaml = require('js-yaml'); +const inquirer = require("inquirer"); +const fs = require("fs"); +const yaml = require("js-yaml"); export const KeypairDependency = { - duniter: { - methods: { - scrypt: Scrypt + scrypt: Scrypt, }, cliOptions: [ - { value: '--salt <salt>', desc: 'Salt to generate the keypair' }, - { value: '--passwd <password>', desc: 'Password to generate the keypair' }, - { value: '--keyN <N>', desc: 'Scrypt `N` parameter. Defaults to 4096.', parser: parseInt }, - { value: '--keyr <r>', desc: 'Scrypt `N` parameter. Defaults to 16.', parser: parseInt }, - { value: '--keyp <p>', desc: 'Scrypt `N` parameter. Defaults to 1.', parser: parseInt }, - { value: '--keyprompt', desc: 'Force to use the keypair given by user prompt.' }, - { value: '--keyfile <filepath>', desc: 'Force to use the keypair of the given YAML file. File must contain `pub:` and `sec:` fields.' } + { value: "--salt <salt>", desc: "Salt to generate the keypair" }, + { + value: "--passwd <password>", + desc: "Password to generate the keypair", + }, + { + value: "--keyN <N>", + desc: "Scrypt `N` parameter. Defaults to 4096.", + parser: parseInt, + }, + { + value: "--keyr <r>", + desc: "Scrypt `N` parameter. Defaults to 16.", + parser: parseInt, + }, + { + value: "--keyp <p>", + desc: "Scrypt `N` parameter. Defaults to 1.", + parser: parseInt, + }, + { + value: "--keyprompt", + desc: "Force to use the keypair given by user prompt.", + }, + { + value: "--keyfile <filepath>", + desc: + "Force to use the keypair of the given YAML file. File must contain `pub:` and `sec:` fields.", + }, ], wizard: { - - 'key': promptKey - + key: promptKey, }, onReset: { - config: (conf:ConfDTO, program:any, logger:any, confDAL:any) => confDAL.coreFS.remove('keyring.yml') + config: (conf: ConfDTO, program: any, logger: any, confDAL: any) => + confDAL.coreFS.remove("keyring.yml"), }, - cli: [{ - name: 'pub', - desc: 'Shows the node public key', - logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO) => { - console.log(conf.pair.pub) - } - }, { - name: 'sec', - desc: 'Shows the node secret key', - logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO) => { - console.log(conf.pair.sec) - } - }], + cli: [ + { + name: "pub", + desc: "Shows the node public key", + logs: false, + onConfiguredExecute: async (server: Server, conf: ConfDTO) => { + console.log(conf.pair.pub); + }, + }, + { + name: "sec", + desc: "Shows the node secret key", + logs: false, + onConfiguredExecute: async (server: Server, conf: ConfDTO) => { + console.log(conf.pair.sec); + }, + }, + ], config: { - /***** * Tries to load a specific parameter `conf.pair` */ - onLoading: async (conf:KeypairConfDTO, program:any, logger:any, confDAL:any) => { - - if ((program.keyN || program.keyr || program.keyp) && !(program.salt && program.passwd)) { - throw Error('Missing --salt and --passwd options along with --keyN|keyr|keyp option'); + onLoading: async ( + conf: KeypairConfDTO, + program: any, + logger: any, + confDAL: any + ) => { + if ( + (program.keyN || program.keyr || program.keyp) && + !(program.salt && program.passwd) + ) { + throw Error( + "Missing --salt and --passwd options along with --keyN|keyr|keyp option" + ); } // If we have salt and password, convert it to keypair if (program.salt || program.passwd) { - const salt = program.salt || ''; - const key = program.passwd || ''; + const salt = program.salt || ""; + const key = program.passwd || ""; conf.pair = await Scrypt(salt, key); } // If no keypair has been loaded, try the default .yml file if (!conf.pair || !conf.pair.pub || !conf.pair.sec) { - const ymlContent = await confDAL.coreFS.read('keyring.yml') + const ymlContent = await confDAL.coreFS.read("keyring.yml"); conf.pair = yaml.safeLoad(ymlContent); } // If no keypair has been loaded or derived from salt/key, generate a random one if (!conf.pair || !conf.pair.pub || !conf.pair.sec) { - conf.pair = randomKey().json() + conf.pair = randomKey().json(); } // With the --keyprompt option, temporarily use a keypair given from CLI prompt (it won't be stored) @@ -98,7 +128,7 @@ export const KeypairDependency = { // Backup of the current pair conf.oldPair = { pub: conf.pair.pub, - sec: conf.pair.sec + sec: conf.pair.sec, }; // Ask the for the session key await promptKey(conf, program); @@ -109,77 +139,87 @@ export const KeypairDependency = { // Backup of the current pair conf.oldPair = { pub: conf.pair.pub, - sec: conf.pair.sec + sec: conf.pair.sec, }; // Load file content - const doc = yaml.safeLoad(fs.readFileSync(program.keyfile, 'utf8')); + const doc = yaml.safeLoad(fs.readFileSync(program.keyfile, "utf8")); if (!doc || !doc.pub || !doc.sec) { - throw 'Could not load full keyring from file'; + throw "Could not load full keyring from file"; } conf.pair = { pub: doc.pub, - sec: doc.sec - } + sec: doc.sec, + }; } - }, - beforeSave: async (conf:KeypairConfDTO, program:any, logger:any, confDAL:any) => { - + beforeSave: async ( + conf: KeypairConfDTO, + program: any, + logger: any, + confDAL: any + ) => { if ((program.keyprompt || program.keyfile) && conf.oldPair) { // Don't store the given key, but only the default/saved one conf.pair = { pub: conf.oldPair.pub, - sec: conf.oldPair.sec + sec: conf.oldPair.sec, }; } delete conf.oldPair; // We save the key in a separate file - const keyring = 'pub: "' + conf.pair.pub + '"\n' + - 'sec: "' + conf.pair.sec + '"' - await confDAL.coreFS.write('keyring.yml', keyring) + const keyring = + 'pub: "' + conf.pair.pub + '"\n' + 'sec: "' + conf.pair.sec + '"'; + await confDAL.coreFS.write("keyring.yml", keyring); // We never want to store salt, password or keypair in the conf.json file delete conf.salt; delete conf.passwd; delete conf.pair; - } - } - } + }, + }, + }, }; -async function promptKey (conf:KeypairConfDTO, program:any) { - +async function promptKey(conf: KeypairConfDTO, program: any) { const changeKeypair = !conf.pair || !conf.pair.pub || !conf.pair.sec; - const answersWantToChange = await inquirer.prompt([{ - type: "confirm", - name: "change", - message: "Modify your keypair?", - default: changeKeypair - }]); + const answersWantToChange = await inquirer.prompt([ + { + type: "confirm", + name: "change", + message: "Modify your keypair?", + default: changeKeypair, + }, + ]); if (answersWantToChange.change) { - const obfuscatedSalt = (program.salt || "").replace(/./g, '*'); - const answersSalt = await inquirer.prompt([{ - type: "password", - name: "salt", - message: "Key's salt", - default: obfuscatedSalt || undefined - }]); - const obfuscatedPasswd = (program.passwd || "").replace(/./g, '*'); - const answersPasswd = await inquirer.prompt([{ - type: "password", - name: "passwd", - message: "Key\'s password", - default: obfuscatedPasswd || undefined - }]); - - const keepOldSalt = obfuscatedSalt.length > 0 && obfuscatedSalt == answersSalt.salt; - const keepOldPasswd = obfuscatedPasswd.length > 0 && obfuscatedPasswd == answersPasswd.passwd; - const salt = keepOldSalt ? program.salt : answersSalt.salt; + const obfuscatedSalt = (program.salt || "").replace(/./g, "*"); + const answersSalt = await inquirer.prompt([ + { + type: "password", + name: "salt", + message: "Key's salt", + default: obfuscatedSalt || undefined, + }, + ]); + const obfuscatedPasswd = (program.passwd || "").replace(/./g, "*"); + const answersPasswd = await inquirer.prompt([ + { + type: "password", + name: "passwd", + message: "Key's password", + default: obfuscatedPasswd || undefined, + }, + ]); + + const keepOldSalt = + obfuscatedSalt.length > 0 && obfuscatedSalt == answersSalt.salt; + const keepOldPasswd = + obfuscatedPasswd.length > 0 && obfuscatedPasswd == answersPasswd.passwd; + const salt = keepOldSalt ? program.salt : answersSalt.salt; const passwd = keepOldPasswd ? program.passwd : answersPasswd.passwd; - conf.pair = await Scrypt(salt, passwd) + conf.pair = await Scrypt(salt, passwd); } } diff --git a/app/modules/keypair/lib/scrypt.ts b/app/modules/keypair/lib/scrypt.ts index 0a348ad01a3d1d48f1d8baca568d30f9d5693f25..d66eba7fa7e74e2f464523b29103dcf5d4784a61 100644 --- a/app/modules/keypair/lib/scrypt.ts +++ b/app/modules/keypair/lib/scrypt.ts @@ -11,8 +11,8 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as crypto from 'crypto' -import { KeyPairBuilder, seedToSecretKey } from 'duniteroxyde' +import * as crypto from "crypto"; +import { KeyPairBuilder, seedToSecretKey } from "duniteroxyde"; const SEED_LENGTH = 32; // Length of the key @@ -25,17 +25,31 @@ const SEED_LENGTH = 32; // Length of the key * @param p Scrypt parameter p. Defaults to 1. * @return keyPair An object containing the public and private keys, base58 encoded. */ -export const Scrypt = async (salt:string, key:string, N = 4096, r = 16, p = 1) => { - const res: { pub: string, sec: string } = await new Promise((resolve, reject) => { - crypto.scrypt(key, salt, SEED_LENGTH, { N, r, p }, (err:any, seed:Buffer) => { - if (err) return reject(err) - const pair = KeyPairBuilder.fromSeed(seed); - resolve({ - pub: pair.getPublicKey(), - sec: seedToSecretKey(seed) - }) - }) - }) +export const Scrypt = async ( + salt: string, + key: string, + N = 4096, + r = 16, + p = 1 +) => { + const res: { pub: string; sec: string } = await new Promise( + (resolve, reject) => { + crypto.scrypt( + key, + salt, + SEED_LENGTH, + { N, r, p }, + (err: any, seed: Buffer) => { + if (err) return reject(err); + const pair = KeyPairBuilder.fromSeed(seed); + resolve({ + pub: pair.getPublicKey(), + sec: seedToSecretKey(seed), + }); + } + ); + } + ); return res; -} +}; diff --git a/app/modules/peersignal.ts b/app/modules/peersignal.ts index 6a7c5d0f412541974072bcf9d60bbec7e5e1ce3f..d4f650a7d1dbec7d06d017b74978c6799b633936 100644 --- a/app/modules/peersignal.ts +++ b/app/modules/peersignal.ts @@ -12,69 +12,76 @@ // GNU Affero General Public License for more details. "use strict"; -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; -const async = require('async'); -const constants = require('../lib/constants'); +const async = require("async"); +const constants = require("../lib/constants"); module.exports = { duniter: { service: { - neutral: (server:Server, conf:ConfDTO) => { + neutral: (server: Server, conf: ConfDTO) => { for (const ep of conf.endpoints || []) { - server.addEndpointsDefinitions(async () => ep) + server.addEndpointsDefinitions(async () => ep); } - return new PeerSignalEmitter(server, conf) - } - } - } -} + return new PeerSignalEmitter(server, conf); + }, + }, + }, +}; /** * Service which triggers the server's peering generation (actualization of the Peer document). * @constructor */ class PeerSignalEmitter { - - INTERVAL:NodeJS.Timer|null = null - peerFifo = async.queue(function (task:any, callback:any) { + INTERVAL: NodeJS.Timer | null = null; + peerFifo = async.queue(function (task: any, callback: any) { task(callback); - }, 1) + }, 1); - constructor(private server:Server, private conf:ConfDTO) { - } + constructor(private server: Server, private conf: ConfDTO) {} async startService() { - // The interval duration - const SIGNAL_INTERVAL = 1000 * this.conf.avgGenTime * constants.NETWORK.STATUS_INTERVAL.UPDATE; - const SIGNAL_INITIAL_DELAY = 1000 * 60 + const SIGNAL_INTERVAL = + 1000 * this.conf.avgGenTime * constants.NETWORK.STATUS_INTERVAL.UPDATE; + const SIGNAL_INITIAL_DELAY = 1000 * 60; // We eventually clean an existing interval - if (this.INTERVAL) - clearInterval(this.INTERVAL); + if (this.INTERVAL) clearInterval(this.INTERVAL); // Create the new regular algorithm this.INTERVAL = setInterval(() => { - this.peerFifo.push(async (done:any) => { + this.peerFifo.push(async (done: any) => { try { - await this.server.PeeringService.generateSelfPeer(this.conf, SIGNAL_INTERVAL) + await this.server.PeeringService.generateSelfPeer( + this.conf, + SIGNAL_INTERVAL + ); done(); } catch (e) { done(e); } - }) - }, SIGNAL_INTERVAL) + }); + }, SIGNAL_INTERVAL); // Launches it a first time few seconds after startup - setTimeout(() => this.server.PeeringService.generateSelfPeer(this.conf, SIGNAL_INTERVAL - SIGNAL_INITIAL_DELAY), 0) + setTimeout( + () => + this.server.PeeringService.generateSelfPeer( + this.conf, + SIGNAL_INTERVAL - SIGNAL_INITIAL_DELAY + ), + 0 + ); } stopService() { // Stop the interval if (this.INTERVAL) { - clearInterval(this.INTERVAL) + clearInterval(this.INTERVAL); } // Empty the fifo this.peerFifo.kill(); diff --git a/app/modules/plugin.ts b/app/modules/plugin.ts index 0fd7849c94a783793315311986a0e7bdbff361ce..06c60ff584a88e0ec9faf5dbb398fa2c16e12bae 100644 --- a/app/modules/plugin.ts +++ b/app/modules/plugin.ts @@ -11,151 +11,180 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; "use strict"; -const fs = require('fs'); -const path = require('path'); -const spawn = require('child_process').spawn; +const fs = require("fs"); +const path = require("path"); +const spawn = require("child_process").spawn; module.exports = { duniter: { - methods: { canWrite: getNPMAccess, npmInstall, - npmRemove + npmRemove, }, - cli: [{ - name: 'plug [what]', - desc: 'Plugs in a duniter module to this Duniter codebase, making it available for the node.', - logs: false, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const what = params[0]; - try { - console.log('Trying to install module "%s"...', what) - await checkNPMAccess() - await npmInstall(what) - console.log('Module successfully installed.') - } catch (err) { - console.error('Error during installation of the plugin:', err); - } - // Close the DB connection properly - return server && server.disconnect() - } - }, { - name: 'unplug [what]', - desc: 'Plugs in a duniter module to this Duniter codebase, making it available for the node.', - logs: false, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const what = params[0]; - try { - console.log('Trying to remove module "%s"...', what) - await checkNPMAccess() - await npmRemove(what) - console.log('Module successfully uninstalled.') - } catch (err) { - console.error('Error during installation of the plugin:', err); - } - // Close the DB connection properly - return server && server.disconnect() - } - }] - } -} - -function npmInstall(what:string, npm:string|null = null, cwd:string|null = null) { + cli: [ + { + name: "plug [what]", + desc: + "Plugs in a duniter module to this Duniter codebase, making it available for the node.", + logs: false, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const what = params[0]; + try { + console.log('Trying to install module "%s"...', what); + await checkNPMAccess(); + await npmInstall(what); + console.log("Module successfully installed."); + } catch (err) { + console.error("Error during installation of the plugin:", err); + } + // Close the DB connection properly + return server && server.disconnect(); + }, + }, + { + name: "unplug [what]", + desc: + "Plugs in a duniter module to this Duniter codebase, making it available for the node.", + logs: false, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const what = params[0]; + try { + console.log('Trying to remove module "%s"...', what); + await checkNPMAccess(); + await npmRemove(what); + console.log("Module successfully uninstalled."); + } catch (err) { + console.error("Error during installation of the plugin:", err); + } + // Close the DB connection properly + return server && server.disconnect(); + }, + }, + ], + }, +}; + +function npmInstall( + what: string, + npm: string | null = null, + cwd: string | null = null +) { return new Promise((res, rej) => { - const node = getNode() - npm = npm || getNPM() - cwd = cwd || getCWD() - const install = spawn(node, [npm, 'i', '--save', what], { cwd }) + const node = getNode(); + npm = npm || getNPM(); + cwd = cwd || getCWD(); + const install = spawn(node, [npm, "i", "--save", what], { cwd }); - install.stdout.pipe(process.stdout) - install.stderr.pipe(process.stderr) + install.stdout.pipe(process.stdout); + install.stderr.pipe(process.stderr); - install.stderr.on('data', (data:any) => { + install.stderr.on("data", (data: any) => { if (data.toString().match(/ERR!/)) { setTimeout(() => { - install.kill('SIGINT') - }, 100) + install.kill("SIGINT"); + }, 100); } }); - install.on('close', (code:number|null) => { + install.on("close", (code: number | null) => { if (code === null || code > 0) { - return rej('could not retrieve or install the plugin') + return rej("could not retrieve or install the plugin"); } - res() + res(); }); - }) + }); } - -function npmRemove(what:string, npm:string|null = null, cwd:string|null = null) { +function npmRemove( + what: string, + npm: string | null = null, + cwd: string | null = null +) { return new Promise((res, rej) => { - const node = getNode() - npm = npm || getNPM() - cwd = cwd || getCWD() - const uninstall = spawn(node, [npm, 'remove', '--save', what], { cwd }) + const node = getNode(); + npm = npm || getNPM(); + cwd = cwd || getCWD(); + const uninstall = spawn(node, [npm, "remove", "--save", what], { cwd }); - uninstall.stdout.pipe(process.stdout) - uninstall.stderr.pipe(process.stderr) + uninstall.stdout.pipe(process.stdout); + uninstall.stderr.pipe(process.stderr); - uninstall.stderr.on('data', (data:any) => { + uninstall.stderr.on("data", (data: any) => { if (data.toString().match(/ERR!/)) { setTimeout(() => { - uninstall.kill('SIGINT') - }, 100) + uninstall.kill("SIGINT"); + }, 100); } }); - uninstall.on('close', (code:number|null) => { + uninstall.on("close", (code: number | null) => { if (code === null || code > 0) { - return rej('error during the uninstallation of the plugin') + return rej("error during the uninstallation of the plugin"); } - res() + res(); }); - }) + }); } function getNode() { return process.argv[0] - .replace(/(node|nw)$/, 'node') - .replace(/(node|nw)\.exe$/, 'nodejs\\node.exe') + .replace(/(node|nw)$/, "node") + .replace(/(node|nw)\.exe$/, "nodejs\\node.exe"); } function getNPM() { return process.argv[0] - .replace(/(node|nw)$/, 'npm') - .replace(/(node|nw)\.exe$/, 'nodejs\\node_modules\\npm\\bin\\npm-cli.js') + .replace(/(node|nw)$/, "npm") + .replace(/(node|nw)\.exe$/, "nodejs\\node_modules\\npm\\bin\\npm-cli.js"); } function getCWD() { - return process.argv[1].replace(/bin\/duniter$/, '') + return process.argv[1].replace(/bin\/duniter$/, ""); } async function checkNPMAccess() { - const hasReadWriteAccess = await getNPMAccess() - if (!hasReadWriteAccess) { - throw 'no write access on disk' - } + const hasReadWriteAccess = await getNPMAccess(); + if (!hasReadWriteAccess) { + throw "no write access on disk"; + } } async function getNPMAccess() { - const hasAccessToPackageJSON = await new Promise((res) => { - fs.access(path.join(__dirname, '/../../package.json'), fs.constants.R_OK | fs.constants.W_OK, (err:any) => { - res(!err) - }) - }) - const hasAccessToNodeModules = await new Promise((res) => { - fs.access(path.join(__dirname, '/../../node_modules'), fs.constants.R_OK | fs.constants.W_OK, (err:any) => { - res(!err) - }) - }) - console.log(hasAccessToPackageJSON, hasAccessToNodeModules) - return hasAccessToPackageJSON && hasAccessToNodeModules + const hasAccessToPackageJSON = await new Promise((res) => { + fs.access( + path.join(__dirname, "/../../package.json"), + fs.constants.R_OK | fs.constants.W_OK, + (err: any) => { + res(!err); + } + ); + }); + const hasAccessToNodeModules = await new Promise((res) => { + fs.access( + path.join(__dirname, "/../../node_modules"), + fs.constants.R_OK | fs.constants.W_OK, + (err: any) => { + res(!err); + } + ); + }); + console.log(hasAccessToPackageJSON, hasAccessToNodeModules); + return hasAccessToPackageJSON && hasAccessToNodeModules; } diff --git a/app/modules/prover/index.ts b/app/modules/prover/index.ts index 2aaa7a10f00fe237b900f9c93af3902b2bbcd00d..4ca5f1a495ef276c1ca9f8adf586d2b1a2c1d7fa 100644 --- a/app/modules/prover/index.ts +++ b/app/modules/prover/index.ts @@ -11,34 +11,38 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../../lib/dto/ConfDTO" -import {BlockGenerator, BlockGeneratorWhichProves} from "./lib/blockGenerator" -import {ProverConstants} from "./lib/constants" -import {BlockProver} from "./lib/blockProver" -import {Prover} from "./lib/prover" -import {Contacter} from "../crawler/lib/contacter" -import {parsers} from "../../lib/common-libs/parsers/index" -import {PeerDTO} from "../../lib/dto/PeerDTO" -import {Server} from "../../../server" -import {BlockDTO} from "../../lib/dto/BlockDTO" -import {DBIdentity} from "../../lib/dal/sqliteDAL/IdentityDAL" +import { ConfDTO } from "../../lib/dto/ConfDTO"; +import { + BlockGenerator, + BlockGeneratorWhichProves, +} from "./lib/blockGenerator"; +import { ProverConstants } from "./lib/constants"; +import { BlockProver } from "./lib/blockProver"; +import { Prover } from "./lib/prover"; +import { Contacter } from "../crawler/lib/contacter"; +import { parsers } from "../../lib/common-libs/parsers/index"; +import { PeerDTO } from "../../lib/dto/PeerDTO"; +import { Server } from "../../../server"; +import { BlockDTO } from "../../lib/dto/BlockDTO"; +import { DBIdentity } from "../../lib/dal/sqliteDAL/IdentityDAL"; -const async = require('async'); +const async = require("async"); export const ProverDependency = { - duniter: { - /*********** Permanent prover **************/ config: { - onLoading: async (conf:ConfDTO) => { + onLoading: async (conf: ConfDTO) => { if (conf.cpu === null || conf.cpu === undefined) { conf.cpu = ProverConstants.DEFAULT_CPU; } if (conf.nbCores === null || conf.nbCores === undefined) { - conf.nbCores = Math.min(ProverConstants.CORES_MAXIMUM_USE_IN_PARALLEL, require('os').cpus().length) + conf.nbCores = Math.min( + ProverConstants.CORES_MAXIMUM_USE_IN_PARALLEL, + require("os").cpus().length + ); } else if (conf.nbCores <= 0) { - conf.nbCores = 1 + conf.nbCores = 1; } if (conf.prefix === null || conf.prefix === undefined) { conf.prefix = ProverConstants.DEFAULT_PEER_ID; @@ -46,195 +50,317 @@ export const ProverDependency = { conf.powSecurityRetryDelay = ProverConstants.POW_SECURITY_RETRY_DELAY; conf.powMaxHandicap = ProverConstants.POW_MAXIMUM_ACCEPTABLE_HANDICAP; }, - beforeSave: async (conf:ConfDTO) => { + beforeSave: async (conf: ConfDTO) => { delete conf.powSecurityRetryDelay; delete conf.powMaxHandicap; - } + }, }, service: { - output: (server:Server) => { + output: (server: Server) => { const generator = new BlockGenerator(server); - server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator) - server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator) - server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator) - return new Prover(server) - } + server.generatorGetJoinData = generator.getSinglePreJoinData.bind( + generator + ); + server.generatorComputeNewCerts = generator.computeNewCerts.bind( + generator + ); + server.generatorNewCertsToLinks = generator.newCertsToLinks.bind( + generator + ); + return new Prover(server); + }, }, methods: { - hookServer: (server:Server) => { + hookServer: (server: Server) => { const generator = new BlockGenerator(server); - server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator) - server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator) - server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator) + server.generatorGetJoinData = generator.getSinglePreJoinData.bind( + generator + ); + server.generatorComputeNewCerts = generator.computeNewCerts.bind( + generator + ); + server.generatorNewCertsToLinks = generator.newCertsToLinks.bind( + generator + ); }, - prover: (server:Server) => new Prover(server), - blockGenerator: (server:Server, prover:any) => new BlockGeneratorWhichProves(server, prover), - generateTheNextBlock: async (server:Server, manualValues:any) => { + prover: (server: Server) => new Prover(server), + blockGenerator: (server: Server, prover: any) => + new BlockGeneratorWhichProves(server, prover), + generateTheNextBlock: async (server: Server, manualValues: any) => { const prover = new BlockProver(server); const generator = new BlockGeneratorWhichProves(server, prover); return generator.nextBlock(manualValues); }, - generateAndProveTheNext: async (server:Server, block:any, trial:any, manualValues:any) => { + generateAndProveTheNext: async ( + server: Server, + block: any, + trial: any, + manualValues: any + ) => { const prover = new BlockProver(server); const generator = new BlockGeneratorWhichProves(server, prover); let res = await generator.makeNextBlock(block, trial, manualValues); - return res - } + return res; + }, }, /*********** CLI gen-next + gen-root **************/ cliOptions: [ - {value: '--show', desc: 'With gen-* commands, displays the generated block.'}, - {value: '--check', desc: 'With gen-* commands: just check validity of generated block.'}, - {value: '--submit-local', desc: 'With gen-* commands: the generated block is submitted to this node only.'}, - {value: '--submit-host <host>', desc: 'With gen-* commands: the generated block is submitted to `submit-host` node.'}, - {value: '--submit-port <port>', desc: 'With gen-* commands: the generated block is submitted to `submit-host` node with port `submit-port`.'}, - {value: '--at <medianTime>', desc: 'With gen-next --show --check: allows to try in a future time.', parser: parseInt } + { + value: "--show", + desc: "With gen-* commands, displays the generated block.", + }, + { + value: "--check", + desc: "With gen-* commands: just check validity of generated block.", + }, + { + value: "--submit-local", + desc: + "With gen-* commands: the generated block is submitted to this node only.", + }, + { + value: "--submit-host <host>", + desc: + "With gen-* commands: the generated block is submitted to `submit-host` node.", + }, + { + value: "--submit-port <port>", + desc: + "With gen-* commands: the generated block is submitted to `submit-host` node with port `submit-port`.", + }, + { + value: "--at <medianTime>", + desc: "With gen-next --show --check: allows to try in a future time.", + parser: parseInt, + }, ], - cli: [{ - name: 'gen-next [difficulty]', - desc: 'Tries to generate the next block of the blockchain.', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const difficulty = params[0] - const generator = new BlockGeneratorWhichProves(server, null); - return generateAndSend(program, difficulty, server, () => () => generator.nextBlock()) - } - }, { - name: 'bc-resolve', - desc: 'Tries to resolve next blocks or forks.', - onDatabaseExecute: async () => {} - }, { - name: 'gen-root [difficulty]', - desc: 'Tries to generate the next block of the blockchain.', - preventIfRunning: true, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const difficulty = params[0] - const generator = new BlockGeneratorWhichProves(server, null); - let toDelete:DBIdentity[] = [], catched = true; - do { - try { - await generateAndSend(program, difficulty, server, () => () => generator.nextBlock()) - catched = false; - } catch (e) { - toDelete = await server.dal.idtyDAL.query('SELECT * FROM idty i WHERE 5 > (SELECT count(*) from cert c where c.`to` = i.pubkey)'); - console.log('Deleting', toDelete.map((i:any) => i.pubkey)); - await server.dal.idtyDAL.exec('DELETE FROM idty WHERE pubkey IN (' + toDelete.map((i:any) => "'" + i.pubkey + "'").join(',') + ')'); - await server.dal.idtyDAL.exec('DELETE FROM cert WHERE `to` IN (' + toDelete.map((i:any) => "'" + i.pubkey + "'").join(',') + ')'); - await server.dal.idtyDAL.exec('DELETE FROM cert WHERE `from` IN (' + toDelete.map((i:any) => "'" + i.pubkey + "'").join(',') + ')'); + cli: [ + { + name: "gen-next [difficulty]", + desc: "Tries to generate the next block of the blockchain.", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const difficulty = params[0]; + const generator = new BlockGeneratorWhichProves(server, null); + return generateAndSend(program, difficulty, server, () => () => + generator.nextBlock() + ); + }, + }, + { + name: "bc-resolve", + desc: "Tries to resolve next blocks or forks.", + onDatabaseExecute: async () => {}, + }, + { + name: "gen-root [difficulty]", + desc: "Tries to generate the next block of the blockchain.", + preventIfRunning: true, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const difficulty = params[0]; + const generator = new BlockGeneratorWhichProves(server, null); + let toDelete: DBIdentity[] = [], + catched = true; + do { + try { + await generateAndSend(program, difficulty, server, () => () => + generator.nextBlock() + ); + catched = false; + } catch (e) { + toDelete = await server.dal.idtyDAL.query( + "SELECT * FROM idty i WHERE 5 > (SELECT count(*) from cert c where c.`to` = i.pubkey)" + ); + console.log( + "Deleting", + toDelete.map((i: any) => i.pubkey) + ); + await server.dal.idtyDAL.exec( + "DELETE FROM idty WHERE pubkey IN (" + + toDelete.map((i: any) => "'" + i.pubkey + "'").join(",") + + ")" + ); + await server.dal.idtyDAL.exec( + "DELETE FROM cert WHERE `to` IN (" + + toDelete.map((i: any) => "'" + i.pubkey + "'").join(",") + + ")" + ); + await server.dal.idtyDAL.exec( + "DELETE FROM cert WHERE `from` IN (" + + toDelete.map((i: any) => "'" + i.pubkey + "'").join(",") + + ")" + ); + } + } while (catched && toDelete.length); + console.log("Done"); + }, + }, + { + name: "gen-root-choose [difficulty]", + desc: "Tries to generate root block, with choice of root members.", + preventIfRunning: true, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const difficulty = params[0]; + if (!difficulty) { + throw "Difficulty is required."; } - } while (catched && toDelete.length); - console.log('Done'); - } - }, { - name: 'gen-root-choose [difficulty]', - desc: 'Tries to generate root block, with choice of root members.', - preventIfRunning: true, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const difficulty = params[0] - if (!difficulty) { - throw 'Difficulty is required.'; - } - const generator = new BlockGenerator(server); - return generateAndSend(program, difficulty, server, () => (): Promise<BlockDTO> => generator.manualRoot()) - } - }] - } -} + const generator = new BlockGenerator(server); + return generateAndSend(program, difficulty, server, () => (): Promise< + BlockDTO + > => generator.manualRoot()); + }, + }, + ], + }, +}; -function generateAndSend(program:any, difficulty:string, server:Server, getGenerationMethod:any) { +function generateAndSend( + program: any, + difficulty: string, + server: Server, + getGenerationMethod: any +) { const logger = server.logger; return new Promise((resolve, reject) => { if (!program.submitLocal) { if (!program.submitHost) { - throw 'Option --submit-host is required.' + throw "Option --submit-host is required."; } if (!program.submitPort) { - throw 'Option --submit-port is required.' + throw "Option --submit-port is required."; } if (isNaN(parseInt(program.submitPort))) { - throw 'Option --submit-port must be a number.' + throw "Option --submit-port must be a number."; } } - async.waterfall([ - function (next:any) { - const method = getGenerationMethod(server); - (async() => { - const simulationValues:any = {} - if (program.show && program.check) { - if (program.at && !isNaN(program.at)) { - simulationValues.medianTime = program.at + async.waterfall( + [ + function (next: any) { + const method = getGenerationMethod(server); + (async () => { + const simulationValues: any = {}; + if (program.show && program.check) { + if (program.at && !isNaN(program.at)) { + simulationValues.medianTime = program.at; + } } + const block = await method(null, simulationValues); + next(null, block); + })(); + }, + function (block: any, next: any) { + if (program.check) { + block.time = block.medianTime; + program.show && console.log(block.getRawSigned()); + (async () => { + try { + const parsed = parsers.parseBlock.syncWrite( + block.getRawSigned() + ); + await server.BlockchainService.checkBlock(parsed, false); + logger.info("Acceptable block"); + next(); + } catch (e) { + next(e); + } + })(); + } else { + logger.debug("Block to be sent: %s", block.getRawInnerPart()); + async.waterfall( + [ + function (subNext: any) { + proveAndSend( + program, + server, + block, + server.conf.pair.pub, + parseInt(difficulty), + subNext + ); + }, + ], + next + ); } - const block = await method(null, simulationValues); - next(null, block); - })() - }, - function (block:any, next:any) { - if (program.check) { - block.time = block.medianTime; - program.show && console.log(block.getRawSigned()); - (async() => { - try { - const parsed = parsers.parseBlock.syncWrite(block.getRawSigned()); - await server.BlockchainService.checkBlock(parsed, false); - logger.info('Acceptable block'); - next(); - } catch (e) { - next(e); - } - })() - } - else { - logger.debug('Block to be sent: %s', block.getRawInnerPart()); - async.waterfall([ - function (subNext:any) { - proveAndSend(program, server, block, server.conf.pair.pub, parseInt(difficulty), subNext); - } - ], next); - } + }, + ], + (err: any, data: any) => { + err && reject(err); + !err && resolve(data); } - ], (err:any, data:any) => { - err && reject(err); - !err && resolve(data); - }); + ); }); } -function proveAndSend(program:any, server:Server, block:any, issuer:any, difficulty:any, done:any) { +function proveAndSend( + program: any, + server: Server, + block: any, + issuer: any, + difficulty: any, + done: any +) { const logger = server.logger; - async.waterfall([ - function (next:any) { - block.issuer = issuer; - program.show && console.log(block.getRawSigned()); - (async () => { - try { - const host:string = program.submitHost - const port:string = program.submitPort - const trialLevel = isNaN(difficulty) ? await server.getBcContext().getIssuerPersonalizedDifficulty(server.PeeringService.selfPubkey) : difficulty - const prover = new BlockProver(server); - const proven = await prover.prove(block, trialLevel); - if (program.submitLocal) { - await server.writeBlock(proven) - next() - } else { - const peer = PeerDTO.fromJSONObject({ - endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] - }); - program.show && console.log(proven.getRawSigned()); - logger.info('Posted block ' + proven.getRawSigned()); - const p = PeerDTO.fromJSONObject(peer); - const contact = new Contacter(p.getHostPreferDNS(), p.getPort() as number); - await contact.postBlock(proven.getRawSigned()); - next() + async.waterfall( + [ + function (next: any) { + block.issuer = issuer; + program.show && console.log(block.getRawSigned()); + (async () => { + try { + const host: string = program.submitHost; + const port: string = program.submitPort; + const trialLevel = isNaN(difficulty) + ? await server + .getBcContext() + .getIssuerPersonalizedDifficulty( + server.PeeringService.selfPubkey + ) + : difficulty; + const prover = new BlockProver(server); + const proven = await prover.prove(block, trialLevel); + if (program.submitLocal) { + await server.writeBlock(proven); + next(); + } else { + const peer = PeerDTO.fromJSONObject({ + endpoints: [["BASIC_MERKLED_API", host, port].join(" ")], + }); + program.show && console.log(proven.getRawSigned()); + logger.info("Posted block " + proven.getRawSigned()); + const p = PeerDTO.fromJSONObject(peer); + const contact = new Contacter( + p.getHostPreferDNS(), + p.getPort() as number + ); + await contact.postBlock(proven.getRawSigned()); + next(); + } + } catch (e) { + next(e); } - } catch(e) { - next(e); - } - })() - } - ], done); + })(); + }, + ], + done + ); } diff --git a/app/modules/prover/lib/PowWorker.ts b/app/modules/prover/lib/PowWorker.ts index 4c6df8ce83143d23cafb5e64c2b8b822d32e246a..f8cbfea9bcdae27ca3f9a65372acdf49b28ed242 100644 --- a/app/modules/prover/lib/PowWorker.ts +++ b/app/modules/prover/lib/PowWorker.ts @@ -11,9 +11,9 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Querable} from "../../../lib/common-libs/querable" +import { Querable } from "../../../lib/common-libs/querable"; -const querablep = require('querablep') +const querablep = require("querablep"); /********* * @@ -31,78 +31,88 @@ const querablep = require('querablep') ********/ export class PowWorker { + private onlinePromise: Promise<void>; + private onlineResolver: () => void; - private onlinePromise:Promise<void> - private onlineResolver:()=>void + private exitPromise: Promise<void>; + private exitResolver: () => void; - private exitPromise:Promise<void> - private exitResolver:()=>void + private proofPromise: Querable<{ message: { answer: any } } | null>; + private proofResolver: (proof: { message: { answer: any } } | null) => void; - private proofPromise:Querable<{ message: { answer:any }}|null> - private proofResolver:(proof:{ message: { answer:any }}|null)=>void - - private messageHandler:((worker:any, msg:any)=>void) + private messageHandler: (worker: any, msg: any) => void; constructor( - private nodejsWorker:any, - private onPowMessage:(message:any)=>void, - private onlineHandler:()=>void, - private exitHandler:(code:any, signal:any)=>void) { - + private nodejsWorker: any, + private onPowMessage: (message: any) => void, + private onlineHandler: () => void, + private exitHandler: (code: any, signal: any) => void + ) { // Handle "online" promise - this.onlinePromise = new Promise(res => this.onlineResolver = res) - nodejsWorker.on('online', () => { - this.onlineHandler() - this.onlineResolver() - }) + this.onlinePromise = new Promise((res) => (this.onlineResolver = res)); + nodejsWorker.on("online", () => { + this.onlineHandler(); + this.onlineResolver(); + }); // Handle "exit" promise - this.exitPromise = new Promise(res => this.exitResolver = res) - nodejsWorker.on('exit', (code:any, signal:any) => { - this.exitHandler(code, signal) - this.exitResolver() - }) + this.exitPromise = new Promise((res) => (this.exitResolver = res)); + nodejsWorker.on("exit", (code: any, signal: any) => { + this.exitHandler(code, signal); + this.exitResolver(); + }); - nodejsWorker.on('message', (message:any) => { + nodejsWorker.on("message", (message: any) => { if (message) { - this.onPowMessage(message) + this.onPowMessage(message); } - if (this.proofPromise && message.uuid && !this.proofPromise.isResolved() && this.proofResolver) { - const result:{ message: { answer:any }}|null = message ? { message } : null - this.proofResolver(result) + if ( + this.proofPromise && + message.uuid && + !this.proofPromise.isResolved() && + this.proofResolver + ) { + const result: { message: { answer: any } } | null = message + ? { message } + : null; + this.proofResolver(result); } - }) + }); } get online() { - return this.onlinePromise + return this.onlinePromise; } get exited() { - return this.exitPromise + return this.exitPromise; } get pid() { - return this.nodejsWorker.process.pid + return this.nodejsWorker.process.pid; } - askProof(commandMessage:{ uuid:string, command:string, value:any }) { - this.proofPromise = querablep(new Promise<{ message: { answer:any }}|null>(res => this.proofResolver = res)) - this.nodejsWorker.send(commandMessage) - return this.proofPromise + askProof(commandMessage: { uuid: string; command: string; value: any }) { + this.proofPromise = querablep( + new Promise<{ message: { answer: any } } | null>( + (res) => (this.proofResolver = res) + ) + ); + this.nodejsWorker.send(commandMessage); + return this.proofPromise; } - sendConf(confMessage:{ rootPath: string, command:string, value:any }) { - this.nodejsWorker.send(confMessage) + sendConf(confMessage: { rootPath: string; command: string; value: any }) { + this.nodejsWorker.send(confMessage); } sendCancel() { this.nodejsWorker.send({ - command: 'cancel' - }) + command: "cancel", + }); } kill() { - this.nodejsWorker.kill() + this.nodejsWorker.kill(); } -} \ No newline at end of file +} diff --git a/app/modules/prover/lib/blockGenerator.ts b/app/modules/prover/lib/blockGenerator.ts index 92dbd81e321ced5b8bebb3cb93e2264dc70bd1c5..58917e2f1c99ed937c82f8b61a94979e0fe7051c 100644 --- a/app/modules/prover/lib/blockGenerator.ts +++ b/app/modules/prover/lib/blockGenerator.ts @@ -11,86 +11,89 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as moment from "moment" -import {Server} from "../../../../server" -import {BlockchainContext} from "../../../lib/computation/BlockchainContext" -import {TransactionDTO} from "../../../lib/dto/TransactionDTO" -import {GLOBAL_RULES_HELPERS} from "../../../lib/rules/global_rules" -import {LOCAL_RULES_HELPERS} from "../../../lib/rules/local_rules" -import {Indexer} from "../../../lib/indexer" -import {DBBlock} from "../../../lib/db/DBBlock" -import {verify} from "duniteroxyde" -import {rawer} from "../../../lib/common-libs/index" -import {hashf} from "../../../lib/common" -import {CommonConstants} from "../../../lib/common-libs/constants" -import {IdentityDTO} from "../../../lib/dto/IdentityDTO" -import {CertificationDTO} from "../../../lib/dto/CertificationDTO" -import {MembershipDTO} from "../../../lib/dto/MembershipDTO" -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {ConfDTO} from "../../../lib/dto/ConfDTO" -import {FileDAL} from "../../../lib/dal/fileDAL" -import {DataErrors} from "../../../lib/common-libs/errors" -import {Underscore} from "../../../lib/common-libs/underscore" -import {DBCert} from "../../../lib/dal/sqliteDAL/CertDAL" -import {Map} from "../../../lib/common-libs/crypto/map" - -const inquirer = require('inquirer'); - -const constants = CommonConstants +import * as moment from "moment"; +import { Server } from "../../../../server"; +import { BlockchainContext } from "../../../lib/computation/BlockchainContext"; +import { TransactionDTO } from "../../../lib/dto/TransactionDTO"; +import { GLOBAL_RULES_HELPERS } from "../../../lib/rules/global_rules"; +import { LOCAL_RULES_HELPERS } from "../../../lib/rules/local_rules"; +import { Indexer } from "../../../lib/indexer"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { verify } from "duniteroxyde"; +import { rawer } from "../../../lib/common-libs/index"; +import { hashf } from "../../../lib/common"; +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { IdentityDTO } from "../../../lib/dto/IdentityDTO"; +import { CertificationDTO } from "../../../lib/dto/CertificationDTO"; +import { MembershipDTO } from "../../../lib/dto/MembershipDTO"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { ConfDTO } from "../../../lib/dto/ConfDTO"; +import { FileDAL } from "../../../lib/dal/fileDAL"; +import { DataErrors } from "../../../lib/common-libs/errors"; +import { Underscore } from "../../../lib/common-libs/underscore"; +import { DBCert } from "../../../lib/dal/sqliteDAL/CertDAL"; +import { Map } from "../../../lib/common-libs/crypto/map"; + +const inquirer = require("inquirer"); + +const constants = CommonConstants; export interface PreJoin { identity: { - pubkey: string - uid: string - buid: string - sig: string - member: boolean - wasMember: boolean - revoked: boolean - } - key: null - idHash: string - certs: DBCert[] - ms: any + pubkey: string; + uid: string; + buid: string; + sig: string; + member: boolean; + wasMember: boolean; + revoked: boolean; + }; + key: null; + idHash: string; + certs: DBCert[]; + ms: any; } interface LeaveData { identity: { - member: boolean - } | null - ms: any - key: any - idHash: string + member: boolean; + } | null; + ms: any; + key: any; + idHash: string; } export class BlockGenerator { + mainContext: BlockchainContext; + selfPubkey: string; + logger: any; - mainContext:BlockchainContext - selfPubkey:string - logger:any - - constructor(private server:Server) { + constructor(private server: Server) { this.mainContext = server.BlockchainService.getContext(); - this.selfPubkey = (this.conf.pair && this.conf.pair.pub) || '' + this.selfPubkey = (this.conf.pair && this.conf.pair.pub) || ""; this.logger = server.logger; } get conf(): ConfDTO { - return this.server.conf + return this.server.conf; } get dal(): FileDAL { - return this.server.dal + return this.server.dal; } - nextBlock(manualValues:any = {}, simulationValues:any = {}) { - return this.generateNextBlock(new NextBlockGenerator(this.mainContext, this.server, this.logger), manualValues, simulationValues) + nextBlock(manualValues: any = {}, simulationValues: any = {}) { + return this.generateNextBlock( + new NextBlockGenerator(this.mainContext, this.server, this.logger), + manualValues, + simulationValues + ); } async manualRoot() { - let current = await this.dal.getCurrentBlockOrNull() + let current = await this.dal.getCurrentBlockOrNull(); if (current) { - throw 'Cannot generate root block: it already exists.'; + throw "Cannot generate root block: it already exists."; } return this.generateNextBlock(new ManualRootGenerator()); } @@ -98,67 +101,115 @@ export class BlockGenerator { /** * Generate next block, gathering both updates & newcomers */ - private async generateNextBlock(generator:BlockGeneratorInterface, manualValues:any = null, simulationValues:any = null) { - const vHEAD_1 = await this.mainContext.getvHEAD_1() + private async generateNextBlock( + generator: BlockGeneratorInterface, + manualValues: any = null, + simulationValues: any = null + ) { + const vHEAD_1 = await this.mainContext.getvHEAD_1(); if (simulationValues && simulationValues.medianTime) { - vHEAD_1.medianTime = simulationValues.medianTime + vHEAD_1.medianTime = simulationValues.medianTime; } const current = await this.dal.getCurrentBlockOrNull(); - const blockVersion = (manualValues && manualValues.version) || (await LOCAL_RULES_HELPERS.getMaxPossibleVersionNumber(current, this.dal)) + const blockVersion = + (manualValues && manualValues.version) || + (await LOCAL_RULES_HELPERS.getMaxPossibleVersionNumber( + current, + this.dal + )); const revocations = await this.dal.getRevocatingMembers(); const exclusions = await this.dal.getToBeKickedPubkeys(); const wereExcludeds = await this.dal.getRevokedPubkeys(); const newCertsFromWoT = await generator.findNewCertsFromWoT(current); - const newcomers = await this.findNewcomers(current, joinersData => generator.filterJoiners(joinersData)) - const leavers = await this.findLeavers(current) + const newcomers = await this.findNewcomers(current, (joinersData) => + generator.filterJoiners(joinersData) + ); + const leavers = await this.findLeavers(current); const transactions = await this.findTransactions(current, manualValues); - const certifiersOfNewcomers = Underscore.uniq(Underscore.keys(newcomers).reduce((theCertifiers, newcomer:string) => { - return theCertifiers.concat(Underscore.pluck(newcomers[newcomer].certs, 'from')); - }, <string[]>[])) + const certifiersOfNewcomers = Underscore.uniq( + Underscore.keys(newcomers).reduce((theCertifiers, newcomer: string) => { + return theCertifiers.concat( + Underscore.pluck(newcomers[newcomer].certs, "from") + ); + }, <string[]>[]) + ); // Merges updates - Underscore.keys(newCertsFromWoT).forEach(function(certified:string){ - newCertsFromWoT[certified] = newCertsFromWoT[certified].filter((cert:any) => { - // Must not certify a newcomer, since it would mean multiple certifications at same time from one member - const isCertifier = certifiersOfNewcomers.indexOf(cert.from) != -1; - if (!isCertifier) { - certifiersOfNewcomers.push(cert.from); + Underscore.keys(newCertsFromWoT).forEach(function (certified: string) { + newCertsFromWoT[certified] = newCertsFromWoT[certified].filter( + (cert: any) => { + // Must not certify a newcomer, since it would mean multiple certifications at same time from one member + const isCertifier = certifiersOfNewcomers.indexOf(cert.from) != -1; + if (!isCertifier) { + certifiersOfNewcomers.push(cert.from); + } + return !isCertifier; } - return !isCertifier; - }); + ); }); // Create the block - return this.createBlock(blockVersion, current, newcomers, leavers, newCertsFromWoT, revocations, exclusions, wereExcludeds, transactions, manualValues); + return this.createBlock( + blockVersion, + current, + newcomers, + leavers, + newCertsFromWoT, + revocations, + exclusions, + wereExcludeds, + transactions, + manualValues + ); } - private async findTransactions(current:DBBlock|null, options:{ dontCareAboutChaining?:boolean }) { + private async findTransactions( + current: DBBlock | null, + options: { dontCareAboutChaining?: boolean } + ) { if (!current) { - return [] + return []; } - const versionMin = current ? Math.min(CommonConstants.LAST_VERSION_FOR_TX, current.version) : CommonConstants.DOCUMENTS_VERSION; + const versionMin = current + ? Math.min(CommonConstants.LAST_VERSION_FOR_TX, current.version) + : CommonConstants.DOCUMENTS_VERSION; const txs = await this.dal.getTransactionsPending(versionMin); const transactions = []; - const passingTxs:any[] = []; - const medianTime = current ? current.medianTime : 0 + const passingTxs: any[] = []; + const medianTime = current ? current.medianTime : 0; for (const obj of txs) { - obj.currency = this.conf.currency + obj.currency = this.conf.currency; const tx = TransactionDTO.fromJSONObject(obj); try { - await LOCAL_RULES_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), this.conf, medianTime, options) + await LOCAL_RULES_HELPERS.checkBunchOfTransactions( + passingTxs.concat(tx), + this.conf, + medianTime, + options + ); const fakeTimeVariation = current.medianTime + 1; - await GLOBAL_RULES_HELPERS.checkSingleTransaction(tx, current.version, fakeTimeVariation, this.conf, this.dal, async (txHash:string) => { - return Underscore.findWhere(passingTxs, { hash: txHash }) || null - }); + await GLOBAL_RULES_HELPERS.checkSingleTransaction( + tx, + current.version, + fakeTimeVariation, + this.conf, + this.dal, + async (txHash: string) => { + return Underscore.findWhere(passingTxs, { hash: txHash }) || null; + } + ); await GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal); transactions.push(tx); passingTxs.push(tx); - this.logger.info('Transaction %s added to block', tx.hash); + this.logger.info("Transaction %s added to block", tx.hash); } catch (err) { this.logger.error(err); const currentNumber = (current && current.number) || 0; - const blockstamp = tx.blockstamp || (currentNumber + '-'); - const txBlockNumber = parseInt(blockstamp.split('-')[0]); + const blockstamp = tx.blockstamp || currentNumber + "-"; + const txBlockNumber = parseInt(blockstamp.split("-")[0]); // X blocks before removing the transaction - if (currentNumber - txBlockNumber + 1 >= CommonConstants.TRANSACTION_MAX_TRIES) { + if ( + currentNumber - txBlockNumber + 1 >= + CommonConstants.TRANSACTION_MAX_TRIES + ) { await this.dal.removeTxByHash(tx.hash); } } @@ -166,25 +217,53 @@ export class BlockGenerator { return transactions; } - private async findLeavers(current:DBBlock|null) { - const leaveData: { [pub:string]: { identity: { member:boolean }|null, ms: any, key: any, idHash: string } } = {}; - const memberships = await this.dal.findLeavers((current && current.medianTime) || 0) - const leavers:string[] = []; - memberships.forEach((ms:any) => leavers.push(ms.issuer)); + private async findLeavers(current: DBBlock | null) { + const leaveData: { + [pub: string]: { + identity: { member: boolean } | null; + ms: any; + key: any; + idHash: string; + }; + } = {}; + const memberships = await this.dal.findLeavers( + (current && current.medianTime) || 0 + ); + const leavers: string[] = []; + memberships.forEach((ms: any) => leavers.push(ms.issuer)); for (const ms of memberships) { - const leave: { identity: { member:boolean }|null, ms: any, key: any, idHash: string } = { identity: null, ms: ms, key: null, idHash: '' }; - leave.idHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase(); + const leave: { + identity: { member: boolean } | null; + ms: any; + key: any; + idHash: string; + } = { identity: null, ms: ms, key: null, idHash: "" }; + leave.idHash = ( + hashf(ms.userid + ms.certts + ms.issuer) + "" + ).toUpperCase(); let block; if (current) { - block = await this.dal.getAbsoluteValidBlockInForkWindowByBlockstamp(ms.block) - } - else { + block = await this.dal.getAbsoluteValidBlockInForkWindowByBlockstamp( + ms.block + ); + } else { block = {}; } - const identity = await this.dal.getGlobalIdentityByHashForIsMember(leave.idHash) - const currentMembership = await this.dal.mindexDAL.getReducedMSForImplicitRevocation(ms.issuer); - const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1; - if (identity && block && currentMSN < leave.ms.number && identity.member) { + const identity = await this.dal.getGlobalIdentityByHashForIsMember( + leave.idHash + ); + const currentMembership = await this.dal.mindexDAL.getReducedMSForImplicitRevocation( + ms.issuer + ); + const currentMSN = currentMembership + ? parseInt(currentMembership.created_on) + : -1; + if ( + identity && + block && + currentMSN < leave.ms.number && + identity.member + ) { // MS + matching cert are found leave.identity = identity; leaveData[identity.pub] = leave; @@ -193,34 +272,51 @@ export class BlockGenerator { return leaveData; } - private async findNewcomers(current:DBBlock|null, filteringFunc: (joinData: Map<PreJoin>) => Promise<Map<PreJoin>>) { + private async findNewcomers( + current: DBBlock | null, + filteringFunc: (joinData: Map<PreJoin>) => Promise<Map<PreJoin>> + ) { const preJoinData = await this.getPreJoinData(current); const joinData = await filteringFunc(preJoinData); const members = await this.dal.getMembers(); - const wotMembers = Underscore.pluck(members, 'pubkey'); + const wotMembers = Underscore.pluck(members, "pubkey"); // Checking step - let newcomers = Underscore.keys(joinData).map(String) - newcomers = Underscore.shuffle(newcomers) + let newcomers = Underscore.keys(joinData).map(String); + newcomers = Underscore.shuffle(newcomers); const nextBlockNumber = current ? current.number + 1 : 0; try { - const realNewcomers = await this.iteratedChecking(newcomers, async (someNewcomers:string[]) => { - const nextBlock = { - number: nextBlockNumber, - joiners: someNewcomers, - identities: Underscore.where(newcomers.map((pub:string) => joinData[pub].identity), { wasMember: false }).map((idty:any) => idty.pubkey) - }; - const theNewLinks = await this.computeNewLinks(nextBlockNumber, someNewcomers, joinData) - await this.checkWoTConstraints(nextBlock, theNewLinks, current); - }) - const newLinks = await this.computeNewLinks(nextBlockNumber, realNewcomers, joinData) + const realNewcomers = await this.iteratedChecking( + newcomers, + async (someNewcomers: string[]) => { + const nextBlock = { + number: nextBlockNumber, + joiners: someNewcomers, + identities: Underscore.where( + newcomers.map((pub: string) => joinData[pub].identity), + { wasMember: false } + ).map((idty: any) => idty.pubkey), + }; + const theNewLinks = await this.computeNewLinks( + nextBlockNumber, + someNewcomers, + joinData + ); + await this.checkWoTConstraints(nextBlock, theNewLinks, current); + } + ); + const newLinks = await this.computeNewLinks( + nextBlockNumber, + realNewcomers, + joinData + ); const newWoT = wotMembers.concat(realNewcomers); - const finalJoinData: { [pub:string]: PreJoin } = {}; - realNewcomers.forEach((newcomer:string) => { + const finalJoinData: { [pub: string]: PreJoin } = {}; + realNewcomers.forEach((newcomer: string) => { // Only keep membership of selected newcomers finalJoinData[newcomer] = joinData[newcomer]; // Only keep certifications from final members - const keptCerts:any[] = []; - joinData[newcomer].certs.forEach((cert:any) => { + const keptCerts: any[] = []; + joinData[newcomer].certs.forEach((cert: any) => { const issuer = cert.from; if (~newWoT.indexOf(issuer) && ~newLinks[cert.to].indexOf(issuer)) { keptCerts.push(cert); @@ -228,18 +324,24 @@ export class BlockGenerator { }); joinData[newcomer].certs = keptCerts; }); - return finalJoinData - } catch(err) { + return finalJoinData; + } catch (err) { this.logger.error(err); throw err; } } - private async checkWoTConstraints(block:{ number:number, joiners:string[], identities:string[] }, newLinks:any, current:DBBlock|null) { + private async checkWoTConstraints( + block: { number: number; joiners: string[]; identities: string[] }, + newLinks: any, + current: DBBlock | null + ) { if (block.number < 0) { - throw 'Cannot compute WoT constraint for negative block number'; + throw "Cannot compute WoT constraint for negative block number"; } - const newcomers = block.joiners.map((inlineMS:string) => inlineMS.split(':')[0]); + const newcomers = block.joiners.map( + (inlineMS: string) => inlineMS.split(":")[0] + ); const realNewcomers = block.identities; for (const newcomer of newcomers) { if (block.number > 0) { @@ -247,9 +349,18 @@ export class BlockGenerator { // Will throw an error if not enough links await this.mainContext.checkHaveEnoughLinks(newcomer, newLinks); // This one does not throw but returns a boolean - const isOut = await GLOBAL_RULES_HELPERS.isOver3Hops(newcomer, newLinks, realNewcomers, current, this.conf, this.dal); + const isOut = await GLOBAL_RULES_HELPERS.isOver3Hops( + newcomer, + newLinks, + realNewcomers, + current, + this.conf, + this.dal + ); if (isOut) { - throw 'Key ' + newcomer + ' is not recognized by the WoT for this block'; + throw ( + "Key " + newcomer + " is not recognized by the WoT for this block" + ); } } catch (e) { this.logger.debug(e); @@ -259,8 +370,11 @@ export class BlockGenerator { } } - private async iteratedChecking(newcomers:string[], checkWoTForNewcomers: (someNewcomers:string[]) => Promise<void>): Promise<string[]> { - const passingNewcomers:string[] = [] + private async iteratedChecking( + newcomers: string[], + checkWoTForNewcomers: (someNewcomers: string[]) => Promise<void> + ): Promise<string[]> { + const passingNewcomers: string[] = []; let hadError = false; for (const newcomer of newcomers) { try { @@ -271,39 +385,62 @@ export class BlockGenerator { } } if (hadError) { - return await this.iteratedChecking(passingNewcomers, checkWoTForNewcomers); + return await this.iteratedChecking( + passingNewcomers, + checkWoTForNewcomers + ); } else { return passingNewcomers; } } - private async getPreJoinData(current:DBBlock|null) { - const preJoinData:{ [k:string]: PreJoin } = {} - const memberships = await this.dal.findNewcomers((current && current.medianTime) || 0) - const joiners:string[] = []; - memberships.forEach((ms:any) => joiners.push(ms.issuer)); + private async getPreJoinData(current: DBBlock | null) { + const preJoinData: { [k: string]: PreJoin } = {}; + const memberships = await this.dal.findNewcomers( + (current && current.medianTime) || 0 + ); + const joiners: string[] = []; + memberships.forEach((ms: any) => joiners.push(ms.issuer)); for (const ms of memberships) { try { if (ms.block !== CommonConstants.SPECIAL_BLOCK) { - let msBasedBlock = await this.dal.getAbsoluteValidBlockInForkWindow(ms.blockNumber, ms.blockHash) + let msBasedBlock = await this.dal.getAbsoluteValidBlockInForkWindow( + ms.blockNumber, + ms.blockHash + ); if (!msBasedBlock) { throw constants.ERRORS.BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK; } if (!current) { - throw Error(DataErrors[DataErrors.CANNOT_DETERMINATE_MEMBERSHIP_AGE]) + throw Error( + DataErrors[DataErrors.CANNOT_DETERMINATE_MEMBERSHIP_AGE] + ); } let age = current.medianTime - msBasedBlock.medianTime; if (age > this.conf.msWindow) { throw constants.ERRORS.TOO_OLD_MEMBERSHIP; } } - const idtyHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase(); - const join = await this.getSinglePreJoinData(current, idtyHash, joiners); + const idtyHash = ( + hashf(ms.userid + ms.certts + ms.issuer) + "" + ).toUpperCase(); + const join = await this.getSinglePreJoinData( + current, + idtyHash, + joiners + ); join.ms = ms; - const currentMembership = await this.dal.mindexDAL.getReducedMSForImplicitRevocation(ms.issuer); - const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1; + const currentMembership = await this.dal.mindexDAL.getReducedMSForImplicitRevocation( + ms.issuer + ); + const currentMSN = currentMembership + ? parseInt(currentMembership.created_on) + : -1; if (!join.identity.revoked && currentMSN < parseInt(join.ms.number)) { - if (!preJoinData[join.identity.pubkey] || preJoinData[join.identity.pubkey].certs.length < join.certs.length) { + if ( + !preJoinData[join.identity.pubkey] || + preJoinData[join.identity.pubkey].certs.length < join.certs.length + ) { preJoinData[join.identity.pubkey] = join; } } @@ -316,22 +453,31 @@ export class BlockGenerator { return preJoinData; } - private async computeNewLinks(forBlock:number, theNewcomers:any, joinData:Map<PreJoin>) { + private async computeNewLinks( + forBlock: number, + theNewcomers: any, + joinData: Map<PreJoin> + ) { let newCerts = await this.computeNewCerts(forBlock, theNewcomers, joinData); return this.newCertsToLinks(newCerts); } - newCertsToLinks(newCerts:Map<DBCert[]>) { - let newLinks: Map<string[]> = {} + newCertsToLinks(newCerts: Map<DBCert[]>) { + let newLinks: Map<string[]> = {}; for (const pubkey of Underscore.keys(newCerts)) { - newLinks[pubkey] = Underscore.pluck(newCerts[pubkey], 'from') + newLinks[pubkey] = Underscore.pluck(newCerts[pubkey], "from"); } - return newLinks + return newLinks; } - async computeNewCerts(forBlock:number, theNewcomers:any, joinData:Map<PreJoin>) { - const newCerts:Map<DBCert[]> = {}, certifiers:string[] = [] - const certsByKey = Underscore.mapObjectByProp(joinData, 'certs') + async computeNewCerts( + forBlock: number, + theNewcomers: any, + joinData: Map<PreJoin> + ) { + const newCerts: Map<DBCert[]> = {}, + certifiers: string[] = []; + const certsByKey = Underscore.mapObjectByProp(joinData, "certs"); for (const newcomer of theNewcomers) { // New array of certifiers newCerts[newcomer] = newCerts[newcomer] || []; @@ -344,7 +490,7 @@ export class BlockGenerator { newCerts[newcomer].push(cert); certifiers.push(cert.from); } else { - let isMember = await this.dal.isMember(cert.from) + let isMember = await this.dal.isMember(cert.from); // Member to newcomer => valid link if (isMember) { newCerts[newcomer].push(cert); @@ -354,23 +500,35 @@ export class BlockGenerator { } } } - return newCerts + return newCerts; } - async getSinglePreJoinData(current:DBBlock|null, idHash:string, joiners:string[]): Promise<PreJoin> { - const identity = await this.dal.getGlobalIdentityByHashForJoining(idHash) + async getSinglePreJoinData( + current: DBBlock | null, + idHash: string, + joiners: string[] + ): Promise<PreJoin> { + const identity = await this.dal.getGlobalIdentityByHashForJoining(idHash); let foundCerts = []; const vHEAD_1 = await this.mainContext.getvHEAD_1(); if (!identity) { - throw 'Identity with hash \'' + idHash + '\' not found'; + throw "Identity with hash '" + idHash + "' not found"; } - if (current && identity.buid == CommonConstants.SPECIAL_BLOCK && !identity.wasMember) { + if ( + current && + identity.buid == CommonConstants.SPECIAL_BLOCK && + !identity.wasMember + ) { throw constants.ERRORS.TOO_OLD_IDENTITY; - } - else if (!identity.wasMember && identity.buid != CommonConstants.SPECIAL_BLOCK) { - const idtyBasedBlock = await this.dal.getTristampOf(parseInt(identity.buid.split('-')[0])) + } else if ( + !identity.wasMember && + identity.buid != CommonConstants.SPECIAL_BLOCK + ) { + const idtyBasedBlock = await this.dal.getTristampOf( + parseInt(identity.buid.split("-")[0]) + ); if (!current || !idtyBasedBlock) { - throw Error(DataErrors[DataErrors.CANNOT_DETERMINATE_IDENTITY_AGE]) + throw Error(DataErrors[DataErrors.CANNOT_DETERMINATE_IDENTITY_AGE]); } const age = current.medianTime - idtyBasedBlock.medianTime; if (age > this.conf.idtyWindow) { @@ -389,9 +547,9 @@ export class BlockGenerator { if (!current) { // Look for certifications from initial joiners const certs = await this.dal.certsNotLinkedToTarget(idHash); - foundCerts = Underscore.filter(certs, (cert:any) => { + foundCerts = Underscore.filter(certs, (cert: any) => { // Add 'joiners && ': special case when block#0 not written and not joiner yet (avoid undefined error) - return !!(joiners && ~joiners.indexOf(cert.from)) + return !!(joiners && ~joiners.indexOf(cert.from)); }); } else { // Look for certifications from WoT members @@ -399,35 +557,50 @@ export class BlockGenerator { const certifiers = []; for (const cert of certs) { try { - const basedBlock = await this.dal.getTristampOf(cert.block_number) + const basedBlock = await this.dal.getTristampOf(cert.block_number); if (!basedBlock) { - throw 'Unknown timestamp block for identity'; + throw "Unknown timestamp block for identity"; } if (current) { const age = current.medianTime - basedBlock.medianTime; if (age > this.conf.sigWindow || age > this.conf.sigValidity) { - throw 'Too old certification'; + throw "Too old certification"; } } // Already exists a link not replayable yet? - let exists = await this.dal.existsNonReplayableLink(cert.from, cert.to, current.medianTime, current.version) + let exists = await this.dal.existsNonReplayableLink( + cert.from, + cert.to, + current.medianTime, + current.version + ); if (exists) { - throw 'It already exists a similar certification written, which is not replayable yet'; + throw "It already exists a similar certification written, which is not replayable yet"; } // Already exists a link not chainable yet? - exists = await this.dal.existsNonChainableLink(cert.from, vHEAD_1, this.conf.sigStock); + exists = await this.dal.existsNonChainableLink( + cert.from, + vHEAD_1, + this.conf.sigStock + ); if (exists) { - throw 'It already exists a written certification from ' + cert.from + ' which is not chainable yet'; + throw ( + "It already exists a written certification from " + + cert.from + + " which is not chainable yet" + ); } const isMember = await this.dal.isMember(cert.from); - const doubleSignature = !!(~certifiers.indexOf(cert.from)) + const doubleSignature = !!~certifiers.indexOf(cert.from); if (isMember && !doubleSignature) { const isValid = await GLOBAL_RULES_HELPERS.checkCertificationIsValidForBlock( cert, { number: current.number + 1, currency: current.currency }, - async () => this.dal.getGlobalIdentityByHashForHashingAndSig(idHash), + async () => + this.dal.getGlobalIdentityByHashForHashingAndSig(idHash), this.conf, - this.dal) + this.dal + ); if (isValid) { certifiers.push(cert.from); foundCerts.push(cert); @@ -440,28 +613,28 @@ export class BlockGenerator { } } } - const ms:any = null // TODO: refactor + const ms: any = null; // TODO: refactor return { identity: identity, key: null, idHash: idHash, certs: foundCerts, - ms + ms, }; } private async createBlock( blockVersion: number, - current:DBBlock|null, - joinData:{ [pub:string]: PreJoin }, - leaveData:{ [pub:string]: LeaveData }, - updates:any, - revocations:any, - exclusions:any, - wereExcluded:any, - transactions:any, - manualValues:ForcedBlockValues) { - + current: DBBlock | null, + joinData: { [pub: string]: PreJoin }, + leaveData: { [pub: string]: LeaveData }, + updates: any, + revocations: any, + exclusions: any, + wereExcluded: any, + transactions: any, + manualValues: ForcedBlockValues + ) { if (manualValues && manualValues.excluded) { exclusions = manualValues.excluded; } @@ -474,22 +647,22 @@ export class BlockGenerator { const maxLenOfBlock = Indexer.DUP_HELPERS.getMaxBlockSize(vHEAD); let blockLen = 0; // Revocations have an impact on exclusions - revocations.forEach((idty:any) => exclusions.push(idty.pubkey)); + revocations.forEach((idty: any) => exclusions.push(idty.pubkey)); // Prevent writing joins/updates for members who will be excluded exclusions = Underscore.uniq(exclusions); - exclusions.forEach((excluded:any) => { + exclusions.forEach((excluded: any) => { delete updates[excluded]; delete joinData[excluded]; delete leaveData[excluded]; }); // Prevent writing joins/updates for excluded members wereExcluded = Underscore.uniq(wereExcluded); - wereExcluded.forEach((excluded:any) => { + wereExcluded.forEach((excluded: any) => { delete updates[excluded]; delete joinData[excluded]; delete leaveData[excluded]; }); - Underscore.keys(leaveData).forEach((leaver:any) => { + Underscore.keys(leaveData).forEach((leaver: any) => { delete updates[leaver]; delete joinData[leaver]; }); @@ -498,12 +671,11 @@ export class BlockGenerator { // Compute the new MedianTime if (block.number == 0) { block.medianTime = moment.utc().unix() - this.conf.rootoffset; - } - else { + } else { block.medianTime = vHEAD.medianTime; } // Choose the version - block.version = blockVersion + block.version = blockVersion; block.currency = current ? current.currency : this.conf.currency; block.nonce = 0; if (!this.conf.dtReeval) { @@ -515,24 +687,39 @@ export class BlockGenerator { if (!this.conf.udReevalTime0) { this.conf.udReevalTime0 = block.medianTime + this.conf.dtReeval; } - block.parameters = block.number > 0 ? '' : [ - this.conf.c, this.conf.dt, this.conf.ud0, - this.conf.sigPeriod, this.conf.sigStock, this.conf.sigWindow, this.conf.sigValidity, - this.conf.sigQty, this.conf.idtyWindow, this.conf.msWindow, this.conf.xpercent, this.conf.msValidity, - this.conf.stepMax, this.conf.medianTimeBlocks, this.conf.avgGenTime, this.conf.dtDiffEval, - (this.conf.percentRot == 1 ? "1.0" : this.conf.percentRot), - this.conf.udTime0, - this.conf.udReevalTime0, - this.conf.dtReeval - ].join(':'); + block.parameters = + block.number > 0 + ? "" + : [ + this.conf.c, + this.conf.dt, + this.conf.ud0, + this.conf.sigPeriod, + this.conf.sigStock, + this.conf.sigWindow, + this.conf.sigValidity, + this.conf.sigQty, + this.conf.idtyWindow, + this.conf.msWindow, + this.conf.xpercent, + this.conf.msValidity, + this.conf.stepMax, + this.conf.medianTimeBlocks, + this.conf.avgGenTime, + this.conf.dtDiffEval, + this.conf.percentRot == 1 ? "1.0" : this.conf.percentRot, + this.conf.udTime0, + this.conf.udReevalTime0, + this.conf.dtReeval, + ].join(":"); block.previousHash = current ? current.hash : ""; block.previousIssuer = current ? current.issuer : ""; if (this.selfPubkey) { - block.issuer = this.selfPubkey + block.issuer = this.selfPubkey; } // Members merkle - const joiners = Underscore.keys(joinData) - joiners.sort() + const joiners = Underscore.keys(joinData); + joiners.sort(); const previousCount = current ? current.membersCount : 0; if (joiners.length == 0 && !current) { throw constants.ERRORS.CANNOT_ROOT_BLOCK_NO_MEMBERS; @@ -545,17 +732,19 @@ export class BlockGenerator { * Priority 1: keep the WoT sane */ // Certifications from the WoT, to the WoT - Underscore.keys(updates).forEach((certifiedMember:any) => { + Underscore.keys(updates).forEach((certifiedMember: any) => { const certs = updates[certifiedMember] || []; - certs.forEach((cert:any) => { + certs.forEach((cert: any) => { if (blockLen < maxLenOfBlock) { - block.certifications.push(CertificationDTO.fromJSONObject(cert).inline()); + block.certifications.push( + CertificationDTO.fromJSONObject(cert).inline() + ); blockLen++; } }); }); // Renewed - joiners.forEach((joiner:any) => { + joiners.forEach((joiner: any) => { const data = joinData[joiner]; // Join only for non-members if (data.identity.member) { @@ -566,8 +755,8 @@ export class BlockGenerator { } }); // Leavers - const leavers = Underscore.keys(leaveData) - leavers.forEach((leaver:any) => { + const leavers = Underscore.keys(leaveData); + leavers.forEach((leaver: any) => { const data = leaveData[leaver]; // Join only for non-members if (data.identity && data.identity.member) { @@ -581,9 +770,9 @@ export class BlockGenerator { /***** * Priority 2: revoked identities */ - revocations.forEach((idty:any) => { + revocations.forEach((idty: any) => { if (blockLen < maxLenOfBlock) { - block.revoked.push([idty.pubkey, idty.revocation_sig].join(':')); + block.revoked.push([idty.pubkey, idty.revocation_sig].join(":")); blockLen++; } }); @@ -594,28 +783,32 @@ export class BlockGenerator { let countOfCertsToNewcomers = 0; // Newcomers // Newcomers + back people - joiners.forEach((joiner:any) => { + joiners.forEach((joiner: any) => { const data = joinData[joiner]; // Identities only for never-have-been members if (!data.identity.member && !data.identity.wasMember) { - block.identities.push(IdentityDTO.fromJSONObject(data.identity).inline()); + block.identities.push( + IdentityDTO.fromJSONObject(data.identity).inline() + ); } // Join only for non-members if (!data.identity.member) { block.joiners.push(MembershipDTO.fromJSONObject(data.ms).inline()); } }); - block.identities = Underscore.sortBy(block.identities, (line:string) => { - const sp = line.split(':'); + block.identities = Underscore.sortBy(block.identities, (line: string) => { + const sp = line.split(":"); return sp[2] + sp[3]; }); // Certifications from the WoT, to newcomers - joiners.forEach((joiner:any) => { + joiners.forEach((joiner: any) => { const data = joinData[joiner] || []; - data.certs.forEach((cert:any) => { + data.certs.forEach((cert: any) => { countOfCertsToNewcomers++; - block.certifications.push(CertificationDTO.fromJSONObject(cert).inline()); + block.certifications.push( + CertificationDTO.fromJSONObject(cert).inline() + ); }); }); @@ -632,16 +825,21 @@ export class BlockGenerator { // Forced joiners (by tests) if (manualValues && manualValues.joiners) { - block.joiners = block.joiners.concat(manualValues.joiners.map(j => j.inline())) + block.joiners = block.joiners.concat( + manualValues.joiners.map((j) => j.inline()) + ); } // Forced certifications (by tests) if (manualValues && manualValues.certifications) { - block.certifications = block.certifications.concat(manualValues.certifications.map(c => c.inline())) + block.certifications = block.certifications.concat( + manualValues.certifications.map((c) => c.inline()) + ); } // Final number of members - block.membersCount = previousCount + block.joiners.length - block.excluded.length; + block.membersCount = + previousCount + block.joiners.length - block.excluded.length; vHEAD.membersCount = block.membersCount; @@ -651,10 +849,14 @@ export class BlockGenerator { block.transactions = []; blockLen = BlockDTO.getLen(block); if (blockLen < maxLenOfBlock) { - transactions.forEach((tx:any) => { - const txDTO = TransactionDTO.fromJSONObject(tx) - const txLen = txDTO.getLen() - if (txLen <= CommonConstants.MAXIMUM_LEN_OF_COMPACT_TX && blockLen + txLen < maxLenOfBlock && tx.version == CommonConstants.TRANSACTION_VERSION) { + transactions.forEach((tx: any) => { + const txDTO = TransactionDTO.fromJSONObject(tx); + const txLen = txDTO.getLen(); + if ( + txLen <= CommonConstants.MAXIMUM_LEN_OF_COMPACT_TX && + blockLen + txLen < maxLenOfBlock && + tx.version == CommonConstants.TRANSACTION_VERSION + ) { block.transactions.push(txDTO); } blockLen += txLen; @@ -668,12 +870,11 @@ export class BlockGenerator { // Universal Dividend if (vHEAD.new_dividend) { - // BR_G13 // Recompute according to block.membersCount - Indexer.prepareDividend(vHEAD, vHEAD_1, this.conf) + Indexer.prepareDividend(vHEAD, vHEAD_1, this.conf); // BR_G14 - Indexer.prepareUnitBase(vHEAD) + Indexer.prepareUnitBase(vHEAD); // Fix BR_G14 double call vHEAD.unitBase = Math.min(vHEAD_1.unitBase + 1, vHEAD.unitBase); @@ -689,7 +890,10 @@ export class BlockGenerator { block.issuersFrameVar = vHEAD.issuersFrameVar; // Manual values before hashing if (manualValues) { - Underscore.extend(block, Underscore.omit(manualValues, 'time', 'certifications', 'joiners')); + Underscore.extend( + block, + Underscore.omit(manualValues, "time", "certifications", "joiners") + ); } // InnerHash block.time = block.medianTime; @@ -699,21 +903,30 @@ export class BlockGenerator { } export class BlockGeneratorWhichProves extends BlockGenerator { - - constructor(server:Server, private prover:any) { - super(server) + constructor(server: Server, private prover: any) { + super(server); } - async makeNextBlock(block:DBBlock|null, trial?:number|null, manualValues:any = null) { - const unsignedBlock = block || (await this.nextBlock(manualValues)) - const trialLevel = trial || (await this.mainContext.getIssuerPersonalizedDifficulty(this.selfPubkey)) - return this.prover.prove(unsignedBlock, trialLevel, (manualValues && manualValues.time) || null); + async makeNextBlock( + block: DBBlock | null, + trial?: number | null, + manualValues: any = null + ) { + const unsignedBlock = block || (await this.nextBlock(manualValues)); + const trialLevel = + trial || + (await this.mainContext.getIssuerPersonalizedDifficulty(this.selfPubkey)); + return this.prover.prove( + unsignedBlock, + trialLevel, + (manualValues && manualValues.time) || null + ); } } interface BlockGeneratorInterface { - findNewCertsFromWoT(current:DBBlock|null): Promise<any> - filterJoiners(preJoinData:any): Promise<any> + findNewCertsFromWoT(current: DBBlock | null): Promise<any>; + filterJoiners(preJoinData: any): Promise<any>; } /** @@ -721,62 +934,79 @@ interface BlockGeneratorInterface { * @constructor */ class NextBlockGenerator implements BlockGeneratorInterface { - constructor( - private mainContext:BlockchainContext, - private server:Server, - private logger:any) { - } + private mainContext: BlockchainContext, + private server: Server, + private logger: any + ) {} get conf() { - return this.server.conf + return this.server.conf; } get dal() { - return this.server.dal + return this.server.dal; } - async findNewCertsFromWoT(current:DBBlock|null) { - const updates:any = {}; - const updatesToFrom:any = {}; + async findNewCertsFromWoT(current: DBBlock | null) { + const updates: any = {}; + const updatesToFrom: any = {}; const certs = await this.dal.certsFindNew(); const vHEAD_1 = await this.mainContext.getvHEAD_1(); for (const cert of certs) { - const targetIdty = await this.dal.getGlobalIdentityByHashForHashingAndSig(cert.target) + const targetIdty = await this.dal.getGlobalIdentityByHashForHashingAndSig( + cert.target + ); // The identity must be known if (targetIdty) { const certSig = cert.sig; // Do not rely on certification block UID, prefer using the known hash of the block by its given number - const targetBlock = await this.dal.getTristampOf(cert.block_number) + const targetBlock = await this.dal.getTristampOf(cert.block_number); // Check if writable - let duration = current && targetBlock ? current.medianTime - targetBlock.medianTime : 0; + let duration = + current && targetBlock + ? current.medianTime - targetBlock.medianTime + : 0; if (targetBlock && duration <= this.conf.sigWindow) { const rawCert = CertificationDTO.fromJSONObject({ - sig: '', + sig: "", currency: this.conf.currency, issuer: cert.from, idty_issuer: targetIdty.pubkey, idty_uid: targetIdty.uid, idty_buid: targetIdty.buid, idty_sig: targetIdty.sig, - buid: current ? [cert.block_number, targetBlock.hash].join('-') : CommonConstants.SPECIAL_BLOCK, + buid: current + ? [cert.block_number, targetBlock.hash].join("-") + : CommonConstants.SPECIAL_BLOCK, }).getRawUnSigned(); if (verify(rawCert, certSig, cert.from)) { cert.sig = certSig; let exists = false; if (current) { // Already exists a link not replayable yet? - exists = await this.dal.existsNonReplayableLink(cert.from, cert.to, current.medianTime, current.version) + exists = await this.dal.existsNonReplayableLink( + cert.from, + cert.to, + current.medianTime, + current.version + ); } if (!exists) { // Already exists a link not chainable yet? // No chainability block means absolutely nobody can issue certifications yet - exists = await this.dal.existsNonChainableLink(cert.from, vHEAD_1, this.conf.sigStock); + exists = await this.dal.existsNonChainableLink( + cert.from, + vHEAD_1, + this.conf.sigStock + ); if (!exists) { // It does NOT already exists a similar certification written, which is not replayable yet // Signatory must be a member const isSignatoryAMember = await this.dal.isMember(cert.from); - const isCertifiedANonLeavingMember = isSignatoryAMember && (await this.dal.isMemberAndNonLeaver(cert.to)); + const isCertifiedANonLeavingMember = + isSignatoryAMember && + (await this.dal.isMemberAndNonLeaver(cert.to)); // Certified must be a member and non-leaver if (isSignatoryAMember && isCertifiedANonLeavingMember) { updatesToFrom[cert.to] = updatesToFrom[cert.to] || []; @@ -795,28 +1025,32 @@ class NextBlockGenerator implements BlockGeneratorInterface { return updates; } - async filterJoiners(preJoinData:any) { - const filtered:any = {}; - const filterings:any = []; - const filter = async (pubkey:string) => { + async filterJoiners(preJoinData: any) { + const filtered: any = {}; + const filterings: any = []; + const filter = async (pubkey: string) => { try { // No manual filtering, takes all BUT already used UID or pubkey - let exists = await GLOBAL_RULES_HELPERS.checkExistsUserID(preJoinData[pubkey].identity.uid, this.dal); + let exists = await GLOBAL_RULES_HELPERS.checkExistsUserID( + preJoinData[pubkey].identity.uid, + this.dal + ); if (exists && !preJoinData[pubkey].identity.wasMember) { - throw 'UID already taken'; + throw "UID already taken"; } exists = await GLOBAL_RULES_HELPERS.checkExistsPubkey(pubkey, this.dal); if (exists && !preJoinData[pubkey].identity.wasMember) { - throw 'Pubkey already taken'; + throw "Pubkey already taken"; } filtered[pubkey] = preJoinData[pubkey]; - } - catch (err) { + } catch (err) { this.logger.warn(err); } - } - Underscore.keys(preJoinData).forEach( (joinPubkey:any) => filterings.push(filter(joinPubkey))); - await Promise.all(filterings) + }; + Underscore.keys(preJoinData).forEach((joinPubkey: any) => + filterings.push(filter(joinPubkey)) + ); + await Promise.all(filterings); return filtered; } } @@ -826,44 +1060,46 @@ class NextBlockGenerator implements BlockGeneratorInterface { * @constructor */ class ManualRootGenerator implements BlockGeneratorInterface { - findNewCertsFromWoT() { - return Promise.resolve({}) + return Promise.resolve({}); } - async filterJoiners(preJoinData:any) { - const filtered:any = {}; - const newcomers = Underscore.keys(preJoinData) - const uids:string[] = []; - newcomers.forEach((newcomer:string) => uids.push(preJoinData[newcomer].ms.userid)); + async filterJoiners(preJoinData: any) { + const filtered: any = {}; + const newcomers = Underscore.keys(preJoinData); + const uids: string[] = []; + newcomers.forEach((newcomer: string) => + uids.push(preJoinData[newcomer].ms.userid) + ); if (newcomers.length > 0) { - const answers = await inquirer.prompt([{ - type: "checkbox", - name: "uids", - message: "Newcomers to add", - choices: uids, - default: uids[0] - }]); - newcomers.forEach((newcomer:string) => { + const answers = await inquirer.prompt([ + { + type: "checkbox", + name: "uids", + message: "Newcomers to add", + choices: uids, + default: uids[0], + }, + ]); + newcomers.forEach((newcomer: string) => { if (~answers.uids.indexOf(preJoinData[newcomer].ms.userid)) filtered[newcomer] = preJoinData[newcomer]; }); - if (answers.uids.length == 0) - throw 'No newcomer selected'; - return filtered + if (answers.uids.length == 0) throw "No newcomer selected"; + return filtered; } else { - throw 'No newcomer found'; + throw "No newcomer found"; } } } export interface ForcedBlockValues { - time?: number - version?: number - medianTime?: number - excluded?: string[] - revoked?: string[] - certifications?: CertificationDTO[] - joiners?: MembershipDTO[] -} \ No newline at end of file + time?: number; + version?: number; + medianTime?: number; + excluded?: string[]; + revoked?: string[]; + certifications?: CertificationDTO[]; + joiners?: MembershipDTO[]; +} diff --git a/app/modules/prover/lib/blockProver.ts b/app/modules/prover/lib/blockProver.ts index 9b3b3953012d311179393d4cbccb1831229d9f45..8a9733dc6a64f7471cecbdb8f7feb528a653dc55 100644 --- a/app/modules/prover/lib/blockProver.ts +++ b/app/modules/prover/lib/blockProver.ts @@ -11,82 +11,84 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ProverConstants} from "./constants" -import {Server} from "../../../../server" -import {PowEngine} from "./engine" -import {DBBlock} from "../../../lib/db/DBBlock" -import {CommonConstants} from "../../../lib/common-libs/constants" -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {ConfDTO, Keypair} from "../../../lib/dto/ConfDTO" +import { ProverConstants } from "./constants"; +import { Server } from "../../../../server"; +import { PowEngine } from "./engine"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { ConfDTO, Keypair } from "../../../lib/dto/ConfDTO"; -const os = require('os') -const querablep = require('querablep') +const os = require("os"); +const querablep = require("querablep"); const POW_FOUND = true; const POW_NOT_FOUND_YET = false; export class WorkerFarm { + private theEngine: PowEngine; + private onAlmostPoW: any = null; + private powPromise: any = null; + private stopPromise: any = null; + private checkPoWandNotify: any = null; - private theEngine:PowEngine - private onAlmostPoW:any = null - private powPromise:any = null - private stopPromise:any = null - private checkPoWandNotify:any = null - - constructor(private server:Server, private logger:any) { - - this.theEngine = new PowEngine(server.conf, server.logger, server.dal) + constructor(private server: Server, private logger: any) { + this.theEngine = new PowEngine(server.conf, server.logger, server.dal); // An utility method to filter the pow notifications - this.checkPoWandNotify = (hash:string, block:DBBlock, found:boolean) => { + this.checkPoWandNotify = (hash: string, block: DBBlock, found: boolean) => { const matches = hash.match(/^(0{2,})[^0]/); if (matches && this.onAlmostPoW) { this.onAlmostPoW(hash, matches, block, found); } - } + }; // Keep track of PoW advancement - this.theEngine.setOnInfoMessage((message:any) => { + this.theEngine.setOnInfoMessage((message: any) => { if (message.error) { - this.logger.error('Error in engine#%s:', this.theEngine.id, message.error) + this.logger.error( + "Error in engine#%s:", + this.theEngine.id, + message.error + ); } else if (message.pow) { // A message about the PoW - const msg = message.pow - this.checkPoWandNotify(msg.pow, msg.block, POW_NOT_FOUND_YET) + const msg = message.pow; + this.checkPoWandNotify(msg.pow, msg.block, POW_NOT_FOUND_YET); } - }) + }); } get nbWorkers() { - return this.theEngine.getNbWorkers() + return this.theEngine.getNbWorkers(); } - changeCPU(cpu:any) { - return this.theEngine.setConf({ cpu }) + changeCPU(cpu: any) { + return this.theEngine.setConf({ cpu }); } - changePoWPrefix(prefix:any) { - return this.theEngine.setConf({ prefix }) + changePoWPrefix(prefix: any) { + return this.theEngine.setConf({ prefix }); } isComputing() { - return this.powPromise !== null && !this.powPromise.isResolved() + return this.powPromise !== null && !this.powPromise.isResolved(); } isStopping() { - return this.stopPromise !== null && !this.stopPromise.isResolved() + return this.stopPromise !== null && !this.stopPromise.isResolved(); } /** * Eventually stops the engine PoW if one was computing */ stopPoW() { - this.stopPromise = querablep(this.theEngine.cancel()) + this.stopPromise = querablep(this.theEngine.cancel()); return this.stopPromise; } shutDownEngine() { - return this.theEngine.shutDown() + return this.theEngine.shutDown(); } /** @@ -95,57 +97,56 @@ export class WorkerFarm { */ async askNewProof(stuff: ProofAsk) { // Starts the PoW - this.powPromise = querablep(this.theEngine.prove(stuff)) - const res = await this.powPromise + this.powPromise = querablep(this.theEngine.prove(stuff)); + const res = await this.powPromise; if (res) { this.checkPoWandNotify(res.pow.pow, res.pow.block, POW_FOUND); } - return res && res.pow + return res && res.pow; } - setOnAlmostPoW(onPoW:any) { - this.onAlmostPoW = onPoW + setOnAlmostPoW(onPoW: any) { + this.onAlmostPoW = onPoW; } } export class BlockProver { + logger: any; + waitResolve: any; + workerFarmPromise: any; - logger:any - waitResolve:any - workerFarmPromise:any + constructor(private server: Server) { + this.logger = server.logger; - constructor(private server:Server) { - this.logger = server.logger - - const debug = process.execArgv.toString().indexOf('--debug') !== -1; - if(debug) { + const debug = process.execArgv.toString().indexOf("--debug") !== -1; + if (debug) { //Set an unused port number. process.execArgv = []; } } - get conf():ConfDTO { - return this.server.conf + get conf(): ConfDTO { + return this.server.conf; } - get pair(): Keypair|null { - return this.conf.pair + get pair(): Keypair | null { + return this.conf.pair; } getWorker(): Promise<WorkerFarm> { if (!this.workerFarmPromise) { this.workerFarmPromise = (async () => { - return new WorkerFarm(this.server, this.logger) - })() + return new WorkerFarm(this.server, this.logger); + })(); } - return this.workerFarmPromise + return this.workerFarmPromise; } async cancel() { // If no farm was instanciated, there is nothing to do yet if (this.workerFarmPromise) { let farm = await this.getWorker(); - await farm.stopPoW() + await farm.stopPoW(); if (this.waitResolve) { this.waitResolve(); this.waitResolve = null; @@ -153,8 +154,7 @@ export class BlockProver { } } - prove(block:any, difficulty:any, forcedTime:any = null) { - + prove(block: any, difficulty: any, forcedTime: any = null) { if (this.waitResolve) { this.waitResolve(); this.waitResolve = null; @@ -163,10 +163,10 @@ export class BlockProver { const remainder = difficulty % 16; const nbZeros = (difficulty - remainder) / 16; const highMark = CommonConstants.PROOF_OF_WORK.UPPER_BOUND[remainder]; - const notifyVersionJumpReady: boolean = (block.version + 1) === CommonConstants.DUBP_NEXT_VERSION; + const notifyVersionJumpReady: boolean = + block.version + 1 === CommonConstants.DUBP_NEXT_VERSION; return (async () => { - let powFarm = await this.getWorker(); if (block.number == 0) { @@ -175,83 +175,126 @@ export class BlockProver { } // Start - powFarm.setOnAlmostPoW((pow:any, matches:any, aBlock:any, found:boolean) => { - this.powEvent(found, pow); - if (matches && matches[1].length >= ProverConstants.MINIMAL_ZEROS_TO_SHOW_IN_LOGS) { - this.logger.info('Matched %s zeros %s with Nonce = %s for block#%s by %s', matches[1].length, pow, aBlock.nonce, aBlock.number, aBlock.issuer.slice(0,6)); + powFarm.setOnAlmostPoW( + (pow: any, matches: any, aBlock: any, found: boolean) => { + this.powEvent(found, pow); + if ( + matches && + matches[1].length >= ProverConstants.MINIMAL_ZEROS_TO_SHOW_IN_LOGS + ) { + this.logger.info( + "Matched %s zeros %s with Nonce = %s for block#%s by %s", + matches[1].length, + pow, + aBlock.nonce, + aBlock.number, + aBlock.issuer.slice(0, 6) + ); + } } - }); + ); block.nonce = 0; - this.logger.info('Generating proof-of-work with %s leading zeros followed by [0-' + highMark + ']... (CPU usage set to %s%) for block#%s', nbZeros, (this.conf.cpu * 100).toFixed(0), block.number, block.issuer.slice(0,6)); + this.logger.info( + "Generating proof-of-work with %s leading zeros followed by [0-" + + highMark + + "]... (CPU usage set to %s%) for block#%s", + nbZeros, + (this.conf.cpu * 100).toFixed(0), + block.number, + block.issuer.slice(0, 6) + ); const start = Date.now(); let result = await powFarm.askNewProof({ newPoW: { conf: { powNoSecurity: this.conf.powNoSecurity, cpu: this.conf.cpu, - prefix: this.conf.prefix ? String(this.conf.prefix) : '', + prefix: this.conf.prefix ? String(this.conf.prefix) : "", avgGenTime: this.conf.avgGenTime, - medianTimeBlocks: this.conf.medianTimeBlocks + medianTimeBlocks: this.conf.medianTimeBlocks, }, block: block, zeros: nbZeros, highMark: highMark, forcedTime: forcedTime, - pair: this.pair + pair: this.pair, }, - specialNonce: notifyVersionJumpReady ? 999 * (ProverConstants.NONCE_RANGE / 1000) : 0, + specialNonce: notifyVersionJumpReady + ? 999 * (ProverConstants.NONCE_RANGE / 1000) + : 0, }); if (!result) { - this.logger.info('GIVEN proof-of-work for block#%s with %s leading zeros followed by [0-' + highMark + ']! stop PoW for %s', block.number, nbZeros, this.pair && this.pair.pub.slice(0,6)); - throw 'Proof-of-work computation canceled because block received'; + this.logger.info( + "GIVEN proof-of-work for block#%s with %s leading zeros followed by [0-" + + highMark + + "]! stop PoW for %s", + block.number, + nbZeros, + this.pair && this.pair.pub.slice(0, 6) + ); + throw "Proof-of-work computation canceled because block received"; } else { const proof = result.block; - const testsCount = result.testsCount * powFarm.nbWorkers - const duration = (Date.now() - start); - const testsPerSecond = testsCount / (duration / 1000) - this.logger.info('Done: #%s, %s in %ss (~%s tests, ~%s tests/s, using %s cores, CPU %s%)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond.toFixed(2), powFarm.nbWorkers, Math.floor(100*this.conf.cpu)) - this.logger.info('FOUND proof-of-work with %s leading zeros followed by [0-' + highMark + ']!', nbZeros); - return BlockDTO.fromJSONObject(proof) + const testsCount = result.testsCount * powFarm.nbWorkers; + const duration = Date.now() - start; + const testsPerSecond = testsCount / (duration / 1000); + this.logger.info( + "Done: #%s, %s in %ss (~%s tests, ~%s tests/s, using %s cores, CPU %s%)", + block.number, + proof.hash, + (duration / 1000).toFixed(2), + testsCount, + testsPerSecond.toFixed(2), + powFarm.nbWorkers, + Math.floor(100 * this.conf.cpu) + ); + this.logger.info( + "FOUND proof-of-work with %s leading zeros followed by [0-" + + highMark + + "]!", + nbZeros + ); + return BlockDTO.fromJSONObject(proof); } - })() - }; + })(); + } - async changeCPU(cpu:number) { + async changeCPU(cpu: number) { this.conf.cpu = cpu; - const farm = await this.getWorker() - return farm.changeCPU(cpu) + const farm = await this.getWorker(); + return farm.changeCPU(cpu); } - async changePoWPrefix(prefix:any) { - this.conf.prefix = prefix - const farm = await this.getWorker() - return farm.changePoWPrefix(prefix) + async changePoWPrefix(prefix: any) { + this.conf.prefix = prefix; + const farm = await this.getWorker(); + return farm.changePoWPrefix(prefix); } - private powEvent(found:boolean, hash:string) { + private powEvent(found: boolean, hash: string) { this.server && this.server.push({ pow: { found, hash } }); } } export interface ProofAsk { - initialTestsPerRound?: number - maxDuration?: number - specialNonce?: number + initialTestsPerRound?: number; + maxDuration?: number; + specialNonce?: number; newPoW: { conf: { - powNoSecurity?: boolean - cpu?: number - prefix?: string - nbCores?: number - avgGenTime: number - medianTimeBlocks: number - }, - block: any, - zeros: number - highMark: string - forcedTime?: number - pair: Keypair|null - turnDuration?: number - } + powNoSecurity?: boolean; + cpu?: number; + prefix?: string; + nbCores?: number; + avgGenTime: number; + medianTimeBlocks: number; + }; + block: any; + zeros: number; + highMark: string; + forcedTime?: number; + pair: Keypair | null; + turnDuration?: number; + }; } diff --git a/app/modules/prover/lib/constants.ts b/app/modules/prover/lib/constants.ts index 286ecbd9d355d927d96e6dd018e2eaec351b71e9..1add261859d5d71b67ec8009ae4821a0583ba98c 100644 --- a/app/modules/prover/lib/constants.ts +++ b/app/modules/prover/lib/constants.ts @@ -12,7 +12,6 @@ // GNU Affero General Public License for more details. export const ProverConstants = { - CORES_MAXIMUM_USE_IN_PARALLEL: 8, MINIMAL_ZEROS_TO_SHOW_IN_LOGS: 3, @@ -29,5 +28,5 @@ export const ProverConstants = { POW_NB_PAUSES_PER_ROUND: 10, // When to trigger the PoW process again if no PoW is triggered for a while. In milliseconds. - POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000 -} + POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000, +}; diff --git a/app/modules/prover/lib/engine.ts b/app/modules/prover/lib/engine.ts index 54b0b4c17516f26fb33ea96b4548d17adb1d481e..93afb37bafc452dd60ef4db1a0e6e69851aad3a6 100644 --- a/app/modules/prover/lib/engine.ts +++ b/app/modules/prover/lib/engine.ts @@ -11,60 +11,58 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Master as PowCluster} from "./powCluster" -import {ConfDTO} from "../../../lib/dto/ConfDTO" -import {FileDAL} from "../../../lib/dal/fileDAL" -import {ProofAsk} from "./blockProver" +import { Master as PowCluster } from "./powCluster"; +import { ConfDTO } from "../../../lib/dto/ConfDTO"; +import { FileDAL } from "../../../lib/dal/fileDAL"; +import { ProofAsk } from "./blockProver"; -const os = require('os') +const os = require("os"); // Super important for Node.js debugging -const debug = process.execArgv.toString().indexOf('--debug') !== -1; -if(debug) { +const debug = process.execArgv.toString().indexOf("--debug") !== -1; +if (debug) { //Set an unused port number. process.execArgv = []; } export class PowEngine { + private nbWorkers: number; + private cluster: PowCluster; + readonly id: number; - private nbWorkers:number - private cluster:PowCluster - readonly id:number - - constructor(private conf:ConfDTO, logger:any, private dal?:FileDAL) { - + constructor(private conf: ConfDTO, logger: any, private dal?: FileDAL) { // We use as much cores as available, but not more than CORES_MAXIMUM_USE_IN_PARALLEL - this.nbWorkers = conf.nbCores - this.cluster = new PowCluster(this.nbWorkers, logger, dal) - this.id = this.cluster.clusterId + this.nbWorkers = conf.nbCores; + this.cluster = new PowCluster(this.nbWorkers, logger, dal); + this.id = this.cluster.clusterId; } getNbWorkers() { - return this.cluster.nbWorkers + return this.cluster.nbWorkers; } forceInit() { - return this.cluster.initCluster() + return this.cluster.initCluster(); } async prove(stuff: ProofAsk) { - await this.cluster.cancelWork() - return await this.cluster.proveByWorkers(stuff) + await this.cluster.cancelWork(); + return await this.cluster.proveByWorkers(stuff); } cancel() { - return this.cluster.cancelWork() + return this.cluster.cancelWork(); } - setConf(value:any) { - return this.cluster.changeConf(value) + setConf(value: any) { + return this.cluster.changeConf(value); } - setOnInfoMessage(callback:any) { - return this.cluster.onInfoMessage = callback + setOnInfoMessage(callback: any) { + return (this.cluster.onInfoMessage = callback); } async shutDown() { - return this.cluster.shutDownWorkers() + return this.cluster.shutDownWorkers(); } } diff --git a/app/modules/prover/lib/permanentProver.ts b/app/modules/prover/lib/permanentProver.ts index 7347aac00ea85194004af7da636979391f3452f3..bf52ee7d7cc2d7a77565b9c49aa3912497c22f2e 100644 --- a/app/modules/prover/lib/permanentProver.ts +++ b/app/modules/prover/lib/permanentProver.ts @@ -11,60 +11,60 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockGeneratorWhichProves} from "./blockGenerator" -import {ConfDTO} from "../../../lib/dto/ConfDTO" -import {BlockProver} from "./blockProver" -import {DBBlock} from "../../../lib/db/DBBlock" -import {dos2unix} from "../../../lib/common-libs/dos2unix" -import {parsers} from "../../../lib/common-libs/parsers/index" +import { BlockGeneratorWhichProves } from "./blockGenerator"; +import { ConfDTO } from "../../../lib/dto/ConfDTO"; +import { BlockProver } from "./blockProver"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { dos2unix } from "../../../lib/common-libs/dos2unix"; +import { parsers } from "../../../lib/common-libs/parsers/index"; -import {Server} from "../../../../server" -import {Querable, querablep} from "../../../lib/common-libs/querable" +import { Server } from "../../../../server"; +import { Querable, querablep } from "../../../lib/common-libs/querable"; export class PermanentProver { + logger: any; + conf: ConfDTO; + prover: BlockProver; + generator: BlockGeneratorWhichProves; + loops: number; - logger:any - conf:ConfDTO - prover:BlockProver - generator:BlockGeneratorWhichProves - loops:number + private permanencePromise: Querable<void> | null = null; - private permanencePromise:Querable<void>|null = null + private blockchainChangedResolver: any = null; + private promiseOfWaitingBetween2BlocksOfOurs: any = null; + private lastComputedBlock: any = null; + private resolveContinuePromise: any = null; + private continuePromise: any = null; - private blockchainChangedResolver:any = null - private promiseOfWaitingBetween2BlocksOfOurs:any = null - private lastComputedBlock:any = null - private resolveContinuePromise:any = null - private continuePromise:any = null - - constructor(private server:Server) { + constructor(private server: Server) { this.logger = server.logger; this.conf = server.conf; - this.prover = new BlockProver(server) - this.generator = new BlockGeneratorWhichProves(server, this.prover) + this.prover = new BlockProver(server); + this.generator = new BlockGeneratorWhichProves(server, this.prover); // Promises triggering the prooving lopp this.resolveContinuePromise = null; - this.continuePromise = new Promise((resolve) => this.resolveContinuePromise = resolve); + this.continuePromise = new Promise( + (resolve) => (this.resolveContinuePromise = resolve) + ); this.loops = 0; - - } allowedToStart() { if (!this.permanencePromise || this.permanencePromise.isFulfilled()) { - this.startPermanence() + this.startPermanence(); } this.resolveContinuePromise(true); } async startPermanence() { - - let permanenceResolve = () => {} - this.permanencePromise = querablep(new Promise(res => { - permanenceResolve = res - })) + let permanenceResolve = () => {}; + this.permanencePromise = querablep( + new Promise((res) => { + permanenceResolve = res; + }) + ); /****************** * Main proof loop @@ -82,23 +82,31 @@ export class PermanentProver { const dal = this.server.dal; const theConf = this.server.conf; if (!selfPubkey) { - throw 'No self pubkey found.'; + throw "No self pubkey found."; } let current; const isMember = await dal.isMember(selfPubkey); if (!isMember) { - throw 'Local node is not a member. Waiting to be a member before computing a block.'; + throw "Local node is not a member. Waiting to be a member before computing a block."; } current = await dal.getCurrentBlockOrNull(); if (!current) { - throw 'Waiting for a root block before computing new blocks'; + throw "Waiting for a root block before computing new blocks"; } - const trial = await this.server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey); + const trial = await this.server + .getBcContext() + .getIssuerPersonalizedDifficulty(selfPubkey); this.checkTrialIsNotTooHigh(trial, current, selfPubkey); const lastIssuedByUs = current.issuer == selfPubkey; if (lastIssuedByUs && !this.promiseOfWaitingBetween2BlocksOfOurs) { - this.promiseOfWaitingBetween2BlocksOfOurs = new Promise((resolve) => setTimeout(resolve, theConf.powDelay)); - this.logger.warn('Waiting ' + theConf.powDelay + 'ms before starting to compute next block...'); + this.promiseOfWaitingBetween2BlocksOfOurs = new Promise((resolve) => + setTimeout(resolve, theConf.powDelay) + ); + this.logger.warn( + "Waiting " + + theConf.powDelay + + "ms before starting to compute next block..." + ); } else { // We have waited enough this.promiseOfWaitingBetween2BlocksOfOurs = null; @@ -110,58 +118,77 @@ export class PermanentProver { } if (doProof) { - /******************* * COMPUTING A BLOCK ******************/ try { - let cancelAlreadyTriggered = false; // The canceller (async () => { // If the blockchain changes - await new Promise((resolve) => this.blockchainChangedResolver = resolve); - cancelAlreadyTriggered = true + await new Promise( + (resolve) => (this.blockchainChangedResolver = resolve) + ); + cancelAlreadyTriggered = true; // Then cancel the generation await this.prover.cancel(); - })() + })(); - let unsignedBlock = null, trial2 = 0 + let unsignedBlock = null, + trial2 = 0; if (!cancelAlreadyTriggered) { // The pushFIFO is here to get the difficulty level while excluding any new block to be resolved. // Without it, a new block could be added meanwhile and would make the difficulty wrongly computed. - await this.server.BlockchainService.pushFIFO('generatingNextBlock', async () => { - const current = (await this.server.dal.getCurrentBlockOrNull()) as DBBlock - const selfPubkey = this.server.keyPair.publicKey; - if (!cancelAlreadyTriggered) { - trial2 = await this.server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey) - } - this.checkTrialIsNotTooHigh(trial2, current, selfPubkey); - if (!cancelAlreadyTriggered) { - unsignedBlock = await this.generator.nextBlock() + await this.server.BlockchainService.pushFIFO( + "generatingNextBlock", + async () => { + const current = (await this.server.dal.getCurrentBlockOrNull()) as DBBlock; + const selfPubkey = this.server.keyPair.publicKey; + if (!cancelAlreadyTriggered) { + trial2 = await this.server + .getBcContext() + .getIssuerPersonalizedDifficulty(selfPubkey); + } + this.checkTrialIsNotTooHigh(trial2, current, selfPubkey); + if (!cancelAlreadyTriggered) { + unsignedBlock = await this.generator.nextBlock(); + } } - }); + ); if (!cancelAlreadyTriggered) { - this.lastComputedBlock = await this.prover.prove(unsignedBlock, trial2, null) + this.lastComputedBlock = await this.prover.prove( + unsignedBlock, + trial2, + null + ); } try { - const obj = parsers.parseBlock.syncWrite(dos2unix(this.lastComputedBlock.getRawSigned())); - await this.server.writeBlock(obj) - await new Promise(res => { - this.server.once('bcEvent', () => res()) - }) + const obj = parsers.parseBlock.syncWrite( + dos2unix(this.lastComputedBlock.getRawSigned()) + ); + await this.server.writeBlock(obj); + await new Promise((res) => { + this.server.once("bcEvent", () => res()); + }); } catch (err) { - this.logger.warn('Proof-of-work self-submission: %s', err.message || err); + this.logger.warn( + "Proof-of-work self-submission: %s", + err.message || err + ); } } } catch (e) { - this.logger.warn('The proof-of-work generation was canceled: %s', (e && e.message) || (e && e.uerr && e.uerr.message) || e || 'unkonwn reason'); + this.logger.warn( + "The proof-of-work generation was canceled: %s", + (e && e.message) || + (e && e.uerr && e.uerr.message) || + e || + "unkonwn reason" + ); } - } else { - /******************* * OR WAITING PHASE ******************/ @@ -171,22 +198,30 @@ export class PermanentProver { let raceDone = false; - await Promise.race(waitingRaces.concat([ - - // The blockchain has changed! We or someone else found a proof, we must make a gnu one - new Promise((resolve) => this.blockchainChangedResolver = () => { - this.logger.warn('Blockchain changed!'); - resolve(); - }), - - // Security: if nothing happens for a while, trigger the whole process again - new Promise((resolve) => setTimeout(() => { - if (!raceDone) { - this.logger.warn('Security trigger: proof-of-work process seems stuck'); - resolve(); - } - }, this.conf.powSecurityRetryDelay)) - ])); + await Promise.race( + waitingRaces.concat([ + // The blockchain has changed! We or someone else found a proof, we must make a gnu one + new Promise( + (resolve) => + (this.blockchainChangedResolver = () => { + this.logger.warn("Blockchain changed!"); + resolve(); + }) + ), + + // Security: if nothing happens for a while, trigger the whole process again + new Promise((resolve) => + setTimeout(() => { + if (!raceDone) { + this.logger.warn( + "Security trigger: proof-of-work process seems stuck" + ); + resolve(); + } + }, this.conf.powSecurityRetryDelay) + ), + ]) + ); raceDone = true; } @@ -196,16 +231,21 @@ export class PermanentProver { this.loops++; // Informative variable - this.logger.trace('PoW loops = %s', this.loops); + this.logger.trace("PoW loops = %s", this.loops); } - permanenceResolve() + permanenceResolve(); } - async blockchainChanged(gottenBlock?:any) { - if (this.server && (!gottenBlock || !this.lastComputedBlock || gottenBlock.hash !== this.lastComputedBlock.hash)) { + async blockchainChanged(gottenBlock?: any) { + if ( + this.server && + (!gottenBlock || + !this.lastComputedBlock || + gottenBlock.hash !== this.lastComputedBlock.hash) + ) { // Cancel any processing proof - await this.prover.cancel() + await this.prover.cancel(); // If we were waiting, stop it and process the continuous generation this.blockchainChangedResolver && this.blockchainChangedResolver(); } @@ -213,21 +253,31 @@ export class PermanentProver { async stopEveryting() { // First: avoid continuing the main loop - this.resolveContinuePromise(true) - this.continuePromise = new Promise((resolve) => this.resolveContinuePromise = resolve); + this.resolveContinuePromise(true); + this.continuePromise = new Promise( + (resolve) => (this.resolveContinuePromise = resolve) + ); // Second: stop any started proof await this.prover.cancel(); // If we were waiting, stop it and process the continuous generation this.blockchainChangedResolver && this.blockchainChangedResolver(); - const farm = await this.prover.getWorker() - await farm.shutDownEngine() + const farm = await this.prover.getWorker(); + await farm.shutDownEngine(); } - private checkTrialIsNotTooHigh(trial:number, current:DBBlock, selfPubkey:string) { - if (trial > (current.powMin + this.conf.powMaxHandicap)) { - this.logger.debug('Trial = %s, powMin = %s, pubkey = %s', trial, current.powMin, selfPubkey.slice(0, 6)); - throw 'Too high difficulty: waiting for other members to write next block'; + private checkTrialIsNotTooHigh( + trial: number, + current: DBBlock, + selfPubkey: string + ) { + if (trial > current.powMin + this.conf.powMaxHandicap) { + this.logger.debug( + "Trial = %s, powMin = %s, pubkey = %s", + trial, + current.powMin, + selfPubkey.slice(0, 6) + ); + throw "Too high difficulty: waiting for other members to write next block"; } } } - diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts index 207b55f12ff04c06781a44c6a025da34d04af9f7..f8d99a629d980d93c0c13b245498cbd366ed3037 100644 --- a/app/modules/prover/lib/powCluster.ts +++ b/app/modules/prover/lib/powCluster.ts @@ -11,72 +11,82 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../../../lib/dto/ConfDTO" -import {ProverConstants} from "./constants" -import {createPowWorker} from "./proof" -import {PowWorker} from "./PowWorker" -import {FileDAL} from "../../../lib/dal/fileDAL" -import {Underscore} from "../../../lib/common-libs/underscore" -import {ProofAsk} from "./blockProver" -import {ExitCodes} from "../../../lib/common-libs/exit-codes" - -const nuuid = require('node-uuid'); -const cluster = require('cluster') -const querablep = require('querablep') - -let clusterId = 0 -cluster.setMaxListeners(3) +import { ConfDTO } from "../../../lib/dto/ConfDTO"; +import { ProverConstants } from "./constants"; +import { createPowWorker } from "./proof"; +import { PowWorker } from "./PowWorker"; +import { FileDAL } from "../../../lib/dal/fileDAL"; +import { Underscore } from "../../../lib/common-libs/underscore"; +import { ProofAsk } from "./blockProver"; +import { ExitCodes } from "../../../lib/common-libs/exit-codes"; + +const nuuid = require("node-uuid"); +const cluster = require("cluster"); +const querablep = require("querablep"); + +let clusterId = 0; +cluster.setMaxListeners(3); export interface SlaveWorker { - worker:PowWorker, - index:number, - online:Promise<void>, - nonceBeginning:number + worker: PowWorker; + index: number; + online: Promise<void>; + nonceBeginning: number; } /** * Cluster controller, handles the messages between the main program and the PoW cluster. */ export class Master { - - nbCancels = 0 - - clusterId:number - currentPromise:any|null = null - slaves:SlaveWorker[] = [] - slavesMap:{ - [k:number]: SlaveWorker|null - } = {} - conf:any = {} - logger:any - onInfoCallback:any - workersOnline:Promise<any>[] - - constructor(private nbCores:number|null|undefined, logger:any, private dal?:FileDAL) { - this.clusterId = clusterId++ - this.logger = logger || Master.defaultLogger() - this.onInfoMessage = (message:any) => { - this.logger.info(`${message.pow.pow} nonce = ${message.pow.block.nonce}`) - } + nbCancels = 0; + + clusterId: number; + currentPromise: any | null = null; + slaves: SlaveWorker[] = []; + slavesMap: { + [k: number]: SlaveWorker | null; + } = {}; + conf: any = {}; + logger: any; + onInfoCallback: any; + workersOnline: Promise<any>[]; + + constructor( + private nbCores: number | null | undefined, + logger: any, + private dal?: FileDAL + ) { + this.clusterId = clusterId++; + this.logger = logger || Master.defaultLogger(); + this.onInfoMessage = (message: any) => { + this.logger.info(`${message.pow.pow} nonce = ${message.pow.block.nonce}`); + }; } get nbWorkers() { - return this.slaves.length + return this.slaves.length; } - set onInfoMessage(callback:any) { - this.onInfoCallback = callback + set onInfoMessage(callback: any) { + this.onInfoCallback = callback; } - onWorkerMessage(workerIndex:number, message:any) { + onWorkerMessage(workerIndex: number, message: any) { // this.logger.info(`worker#${this.slavesMap[worker.id].index} sent message:${message}`) if (message && message.pow) { - this.onInfoCallback && this.onInfoCallback(message) + this.onInfoCallback && this.onInfoCallback(message); } - if (this.currentPromise && message.uuid && !this.currentPromise.isResolved() && message.answer) { - this.logger.info(`ENGINE c#${this.clusterId}#${workerIndex} HAS FOUND A PROOF #${message.answer.pow.pow}`) + if ( + this.currentPromise && + message.uuid && + !this.currentPromise.isResolved() && + message.answer + ) { + this.logger.info( + `ENGINE c#${this.clusterId}#${workerIndex} HAS FOUND A PROOF #${message.answer.pow.pow}` + ); } else if (message.canceled) { - this.nbCancels++ + this.nbCancels++; } // this.logger.debug(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index}:`, message) } @@ -89,28 +99,37 @@ export class Master { // Setup master cluster.setupMaster({ exec: __filename, - execArgv: [] // Do not try to debug forks - }) + execArgv: [], // Do not try to debug forks + }); - const nbCores = this.nbCores !== undefined && this.nbCores !== null ? this.nbCores : 1 + const nbCores = + this.nbCores !== undefined && this.nbCores !== null ? this.nbCores : 1; this.slaves = Array.from({ length: nbCores }).map((value, index) => { - const nodejsWorker = cluster.fork() - const worker = new PowWorker(nodejsWorker, message => { - this.onWorkerMessage(index, message) - }, () => { - this.logger.info(`[online] worker c#${this.clusterId}#w#${index}`) - worker.sendConf({ - rootPath: this.dal ? this.dal.rootPath : '', - command: 'conf', - value: this.conf - }) - }, (code:any, signal:any) => { - this.logger.info(`worker ${worker.pid} died with code ${code} and signal ${signal}`) - }) + const nodejsWorker = cluster.fork(); + const worker = new PowWorker( + nodejsWorker, + (message) => { + this.onWorkerMessage(index, message); + }, + () => { + this.logger.info(`[online] worker c#${this.clusterId}#w#${index}`); + worker.sendConf({ + rootPath: this.dal ? this.dal.rootPath : "", + command: "conf", + value: this.conf, + }); + }, + (code: any, signal: any) => { + this.logger.info( + `worker ${worker.pid} died with code ${code} and signal ${signal}` + ); + } + ); - this.logger.info(`Creating worker c#${this.clusterId}#w#${nodejsWorker.id}`) + this.logger.info( + `Creating worker c#${this.clusterId}#w#${nodejsWorker.id}` + ); const slave = { - // The Node.js worker worker, @@ -121,151 +140,162 @@ export class Master { online: worker.online, // Each worker has his own chunk of possible nonces - nonceBeginning: this.nbCores === 1 ? 0 : (index + 1) * ProverConstants.NONCE_RANGE - } - this.slavesMap[nodejsWorker.id] = slave - return slave - }) - - this.workersOnline = this.slaves.map((s) => s.online) - return Promise.all(this.workersOnline) + nonceBeginning: + this.nbCores === 1 ? 0 : (index + 1) * ProverConstants.NONCE_RANGE, + }; + this.slavesMap[nodejsWorker.id] = slave; + return slave; + }); + + this.workersOnline = this.slaves.map((s) => s.online); + return Promise.all(this.workersOnline); } - changeConf(conf:ConfDTO) { - this.logger.info(`Changing conf to: ${JSON.stringify(conf)} on PoW cluster`) - this.conf.cpu = conf.cpu || this.conf.cpu - this.conf.prefix = this.conf.prefix || conf.prefix - this.slaves.forEach(s => { + changeConf(conf: ConfDTO) { + this.logger.info( + `Changing conf to: ${JSON.stringify(conf)} on PoW cluster` + ); + this.conf.cpu = conf.cpu || this.conf.cpu; + this.conf.prefix = this.conf.prefix || conf.prefix; + this.slaves.forEach((s) => { s.worker.sendConf({ - rootPath: '', - command: 'conf', - value: this.conf - }) - }) - return Promise.resolve(Underscore.clone(conf)) + rootPath: "", + command: "conf", + value: this.conf, + }); + }); + return Promise.resolve(Underscore.clone(conf)); } private cancelWorkersWork() { - this.slaves.forEach(s => { - s.worker.sendCancel() - }) + this.slaves.forEach((s) => { + s.worker.sendCancel(); + }); if (this.dal) { - this.dal.powDAL.writeCurrent("") + this.dal.powDAL.writeCurrent(""); } } async cancelWork() { - const workEnded = this.currentPromise + const workEnded = this.currentPromise; // Don't await the cancellation! - this.cancelWorkersWork() + this.cancelWorkersWork(); // Current promise is done - this.currentPromise = null - return await workEnded + this.currentPromise = null; + return await workEnded; } async shutDownWorkers() { if (this.workersOnline) { - await Promise.all(this.workersOnline) - await Promise.all(this.slaves.map(async (s) => { - s.worker.kill() - })) + await Promise.all(this.workersOnline); + await Promise.all( + this.slaves.map(async (s) => { + s.worker.kill(); + }) + ); } - this.slaves = [] + this.slaves = []; } async proveByWorkers(stuff: ProofAsk) { - // Eventually spawn the workers if (this.slaves.length === 0) { - this.initCluster() + this.initCluster(); } if (this.dal) { - await this.dal.powDAL.writeCurrent([stuff.newPoW.block.number - 1, stuff.newPoW.block.previousHash].join('-')) + await this.dal.powDAL.writeCurrent( + [stuff.newPoW.block.number - 1, stuff.newPoW.block.previousHash].join( + "-" + ) + ); } // Register the new proof uuid - const uuid = nuuid.v4() - this.currentPromise = querablep((async () => { - await Promise.all(this.workersOnline) - - if (!this.currentPromise) { - this.logger.info(`Proof canceled during workers' initialization`) - return null - } - - // Start the salves' job - const asks = this.slaves.map(async (s, index) => { - const nonceBeginning = stuff.specialNonce || s.nonceBeginning - const proof = await s.worker.askProof({ - uuid, - command: 'newPoW', - value: { - rootPath: this.dal ? this.dal.rootPath : '', - initialTestsPerRound: stuff.initialTestsPerRound, - maxDuration: stuff.maxDuration, - block: stuff.newPoW.block, - nonceBeginning, - zeros: stuff.newPoW.zeros, - highMark: stuff.newPoW.highMark, - pair: Underscore.clone(stuff.newPoW.pair), - forcedTime: stuff.newPoW.forcedTime, - conf: { - powNoSecurity: stuff.newPoW.conf.powNoSecurity, - medianTimeBlocks: stuff.newPoW.conf.medianTimeBlocks, - avgGenTime: stuff.newPoW.conf.avgGenTime, - cpu: stuff.newPoW.conf.cpu, - prefix: stuff.newPoW.conf.prefix - } - } - }) - this.logger.info(`[done] worker c#${this.clusterId}#w#${index}`) - return { - workerID: index, - proof + const uuid = nuuid.v4(); + this.currentPromise = querablep( + (async () => { + await Promise.all(this.workersOnline); + + if (!this.currentPromise) { + this.logger.info(`Proof canceled during workers' initialization`); + return null; } - }) - - // Find a proof - const result = await Promise.race(asks) - // Don't await the cancellation! - this.cancelWorkersWork() - // Wait for all workers to have stopped looking for a proof - await Promise.all(asks) - - if (!result.proof || !result.proof.message.answer) { - this.logger.info('No engine found the proof. It was probably cancelled.') - return null - } else { - this.logger.info(`ENGINE c#${this.clusterId}#${result.workerID} HAS FOUND A PROOF #${result.proof.message.answer.pow.pow}`) - return result.proof.message.answer - } - })()) - - return this.currentPromise + + // Start the salves' job + const asks = this.slaves.map(async (s, index) => { + const nonceBeginning = stuff.specialNonce || s.nonceBeginning; + const proof = await s.worker.askProof({ + uuid, + command: "newPoW", + value: { + rootPath: this.dal ? this.dal.rootPath : "", + initialTestsPerRound: stuff.initialTestsPerRound, + maxDuration: stuff.maxDuration, + block: stuff.newPoW.block, + nonceBeginning, + zeros: stuff.newPoW.zeros, + highMark: stuff.newPoW.highMark, + pair: Underscore.clone(stuff.newPoW.pair), + forcedTime: stuff.newPoW.forcedTime, + conf: { + powNoSecurity: stuff.newPoW.conf.powNoSecurity, + medianTimeBlocks: stuff.newPoW.conf.medianTimeBlocks, + avgGenTime: stuff.newPoW.conf.avgGenTime, + cpu: stuff.newPoW.conf.cpu, + prefix: stuff.newPoW.conf.prefix, + }, + }, + }); + this.logger.info(`[done] worker c#${this.clusterId}#w#${index}`); + return { + workerID: index, + proof, + }; + }); + + // Find a proof + const result = await Promise.race(asks); + // Don't await the cancellation! + this.cancelWorkersWork(); + // Wait for all workers to have stopped looking for a proof + await Promise.all(asks); + + if (!result.proof || !result.proof.message.answer) { + this.logger.info( + "No engine found the proof. It was probably cancelled." + ); + return null; + } else { + this.logger.info( + `ENGINE c#${this.clusterId}#${result.workerID} HAS FOUND A PROOF #${result.proof.message.answer.pow.pow}` + ); + return result.proof.message.answer; + } + })() + ); + + return this.currentPromise; } static defaultLogger() { return { - info: (message:any) => {} - } + info: (message: any) => {}, + }; } } if (cluster.isMaster) { - // Super important for Node.js debugging - const debug = process.execArgv.toString().indexOf('--debug') !== -1; - if(debug) { + const debug = process.execArgv.toString().indexOf("--debug") !== -1; + if (debug) { //Set an unused port number. process.execArgv = []; } - } else { - - process.on("SIGTERM", function() { - process.exit(ExitCodes.OK) + process.on("SIGTERM", function () { + process.exit(ExitCodes.OK); }); - createPowWorker() + createPowWorker(); } diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts index f99d53aba5d1a751ff209bca6226bf4b22752fb0..2f5e68d1cfa9a818f67e8a88bf63ae518889a709 100644 --- a/app/modules/prover/lib/proof.ts +++ b/app/modules/prover/lib/proof.ts @@ -11,54 +11,53 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as moment from "moment" -import {LOCAL_RULES_HELPERS} from "../../../lib/rules/local_rules" -import {hashf} from "../../../lib/common" -import {DBBlock} from "../../../lib/db/DBBlock" -import {ConfDTO} from "../../../lib/dto/ConfDTO" -import {ProverConstants} from "./constants" -import {Ed25519Signator, KeyPairBuilder} from "duniteroxyde" -import {dos2unix} from "../../../lib/common-libs/dos2unix" -import {rawer} from "../../../lib/common-libs/index" -import {ProcessCpuProfiler} from "../../../ProcessCpuProfiler" -import {PowDAL} from "../../../lib/dal/fileDALs/PowDAL"; -import {Directory} from "../../../lib/system/directory" -import {ExitCodes} from "../../../lib/common-libs/exit-codes" - -const querablep = require('querablep'); +import * as moment from "moment"; +import { LOCAL_RULES_HELPERS } from "../../../lib/rules/local_rules"; +import { hashf } from "../../../lib/common"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { ConfDTO } from "../../../lib/dto/ConfDTO"; +import { ProverConstants } from "./constants"; +import { Ed25519Signator, KeyPairBuilder } from "duniteroxyde"; +import { dos2unix } from "../../../lib/common-libs/dos2unix"; +import { rawer } from "../../../lib/common-libs/index"; +import { ProcessCpuProfiler } from "../../../ProcessCpuProfiler"; +import { PowDAL } from "../../../lib/dal/fileDALs/PowDAL"; +import { Directory } from "../../../lib/system/directory"; +import { ExitCodes } from "../../../lib/common-libs/exit-codes"; + +const querablep = require("querablep"); export function createPowWorker() { - - let powDAL:PowDAL|null = null + let powDAL: PowDAL | null = null; let computing = querablep(Promise.resolve(null)); let askedStop = false; -// By default, we do not prefix the PoW by any number + // By default, we do not prefix the PoW by any number let prefix = 0; - let sigFuncSaved: (msg:string) => string; - let lastSecret:any, lastVersion: number, currentCPU:number = 1; + let sigFuncSaved: (msg: string) => string; + let lastSecret: any, + lastVersion: number, + currentCPU: number = 1; - process.on('uncaughtException', (err:any) => { - console.error(err.stack || Error(err)) + process.on("uncaughtException", (err: any) => { + console.error(err.stack || Error(err)); if (process.send) { - process.send({error: err}); + process.send({ error: err }); } else { - throw Error('process.send() is not defined') + throw Error("process.send() is not defined"); } }); - process.on('unhandledRejection', () => { - process.exit(ExitCodes.OK) - }) - - process.on('message', async (message) => { + process.on("unhandledRejection", () => { + process.exit(ExitCodes.OK); + }); + process.on("message", async (message) => { switch (message.command) { - - case 'newPoW': + case "newPoW": (async () => { - askedStop = true + askedStop = true; // Very important: do not await if the computation is already done, to keep the lock on JS engine if (!computing.isFulfilled()) { @@ -66,282 +65,332 @@ export function createPowWorker() { } if (message.value.rootPath) { - const params = await Directory.getHomeFS(false, message.value.rootPath, false) - powDAL = new PowDAL(message.value.rootPath, params.fs) + const params = await Directory.getHomeFS( + false, + message.value.rootPath, + false + ); + powDAL = new PowDAL(message.value.rootPath, params.fs); } const res = await beginNewProofOfWork(message.value); answer(message, res); - })() + })(); break; - case 'cancel': + case "cancel": if (!computing.isFulfilled()) { askedStop = true; } break; - case 'conf': + case "conf": if (message.value.cpu !== undefined) { - currentCPU = message.value.cpu + currentCPU = message.value.cpu; } if (message.value.prefix !== undefined) { - prefix = message.value.prefix + prefix = message.value.prefix; } answer(message, { currentCPU, prefix }); break; } + }); - }) - - function beginNewProofOfWork(stuff:any) { + function beginNewProofOfWork(stuff: any) { askedStop = false; - computing = querablep((async () => { - - /***************** - * PREPARE POW STUFF - ****************/ - - let nonce = 0; - const maxDuration = stuff.maxDuration || 1000 - const conf = stuff.conf; - const block = stuff.block; - const nonceBeginning = stuff.nonceBeginning; - const nbZeros = stuff.zeros; - const pair = stuff.pair; - const forcedTime = stuff.forcedTime; - currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU; - prefix = parseInt(conf.prefix || prefix) - if (prefix && prefix < ProverConstants.NONCE_RANGE) { - prefix *= 100 * ProverConstants.NONCE_RANGE - } - const highMark = stuff.highMark; - - // Define sigFunc - const signator = KeyPairBuilder.fromSecretKey(pair.sec); - let sigFunc = null; - if (sigFuncSaved && lastSecret === pair.sec) { - sigFunc = sigFuncSaved; - } else { - lastSecret = pair.sec; - sigFunc = (msg:string) => signator.sign(msg) - } - - /***************** - * GO! - ****************/ - - let pow = "", sig = "", raw = ""; - let pausePeriod = 1; - let testsCount = 0; - let found = false; - let turn = 0; - const profiler = new ProcessCpuProfiler(100) - let cpuUsage = profiler.cpuUsageOverLastMilliseconds(1) - // We limit the number of tests according to CPU usage - let testsPerRound = stuff.initialTestsPerRound || 1 - let turnDuration = 20 // We initially goes quickly to the max speed = 50 reevaluations per second (1000 / 20) - - while (!found && !askedStop) { - + computing = querablep( + (async () => { /***************** - * A TURN ~ 100ms + * PREPARE POW STUFF ****************/ - await Promise.race([ - - // I. Stop the turn if it exceeds `turnDuration` ms - countDown(turnDuration), - - // II. Process the turn's PoW - (async () => { - - // Prove - let i = 0; - const thisTurn = turn; - - // Time is updated regularly during the proof - block.time = getBlockTime(block, conf, forcedTime) - if (block.number === 0) { - block.medianTime = block.time - } - block.inner_hash = getBlockInnerHash(block); - - /***************** - * Iterations of a turn - ****************/ - - while(!found && i < testsPerRound && thisTurn === turn && !askedStop) { - - // Nonce change (what makes the PoW change if the time field remains the same) - nonce++ - - /***************** - * A PROOF OF WORK - ****************/ - - // The final nonce is composed of 3 parts - block.nonce = prefix + nonceBeginning + nonce - raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n") - sig = dos2unix(sigFunc(raw)) - pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase() + let nonce = 0; + const maxDuration = stuff.maxDuration || 1000; + const conf = stuff.conf; + const block = stuff.block; + const nonceBeginning = stuff.nonceBeginning; + const nbZeros = stuff.zeros; + const pair = stuff.pair; + const forcedTime = stuff.forcedTime; + currentCPU = conf.cpu || ProverConstants.DEFAULT_CPU; + prefix = parseInt(conf.prefix || prefix); + if (prefix && prefix < ProverConstants.NONCE_RANGE) { + prefix *= 100 * ProverConstants.NONCE_RANGE; + } + const highMark = stuff.highMark; + + // Define sigFunc + const signator = KeyPairBuilder.fromSecretKey(pair.sec); + let sigFunc = null; + if (sigFuncSaved && lastSecret === pair.sec) { + sigFunc = sigFuncSaved; + } else { + lastSecret = pair.sec; + sigFunc = (msg: string) => signator.sign(msg); + } - /***************** - * Check the POW result - ****************/ + /***************** + * GO! + ****************/ - let j = 0, charOK = true; - while (j < nbZeros && charOK) { - charOK = pow[j] === '0'; - j++; - } - if (charOK) { - found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']'))) - } - if (!found && nbZeros > 0 && j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW) { - pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }}); + let pow = "", + sig = "", + raw = ""; + let pausePeriod = 1; + let testsCount = 0; + let found = false; + let turn = 0; + const profiler = new ProcessCpuProfiler(100); + let cpuUsage = profiler.cpuUsageOverLastMilliseconds(1); + // We limit the number of tests according to CPU usage + let testsPerRound = stuff.initialTestsPerRound || 1; + let turnDuration = 20; // We initially goes quickly to the max speed = 50 reevaluations per second (1000 / 20) + + while (!found && !askedStop) { + /***************** + * A TURN ~ 100ms + ****************/ + + await Promise.race([ + // I. Stop the turn if it exceeds `turnDuration` ms + countDown(turnDuration), + + // II. Process the turn's PoW + (async () => { + // Prove + let i = 0; + const thisTurn = turn; + + // Time is updated regularly during the proof + block.time = getBlockTime(block, conf, forcedTime); + if (block.number === 0) { + block.medianTime = block.time; } + block.inner_hash = getBlockInnerHash(block); /***************** - * - Update local vars - * - Allow to receive stop signal + * Iterations of a turn ****************/ - if (!found && !askedStop) { - i++; - testsCount++; - if (i % pausePeriod === 0) { - await countDown(1); // Very low pause, just the time to process eventual end of the turn + while ( + !found && + i < testsPerRound && + thisTurn === turn && + !askedStop + ) { + // Nonce change (what makes the PoW change if the time field remains the same) + nonce++; + + /***************** + * A PROOF OF WORK + ****************/ + + // The final nonce is composed of 3 parts + block.nonce = prefix + nonceBeginning + nonce; + raw = dos2unix( + "InnerHash: " + + block.inner_hash + + "\nNonce: " + + block.nonce + + "\n" + ); + sig = dos2unix(sigFunc(raw)); + pow = hashf( + "InnerHash: " + + block.inner_hash + + "\nNonce: " + + block.nonce + + "\n" + + sig + + "\n" + ).toUpperCase(); + + /***************** + * Check the POW result + ****************/ + + let j = 0, + charOK = true; + while (j < nbZeros && charOK) { + charOK = pow[j] === "0"; + j++; } - } - } + if (charOK) { + found = !!pow[nbZeros].match( + new RegExp("[0-" + highMark + "]") + ); + } + if ( + !found && + nbZeros > 0 && + j - 1 >= ProverConstants.POW_MINIMAL_TO_SHOW + ) { + pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros } }); + } + + /***************** + * - Update local vars + * - Allow to receive stop signal + ****************/ - /***************** - * Check the POW result - ****************/ - if (!found) { - - // CPU speed recording - if (turn > 0) { - cpuUsage = profiler.cpuUsageOverLastMilliseconds(turnDuration) - if (cpuUsage > currentCPU + 0.005 || cpuUsage < currentCPU - 0.005) { - let powVariationFactor - // powVariationFactor = currentCPU / (cpuUsage || 0.01) / 5 // divide by 2 to avoid extreme responses - if (currentCPU > cpuUsage) { - powVariationFactor = 1.01 - testsPerRound = Math.max(1, Math.ceil(testsPerRound * powVariationFactor)) - } else { - powVariationFactor = 0.99 - testsPerRound = Math.max(1, Math.floor(testsPerRound * powVariationFactor)) + if (!found && !askedStop) { + i++; + testsCount++; + if (i % pausePeriod === 0) { + await countDown(1); // Very low pause, just the time to process eventual end of the turn } - pausePeriod = Math.floor(testsPerRound / ProverConstants.POW_NB_PAUSES_PER_ROUND) } } /***************** - * UNLOAD CPU CHARGE FOR THIS TURN + * Check the POW result ****************/ - // We wait for a maximum time of `turnDuration`. - // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script - // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu" - // parameter. - await countDown(turnDuration); - } - })() - ]); - - // console.log('W#%s.powDAL = ', process.pid, powDAL) - - if (powDAL && !conf.powNoSecurity) { - const currentProofCheck = await powDAL.getCurrent() - if (currentProofCheck !== null) { - if (currentProofCheck === "") { - askedStop = true - } else { - const [currentNumber, currentHash] = currentProofCheck.split('-') - if (block.number !== parseInt(currentNumber) + 1 || block.previousHash !== currentHash) { - askedStop = true + if (!found) { + // CPU speed recording + if (turn > 0) { + cpuUsage = profiler.cpuUsageOverLastMilliseconds( + turnDuration + ); + if ( + cpuUsage > currentCPU + 0.005 || + cpuUsage < currentCPU - 0.005 + ) { + let powVariationFactor; + // powVariationFactor = currentCPU / (cpuUsage || 0.01) / 5 // divide by 2 to avoid extreme responses + if (currentCPU > cpuUsage) { + powVariationFactor = 1.01; + testsPerRound = Math.max( + 1, + Math.ceil(testsPerRound * powVariationFactor) + ); + } else { + powVariationFactor = 0.99; + testsPerRound = Math.max( + 1, + Math.floor(testsPerRound * powVariationFactor) + ); + } + pausePeriod = Math.floor( + testsPerRound / ProverConstants.POW_NB_PAUSES_PER_ROUND + ); + } + } + + /***************** + * UNLOAD CPU CHARGE FOR THIS TURN + ****************/ + // We wait for a maximum time of `turnDuration`. + // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script + // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu" + // parameter. + await countDown(turnDuration); + } + })(), + ]); + + // console.log('W#%s.powDAL = ', process.pid, powDAL) + + if (powDAL && !conf.powNoSecurity) { + const currentProofCheck = await powDAL.getCurrent(); + if (currentProofCheck !== null) { + if (currentProofCheck === "") { + askedStop = true; + } else { + const [currentNumber, currentHash] = currentProofCheck.split( + "-" + ); + if ( + block.number !== parseInt(currentNumber) + 1 || + block.previousHash !== currentHash + ) { + askedStop = true; + } } } } - } - - // Next turn - turn++ - - turnDuration += 1 - turnDuration = Math.min(turnDuration, maxDuration) // Max 1 second per turn - } - - /***************** - * POW IS OVER - * ----------- - * - * We either have found a valid POW or a stop event has been detected. - ****************/ - if (askedStop) { + // Next turn + turn++; - // PoW stopped - askedStop = false; - pSend({ canceled: true }) - return null + turnDuration += 1; + turnDuration = Math.min(turnDuration, maxDuration); // Max 1 second per turn + } - } else { + /***************** + * POW IS OVER + * ----------- + * + * We either have found a valid POW or a stop event has been detected. + ****************/ - // PoW success - block.hash = pow - block.signature = sig - return { - pow: { - block: block, - testsCount: testsCount, - pow: pow - } + if (askedStop) { + // PoW stopped + askedStop = false; + pSend({ canceled: true }); + return null; + } else { + // PoW success + block.hash = pow; + block.signature = sig; + return { + pow: { + block: block, + testsCount: testsCount, + pow: pow, + }, + }; } - } - })()) + })() + ); return computing; } - function countDown(duration:number) { + function countDown(duration: number) { return new Promise((resolve) => setTimeout(resolve, duration)); } - function getBlockInnerHash(block:DBBlock) { + function getBlockInnerHash(block: DBBlock) { const raw = rawer.getBlockInnerPart(block); - return hashf(raw) + return hashf(raw); } - function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) { + function getBlockTime( + block: DBBlock, + conf: ConfDTO, + forcedTime: number | null + ) { if (forcedTime) { return forcedTime; } const now = moment.utc().unix(); const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf); - const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0; + const timeoffset = + block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0; const medianTime = block.medianTime; - const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset); + const upperBound = + block.number === 0 + ? medianTime + : Math.min(medianTime + maxAcceleration, now - timeoffset); return Math.max(medianTime, upperBound); } - function answer(message:any, theAnswer:any) { + function answer(message: any, theAnswer: any) { return pSend({ uuid: message.uuid, - answer: theAnswer - }) + answer: theAnswer, + }); } - function pSend(stuff:any) { + function pSend(stuff: any) { return new Promise(function (resolve, reject) { if (process.send) { - process.send(stuff, function (error:any) { + process.send(stuff, function (error: any) { !error && resolve(); error && reject(); - }) + }); } else { - reject('process.send() is not defined') + reject("process.send() is not defined"); } }); } diff --git a/app/modules/prover/lib/prover.ts b/app/modules/prover/lib/prover.ts index a16bcc40f0910d52bcb3d6ed6aa8daaa0d7ced21..c1adcbdea0927d6ed8d735e9f892ba128056731b 100644 --- a/app/modules/prover/lib/prover.ts +++ b/app/modules/prover/lib/prover.ts @@ -12,31 +12,33 @@ // GNU Affero General Public License for more details. "use strict"; -import {PermanentProver} from "./permanentProver" -import * as stream from "stream" -import {OtherConstants} from "../../../lib/other_constants" -import {Server} from "../../../../server" +import { PermanentProver } from "./permanentProver"; +import * as stream from "stream"; +import { OtherConstants } from "../../../lib/other_constants"; +import { Server } from "../../../../server"; export class Prover extends stream.Transform { + permaProver: PermanentProver; - permaProver:PermanentProver - - constructor(server:Server) { - super({ objectMode: true }) - this.permaProver = new PermanentProver(server) + constructor(server: Server) { + super({ objectMode: true }); + this.permaProver = new PermanentProver(server); } - _write(obj:any, enc:any, done:any) { + _write(obj: any, enc: any, done: any) { // Never close the stream if (obj) { - if (obj.bcEvent && obj.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED || obj.bcEvent === OtherConstants.BC_EVENT.SWITCHED) { + if ( + (obj.bcEvent && obj.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED) || + obj.bcEvent === OtherConstants.BC_EVENT.SWITCHED + ) { this.permaProver.blockchainChanged(obj.block); } else if (obj.cpu !== undefined) { this.permaProver.prover.changeCPU(obj.cpu); // We multiply by 10 to give room to computers with < 100 cores } } done && done(); - }; + } async startService() { this.permaProver.allowedToStart(); diff --git a/app/modules/reapply.ts b/app/modules/reapply.ts index 0fcb1b206d56b29ae4c75cd268b73ff161a4c191..873120d5b49a57dc796dea603501850cf1f3c9cf 100644 --- a/app/modules/reapply.ts +++ b/app/modules/reapply.ts @@ -12,28 +12,36 @@ // GNU Affero General Public License for more details. "use strict"; -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; module.exports = { duniter: { - cli: [{ - name: 'reapply-to [number]', - desc: 'Reapply reverted blocks until block #[number] is reached. EXPERIMENTAL', - preventIfRunning: true, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const number = params[0]; - const logger = server.logger; - try { - await server.reapplyTo(number); - } catch (err) { - logger.error('Error during reapply:', err); - } - // Save DB - if (server) { - await server.disconnect(); - } - } - }] - } -} + cli: [ + { + name: "reapply-to [number]", + desc: + "Reapply reverted blocks until block #[number] is reached. EXPERIMENTAL", + preventIfRunning: true, + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const number = params[0]; + const logger = server.logger; + try { + await server.reapplyTo(number); + } catch (err) { + logger.error("Error during reapply:", err); + } + // Save DB + if (server) { + await server.disconnect(); + } + }, + }, + ], + }, +}; diff --git a/app/modules/reset.ts b/app/modules/reset.ts index 96a883304afc7d4cb29c7996dfeca4d071d9c331..b06a65fc8e8ff607f9d1f14bf473fd4e9232bca6 100644 --- a/app/modules/reset.ts +++ b/app/modules/reset.ts @@ -12,52 +12,59 @@ // GNU Affero General Public License for more details. "use strict"; -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; -const constants = require('../lib/constants'); -const wizard = require('../lib/wizard'); -const logger = require('../lib/logger').NewLogger('wizard'); +const constants = require("../lib/constants"); +const wizard = require("../lib/wizard"); +const logger = require("../lib/logger").NewLogger("wizard"); module.exports = { duniter: { + cli: [ + { + name: "reset [config|data|peers|tx|stats|all]", + desc: + "Reset configuration, data, peers, transactions or everything in the database", + preventIfRunning: true, - cli: [{ - name: 'reset [config|data|peers|tx|stats|all]', - desc: 'Reset configuration, data, peers, transactions or everything in the database', - preventIfRunning: true, - - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const type = params[0]; - if (type === 'peers') { - // Needs the DAL plugged - await server.initDAL(); - } - switch (type) { - case 'data': - await server.resetData(); - logger.warn('Data successfully reseted.'); - break; - case 'peers': - await server.resetPeers(); - logger.warn('Peers successfully reseted.'); - break; - case 'stats': - await server.resetStats(); - logger.warn('Stats successfully reseted.'); - break; - case 'config': - await server.resetConf(); - logger.warn('Configuration successfully reseted.'); - break; - case 'all': - await server.resetAll(); - logger.warn('Data & Configuration successfully reseted.'); - break; - default: - throw constants.ERRORS.CLI_CALLERR_RESET; - } - } - }] - } + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const type = params[0]; + if (type === "peers") { + // Needs the DAL plugged + await server.initDAL(); + } + switch (type) { + case "data": + await server.resetData(); + logger.warn("Data successfully reseted."); + break; + case "peers": + await server.resetPeers(); + logger.warn("Peers successfully reseted."); + break; + case "stats": + await server.resetStats(); + logger.warn("Stats successfully reseted."); + break; + case "config": + await server.resetConf(); + logger.warn("Configuration successfully reseted."); + break; + case "all": + await server.resetAll(); + logger.warn("Data & Configuration successfully reseted."); + break; + default: + throw constants.ERRORS.CLI_CALLERR_RESET; + } + }, + }, + ], + }, }; diff --git a/app/modules/revert.ts b/app/modules/revert.ts index cc9c890d6c007555211016e301e87dc76cd60045..2c53bcfb2083b5624f79504ade90aa3ad6004471 100644 --- a/app/modules/revert.ts +++ b/app/modules/revert.ts @@ -11,45 +11,60 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; module.exports = { duniter: { - cli: [{ - name: 'revert [count]', - desc: 'Revert (undo + remove) the top [count] blocks from the blockchain. EXPERIMENTAL', - preventIfRunning: true, + cli: [ + { + name: "revert [count]", + desc: + "Revert (undo + remove) the top [count] blocks from the blockchain. EXPERIMENTAL", + preventIfRunning: true, - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const count = params[0]; - const logger = server.logger; - try { - for (let i = 0; i < count; i++) { - await server.revert(); + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const count = params[0]; + const logger = server.logger; + try { + for (let i = 0; i < count; i++) { + await server.revert(); + } + } catch (err) { + logger.error("Error during revert:", err); } - } catch (err) { - logger.error('Error during revert:', err); - } - // Save DB - await server.disconnect(); - } - },{ - name: 'revert-to [number]', - desc: 'Revert (undo + remove) top blockchain blocks until block #[number] is reached. EXPERIMENTAL', - onDatabaseExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const number = params[0]; - const logger = server.logger; - try { - await server.revertTo(number); - } catch (err) { - logger.error('Error during revert:', err); - } - // Save DB - if (server) { + // Save DB await server.disconnect(); - } - } - }] - } -} + }, + }, + { + name: "revert-to [number]", + desc: + "Revert (undo + remove) top blockchain blocks until block #[number] is reached. EXPERIMENTAL", + onDatabaseExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const number = params[0]; + const logger = server.logger; + try { + await server.revertTo(number); + } catch (err) { + logger.error("Error during revert:", err); + } + // Save DB + if (server) { + await server.disconnect(); + } + }, + }, + ], + }, +}; diff --git a/app/modules/router.ts b/app/modules/router.ts index da33c528548ae5b525ca30ac650b47b28197278c..22b9fe6755ebeceefea8f70a23e81211807af71b 100644 --- a/app/modules/router.ts +++ b/app/modules/router.ts @@ -12,55 +12,58 @@ // GNU Affero General Public License for more details. "use strict"; -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" -import * as stream from "stream" -import {Multicaster} from "../lib/streams/multicaster" -import {RouterStream} from "../lib/streams/router" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; +import * as stream from "stream"; +import { Multicaster } from "../lib/streams/multicaster"; +import { RouterStream } from "../lib/streams/router"; export const RouterDependency = { duniter: { service: { - output: (server:Server, conf:ConfDTO, logger:any) => new Router(server) + output: (server: Server, conf: ConfDTO, logger: any) => + new Router(server), }, methods: { - routeToNetwork: (server:Server) => { + routeToNetwork: (server: Server) => { const theRouter = new Router(server); theRouter.startService(); server.pipe(theRouter); - } - } - } -} + }, + }, + }, +}; /** * Service which triggers the server's peering generation (actualization of the Peer document). * @constructor */ export class Router extends stream.Transform { + theRouter: any; + theMulticaster: Multicaster = new Multicaster(); - theRouter:any - theMulticaster:Multicaster = new Multicaster() - - constructor(private server:Server) { - super({ objectMode: true }) + constructor(private server: Server) { + super({ objectMode: true }); } - _write(obj:any, enc:string, done:any) { + _write(obj: any, enc: string, done: any) { // Never close the stream if (obj) { this.push(obj); } done && done(); - }; + } async startService() { if (this.server.conf.nobma || !this.server.conf.bmaWithCrawler) { // Disable BMA - return Promise.resolve() + return Promise.resolve(); } if (!this.theRouter) { - this.theRouter = new RouterStream(this.server.PeeringService, this.server.dal) + this.theRouter = new RouterStream( + this.server.PeeringService, + this.server.dal + ); } this.theRouter.setActive(true); this.theRouter.setConfDAL(this.server.dal); @@ -71,9 +74,8 @@ export class Router extends stream.Transform { * - The server will eventually be notified of network failures */ // The router asks for multicasting of documents - this - .pipe(this.theRouter) - // The documents get sent to peers + this.pipe(this.theRouter) + // The documents get sent to peers .pipe(this.theMulticaster) // The multicaster may answer 'unreachable peer' .pipe(this.theRouter); @@ -82,7 +84,7 @@ export class Router extends stream.Transform { async stopService() { if (this.server.conf.nobma || !this.server.conf.bmaWithCrawler) { // Disable BMA - return Promise.resolve() + return Promise.resolve(); } this.unpipe(); this.theRouter && this.theRouter.unpipe(); diff --git a/app/modules/upnp-provider.ts b/app/modules/upnp-provider.ts index e9f533fd729f54da78c778fe1978461513c55646..776453de15c99ea4e34140a4926da2146ec1c0a8 100644 --- a/app/modules/upnp-provider.ts +++ b/app/modules/upnp-provider.ts @@ -11,22 +11,21 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as os from "os" -import {Underscore} from "../lib/common-libs/underscore" +import * as os from "os"; +import { Underscore } from "../lib/common-libs/underscore"; -const upnp = require('nat-upnp'); +const upnp = require("nat-upnp"); export interface UPnPBinding { - remotehost:string - host:string - port:number + remotehost: string; + host: string; + port: number; } export class UpnpProvider { - - private currentConfig:UPnPBinding|null - private interval:NodeJS.Timer|null - private client = upnp.createClient() + private currentConfig: UPnPBinding | null; + private interval: NodeJS.Timer | null; + private client = upnp.createClient(); constructor( private portStart: number, @@ -34,37 +33,37 @@ export class UpnpProvider { private identifier: string, private upnpInterval = 300, private ttl = 600, - private logger?:any, - private host = '' + private logger?: any, + private host = "" ) { if (!host) { - this.host = this.getBestHostForUPnP() + this.host = this.getBestHostForUPnP(); } } async checkUPnPisAvailable() { try { await new Promise((resolve, reject) => { - this.client.externalIp((err:any, res:any) => { - if (err || !res) { - reject() + this.client.externalIp((err: any, res: any) => { + if (err || !res) { + reject(); } else { - resolve() + resolve(); } - }) - }) - return true + }); + }); + return true; } catch (err) { - return false + return false; } } getCurrentConfig() { - return this.currentConfig + return this.currentConfig; } getUpnpDescription() { - return 'duniter:' + this.identifier + return "duniter:" + this.identifier; } /** @@ -72,107 +71,123 @@ export class UpnpProvider { * @returns { host:string, port:number } */ openPort() { - return new Promise<{ host:string, port:number }>(async (resolve:any, reject:any) => { - if (!this.currentConfig) { - this.currentConfig = await this.getAvailablePort(this.client) - } - this.logger && this.logger.trace('WS2P: mapping external port %s to local %s using UPnP...', this.currentConfig.port, [this.currentConfig.host, this.currentConfig.port].join(':')) - const client = upnp.createClient() - client.portMapping({ - 'public': this.currentConfig.port, - 'private': { - host: this.currentConfig.host, - port: this.currentConfig.port, - }, - 'ttl': this.ttl, - 'description': this.getUpnpDescription() - }, (err:any) => { - client.close() - if (err) { - this.logger && this.logger.warn(err) - return reject(err) + return new Promise<{ host: string; port: number }>( + async (resolve: any, reject: any) => { + if (!this.currentConfig) { + this.currentConfig = await this.getAvailablePort(this.client); } - resolve(this.currentConfig) - }) - }) + this.logger && + this.logger.trace( + "WS2P: mapping external port %s to local %s using UPnP...", + this.currentConfig.port, + [this.currentConfig.host, this.currentConfig.port].join(":") + ); + const client = upnp.createClient(); + client.portMapping( + { + public: this.currentConfig.port, + private: { + host: this.currentConfig.host, + port: this.currentConfig.port, + }, + ttl: this.ttl, + description: this.getUpnpDescription(), + }, + (err: any) => { + client.close(); + if (err) { + this.logger && this.logger.warn(err); + return reject(err); + } + resolve(this.currentConfig); + } + ); + } + ); } async startRegular() { this.stopRegular(); - const available = await this.checkUPnPisAvailable() + const available = await this.checkUPnPisAvailable(); if (available) { // Update UPnP IGD every INTERVAL seconds - this.interval = setInterval(() => this.openPort(), 1000 * this.upnpInterval) - const { host, port } = await this.openPort() - return { host, port, available } + this.interval = setInterval( + () => this.openPort(), + 1000 * this.upnpInterval + ); + const { host, port } = await this.openPort(); + return { host, port, available }; } - return { host: '', port: 0, available: false } + return { host: "", port: 0, available: false }; } stopRegular() { if (this.interval) { - clearInterval(this.interval) + clearInterval(this.interval); } } - static async getLocalIP(client:any) { - return await new Promise<string>((resolve:any, reject:any) => { - client.findGateway((err:any, res:any, localIP:any) => { - if (err) return reject(err) - resolve(localIP) - }) - }) + static async getLocalIP(client: any) { + return await new Promise<string>((resolve: any, reject: any) => { + client.findGateway((err: any, res: any, localIP: any) => { + if (err) return reject(err); + resolve(localIP); + }); + }); } - static async getRemoteIP(client:any): Promise<string> { - return await new Promise<string>((resolve:any, reject:any) => { - client.externalIp((err:any, externalIP:string) => { - if (err) return reject(err) - resolve(externalIP) - }) - }) + static async getRemoteIP(client: any): Promise<string> { + return await new Promise<string>((resolve: any, reject: any) => { + client.externalIp((err: any, externalIP: string) => { + if (err) return reject(err); + resolve(externalIP); + }); + }); } - private async getAvailablePort(client:any) { - const localIP = this.host || await UpnpProvider.getLocalIP(client) - const remoteIP = await UpnpProvider.getRemoteIP(client) - const mappings:{ + private async getAvailablePort(client: any) { + const localIP = this.host || (await UpnpProvider.getLocalIP(client)); + const remoteIP = await UpnpProvider.getRemoteIP(client); + const mappings: { private: { - host:string - } + host: string; + }; public: { - port:number - } - description:string - }[] = await UpnpProvider.getUPnPMappings(client) - const thisDesc = this.getUpnpDescription() - const externalPortsUsed = mappings.filter((m) => m.description !== thisDesc).map((m) => m.public.port) - let availablePort = this.portStart - while (externalPortsUsed.indexOf(availablePort) !== -1 - && availablePort <= this.portEnd) { - availablePort++ + port: number; + }; + description: string; + }[] = await UpnpProvider.getUPnPMappings(client); + const thisDesc = this.getUpnpDescription(); + const externalPortsUsed = mappings + .filter((m) => m.description !== thisDesc) + .map((m) => m.public.port); + let availablePort = this.portStart; + while ( + externalPortsUsed.indexOf(availablePort) !== -1 && + availablePort <= this.portEnd + ) { + availablePort++; } if (availablePort > this.portEnd) { - throw "No port available for UPnP" + throw "No port available for UPnP"; } return { remotehost: remoteIP, host: localIP, - port: availablePort - } + port: availablePort, + }; } - static async getUPnPMappings(client:any): Promise<any> { + static async getUPnPMappings(client: any): Promise<any> { return new Promise((resolve, reject) => { - client.getMappings((err:any, res:any) => { + client.getMappings((err: any, res: any) => { if (err) { - reject(err) + reject(err); + } else { + resolve(res); } - else { - resolve(res) - } - }) - }) + }); + }); } /** @@ -180,18 +195,18 @@ export class UpnpProvider { * Giving the priority to Ethernet seems to fix the problem. * @param family */ - private getBestHostForUPnP(family = '') { - let netInterfaces = os.networkInterfaces() - let keys = Underscore.keys(netInterfaces) + private getBestHostForUPnP(family = "") { + let netInterfaces = os.networkInterfaces(); + let keys = Underscore.keys(netInterfaces); let res = []; for (const name of keys) { - let addresses = netInterfaces[name] + let addresses = netInterfaces[name]; for (const addr of addresses) { if (!family || addr.family == family) { res.push({ name: name, - value: addr.address - }) + value: addr.address, + }); } } } @@ -206,15 +221,15 @@ export class UpnpProvider { /^Wi-Fi/, /^lo/, /^Loopback/, - /^None/ - ] - const best = Underscore.sortBy(res, function(entry:any) { + /^None/, + ]; + const best = Underscore.sortBy(res, function (entry: any) { for (let i = 0; i < interfacePriorityRegCatcher.length; i++) { // `i` is the priority (0 is the better, 1 is the second, ...) - if (entry.name.match(interfacePriorityRegCatcher[i])) return i + if (entry.name.match(interfacePriorityRegCatcher[i])) return i; } - return interfacePriorityRegCatcher.length - })[0] - return (best && best.value) || "" + return interfacePriorityRegCatcher.length; + })[0]; + return (best && best.value) || ""; } -} \ No newline at end of file +} diff --git a/app/modules/wizard.ts b/app/modules/wizard.ts index 51c3e38c08469b79195de403073523d4448500fe..414b1dcc5587a59b6f546e92ae5b9c56808de7eb 100644 --- a/app/modules/wizard.ts +++ b/app/modules/wizard.ts @@ -11,41 +11,51 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../lib/dto/ConfDTO" -import {Server} from "../../server" -import {Wizard} from "../lib/wizard" -import {Underscore} from "../lib/common-libs/underscore" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { Server } from "../../server"; +import { Wizard } from "../lib/wizard"; +import { Underscore } from "../lib/common-libs/underscore"; -const logger = require('../lib/logger').NewLogger('wizard'); +const logger = require("../lib/logger").NewLogger("wizard"); module.exports = { duniter: { - wizard: { // The wizard itself also defines its personal tasks - 'currency': (conf:ConfDTO) => Wizard.configCurrency(conf), - 'pow': (conf:ConfDTO) => Wizard.configPoW(conf), - 'parameters': (conf:ConfDTO) => Wizard.configUCP(conf) + currency: (conf: ConfDTO) => Wizard.configCurrency(conf), + pow: (conf: ConfDTO) => Wizard.configPoW(conf), + parameters: (conf: ConfDTO) => Wizard.configUCP(conf), }, - cli: [{ - name: 'wizard [key|network|network-reconfigure|currency|pow|parameters]', - desc: 'Launch the configuration wizard.', + cli: [ + { + name: + "wizard [key|network|network-reconfigure|currency|pow|parameters]", + desc: "Launch the configuration wizard.", - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any, wizardTasks:any) => { - const step = params[0]; - const tasks = step ? [wizardTasks[step]] : Underscore.values(wizardTasks); - for (const task of tasks) { - if (!task) { - throw 'Unknown task'; + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any, + wizardTasks: any + ) => { + const step = params[0]; + const tasks = step + ? [wizardTasks[step]] + : Underscore.values(wizardTasks); + for (const task of tasks) { + if (!task) { + throw "Unknown task"; + } + await task(conf, program); } - await task(conf, program) - } - // Check config - await server.checkConfig(); - await server.dal.saveConf(conf); - logger.debug("Configuration saved."); - } - }] - } + // Check config + await server.checkConfig(); + await server.dal.saveConf(conf); + logger.debug("Configuration saved."); + }, + }, + ], + }, }; diff --git a/app/modules/ws2p/index.ts b/app/modules/ws2p/index.ts index 930962bd275c9f3c199fd51d02fe59f0092f087a..d46d3d3bbd0525ea8cbe52fa3cf6703213b0b16f 100644 --- a/app/modules/ws2p/index.ts +++ b/app/modules/ws2p/index.ts @@ -11,106 +11,143 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {WS2PConstants} from './lib/constants'; -import {ConfDTO, WS2PConfDTO} from "../../lib/dto/ConfDTO" -import {Server} from "../../../server" -import * as stream from 'stream'; -import {WS2PCluster} from "./lib/WS2PCluster" -import {CommonConstants} from "../../lib/common-libs/constants" -import {NewLogger} from "../../lib/logger" -import {UpnpProvider} from "../upnp-provider" +import { WS2PConstants } from "./lib/constants"; +import { ConfDTO, WS2PConfDTO } from "../../lib/dto/ConfDTO"; +import { Server } from "../../../server"; +import * as stream from "stream"; +import { WS2PCluster } from "./lib/WS2PCluster"; +import { CommonConstants } from "../../lib/common-libs/constants"; +import { NewLogger } from "../../lib/logger"; +import { UpnpProvider } from "../upnp-provider"; const constants = require("../../lib/constants"); -const logger = NewLogger() -const nuuid = require('node-uuid') +const logger = NewLogger(); +const nuuid = require("node-uuid"); export const WS2PDependency = { duniter: { - cliOptions: [ - { value: '--ws2p-upnp', desc: 'Use UPnP to open remote port.' }, - { value: '--ws2p-noupnp', desc: 'Do not use UPnP to open remote port.' }, - { value: '--ws2p-host <host>', desc: 'Host to listen to.' }, - { value: '--ws2p-port <port>', desc: 'Port to listen to.', parser: (val:string) => parseInt(val) }, - { value: '--ws2p-remote-host <address>', desc: 'Availabily host.' }, - { value: '--ws2p-remote-port <port>', desc: 'Availabily port.', parser: (val:string) => parseInt(val) }, - { value: '--ws2p-remote-path <path>', desc: 'Availabily web path.' }, - { value: '--ws2p-max-private <count>', desc: 'Maximum private connections count.', parser: (val:string) => parseInt(val) }, - { value: '--ws2p-max-public <count>', desc: 'Maximum public connections count.', parser: (val:string) => parseInt(val) }, - { value: '--ws2p-private', desc: 'Enable WS2P Private access.' }, - { value: '--ws2p-public', desc: 'Enable WS2P Public access.' }, - { value: '--ws2p-noprivate', desc: 'Disable WS2P Private access.' }, - { value: '--ws2p-nopublic', desc: 'Disable WS2P Public access.' }, - { value: '--ws2p-sync', desc: 'Enable WS2P SYNC access.' }, - { value: '--ws2p-nosync', desc: 'Disable WS2P SYNC access.' }, - { value: '--ws2p-prefered-add <pubkey>', desc: 'Add a prefered node to connect to through private access.' }, - { value: '--ws2p-prefered-rm <pubkey>', desc: 'Remove prefered node.' }, - { value: '--ws2p-prefered-only <pubkey>', desc: 'Only connect to prefered node.' }, - { value: '--ws2p-privileged-add <pubkey>', desc: 'Add a privileged node to for our public access.' }, - { value: '--ws2p-privileged-rm <pubkey>', desc: 'Remove a privileged.' }, - { value: '--ws2p-privileged-only <pubkey>', desc: 'Accept only connections from a privileged node.' }, + { value: "--ws2p-upnp", desc: "Use UPnP to open remote port." }, + { value: "--ws2p-noupnp", desc: "Do not use UPnP to open remote port." }, + { value: "--ws2p-host <host>", desc: "Host to listen to." }, + { + value: "--ws2p-port <port>", + desc: "Port to listen to.", + parser: (val: string) => parseInt(val), + }, + { value: "--ws2p-remote-host <address>", desc: "Availabily host." }, + { + value: "--ws2p-remote-port <port>", + desc: "Availabily port.", + parser: (val: string) => parseInt(val), + }, + { value: "--ws2p-remote-path <path>", desc: "Availabily web path." }, + { + value: "--ws2p-max-private <count>", + desc: "Maximum private connections count.", + parser: (val: string) => parseInt(val), + }, + { + value: "--ws2p-max-public <count>", + desc: "Maximum public connections count.", + parser: (val: string) => parseInt(val), + }, + { value: "--ws2p-private", desc: "Enable WS2P Private access." }, + { value: "--ws2p-public", desc: "Enable WS2P Public access." }, + { value: "--ws2p-noprivate", desc: "Disable WS2P Private access." }, + { value: "--ws2p-nopublic", desc: "Disable WS2P Public access." }, + { value: "--ws2p-sync", desc: "Enable WS2P SYNC access." }, + { value: "--ws2p-nosync", desc: "Disable WS2P SYNC access." }, + { + value: "--ws2p-prefered-add <pubkey>", + desc: "Add a prefered node to connect to through private access.", + }, + { value: "--ws2p-prefered-rm <pubkey>", desc: "Remove prefered node." }, + { + value: "--ws2p-prefered-only <pubkey>", + desc: "Only connect to prefered node.", + }, + { + value: "--ws2p-privileged-add <pubkey>", + desc: "Add a privileged node to for our public access.", + }, + { value: "--ws2p-privileged-rm <pubkey>", desc: "Remove a privileged." }, + { + value: "--ws2p-privileged-only <pubkey>", + desc: "Accept only connections from a privileged node.", + }, ], config: { - - onLoading: async (conf:WS2PConfDTO, program:any, logger:any) => { - + onLoading: async (conf: WS2PConfDTO, program: any, logger: any) => { conf.ws2p = conf.ws2p || { - uuid: nuuid.v4().slice(0,8), + uuid: nuuid.v4().slice(0, 8), privateAccess: true, publicAccess: true, preferedOnly: false, - privilegedOnly: false - } + privilegedOnly: false, + }; // For config with missing value - conf.ws2p.uuid = conf.ws2p.uuid || nuuid.v4().slice(0,8) - if (conf.ws2p.privateAccess === undefined) conf.ws2p.privateAccess = true - if (conf.ws2p.publicAccess === undefined) conf.ws2p.publicAccess = true + conf.ws2p.uuid = conf.ws2p.uuid || nuuid.v4().slice(0, 8); + if (conf.ws2p.privateAccess === undefined) + conf.ws2p.privateAccess = true; + if (conf.ws2p.publicAccess === undefined) conf.ws2p.publicAccess = true; - if (program.ws2pHost !== undefined) conf.ws2p.host = program.ws2pHost - if (program.ws2pPort !== undefined) conf.ws2p.port = parseInt(program.ws2pPort) - if (program.ws2pRemotePort !== undefined) conf.ws2p.remoteport = program.ws2pRemotePort - if (program.ws2pRemoteHost !== undefined) conf.ws2p.remotehost = program.ws2pRemoteHost - if (program.ws2pRemotePath !== undefined) conf.ws2p.remotepath = program.ws2pRemotePath - if (program.ws2pUpnp !== undefined) conf.ws2p.upnp = true - if (program.ws2pNoupnp !== undefined) conf.ws2p.upnp = false - if (program.ws2pMaxPrivate !== undefined) conf.ws2p.maxPrivate = program.ws2pMaxPrivate - if (program.ws2pMaxPublic !== undefined) conf.ws2p.maxPublic = program.ws2pMaxPublic - if (program.ws2pPrivate !== undefined) conf.ws2p.privateAccess = true - if (program.ws2pPublic !== undefined) conf.ws2p.publicAccess = true - if (program.ws2pNoprivate !== undefined) conf.ws2p.privateAccess = false - if (program.ws2pNopublic !== undefined) conf.ws2p.publicAccess = false - if (program.ws2pSync !== undefined) conf.ws2p.sync = true - if (program.ws2pNosync !== undefined) conf.ws2p.sync = false + if (program.ws2pHost !== undefined) conf.ws2p.host = program.ws2pHost; + if (program.ws2pPort !== undefined) + conf.ws2p.port = parseInt(program.ws2pPort); + if (program.ws2pRemotePort !== undefined) + conf.ws2p.remoteport = program.ws2pRemotePort; + if (program.ws2pRemoteHost !== undefined) + conf.ws2p.remotehost = program.ws2pRemoteHost; + if (program.ws2pRemotePath !== undefined) + conf.ws2p.remotepath = program.ws2pRemotePath; + if (program.ws2pUpnp !== undefined) conf.ws2p.upnp = true; + if (program.ws2pNoupnp !== undefined) conf.ws2p.upnp = false; + if (program.ws2pMaxPrivate !== undefined) + conf.ws2p.maxPrivate = program.ws2pMaxPrivate; + if (program.ws2pMaxPublic !== undefined) + conf.ws2p.maxPublic = program.ws2pMaxPublic; + if (program.ws2pPrivate !== undefined) conf.ws2p.privateAccess = true; + if (program.ws2pPublic !== undefined) conf.ws2p.publicAccess = true; + if (program.ws2pNoprivate !== undefined) + conf.ws2p.privateAccess = false; + if (program.ws2pNopublic !== undefined) conf.ws2p.publicAccess = false; + if (program.ws2pSync !== undefined) conf.ws2p.sync = true; + if (program.ws2pNosync !== undefined) conf.ws2p.sync = false; // Prefered nodes if (program.ws2pPreferedAdd !== undefined) { - conf.ws2p.preferedNodes = conf.ws2p.preferedNodes || [] - conf.ws2p.preferedNodes.push(String(program.ws2pPreferedAdd)) + conf.ws2p.preferedNodes = conf.ws2p.preferedNodes || []; + conf.ws2p.preferedNodes.push(String(program.ws2pPreferedAdd)); } if (program.ws2pPreferedRm !== undefined) { - conf.ws2p.preferedNodes = conf.ws2p.preferedNodes || [] - const index = conf.ws2p.preferedNodes.indexOf(program.ws2pPreferedRm) + conf.ws2p.preferedNodes = conf.ws2p.preferedNodes || []; + const index = conf.ws2p.preferedNodes.indexOf(program.ws2pPreferedRm); if (index !== -1) { - conf.ws2p.preferedNodes.splice(index, 1) + conf.ws2p.preferedNodes.splice(index, 1); } } - if (program.ws2pPreferedOnly !== undefined) conf.ws2p.preferedOnly = true + if (program.ws2pPreferedOnly !== undefined) + conf.ws2p.preferedOnly = true; // Privileged nodes if (program.ws2pPrivilegedAdd !== undefined) { - conf.ws2p.privilegedNodes = conf.ws2p.privilegedNodes || [] - conf.ws2p.privilegedNodes.push(String(program.ws2pPrivilegedAdd)) + conf.ws2p.privilegedNodes = conf.ws2p.privilegedNodes || []; + conf.ws2p.privilegedNodes.push(String(program.ws2pPrivilegedAdd)); } if (program.ws2pPrivilegedRm !== undefined) { - conf.ws2p.privilegedNodes = conf.ws2p.privilegedNodes || [] - const index = conf.ws2p.privilegedNodes.indexOf(program.ws2pPrivilegedRm) + conf.ws2p.privilegedNodes = conf.ws2p.privilegedNodes || []; + const index = conf.ws2p.privilegedNodes.indexOf( + program.ws2pPrivilegedRm + ); if (index !== -1) { - conf.ws2p.privilegedNodes.splice(index, 1) + conf.ws2p.privilegedNodes.splice(index, 1); } } - if (program.ws2pPrivilegedOnly !== undefined) conf.ws2p.privilegedOnly = true + if (program.ws2pPrivilegedOnly !== undefined) + conf.ws2p.privilegedOnly = true; // Default value if (conf.ws2p.upnp === undefined || conf.ws2p.upnp === null) { @@ -118,139 +155,144 @@ export const WS2PDependency = { } }, - beforeSave: async (conf:WS2PConfDTO) => { - if (conf.ws2p && !conf.ws2p.host) delete conf.ws2p.host - if (conf.ws2p && !conf.ws2p.port) delete conf.ws2p.port - if (conf.ws2p && !conf.ws2p.remoteport) delete conf.ws2p.remoteport - if (conf.ws2p && !conf.ws2p.remotehost) delete conf.ws2p.remotehost - } + beforeSave: async (conf: WS2PConfDTO) => { + if (conf.ws2p && !conf.ws2p.host) delete conf.ws2p.host; + if (conf.ws2p && !conf.ws2p.port) delete conf.ws2p.port; + if (conf.ws2p && !conf.ws2p.remoteport) delete conf.ws2p.remoteport; + if (conf.ws2p && !conf.ws2p.remotehost) delete conf.ws2p.remotehost; + }, }, methods: { bindWS2P: (server: Server) => { - const api = new WS2PAPI(server, server.conf) - server.ws2pCluster = api.getCluster() - server.addEndpointsDefinitions(async () => api.getEndpoint()) - server.addWrongEndpointFilter((endpoints:string[]) => getWrongEndpoints(endpoints, server.conf)) - return api - } + const api = new WS2PAPI(server, server.conf); + server.ws2pCluster = api.getCluster(); + server.addEndpointsDefinitions(async () => api.getEndpoint()); + server.addWrongEndpointFilter((endpoints: string[]) => + getWrongEndpoints(endpoints, server.conf) + ); + return api; + }, }, service: { - input: (server:Server) => { - return WS2PDependency.duniter.methods.bindWS2P(server) - } + input: (server: Server) => { + return WS2PDependency.duniter.methods.bindWS2P(server); + }, }, - cli: [{ - name: 'ws2p [list-prefered|list-privileged|list-nodes|show-conf]', - desc: 'WS2P operations for configuration and diagnosis tasks.', - logs: false, + cli: [ + { + name: "ws2p [list-prefered|list-privileged|list-nodes|show-conf]", + desc: "WS2P operations for configuration and diagnosis tasks.", + logs: false, - onConfiguredExecute: async (server:Server, conf:ConfDTO, program:any, params:any) => { - const subcmd = params[0]; - if (subcmd === 'list-nodes') { - // Needs the DAL plugged - await server.initDAL(); - } - switch (subcmd) { - case 'show-conf': - console.log(JSON.stringify(conf.ws2p, null, ' ')) - break; - case 'list-prefered': - for (const p of (conf.ws2p && conf.ws2p.preferedNodes || [])) { - console.log(p) - } - break; - case 'list-privileged': - for (const p of (conf.ws2p && conf.ws2p.privilegedNodes || [])) { - console.log(p) - } - break; - case 'list-nodes': - const peers = await server.dal.getWS2Peers() - for (const p of peers) { - for (const ep of p.endpoints) { - if (ep.match(/^WS2P/)) { - console.log(p.pubkey, ep) + onConfiguredExecute: async ( + server: Server, + conf: ConfDTO, + program: any, + params: any + ) => { + const subcmd = params[0]; + if (subcmd === "list-nodes") { + // Needs the DAL plugged + await server.initDAL(); + } + switch (subcmd) { + case "show-conf": + console.log(JSON.stringify(conf.ws2p, null, " ")); + break; + case "list-prefered": + for (const p of (conf.ws2p && conf.ws2p.preferedNodes) || []) { + console.log(p); + } + break; + case "list-privileged": + for (const p of (conf.ws2p && conf.ws2p.privilegedNodes) || []) { + console.log(p); + } + break; + case "list-nodes": + const peers = await server.dal.getWS2Peers(); + for (const p of peers) { + for (const ep of p.endpoints) { + if (ep.match(/^WS2P/)) { + console.log(p.pubkey, ep); + } } } - } - break; - default: - throw constants.ERRORS.CLI_CALLERR_WS2P; - } - } - }] - } -} + break; + default: + throw constants.ERRORS.CLI_CALLERR_WS2P; + } + }, + }, + ], + }, +}; -async function getWrongEndpoints(endpoints:string[], ws2pConf:WS2PConfDTO) { - return endpoints.filter(ep => { - const match = ep.match(CommonConstants.WS2P_REGEXP) - return ws2pConf.ws2p && match && match[1] === ws2pConf.ws2p.uuid - }) +async function getWrongEndpoints(endpoints: string[], ws2pConf: WS2PConfDTO) { + return endpoints.filter((ep) => { + const match = ep.match(CommonConstants.WS2P_REGEXP); + return ws2pConf.ws2p && match && match[1] === ws2pConf.ws2p.uuid; + }); } export class WS2PAPI extends stream.Transform { - // Public http interface - private cluster:WS2PCluster - private upnpAPI:UpnpProvider|null + private cluster: WS2PCluster; + private upnpAPI: UpnpProvider | null; - constructor( - private server:Server, - private conf:ConfDTO) { - super({ objectMode: true }) - this.cluster = WS2PCluster.plugOn(server) + constructor(private server: Server, private conf: ConfDTO) { + super({ objectMode: true }); + this.cluster = WS2PCluster.plugOn(server); } getCluster() { - return this.cluster + return this.cluster; } startService = async () => { - /*************** * PUBLIC ACCESS **************/ if (this.conf.ws2p && this.conf.ws2p.publicAccess) { - /*************** * MANUAL **************/ - if (this.conf.ws2p - && !this.conf.ws2p.upnp - && this.conf.ws2p.host - && this.conf.ws2p.port) { - await this.cluster.listen(this.conf.ws2p.host, this.conf.ws2p.port) - } - - /*************** - * UPnP - **************/ - else if (!this.conf.ws2p || this.conf.ws2p.upnp !== false) { + if ( + this.conf.ws2p && + !this.conf.ws2p.upnp && + this.conf.ws2p.host && + this.conf.ws2p.port + ) { + await this.cluster.listen(this.conf.ws2p.host, this.conf.ws2p.port); + } else if (!this.conf.ws2p || this.conf.ws2p.upnp !== false) { + /*************** + * UPnP + **************/ if (this.upnpAPI) { this.upnpAPI.stopRegular(); } try { - const uuid = (this.conf.ws2p && this.conf.ws2p.uuid) || "no-uuid-yet" - const suffix = this.conf.pair.pub.substr(0, 6) + ":" + uuid + const uuid = (this.conf.ws2p && this.conf.ws2p.uuid) || "no-uuid-yet"; + const suffix = this.conf.pair.pub.substr(0, 6) + ":" + uuid; this.upnpAPI = new UpnpProvider( WS2PConstants.WS2P_PORTS_START, WS2PConstants.WS2P_PORTS_END, - ':ws2p:' + suffix, + ":ws2p:" + suffix, WS2PConstants.WS2P_UPNP_INTERVAL, WS2PConstants.WS2P_UPNP_TTL, logger, - this.conf.ws2p.host) - const { host, port, available } = await this.upnpAPI.startRegular() + this.conf.ws2p.host + ); + const { host, port, available } = await this.upnpAPI.startRegular(); if (available) { // Defaults UPnP to true if not defined and available - this.conf.ws2p.upnp = true - await this.cluster.listen(host, port) - await this.server.PeeringService.generateSelfPeer(this.server.conf) + this.conf.ws2p.upnp = true; + await this.cluster.listen(host, port); + await this.server.PeeringService.generateSelfPeer(this.server.conf); } } catch (e) { logger.warn(e); @@ -263,49 +305,65 @@ export class WS2PAPI extends stream.Transform { **************/ if (!this.conf.ws2p || this.conf.ws2p.privateAccess) { - await this.cluster.startCrawling() + await this.cluster.startCrawling(); } - } + }; stopService = async () => { if (this.cluster) { - await this.cluster.stopCrawling() - await this.cluster.close() + await this.cluster.stopCrawling(); + await this.cluster.close(); } if (this.upnpAPI) { this.upnpAPI.stopRegular(); } - } + }; async getEndpoint() { // If WS2P defined and enabled - if (this.server.conf.ws2p !== undefined && (this.server.conf.ws2p.publicAccess || this.server.conf.ws2p.privateAccess)) - { - let endpointType = "WS2P" + if ( + this.server.conf.ws2p !== undefined && + (this.server.conf.ws2p.publicAccess || + this.server.conf.ws2p.privateAccess) + ) { + let endpointType = "WS2P"; if (this.server.conf.upnp && this.upnpAPI) { - const config = this.upnpAPI.getCurrentConfig() + const config = this.upnpAPI.getCurrentConfig(); if (config) { - if (config.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) { endpointType += "TOR"; } - return [endpointType, this.server.conf.ws2p.uuid, config.remotehost, config.port].join(' ') + if (config.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) { + endpointType += "TOR"; + } + return [ + endpointType, + this.server.conf.ws2p.uuid, + config.remotehost, + config.port, + ].join(" "); } else { - return '' + return ""; } - } - else if (this.server.conf.ws2p.uuid - && this.server.conf.ws2p.remotehost - && this.server.conf.ws2p.remoteport) { - if (this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) { endpointType += "TOR"; } - let ep = [endpointType, - this.server.conf.ws2p.uuid, - this.server.conf.ws2p.remotehost, - this.server.conf.ws2p.remoteport - ].join(' ') - if (this.server.conf.ws2p.remotepath) { - ep += ` ${this.server.conf.ws2p.remotepath}` - } - return ep + } else if ( + this.server.conf.ws2p.uuid && + this.server.conf.ws2p.remotehost && + this.server.conf.ws2p.remoteport + ) { + if ( + this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX) + ) { + endpointType += "TOR"; + } + let ep = [ + endpointType, + this.server.conf.ws2p.uuid, + this.server.conf.ws2p.remotehost, + this.server.conf.ws2p.remoteport, + ].join(" "); + if (this.server.conf.ws2p.remotepath) { + ep += ` ${this.server.conf.ws2p.remotepath}`; + } + return ep; } } - return '' + return ""; } -} \ No newline at end of file +} diff --git a/app/modules/ws2p/lib/WS2PBlockPuller.ts b/app/modules/ws2p/lib/WS2PBlockPuller.ts index 638db4b9381070f3ca4d55274afcbbe8e1008797..9d762372791a9989dde766c3df716e5fe3667121 100644 --- a/app/modules/ws2p/lib/WS2PBlockPuller.ts +++ b/app/modules/ws2p/lib/WS2PBlockPuller.ts @@ -11,80 +11,72 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {AbstractDAO} from "../../crawler/lib/pulling" -import {Server} from "../../../../server" -import {DBBlock} from "../../../lib/db/DBBlock" -import {PeerDTO} from "../../../lib/dto/PeerDTO" -import {CrawlerConstants} from "../../crawler/lib/constants" -import {tx_cleaner} from "../../crawler/lib/tx_cleaner" -import {WS2PConnection} from "./WS2PConnection" -import {WS2PRequester} from "./WS2PRequester" +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { AbstractDAO } from "../../crawler/lib/pulling"; +import { Server } from "../../../../server"; +import { DBBlock } from "../../../lib/db/DBBlock"; +import { PeerDTO } from "../../../lib/dto/PeerDTO"; +import { CrawlerConstants } from "../../crawler/lib/constants"; +import { tx_cleaner } from "../../crawler/lib/tx_cleaner"; +import { WS2PConnection } from "./WS2PConnection"; +import { WS2PRequester } from "./WS2PRequester"; export class WS2PBlockPuller { - - constructor( - private server:Server, - private connection:WS2PConnection - ) {} + constructor(private server: Server, private connection: WS2PConnection) {} async pull() { - const requester = WS2PRequester.fromConnection(this.connection) + const requester = WS2PRequester.fromConnection(this.connection); // node.pubkey = p.pubkey; - let dao = new WS2PDao(this.server, requester) - await dao.pull(this.server.conf, this.server.logger) + let dao = new WS2PDao(this.server, requester); + await dao.pull(this.server.conf, this.server.logger); } } interface RemoteNode { - getCurrent: () => Promise<BlockDTO> - getBlock: (number:number) => Promise<BlockDTO> - getBlocks: (count:number, fromNumber:number) => Promise<BlockDTO[]> - pubkey:string + getCurrent: () => Promise<BlockDTO>; + getBlock: (number: number) => Promise<BlockDTO>; + getBlocks: (count: number, fromNumber: number) => Promise<BlockDTO[]>; + pubkey: string; } class WS2PDao extends AbstractDAO { + private node: RemoteNode; + private lastDownloaded: BlockDTO | null; + private nodeCurrent: BlockDTO | null = null; + public newCurrent: BlockDTO | null = null; - private node:RemoteNode - private lastDownloaded:BlockDTO|null - private nodeCurrent:BlockDTO|null = null - public newCurrent:BlockDTO|null = null - - constructor( - private server:Server, - private requester:WS2PRequester - ) { - super() + constructor(private server: Server, private requester: WS2PRequester) { + super(); this.node = { getCurrent: async () => { - return this.requester.getCurrent() + return this.requester.getCurrent(); }, - getBlock: async (number:number) => { - return this.requester.getBlock(number) + getBlock: async (number: number) => { + return this.requester.getBlock(number); }, - getBlocks: async (count:number, fromNumber:number) => { - return this.requester.getBlocks(count, fromNumber) + getBlocks: async (count: number, fromNumber: number) => { + return this.requester.getBlocks(count, fromNumber); }, - pubkey: this.requester.getPubkey() - } + pubkey: this.requester.getPubkey(), + }; } async localCurrent(): Promise<DBBlock | null> { - return this.server.dal.getCurrentBlockOrNull() + return this.server.dal.getCurrentBlockOrNull(); } async remoteCurrent(source: RemoteNode): Promise<BlockDTO | null> { - this.nodeCurrent = await source.getCurrent() - return this.nodeCurrent + this.nodeCurrent = await source.getCurrent(); + return this.nodeCurrent; } async remotePeers(source?: any): Promise<PeerDTO[]> { - const peer:any = this.node - return Promise.resolve([peer]) + const peer: any = this.node; + return Promise.resolve([peer]); } async getLocalBlock(number: number): Promise<DBBlock> { - return this.server.dal.getBlockWeHaveItForSure(number) + return this.server.dal.getBlockWeHaveItForSure(number); } async getRemoteBlock(thePeer: any, number: number): Promise<BlockDTO> { @@ -101,35 +93,49 @@ class WS2PDao extends AbstractDAO { } async applyMainBranch(block: BlockDTO): Promise<boolean> { - const existing = await this.server.dal.getAbsoluteBlockByNumberAndHash(block.number, block.hash) + const existing = await this.server.dal.getAbsoluteBlockByNumberAndHash( + block.number, + block.hash + ); if (!existing) { - let addedBlock = await this.server.writeBlock(block, false, true) + let addedBlock = await this.server.writeBlock(block, false, true); if (!this.lastDownloaded) { - this.lastDownloaded = await this.remoteCurrent(this.node) + this.lastDownloaded = await this.remoteCurrent(this.node); } - this.server.pullingEvent('applying', {number: block.number, last: this.lastDownloaded && this.lastDownloaded.number}) + this.server.pullingEvent("applying", { + number: block.number, + last: this.lastDownloaded && this.lastDownloaded.number, + }); if (addedBlock) { - this.newCurrent = addedBlock + this.newCurrent = addedBlock; // Emit block events (for sharing with the network) only in forkWindowSize - if (this.nodeCurrent && this.nodeCurrent.number - addedBlock.number < this.server.conf.forksize) { + if ( + this.nodeCurrent && + this.nodeCurrent.number - addedBlock.number < + this.server.conf.forksize + ) { this.server.streamPush(addedBlock); } } } - return true + return true; } async removeForks(): Promise<boolean> { - return true + return true; } async isMemberPeer(thePeer: PeerDTO): Promise<boolean> { - return true + return true; } - async downloadBlocks(thePeer: any, fromNumber: number, count?: number | undefined): Promise<BlockDTO[]> { + async downloadBlocks( + thePeer: any, + fromNumber: number, + count?: number | undefined + ): Promise<BlockDTO[]> { if (!count) { - count = CrawlerConstants.CRAWL_BLOCK_CHUNK + count = CrawlerConstants.CRAWL_BLOCK_CHUNK; } let blocks = await thePeer.getBlocks(count, fromNumber); diff --git a/app/modules/ws2p/lib/WS2PClient.ts b/app/modules/ws2p/lib/WS2PClient.ts index 6f3602cb9a9146a299aeb324954a487720047673..b52ccd37a0f9df822c73143f29615fb7924a91a8 100644 --- a/app/modules/ws2p/lib/WS2PClient.ts +++ b/app/modules/ws2p/lib/WS2PClient.ts @@ -11,69 +11,79 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Server} from "../../../../server" -import {WS2PConnection, WS2PPubkeyLocalAuth, WS2PPubkeyRemoteAuth} from "./WS2PConnection" -import {Key} from "../../../lib/common-libs/crypto/keyring" -import {WS2PMessageHandler} from "./impl/WS2PMessageHandler" -import {WS2PConstants} from "./constants" -import {WS2PStreamer} from "./WS2PStreamer" -import {WS2PSingleWriteStream} from "./WS2PSingleWriteStream" -import {ProxiesConf} from '../../../lib/proxy'; +import { Server } from "../../../../server"; +import { + WS2PConnection, + WS2PPubkeyLocalAuth, + WS2PPubkeyRemoteAuth, +} from "./WS2PConnection"; +import { Key } from "../../../lib/common-libs/crypto/keyring"; +import { WS2PMessageHandler } from "./impl/WS2PMessageHandler"; +import { WS2PConstants } from "./constants"; +import { WS2PStreamer } from "./WS2PStreamer"; +import { WS2PSingleWriteStream } from "./WS2PSingleWriteStream"; +import { ProxiesConf } from "../../../lib/proxy"; export class WS2PClient { - private constructor( - public connection:WS2PConnection, - private streamer:WS2PStreamer) { - } + public connection: WS2PConnection, + private streamer: WS2PStreamer + ) {} disableStream() { - this.streamer.disable() + this.streamer.disable(); } - static async connectTo(server:Server, fullEndpointAddress:string, endpointVersion:number, expectedWS2PUID:string, messageHandler:WS2PMessageHandler, expectedPub:string, allowKey:(pub:string)=>Promise<boolean> ) { - const k2 = new Key(server.conf.pair.pub, server.conf.pair.sec) - const myWs2pId = (server.conf.ws2p && server.conf.ws2p.uuid) ? server.conf.ws2p.uuid:"" + static async connectTo( + server: Server, + fullEndpointAddress: string, + endpointVersion: number, + expectedWS2PUID: string, + messageHandler: WS2PMessageHandler, + expectedPub: string, + allowKey: (pub: string) => Promise<boolean> + ) { + const k2 = new Key(server.conf.pair.pub, server.conf.pair.sec); + const myWs2pId = + server.conf.ws2p && server.conf.ws2p.uuid ? server.conf.ws2p.uuid : ""; const c = WS2PConnection.newConnectionToAddress( Math.min(endpointVersion, WS2PConstants.WS2P_API_VERSION), fullEndpointAddress, messageHandler, - new WS2PPubkeyLocalAuth(server.conf.currency , k2, myWs2pId, allowKey), + new WS2PPubkeyLocalAuth(server.conf.currency, k2, myWs2pId, allowKey), new WS2PPubkeyRemoteAuth(server.conf.currency, k2, allowKey), ProxiesConf.wsProxy(fullEndpointAddress, server.conf.proxiesConf), { connectionTimeout: WS2PConstants.REQUEST_TIMEOUT, - requestTimeout: WS2PConstants.REQUEST_TIMEOUT + requestTimeout: WS2PConstants.REQUEST_TIMEOUT, }, expectedPub, expectedWS2PUID - ) - const singleWriteProtection = new WS2PSingleWriteStream() - const streamer = new WS2PStreamer(c) + ); + const singleWriteProtection = new WS2PSingleWriteStream(); + const streamer = new WS2PStreamer(c); c.connected .then(() => { // Streaming - server - .pipe(singleWriteProtection) - .pipe(streamer) + server.pipe(singleWriteProtection).pipe(streamer); }) .catch(() => { - server.unpipe(singleWriteProtection) - singleWriteProtection.unpipe(streamer) - }) + server.unpipe(singleWriteProtection); + singleWriteProtection.unpipe(streamer); + }); c.closed.then(() => { - server.unpipe(singleWriteProtection) - singleWriteProtection.unpipe(streamer) - }) + server.unpipe(singleWriteProtection); + singleWriteProtection.unpipe(streamer); + }); // Connecting try { - await c.connect() + await c.connect(); } catch (e) { // Immediately close the connection - c.close() - throw e + c.close(); + throw e; } - return new WS2PClient(c, streamer) + return new WS2PClient(c, streamer); } -} \ No newline at end of file +} diff --git a/app/modules/ws2p/lib/WS2PCluster.ts b/app/modules/ws2p/lib/WS2PCluster.ts index 7dad2898b9bb516888386b679e1be5ab56c3c9fa..535eea0191873f3db571ba47b6efd171611c3825 100644 --- a/app/modules/ws2p/lib/WS2PCluster.ts +++ b/app/modules/ws2p/lib/WS2PCluster.ts @@ -11,126 +11,140 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {DEFAULT_ENCODING} from 'crypto'; -import {WS2PServer} from "./WS2PServer" -import {Server} from "../../../../server" -import {WS2PClient} from "./WS2PClient" -import {WS2PConnection} from "./WS2PConnection" -import {randomPick} from "../../../lib/common-libs/randomPick" -import {CrawlerConstants} from "../../crawler/lib/constants" -import {WS2PBlockPuller} from "./WS2PBlockPuller" -import {WS2PDocpoolPuller} from "./WS2PDocpoolPuller" -import {WS2PConstants} from "./constants" -import {PeerDTO, WS2PEndpoint} from '../../../lib/dto/PeerDTO'; -import {GlobalFifoPromise} from "../../../service/GlobalFifoPromise" -import {OtherConstants} from "../../../lib/other_constants" -import {Key} from "../../../lib/common-libs/crypto/keyring" -import {verify} from "duniteroxyde" -import {WS2PServerMessageHandler} from "./interface/WS2PServerMessageHandler" -import {WS2PMessageHandler} from "./impl/WS2PMessageHandler" -import {CommonConstants} from '../../../lib/common-libs/constants'; -import {Package} from "../../../lib/common/package"; -import {ProverConstants} from "../../prover/lib/constants"; -import {ProxiesConf} from '../../../lib/proxy'; -import {Underscore} from "../../../lib/common-libs/underscore" -import {NewLogger} from "../../../lib/logger"; - -const es = require('event-stream') -const nuuid = require('node-uuid') -const logger = NewLogger() +import { DEFAULT_ENCODING } from "crypto"; +import { WS2PServer } from "./WS2PServer"; +import { Server } from "../../../../server"; +import { WS2PClient } from "./WS2PClient"; +import { WS2PConnection } from "./WS2PConnection"; +import { randomPick } from "../../../lib/common-libs/randomPick"; +import { CrawlerConstants } from "../../crawler/lib/constants"; +import { WS2PBlockPuller } from "./WS2PBlockPuller"; +import { WS2PDocpoolPuller } from "./WS2PDocpoolPuller"; +import { WS2PConstants } from "./constants"; +import { PeerDTO, WS2PEndpoint } from "../../../lib/dto/PeerDTO"; +import { GlobalFifoPromise } from "../../../service/GlobalFifoPromise"; +import { OtherConstants } from "../../../lib/other_constants"; +import { Key } from "../../../lib/common-libs/crypto/keyring"; +import { verify } from "duniteroxyde"; +import { WS2PServerMessageHandler } from "./interface/WS2PServerMessageHandler"; +import { WS2PMessageHandler } from "./impl/WS2PMessageHandler"; +import { CommonConstants } from "../../../lib/common-libs/constants"; +import { Package } from "../../../lib/common/package"; +import { ProverConstants } from "../../prover/lib/constants"; +import { ProxiesConf } from "../../../lib/proxy"; +import { Underscore } from "../../../lib/common-libs/underscore"; +import { NewLogger } from "../../../lib/logger"; + +const es = require("event-stream"); +const nuuid = require("node-uuid"); +const logger = NewLogger(); export interface WS2PHead { - message:string - sig:string - messageV2?:string - sigV2?:string - step?:number + message: string; + sig: string; + messageV2?: string; + sigV2?: string; + step?: number; } export interface WS2pHeadCache extends WS2PHead { - blockstamp:string + blockstamp: string; } export class WS2PCluster { - - static getFullAddress(host: string, port: number, path: string|null|undefined = null): string { + static getFullAddress( + host: string, + port: number, + path: string | null | undefined = null + ): string { if (host.match(CommonConstants.IPV6_REGEXP)) { - host = "[" + host + "]" + host = "[" + host + "]"; } // Make the path be a string - path = path || '' + path = path || ""; // delete the space at the beginning of the path - if (path.match(/^ /)) - { - path = path.substr(1) + if (path.match(/^ /)) { + path = path.substr(1); } // Check that the path starts well with / (added if not) - if (path.length > 0 && !path.match(/^\//)) - { - path = '/'+path + if (path.length > 0 && !path.match(/^\//)) { + path = "/" + path; } // Choose the web protocol depending on the port - const protocol = port == 443 ? "wss://": "ws://" - return [protocol, host, ':', port, path].join('') + const protocol = port == 443 ? "wss://" : "ws://"; + return [protocol, host, ":", port, path].join(""); } - private ws2pServer:WS2PServer|null = null - private ws2pClients:{[ws2puid:string]:WS2PClient} = {} - private host:string|null = null - private port:number|null = null - private syncBlockInterval:NodeJS.Timer - private syncDocpoolInterval:NodeJS.Timer - private fifo:GlobalFifoPromise = new GlobalFifoPromise() - private maxLevel1Size = WS2PConstants.MAX_LEVEL_1_PEERS - private messageHandler: WS2PServerMessageHandler + private ws2pServer: WS2PServer | null = null; + private ws2pClients: { [ws2puid: string]: WS2PClient } = {}; + private host: string | null = null; + private port: number | null = null; + private syncBlockInterval: NodeJS.Timer; + private syncDocpoolInterval: NodeJS.Timer; + private fifo: GlobalFifoPromise = new GlobalFifoPromise(); + private maxLevel1Size = WS2PConstants.MAX_LEVEL_1_PEERS; + private messageHandler: WS2PServerMessageHandler; // A cache to remember the banned keys - private banned:{ [k:string]: string } = {} + private banned: { [k: string]: string } = {}; // A cache to remember the keys OK for reconnect - private ok4reconnect:{ [k:string]: string } = {} + private ok4reconnect: { [k: string]: string } = {}; // A cache to remember the banned keys for synchronization - private banned4Sync:{ [k:string]: string } = {} + private banned4Sync: { [k: string]: string } = {}; // A cache to know if a block exists or not in the DB - private blockstampsCache:{ [k:string]: number } = {} + private blockstampsCache: { [k: string]: number } = {}; // A cache to know wether a pubkey is a member or not - private memberkeysCache:{ [k:string]: number } = {} + private memberkeysCache: { [k: string]: number } = {}; // A cache of the current HEAD for a given ws2pFullId - private headsCache:{ [ws2pFullId:string]:WS2pHeadCache } = {} + private headsCache: { [ws2pFullId: string]: WS2pHeadCache } = {}; // A buffer of "to be sent" heads - private newHeads:WS2PHead[] = [] + private newHeads: WS2PHead[] = []; // The triggerer of a buffer of heads' sending - private headsTimeout:NodeJS.Timer|null = null + private headsTimeout: NodeJS.Timer | null = null; // A timer to regularly reconnect to the network in case we are below the minimum connections' count - private reconnectionInteval:NodeJS.Timer|null = null + private reconnectionInteval: NodeJS.Timer | null = null; - private constructor(private server:Server) { - this.messageHandler = new WS2PServerMessageHandler(this.server, this) + private constructor(private server: Server) { + this.messageHandler = new WS2PServerMessageHandler(this.server, this); // Conf: max private connections - if (this.server.conf.ws2p && this.server.conf.ws2p.maxPrivate !== undefined) { - this.maxLevel1Size = this.server.conf.ws2p.maxPrivate + if ( + this.server.conf.ws2p && + this.server.conf.ws2p.maxPrivate !== undefined + ) { + this.maxLevel1Size = this.server.conf.ws2p.maxPrivate; } } async getKnownHeads(): Promise<WS2PHead[]> { - const heads:WS2PHead[] = [] - const ws2pId = (this.server.conf.ws2p && this.server.conf.ws2p.uuid) || '000000' - const localPub = this.server.conf.pair.pub - const myFullId = [localPub, ws2pId].join('-') + const heads: WS2PHead[] = []; + const ws2pId = + (this.server.conf.ws2p && this.server.conf.ws2p.uuid) || "000000"; + const localPub = this.server.conf.pair.pub; + const myFullId = [localPub, ws2pId].join("-"); if (!this.headsCache[myFullId]) { - const current = await this.server.dal.getCurrentBlockOrNull() + const current = await this.server.dal.getCurrentBlockOrNull(); if (current) { - const myHead = await this.sayHeadChangedTo(current.number, current.hash) - const blockstamp = [current.number, current.hash].join('-') - this.headsCache[myFullId] = { blockstamp, message: myHead.message, sig: myHead.sig, messageV2: myHead.messageV2, sigV2: myHead.sigV2, step:myHead.step } - + const myHead = await this.sayHeadChangedTo( + current.number, + current.hash + ); + const blockstamp = [current.number, current.hash].join("-"); + this.headsCache[myFullId] = { + blockstamp, + message: myHead.message, + sig: myHead.sig, + messageV2: myHead.messageV2, + sigV2: myHead.sigV2, + step: myHead.step, + }; } } for (const ws2pFullId of Object.keys(this.headsCache)) { @@ -139,451 +153,742 @@ export class WS2PCluster { sig: this.headsCache[ws2pFullId].sig, messageV2: this.headsCache[ws2pFullId].messageV2, sigV2: this.headsCache[ws2pFullId].sigV2, - step: this.headsCache[ws2pFullId].step - }) + step: this.headsCache[ws2pFullId].step, + }); } - return heads + return heads; } - async headsReceived(heads:WS2PHead[]) { - await Promise.all(heads.map(async (h:WS2PHead) => { - try { - // HEAD v2 - if (h.messageV2 && h.messageV2.match(WS2PConstants.HEAD_V2_REGEXP)) { - if (!h.sigV2) { - throw "HEAD_MESSAGE_WRONGLY_SIGNED" - } else { - const [,,, pub, blockstamp, ws2pId,,,,,]:string[] = h.messageV2.split(':') - this.headReceived(h, pub, [pub, ws2pId].join('-'), blockstamp) + async headsReceived(heads: WS2PHead[]) { + await Promise.all( + heads.map(async (h: WS2PHead) => { + try { + // HEAD v2 + if (h.messageV2 && h.messageV2.match(WS2PConstants.HEAD_V2_REGEXP)) { + if (!h.sigV2) { + throw "HEAD_MESSAGE_WRONGLY_SIGNED"; + } else { + const [ + , + , + , + pub, + blockstamp, + ws2pId, + , + , + , + , + ]: string[] = h.messageV2.split(":"); + this.headReceived(h, pub, [pub, ws2pId].join("-"), blockstamp); + } } - } - // HEAD v1 and HEAD v0 - else if (h.message && h.sig) { - if (h.message.match(WS2PConstants.HEAD_V1_REGEXP)) { - const [,,, pub, blockstamp, ws2pId,,,]:string[] = h.message.split(':') - await this.headReceived(h, pub, [pub, ws2pId].join('-'), blockstamp) - } else if (h.message.match(WS2PConstants.HEAD_V0_REGEXP)) { - const [,,pub, blockstamp]:string[] = h.message.split(':') - await this.headReceived(h, pub, [pub, "00000000"].join('-'), blockstamp) + // HEAD v1 and HEAD v0 + else if (h.message && h.sig) { + if (h.message.match(WS2PConstants.HEAD_V1_REGEXP)) { + const [ + , + , + , + pub, + blockstamp, + ws2pId, + , + , + ]: string[] = h.message.split(":"); + await this.headReceived( + h, + pub, + [pub, ws2pId].join("-"), + blockstamp + ); + } else if (h.message.match(WS2PConstants.HEAD_V0_REGEXP)) { + const [, , pub, blockstamp]: string[] = h.message.split(":"); + await this.headReceived( + h, + pub, + [pub, "00000000"].join("-"), + blockstamp + ); + } else { + throw "HEAD_WRONG_FORMAT"; + } + } else if (!h.message) { + throw "EMPTY_MESSAGE_FOR_HEAD"; + } else if (!h.sig) { + throw "HEAD_MESSAGE_WRONGLY_SIGNED"; } else { - throw "HEAD_WRONG_FORMAT" + throw "HEAD_WRONG_FORMAT"; } + } catch (e) { + this.server.logger.trace(e); } - else if (!h.message) { - throw "EMPTY_MESSAGE_FOR_HEAD" - } else if (!h.sig) { - throw "HEAD_MESSAGE_WRONGLY_SIGNED" - } else { - throw "HEAD_WRONG_FORMAT" - } - } catch (e) { - this.server.logger.trace(e) - } - })) + }) + ); // Cancel a pending "heads" to be spread if (this.headsTimeout) { - clearTimeout(this.headsTimeout) + clearTimeout(this.headsTimeout); } // Reprogram it a few moments later this.headsTimeout = setTimeout(async () => { - const heads = this.newHeads.splice(0, this.newHeads.length) + const heads = this.newHeads.splice(0, this.newHeads.length); if (heads.length) { - await this.spreadNewHeads(heads) + await this.spreadNewHeads(heads); } - }, WS2PConstants.HEADS_SPREAD_TIMEOUT) - + }, WS2PConstants.HEADS_SPREAD_TIMEOUT); + this.server.push({ - ws2p: 'heads', - added: this.newHeads - }) + ws2p: "heads", + added: this.newHeads, + }); } - private async headReceived(h:WS2PHead, pub:string, fullId:string, blockstamp:string) { + private async headReceived( + h: WS2PHead, + pub: string, + fullId: string, + blockstamp: string + ) { try { // Prevent fields injection - if ( (h.message.match(WS2PConstants.HEAD_V1_REGEXP) || h.message.match(WS2PConstants.HEAD_V0_REGEXP)) - && h.sig.match(WS2PConstants.HEAD_SIG_REGEXP) - && (!h.messageV2 || h.messageV2.match(WS2PConstants.HEAD_V2_REGEXP)) - && (!h.sigV2 || h.sigV2.match(WS2PConstants.HEAD_SIG_REGEXP)) - && (!h.step || h.step.toFixed(0).match(/^[0-9]*$/)) + if ( + (h.message.match(WS2PConstants.HEAD_V1_REGEXP) || + h.message.match(WS2PConstants.HEAD_V0_REGEXP)) && + h.sig.match(WS2PConstants.HEAD_SIG_REGEXP) && + (!h.messageV2 || h.messageV2.match(WS2PConstants.HEAD_V2_REGEXP)) && + (!h.sigV2 || h.sigV2.match(WS2PConstants.HEAD_SIG_REGEXP)) && + (!h.step || h.step.toFixed(0).match(/^[0-9]*$/)) ) { - const head:WS2PHead = { message: h.message, sig: h.sig, messageV2: h.messageV2, sigV2: h.sigV2, step: h.step } - - const sigOK = verify(head.message, head.sig, pub) - const sigV2OK = (head.messageV2 !== undefined && head.sigV2 !== undefined) ? verify(head.messageV2, head.sigV2, pub):false + const head: WS2PHead = { + message: h.message, + sig: h.sig, + messageV2: h.messageV2, + sigV2: h.sigV2, + step: h.step, + }; + + const sigOK = verify(head.message, head.sig, pub); + const sigV2OK = + head.messageV2 !== undefined && head.sigV2 !== undefined + ? verify(head.messageV2, head.sigV2, pub) + : false; if ((sigV2OK && sigOK) || sigOK) { // Already known or more recent or closer ? - const step = (this.headsCache[fullId]) ? this.headsCache[fullId].step || 0:0 - if (!this.headsCache[fullId] // unknow head - || parseInt(this.headsCache[fullId].blockstamp) < parseInt(blockstamp) // more recent head - || (head.step !== undefined && head.step < step && this.headsCache[fullId].blockstamp === blockstamp) // closer head + const step = this.headsCache[fullId] + ? this.headsCache[fullId].step || 0 + : 0; + if ( + !this.headsCache[fullId] || // unknow head + parseInt(this.headsCache[fullId].blockstamp) < + parseInt(blockstamp) || // more recent head + (head.step !== undefined && + head.step < step && + this.headsCache[fullId].blockstamp === blockstamp) // closer head ) { // Check that issuer is a member and that the block exists - const isAllowed = pub === this.server.conf.pair.pub || this.isConnectedKey(pub) || (await this.isMemberKey(pub)) + const isAllowed = + pub === this.server.conf.pair.pub || + this.isConnectedKey(pub) || + (await this.isMemberKey(pub)); if (isAllowed) { - const exists = await this.existsBlock(blockstamp) + const exists = await this.existsBlock(blockstamp); if (exists) { - this.headsCache[fullId] = { blockstamp, message: head.message, sig: head.sig, messageV2: head.messageV2, sigV2: head.sigV2, step: head.step } - this.newHeads.push(head) + this.headsCache[fullId] = { + blockstamp, + message: head.message, + sig: head.sig, + messageV2: head.messageV2, + sigV2: head.sigV2, + step: head.step, + }; + this.newHeads.push(head); } } } } else { - throw "HEAD_MESSAGE_WRONGLY_SIGNED" + throw "HEAD_MESSAGE_WRONGLY_SIGNED"; } } else { - throw "HEAD_WRONG_FORMAT" + throw "HEAD_WRONG_FORMAT"; } } catch (e) { - this.server.logger.trace(e) + this.server.logger.trace(e); } } - private async isMemberKey(pub:string) { - let isMember = false + private async isMemberKey(pub: string) { + let isMember = false; if (this.memberkeysCache[pub]) { - isMember = true + isMember = true; } if (!isMember) { // Do we have this block in the DB? - isMember = !!(await this.server.dal.isMember(pub)) + isMember = !!(await this.server.dal.isMember(pub)); } if (isMember) { // Update the last time it was checked - this.memberkeysCache[pub] = Date.now() + this.memberkeysCache[pub] = Date.now(); } - return isMember + return isMember; } - private isConnectedKey(pub:string) { - return this.getConnectedPubkeys().indexOf(pub) !== -1 + private isConnectedKey(pub: string) { + return this.getConnectedPubkeys().indexOf(pub) !== -1; } - private async existsBlock(blockstamp:string) { - let exists = false + private async existsBlock(blockstamp: string) { + let exists = false; if (this.blockstampsCache[blockstamp]) { - exists = true + exists = true; } if (!exists) { // Do we have this block in the DB? - exists = !!(await this.server.dal.getAbsoluteBlockInForkWindowByBlockstamp(blockstamp)) + exists = !!(await this.server.dal.getAbsoluteBlockInForkWindowByBlockstamp( + blockstamp + )); } // Update the last time it was checked - this.blockstampsCache[blockstamp] = Date.now() - return exists + this.blockstampsCache[blockstamp] = Date.now(); + return exists; } - static plugOn(server:Server) { - const cluster = new WS2PCluster(server) - server.ws2pCluster = cluster - return cluster + static plugOn(server: Server) { + const cluster = new WS2PCluster(server); + server.ws2pCluster = cluster; + return cluster; } - set maxLevel1Peers(newValue:number) { - this.maxLevel1Size = Math.max(newValue, 0) || 0 + set maxLevel1Peers(newValue: number) { + this.maxLevel1Size = Math.max(newValue, 0) || 0; } get maxLevel2Peers() { if (this.ws2pServer) { - return this.ws2pServer.maxLevel2Peers || 0 + return this.ws2pServer.maxLevel2Peers || 0; } - return 0 + return 0; } - async listen(host:string, port:number) { + async listen(host: string, port: number) { if (this.ws2pServer) { - await this.ws2pServer.close() + await this.ws2pServer.close(); } - this.ws2pServer = await WS2PServer.bindOn(this.server, host, port, this.fifo, (pubkey:string, isSync: boolean, syncConnectedPubkeys:string[], connectedPubkeys:string[]) => { - return this.acceptPubkey(pubkey, isSync, syncConnectedPubkeys, connectedPubkeys, [], () => this.servedCount(), this.maxLevel2Peers, this.privilegedNodes(), (this.server.conf.ws2p !== undefined && this.server.conf.ws2p.privilegedOnly)) - }, this.keyPriorityLevel, this.messageHandler) - this.host = host - this.port = port - return this.ws2pServer + this.ws2pServer = await WS2PServer.bindOn( + this.server, + host, + port, + this.fifo, + ( + pubkey: string, + isSync: boolean, + syncConnectedPubkeys: string[], + connectedPubkeys: string[] + ) => { + return this.acceptPubkey( + pubkey, + isSync, + syncConnectedPubkeys, + connectedPubkeys, + [], + () => this.servedCount(), + this.maxLevel2Peers, + this.privilegedNodes(), + this.server.conf.ws2p !== undefined && + this.server.conf.ws2p.privilegedOnly + ); + }, + this.keyPriorityLevel, + this.messageHandler + ); + this.host = host; + this.port = port; + return this.ws2pServer; } async close() { if (this.ws2pServer) { - await this.ws2pServer.close() + await this.ws2pServer.close(); } - const connections = this.getAllConnections() - await Promise.all(connections.map(c => c.close())) + const connections = this.getAllConnections(); + await Promise.all(connections.map((c) => c.close())); } clientsCount() { - let count = 0 - let connectedKeys:string[] = [] + let count = 0; + let connectedKeys: string[] = []; for (const ws2pid in this.ws2pClients) { - if (this.ws2pClients[ws2pid].connection.pubkey != this.server.conf.pair.pub - && connectedKeys.indexOf(this.ws2pClients[ws2pid].connection.pubkey) == -1) { - count++ - connectedKeys.push(this.ws2pClients[ws2pid].connection.pubkey) + if ( + this.ws2pClients[ws2pid].connection.pubkey != + this.server.conf.pair.pub && + connectedKeys.indexOf(this.ws2pClients[ws2pid].connection.pubkey) == -1 + ) { + count++; + connectedKeys.push(this.ws2pClients[ws2pid].connection.pubkey); } } - return count + return count; } numberOfConnectedPublicNodesWithSameKey() { - let count = 0 + let count = 0; for (const ws2pid in this.ws2pClients) { - if (this.ws2pClients[ws2pid].connection.pubkey === this.server.conf.pair.pub) { - count++ + if ( + this.ws2pClients[ws2pid].connection.pubkey === this.server.conf.pair.pub + ) { + count++; } } - return count + return count; } servedCount() { - return (this.ws2pServer) ? this.ws2pServer.countConnexions():0 + return this.ws2pServer ? this.ws2pServer.countConnexions() : 0; } privilegedNodes() { if (this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) { - return this.server.conf.ws2p.privilegedNodes + return this.server.conf.ws2p.privilegedNodes; } else { - return  [] + return []; } } - async connectToRemoteWS(endpointVersion:number, host: string, port: number, path:string, messageHandler:WS2PMessageHandler, expectedPub:string, ws2pEndpointUUID:string = ""): Promise<WS2PClient> { - const uuid = nuuid.v4() - let pub = expectedPub.slice(0, 8) - const api:string = (host.match(WS2PConstants.HOST_ONION_REGEX) !== null) ? 'WS2PTOR':'WS2P' + async connectToRemoteWS( + endpointVersion: number, + host: string, + port: number, + path: string, + messageHandler: WS2PMessageHandler, + expectedPub: string, + ws2pEndpointUUID: string = "" + ): Promise<WS2PClient> { + const uuid = nuuid.v4(); + let pub = expectedPub.slice(0, 8); + const api: string = + host.match(WS2PConstants.HOST_ONION_REGEX) !== null ? "WS2PTOR" : "WS2P"; try { - const fullEndpointAddress = WS2PCluster.getFullAddress(host, port, path) - const ws2pc = await WS2PClient.connectTo(this.server, fullEndpointAddress, endpointVersion, ws2pEndpointUUID, messageHandler, expectedPub, (pub:string) => { - const syncPubkeys: string[] = [] // The connection won't be considered as a SYNC connection, so there is no check to do - const connectedPubkeys = this.getConnectedPubkeys() - const connectedWS2PUID = this.getConnectedWS2PUID() - const preferedNodes = (this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) ? this.server.conf.ws2p.preferedNodes:[] - return this.acceptPubkey(expectedPub, false, syncPubkeys, connectedPubkeys, connectedWS2PUID, () => this.clientsCount(), this.maxLevel1Size, preferedNodes, (this.server.conf.ws2p && this.server.conf.ws2p.preferedOnly) || false, ws2pEndpointUUID) - }) - this.ws2pClients[uuid] = ws2pc - pub = ws2pc.connection.pubkey + const fullEndpointAddress = WS2PCluster.getFullAddress(host, port, path); + const ws2pc = await WS2PClient.connectTo( + this.server, + fullEndpointAddress, + endpointVersion, + ws2pEndpointUUID, + messageHandler, + expectedPub, + (pub: string) => { + const syncPubkeys: string[] = []; // The connection won't be considered as a SYNC connection, so there is no check to do + const connectedPubkeys = this.getConnectedPubkeys(); + const connectedWS2PUID = this.getConnectedWS2PUID(); + const preferedNodes = + this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes + ? this.server.conf.ws2p.preferedNodes + : []; + return this.acceptPubkey( + expectedPub, + false, + syncPubkeys, + connectedPubkeys, + connectedWS2PUID, + () => this.clientsCount(), + this.maxLevel1Size, + preferedNodes, + (this.server.conf.ws2p && this.server.conf.ws2p.preferedOnly) || + false, + ws2pEndpointUUID + ); + } + ); + this.ws2pClients[uuid] = ws2pc; + pub = ws2pc.connection.pubkey; ws2pc.connection.closed.then(() => { - this.server.logger.info(api+': connection [%s `'+api+' %s %s`] has been closed', pub.slice(0, 8), host, port) + this.server.logger.info( + api + ": connection [%s `" + api + " %s %s`] has been closed", + pub.slice(0, 8), + host, + port + ); this.server.push({ - ws2p: 'disconnected', + ws2p: "disconnected", peer: { - pub: ws2pc.connection.pubkey - } - }) + pub: ws2pc.connection.pubkey, + }, + }); if (this.ws2pClients[uuid]) { - delete this.ws2pClients[uuid] + delete this.ws2pClients[uuid]; } - }) - this.server.logger.info(api+': connected to peer %s using `'+api+' %s %s`!', pub.slice(0, 8), host, port) + }); + this.server.logger.info( + api + ": connected to peer %s using `" + api + " %s %s`!", + pub.slice(0, 8), + host, + port + ); this.server.push({ - ws2p: 'connected', - to: { host, port, pubkey: pub } - }) - await this.server.dal.setPeerUP(pub) - return ws2pc + ws2p: "connected", + to: { host, port, pubkey: pub }, + }); + await this.server.dal.setPeerUP(pub); + return ws2pc; } catch (e) { - this.server.logger.info(api+': Could not connect to peer %s using `'+api+' %s %s: %s`', pub.slice(0, 8), host, port, (e && e.message || e)) - throw e + this.server.logger.info( + api + ": Could not connect to peer %s using `" + api + " %s %s: %s`", + pub.slice(0, 8), + host, + port, + (e && e.message) || e + ); + throw e; } } async connectToWS2Peers() { // If incoming connections quota is full, delete one low-priority connection - if (this.ws2pServer !== null && this.ws2pServer.countConnexions() === this.ws2pServer.maxLevel2Peers) { - const privilegedKeys = ((this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) || []).slice() // Copy - this.ws2pServer.removeLowPriorityConnection(privilegedKeys) - } - const myUUID = (this.server.conf.ws2p && this.server.conf.ws2p.uuid) ? this.server.conf.ws2p.uuid:"" - const potentials = await this.server.dal.getWS2Peers() - const peers:PeerDTO[] = potentials.map((p:any) => PeerDTO.fromJSONObject(p)) - const prefered = ((this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) || []).slice() // Copy + if ( + this.ws2pServer !== null && + this.ws2pServer.countConnexions() === this.ws2pServer.maxLevel2Peers + ) { + const privilegedKeys = ( + (this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) || + [] + ).slice(); // Copy + this.ws2pServer.removeLowPriorityConnection(privilegedKeys); + } + const myUUID = + this.server.conf.ws2p && this.server.conf.ws2p.uuid + ? this.server.conf.ws2p.uuid + : ""; + const potentials = await this.server.dal.getWS2Peers(); + const peers: PeerDTO[] = potentials.map((p: any) => + PeerDTO.fromJSONObject(p) + ); + const prefered = ( + (this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) || + [] + ).slice(); // Copy // Our key is also a prefered one, so we connect to our siblings - const canReachTorEndpoint = ProxiesConf.canReachTorEndpoint(this.server.conf.proxiesConf) - const canReachClearEndpoint = ProxiesConf.canReachClearEndpoint(this.server.conf.proxiesConf) + const canReachTorEndpoint = ProxiesConf.canReachTorEndpoint( + this.server.conf.proxiesConf + ); + const canReachClearEndpoint = ProxiesConf.canReachClearEndpoint( + this.server.conf.proxiesConf + ); peers.sort((a, b) => { // Top priority at our own nodes - if (a.pubkey === this.server.conf.pair.pub && b.pubkey !== this.server.conf.pair.pub) { - return -1 - } else if (a.pubkey !== this.server.conf.pair.pub && b.pubkey === this.server.conf.pair.pub) { - return 1 + if ( + a.pubkey === this.server.conf.pair.pub && + b.pubkey !== this.server.conf.pair.pub + ) { + return -1; + } else if ( + a.pubkey !== this.server.conf.pair.pub && + b.pubkey === this.server.conf.pair.pub + ) { + return 1; } - const aIsPrefered = prefered.indexOf(a.pubkey) !== -1 - const bIsPrefered = prefered.indexOf(b.pubkey) !== -1 - const aNumberOfFreeRooms = this.numberOfFreeRooms(a, canReachTorEndpoint, canReachClearEndpoint) - const bNumberOfFreeRooms = this.numberOfFreeRooms(b, canReachTorEndpoint, canReachClearEndpoint) + const aIsPrefered = prefered.indexOf(a.pubkey) !== -1; + const bIsPrefered = prefered.indexOf(b.pubkey) !== -1; + const aNumberOfFreeRooms = this.numberOfFreeRooms( + a, + canReachTorEndpoint, + canReachClearEndpoint + ); + const bNumberOfFreeRooms = this.numberOfFreeRooms( + b, + canReachTorEndpoint, + canReachClearEndpoint + ); if (canReachTorEndpoint) { - const aAtWs2pTorEnpoint = a.endpoints.filter(function (element) { return element.match(CommonConstants.WS2PTOR_REGEXP); }).length > 0 - const bAtWs2pTorEnpoint = b.endpoints.filter(function (element) { return element.match(CommonConstants.WS2PTOR_REGEXP); }).length > 0 - - if ( (aAtWs2pTorEnpoint && bAtWs2pTorEnpoint) || (!aAtWs2pTorEnpoint && !bAtWs2pTorEnpoint) ) { - if ((aIsPrefered && bIsPrefered) || (!aIsPrefered && !bIsPrefered)) { + const aAtWs2pTorEnpoint = + a.endpoints.filter(function (element) { + return element.match(CommonConstants.WS2PTOR_REGEXP); + }).length > 0; + const bAtWs2pTorEnpoint = + b.endpoints.filter(function (element) { + return element.match(CommonConstants.WS2PTOR_REGEXP); + }).length > 0; + + if ( + (aAtWs2pTorEnpoint && bAtWs2pTorEnpoint) || + (!aAtWs2pTorEnpoint && !bAtWs2pTorEnpoint) + ) { + if ((aIsPrefered && bIsPrefered) || (!aIsPrefered && !bIsPrefered)) { if (aNumberOfFreeRooms > bNumberOfFreeRooms) { - return -1 + return -1; } else if (aNumberOfFreeRooms < bNumberOfFreeRooms) { - return 1 + return 1; } - return 0 + return 0; } else if (aIsPrefered) { - return -1 + return -1; } - return 1 + return 1; } else { if (aAtWs2pTorEnpoint) { - return -1 + return -1; } - return 1 + return 1; } } else { - if ((aIsPrefered && bIsPrefered) || (!aIsPrefered && !bIsPrefered)) { + if ((aIsPrefered && bIsPrefered) || (!aIsPrefered && !bIsPrefered)) { if (aNumberOfFreeRooms > bNumberOfFreeRooms) { - return -1 + return -1; } else if (aNumberOfFreeRooms < bNumberOfFreeRooms) { - return 1 + return 1; } - return 0 + return 0; } else if (aIsPrefered) { - return -1 + return -1; } - return 1 + return 1; } - }) - let i = 0 - let countPublicNodesWithSameKey:number = 1 // Necessary if maxPrivate = 0 - let endpointsNodesWithSameKey:WS2PEndpoint[] = [] + }); + let i = 0; + let countPublicNodesWithSameKey: number = 1; // Necessary if maxPrivate = 0 + let endpointsNodesWithSameKey: WS2PEndpoint[] = []; // Group the peers by bunches - const bunchsOfPeers = peers.reduce((bundles:PeerDTO[][], p:PeerDTO) => { - let bundleIndex = (bundles.length || 1) - 1 + const bunchsOfPeers = peers.reduce((bundles: PeerDTO[][], p: PeerDTO) => { + let bundleIndex = (bundles.length || 1) - 1; // Maximum size of a bundle of peers - if (bundles[bundleIndex] && bundles[bundleIndex].length >= WS2PConstants.INITIAL_CONNECTION_PEERS_BUNDLE_SIZE) { - bundleIndex++ + if ( + bundles[bundleIndex] && + bundles[bundleIndex].length >= + WS2PConstants.INITIAL_CONNECTION_PEERS_BUNDLE_SIZE + ) { + bundleIndex++; } // We create the bundle of it doesn't exist yet if (!bundles[bundleIndex]) { - bundles[bundleIndex] = [] + bundles[bundleIndex] = []; } // We feed it with this peer - bundles[bundleIndex].push(p) - return bundles - }, []) - while (i < bunchsOfPeers.length && (this.clientsCount() < this.maxLevel1Size || this.numberOfConnectedPublicNodesWithSameKey() < countPublicNodesWithSameKey) ) { - this.server.logger.info("WS2P: init: bundle of peers %s/%s", i+1, bunchsOfPeers.length) - await Promise.all(bunchsOfPeers[i].map(async p => { - if (p.pubkey === this.server.conf.pair.pub) { - endpointsNodesWithSameKey = p.getAllWS2PEndpoints(canReachTorEndpoint, canReachClearEndpoint, myUUID) - countPublicNodesWithSameKey = endpointsNodesWithSameKey.length - for (const api of endpointsNodesWithSameKey) { - try { - // We do not connect to local host - if (api.uuid !== myUUID) { - await this.connectToRemoteWS(api.version, api.host, api.port, api.path, this.messageHandler, p.pubkey, api.uuid) + bundles[bundleIndex].push(p); + return bundles; + }, []); + while ( + i < bunchsOfPeers.length && + (this.clientsCount() < this.maxLevel1Size || + this.numberOfConnectedPublicNodesWithSameKey() < + countPublicNodesWithSameKey) + ) { + this.server.logger.info( + "WS2P: init: bundle of peers %s/%s", + i + 1, + bunchsOfPeers.length + ); + await Promise.all( + bunchsOfPeers[i].map(async (p) => { + if (p.pubkey === this.server.conf.pair.pub) { + endpointsNodesWithSameKey = p.getAllWS2PEndpoints( + canReachTorEndpoint, + canReachClearEndpoint, + myUUID + ); + countPublicNodesWithSameKey = endpointsNodesWithSameKey.length; + for (const api of endpointsNodesWithSameKey) { + try { + // We do not connect to local host + if (api.uuid !== myUUID) { + await this.connectToRemoteWS( + api.version, + api.host, + api.port, + api.path, + this.messageHandler, + p.pubkey, + api.uuid + ); + } + } catch (e) { + this.server.logger.debug("WS2P: init: failed connection"); } - } catch (e) { - this.server.logger.debug('WS2P: init: failed connection') } - } - } else { - const api = p.getOnceWS2PEndpoint(canReachTorEndpoint, canReachClearEndpoint) - if (api) { - try { - // We do not connect to local host - await this.connectToRemoteWS(api.version, api.host, api.port, api.path, this.messageHandler, p.pubkey, api.uuid) - } catch (e) { - this.server.logger.debug('WS2P: init: failed connection') + } else { + const api = p.getOnceWS2PEndpoint( + canReachTorEndpoint, + canReachClearEndpoint + ); + if (api) { + try { + // We do not connect to local host + await this.connectToRemoteWS( + api.version, + api.host, + api.port, + api.path, + this.messageHandler, + p.pubkey, + api.uuid + ); + } catch (e) { + this.server.logger.debug("WS2P: init: failed connection"); + } } } - } - })) - i++ + }) + ); + i++; // Trim the eventual extra connections - setTimeout(() => this.removeLowPriorityConnections(prefered), WS2PConstants.CONNEXION_TIMEOUT) + setTimeout( + () => this.removeLowPriorityConnections(prefered), + WS2PConstants.CONNEXION_TIMEOUT + ); } } - private numberOfFreeRooms(p:PeerDTO, canReachTorEndpoint:boolean, canReachClearEndpoint:boolean) { - const api = p.getOnceWS2PEndpoint(canReachTorEndpoint, canReachClearEndpoint) + private numberOfFreeRooms( + p: PeerDTO, + canReachTorEndpoint: boolean, + canReachClearEndpoint: boolean + ) { + const api = p.getOnceWS2PEndpoint( + canReachTorEndpoint, + canReachClearEndpoint + ); if (api) { for (const ws2pFullId in this.headsCache) { if (ws2pFullId.slice(0, 8) == api.uuid) { - const messageV2 = this.headsCache[ws2pFullId].messageV2 + const messageV2 = this.headsCache[ws2pFullId].messageV2; if (messageV2 !== undefined) { - const [,,, pub, blockstamp, ws2pId,,,,freeMemberRoom,freeMirorRoom]:string[] = messageV2.split(':') - return (this.server.dal.isMember(this.server.conf.pair.pub)) ? freeMemberRoom:freeMirorRoom + const [ + , + , + , + pub, + blockstamp, + ws2pId, + , + , + , + freeMemberRoom, + freeMirorRoom, + ]: string[] = messageV2.split(":"); + return this.server.dal.isMember(this.server.conf.pair.pub) + ? freeMemberRoom + : freeMirorRoom; } } } } - return 0 + return 0; } listenServerFlow() { - let connectingToNodesByFlow = false + let connectingToNodesByFlow = false; // Also listen for network updates, and connect to new nodes - this.server.pipe(es.mapSync((data:any) => { - - (async () => { - // New peer - if (data.endpoints) { - const peer = PeerDTO.fromJSONObject(data) - const ws2pEnpoint = peer.getOnceWS2PEndpoint(ProxiesConf.canReachTorEndpoint(this.server.conf.proxiesConf), ProxiesConf.canReachClearEndpoint(this.server.conf.proxiesConf)) - if (ws2pEnpoint) { - // Check if already connected to the pubkey (in any way: server or client) - const syncPubkeys: string[] = [] // The connection won't be considered as a SYNC connection, so there is no check to do - const connectedPubkeys = this.getConnectedPubkeys() - const connectedWS2PUID = this.getConnectedWS2PUID() - const preferedKeys = (this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) ? this.server.conf.ws2p.preferedNodes:[] - const shouldAccept = await this.acceptPubkey(peer.pubkey, false, syncPubkeys, connectedPubkeys, connectedWS2PUID, () => this.clientsCount(), this.maxLevel1Size, preferedKeys, (this.server.conf.ws2p && this.server.conf.ws2p.preferedOnly) || false, ws2pEnpoint.uuid) - if (shouldAccept && (!this.server.conf.ws2p || ws2pEnpoint.uuid !== this.server.conf.ws2p.uuid || peer.pubkey !== this.server.conf.pair.pub)) { - await this.connectToRemoteWS(ws2pEnpoint.version, ws2pEnpoint.host, ws2pEnpoint.port, ws2pEnpoint.path, this.messageHandler, peer.pubkey, ws2pEnpoint.uuid) - await this.removeLowPriorityConnections(preferedKeys) + this.server.pipe( + es.mapSync((data: any) => { + (async () => { + // New peer + if (data.endpoints) { + const peer = PeerDTO.fromJSONObject(data); + const ws2pEnpoint = peer.getOnceWS2PEndpoint( + ProxiesConf.canReachTorEndpoint(this.server.conf.proxiesConf), + ProxiesConf.canReachClearEndpoint(this.server.conf.proxiesConf) + ); + if (ws2pEnpoint) { + // Check if already connected to the pubkey (in any way: server or client) + const syncPubkeys: string[] = []; // The connection won't be considered as a SYNC connection, so there is no check to do + const connectedPubkeys = this.getConnectedPubkeys(); + const connectedWS2PUID = this.getConnectedWS2PUID(); + const preferedKeys = + this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes + ? this.server.conf.ws2p.preferedNodes + : []; + const shouldAccept = await this.acceptPubkey( + peer.pubkey, + false, + syncPubkeys, + connectedPubkeys, + connectedWS2PUID, + () => this.clientsCount(), + this.maxLevel1Size, + preferedKeys, + (this.server.conf.ws2p && this.server.conf.ws2p.preferedOnly) || + false, + ws2pEnpoint.uuid + ); + if ( + shouldAccept && + (!this.server.conf.ws2p || + ws2pEnpoint.uuid !== this.server.conf.ws2p.uuid || + peer.pubkey !== this.server.conf.pair.pub) + ) { + await this.connectToRemoteWS( + ws2pEnpoint.version, + ws2pEnpoint.host, + ws2pEnpoint.port, + ws2pEnpoint.path, + this.messageHandler, + peer.pubkey, + ws2pEnpoint.uuid + ); + await this.removeLowPriorityConnections(preferedKeys); + } } } - } - // Block received - else if (data.joiners) { - // Update the cache - this.blockstampsCache[[data.number, data.hash].join('-')] = Date.now() - } + // Block received + else if (data.joiners) { + // Update the cache + this.blockstampsCache[ + [data.number, data.hash].join("-") + ] = Date.now(); + } - // HEAD changed - else if (data.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED || data.bcEvent === OtherConstants.BC_EVENT.SWITCHED) { - // Propagate this change to the network - const myHead = await this.sayHeadChangedTo(data.block.number, data.block.hash) - try { - await this.broadcastHead(myHead) - } catch (e) { - this.server.logger.warn(e) + // HEAD changed + else if ( + data.bcEvent === OtherConstants.BC_EVENT.HEAD_CHANGED || + data.bcEvent === OtherConstants.BC_EVENT.SWITCHED + ) { + // Propagate this change to the network + const myHead = await this.sayHeadChangedTo( + data.block.number, + data.block.hash + ); + try { + await this.broadcastHead(myHead); + } catch (e) { + this.server.logger.warn(e); + } } - } - })() + })(); - return data - })) + return data; + }) + ); } - private async broadcastHead(head:WS2PHead) { - await this.headsReceived([head]) - return this.spreadNewHeads([head]) + private async broadcastHead(head: WS2PHead) { + await this.headsReceived([head]); + return this.spreadNewHeads([head]); } - private async spreadNewHeads(heads:WS2PHead[]) { - heads = this.incrementHeadsStep(heads) - const connexions = this.getAllConnections() - return Promise.all(connexions.map(async (c) => { - try { - await c.pushHeads(heads) - } catch (e) { - this.server.logger.warn('Could not spread new HEAD info to %s WS2PID %s', c.pubkey, c.uuid) - } - })) + private async spreadNewHeads(heads: WS2PHead[]) { + heads = this.incrementHeadsStep(heads); + const connexions = this.getAllConnections(); + return Promise.all( + connexions.map(async (c) => { + try { + await c.pushHeads(heads); + } catch (e) { + this.server.logger.warn( + "Could not spread new HEAD info to %s WS2PID %s", + c.pubkey, + c.uuid + ); + } + }) + ); } - private incrementHeadsStep(heads_:WS2PHead[]) { - let heads:WS2PHead[] = [] + private incrementHeadsStep(heads_: WS2PHead[]) { + let heads: WS2PHead[] = []; for (let head of heads_) { if (head.step !== undefined) { - head.step++ + head.step++; } // Prevent injections heads.push({ @@ -591,395 +896,501 @@ export class WS2PCluster { sig: head.sig, messageV2: head.messageV2, sigV2: head.sigV2, - step: head.step - }) + step: head.step, + }); } - return heads - } - - private async sayHeadChangedTo(number:number, hash:string) { - const api = this.getApi() - const key = new Key(this.server.conf.pair.pub, this.server.conf.pair.sec) - const software = 'duniter' - const softVersion = Package.getInstance().version - const ws2pId = (this.server.conf.ws2p && this.server.conf.ws2p.uuid) || '00000000' - const prefix = this.server.conf.prefix || ProverConstants.DEFAULT_PEER_ID - const { freeMemberRoom , freeMirorRoom } = await this.countFreeRooms() - const message = `${api}:HEAD:1:${key.publicKey}:${number}-${hash}:${ws2pId}:${software}:${softVersion}:${prefix}` - const sig = key.signSync(message) - const messageV2 = `${api}:HEAD:2:${key.publicKey}:${number}-${hash}:${ws2pId}:${software}:${softVersion}:${prefix}:${freeMemberRoom}:${freeMirorRoom}` - const sigV2 = key.signSync(messageV2) - - const myHead:WS2PHead = { + return heads; + } + + private async sayHeadChangedTo(number: number, hash: string) { + const api = this.getApi(); + const key = new Key(this.server.conf.pair.pub, this.server.conf.pair.sec); + const software = "duniter"; + const softVersion = Package.getInstance().version; + const ws2pId = + (this.server.conf.ws2p && this.server.conf.ws2p.uuid) || "00000000"; + const prefix = this.server.conf.prefix || ProverConstants.DEFAULT_PEER_ID; + const { freeMemberRoom, freeMirorRoom } = await this.countFreeRooms(); + const message = `${api}:HEAD:1:${key.publicKey}:${number}-${hash}:${ws2pId}:${software}:${softVersion}:${prefix}`; + const sig = key.signSync(message); + const messageV2 = `${api}:HEAD:2:${key.publicKey}:${number}-${hash}:${ws2pId}:${software}:${softVersion}:${prefix}:${freeMemberRoom}:${freeMirorRoom}`; + const sigV2 = key.signSync(messageV2); + + const myHead: WS2PHead = { message, sig, messageV2, sigV2, - step: 0 } + step: 0, + }; - return myHead + return myHead; } private getApi() { - let api = 'WS2P' + let api = "WS2P"; let network = { in: WS2PConstants.NETWORK.INCOMING.DEFAULT, out: WS2PConstants.NETWORK.OUTCOMING.DEFAULT, - } - let ws2pPrivate = '' - let ws2pPublic = '' + }; + let ws2pPrivate = ""; + let ws2pPublic = ""; if (this.server.conf.ws2p) { - if (this.server.conf.ws2p.publicAccess && - (this.server.conf.ws2p.remotehost && this.server.conf.ws2p.remoteport) - || - (this.server.conf.ws2p.upnp && this.server.conf.upnp) - ) - { - ws2pPublic = 'I' + if ( + (this.server.conf.ws2p.publicAccess && + this.server.conf.ws2p.remotehost && + this.server.conf.ws2p.remoteport) || + (this.server.conf.ws2p.upnp && this.server.conf.upnp) + ) { + ws2pPublic = "I"; // Determine the network layer - if (this.server.conf.ws2p.remotehost && this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) { - network.in = WS2PConstants.NETWORK.INCOMING.TOR + if ( + this.server.conf.ws2p.remotehost && + this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX) + ) { + network.in = WS2PConstants.NETWORK.INCOMING.TOR; } // Apply the network layer switch (network.in) { - case WS2PConstants.NETWORK.INCOMING.TOR: ws2pPublic += 'T'; break; - default: ws2pPublic += 'C'; break; + case WS2PConstants.NETWORK.INCOMING.TOR: + ws2pPublic += "T"; + break; + default: + ws2pPublic += "C"; + break; } } if (this.server.conf.ws2p.privateAccess) { - ws2pPrivate = 'O' + ws2pPrivate = "O"; // Determine the network layer - if (this.server.conf.proxiesConf && (this.server.conf.proxiesConf.proxyTorAddress || this.server.conf.proxiesConf.forceTor)) { - network.out = WS2PConstants.NETWORK.OUTCOMING.TOR + if ( + this.server.conf.proxiesConf && + (this.server.conf.proxiesConf.proxyTorAddress || + this.server.conf.proxiesConf.forceTor) + ) { + network.out = WS2PConstants.NETWORK.OUTCOMING.TOR; } // Apply the network layer switch (network.out) { - case WS2PConstants.NETWORK.OUTCOMING.TOR: ws2pPrivate += 'T'; - if (this.server.conf.proxiesConf && this.server.conf.proxiesConf.reachingClearEp) { + case WS2PConstants.NETWORK.OUTCOMING.TOR: + ws2pPrivate += "T"; + if ( + this.server.conf.proxiesConf && + this.server.conf.proxiesConf.reachingClearEp + ) { switch (this.server.conf.proxiesConf.reachingClearEp) { - case 'none': ws2pPrivate += 'S'; break; - case 'tor': ws2pPrivate += 'A'; break; - default: ws2pPrivate += 'M'; break; + case "none": + ws2pPrivate += "S"; + break; + case "tor": + ws2pPrivate += "A"; + break; + default: + ws2pPrivate += "M"; + break; } } - break; - default: ws2pPrivate += 'CA'; break; + break; + default: + ws2pPrivate += "CA"; + break; } } } - - api += ws2pPrivate + ws2pPublic - return api + api += ws2pPrivate + ws2pPublic; + return api; } private async countFreeRooms() { if (!this.ws2pServer) { return { freeMemberRoom: 0, - freeMirorRoom: 0 - } + freeMirorRoom: 0, + }; } - let freeMirorRoom = this.maxLevel2Peers - this.ws2pServer.countConnexions() - let freeMemberRoom = freeMirorRoom - const privilegedNodes = (this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) ? this.server.conf.ws2p.privilegedNodes:[] + let freeMirorRoom = this.maxLevel2Peers - this.ws2pServer.countConnexions(); + let freeMemberRoom = freeMirorRoom; + const privilegedNodes = + this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes + ? this.server.conf.ws2p.privilegedNodes + : []; for (const c of this.ws2pServer.getConnexions()) { - const connexionPriority = await this.keyPriorityLevel(c.pubkey, privilegedNodes) - if (connexionPriority < WS2PConstants.CONNECTIONS_PRIORITY.MEMBER_KEY_LEVEL) { - freeMemberRoom++ + const connexionPriority = await this.keyPriorityLevel( + c.pubkey, + privilegedNodes + ); + if ( + connexionPriority < WS2PConstants.CONNECTIONS_PRIORITY.MEMBER_KEY_LEVEL + ) { + freeMemberRoom++; } } return { freeMemberRoom, - freeMirorRoom - } + freeMirorRoom, + }; } async trimServerConnections() { if (this.ws2pServer) { - await this.ws2pServer.removeExcessIncomingConnections() + await this.ws2pServer.removeExcessIncomingConnections(); } } - async removeLowPriorityConnections(preferedKeys:string[]) { - let serverPubkeys:string[] = [] + async removeLowPriorityConnections(preferedKeys: string[]) { + let serverPubkeys: string[] = []; if (this.ws2pServer) { - serverPubkeys = this.ws2pServer.getConnexions().map(c => c.pubkey) + serverPubkeys = this.ws2pServer.getConnexions().map((c) => c.pubkey); } // Disconnect Private connexions already present under Public - let uuids = Object.keys(this.ws2pClients) - uuids = Underscore.shuffle(uuids) + let uuids = Object.keys(this.ws2pClients); + uuids = Underscore.shuffle(uuids); for (const uuid of uuids) { - const client = this.ws2pClients[uuid] - const pub = client.connection.pubkey - const isNotOurself = pub !== this.server.conf.pair.pub - const isAlreadyInPublic = serverPubkeys.indexOf(pub) !== -1 + const client = this.ws2pClients[uuid]; + const pub = client.connection.pubkey; + const isNotOurself = pub !== this.server.conf.pair.pub; + const isAlreadyInPublic = serverPubkeys.indexOf(pub) !== -1; if (isNotOurself && isAlreadyInPublic) { - client.connection.close() - await client.connection.closed + client.connection.close(); + await client.connection.closed; if (this.ws2pClients[uuid]) { - delete this.ws2pClients[uuid] + delete this.ws2pClients[uuid]; } } } // Disconnect Private connexions until the maximum size is respected while (this.clientsCount() > this.maxLevel1Size) { - let uuids = Object.keys(this.ws2pClients) - uuids = Underscore.shuffle(uuids) - let lowPriorityConnectionUUID:string = uuids[0] - let minPriorityLevel = await this.keyPriorityLevel(this.ws2pClients[lowPriorityConnectionUUID].connection.pubkey, preferedKeys) + let uuids = Object.keys(this.ws2pClients); + uuids = Underscore.shuffle(uuids); + let lowPriorityConnectionUUID: string = uuids[0]; + let minPriorityLevel = await this.keyPriorityLevel( + this.ws2pClients[lowPriorityConnectionUUID].connection.pubkey, + preferedKeys + ); for (const uuid of uuids) { - const client = this.ws2pClients[uuid] - if (uuid !== lowPriorityConnectionUUID) { - let uuidPriorityLevel = await this.keyPriorityLevel(client.connection.pubkey, preferedKeys) - if (uuidPriorityLevel < minPriorityLevel) { - lowPriorityConnectionUUID = uuid - minPriorityLevel = uuidPriorityLevel - } + const client = this.ws2pClients[uuid]; + if (uuid !== lowPriorityConnectionUUID) { + let uuidPriorityLevel = await this.keyPriorityLevel( + client.connection.pubkey, + preferedKeys + ); + if (uuidPriorityLevel < minPriorityLevel) { + lowPriorityConnectionUUID = uuid; + minPriorityLevel = uuidPriorityLevel; } + } } - this.ws2pClients[lowPriorityConnectionUUID].connection.close() - await this.ws2pClients[lowPriorityConnectionUUID].connection.closed - delete this.ws2pClients[lowPriorityConnectionUUID] + this.ws2pClients[lowPriorityConnectionUUID].connection.close(); + await this.ws2pClients[lowPriorityConnectionUUID].connection.closed; + delete this.ws2pClients[lowPriorityConnectionUUID]; } } - async keyPriorityLevel(pubkey:string, preferedOrPrivilegedKeys:string[]) { - const isMember = await this.server.dal.isMember(pubkey) - let priorityLevel = (isMember) ? WS2PConstants.CONNECTIONS_PRIORITY.MEMBER_KEY_LEVEL:0 - priorityLevel += (preferedOrPrivilegedKeys.indexOf(pubkey) !== -1) ? WS2PConstants.CONNECTIONS_PRIORITY.PREFERED_PRIVILEGED_KEY_LEVEL:0 - priorityLevel += (this.server.conf.pair.pub === pubkey) ? WS2PConstants.CONNECTIONS_PRIORITY.SELF_KEY_LEVEL:0 - return priorityLevel + async keyPriorityLevel(pubkey: string, preferedOrPrivilegedKeys: string[]) { + const isMember = await this.server.dal.isMember(pubkey); + let priorityLevel = isMember + ? WS2PConstants.CONNECTIONS_PRIORITY.MEMBER_KEY_LEVEL + : 0; + priorityLevel += + preferedOrPrivilegedKeys.indexOf(pubkey) !== -1 + ? WS2PConstants.CONNECTIONS_PRIORITY.PREFERED_PRIVILEGED_KEY_LEVEL + : 0; + priorityLevel += + this.server.conf.pair.pub === pubkey + ? WS2PConstants.CONNECTIONS_PRIORITY.SELF_KEY_LEVEL + : 0; + return priorityLevel; } private getPreferedNodes(): string[] { - return (this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) || [] + return (this.server.conf.ws2p && this.server.conf.ws2p.preferedNodes) || []; } protected async acceptPubkey( - pub:string, + pub: string, isSync: boolean, - syncConnectedPubkeys:string[], - connectedPubkeys:string[], - connectedWS2PUID:string[], - getConcurrentConnexionsCount:()=>number, - maxConcurrentConnexionsSize:number, - priorityKeys:string[], - priorityKeysOnly:boolean, + syncConnectedPubkeys: string[], + connectedPubkeys: string[], + connectedWS2PUID: string[], + getConcurrentConnexionsCount: () => number, + maxConcurrentConnexionsSize: number, + priorityKeys: string[], + priorityKeysOnly: boolean, targetWS2PUID = "" ) { - // Sync case is specific if (isSync) { // OK for reconnection period of time if (this.ok4reconnect[pub]) { - return true + return true; } if (this.banned4Sync[pub]) { - return false + return false; } - const limit = (this.server.conf.ws2p && this.server.conf.ws2p.syncLimit) || WS2PConstants.WS2P_SYNC_LIMIT - const ok = syncConnectedPubkeys.length < limit + const limit = + (this.server.conf.ws2p && this.server.conf.ws2p.syncLimit) || + WS2PConstants.WS2P_SYNC_LIMIT; + const ok = syncConnectedPubkeys.length < limit; if (ok) { // The connection will OK: we prepare the ban right now to give room for future users - this.rememberAndPrepareBanSyncConnection(pub) + this.rememberAndPrepareBanSyncConnection(pub); } - return ok + return ok; } if (this.server.conf.pair.pub === pub) { // We do not accept oneself connetion - if (this.server.conf.ws2p && this.server.conf.ws2p.uuid === targetWS2PUID || targetWS2PUID === '11111111') { - return false + if ( + (this.server.conf.ws2p && + this.server.conf.ws2p.uuid === targetWS2PUID) || + targetWS2PUID === "11111111" + ) { + return false; } else { // We always accept self nodes, and they have a supreme priority (these are siblings) - if (targetWS2PUID === "" || this.isNewSiblingNode(pub, targetWS2PUID, connectedWS2PUID) ) { - return true + if ( + targetWS2PUID === "" || + this.isNewSiblingNode(pub, targetWS2PUID, connectedWS2PUID) + ) { + return true; } else { // We are already connected to this self node (same WS2PUID) - return false + return false; } } } // We do not accept banned keys - if (this.banned[pub]) { - this.server.logger.warn('Connection to %s refused, reason: %s', pub.slice(0, 8), this.banned[pub]) - return false + if (this.banned[pub]) { + this.server.logger.warn( + "Connection to %s refused, reason: %s", + pub.slice(0, 8), + this.banned[pub] + ); + return false; } // Is priority key ? - let isPriorityKey = priorityKeys.indexOf(pub) !== -1 + let isPriorityKey = priorityKeys.indexOf(pub) !== -1; // We do not accept forbidden keys - if (priorityKeysOnly && !isPriorityKey && this.server.conf.pair.pub !== pub) { - return false + if ( + priorityKeysOnly && + !isPriorityKey && + this.server.conf.pair.pub !== pub + ) { + return false; } // We do not accept keys already connected if (connectedPubkeys.indexOf(pub) !== -1) { - return false + return false; } // Do we have room? if (getConcurrentConnexionsCount() < maxConcurrentConnexionsSize) { // Yes: just connect to it - return true - } - else { - let minPriorityLevel = WS2PConstants.CONNECTIONS_PRIORITY.MAX_PRIORITY_LEVEL + return true; + } else { + let minPriorityLevel = + WS2PConstants.CONNECTIONS_PRIORITY.MAX_PRIORITY_LEVEL; for (const connectedPubkey of connectedPubkeys) { - const connectedPubkeyPriorityLevel = await this.keyPriorityLevel(connectedPubkey, priorityKeys) + const connectedPubkeyPriorityLevel = await this.keyPriorityLevel( + connectedPubkey, + priorityKeys + ); if (connectedPubkeyPriorityLevel < minPriorityLevel) { - minPriorityLevel = connectedPubkeyPriorityLevel + minPriorityLevel = connectedPubkeyPriorityLevel; } } - const pubkeyPriorityLevel = await this.keyPriorityLevel(pub, priorityKeys) + const pubkeyPriorityLevel = await this.keyPriorityLevel( + pub, + priorityKeys + ); if (pubkeyPriorityLevel > minPriorityLevel) { - return true + return true; } } - return false + return false; } - isNewSiblingNode(pub:string, targetWS2PUID:string, connectedWS2PUID:string[]) { + isNewSiblingNode( + pub: string, + targetWS2PUID: string, + connectedWS2PUID: string[] + ) { for (const uuid of connectedWS2PUID) { if (uuid === targetWS2PUID) { - return false + return false; } } - return true + return true; } async getLevel1Connections() { - const all:WS2PConnection[] = [] + const all: WS2PConnection[] = []; for (const uuid of Object.keys(this.ws2pClients)) { - all.push(this.ws2pClients[uuid].connection) + all.push(this.ws2pClients[uuid].connection); } - return all + return all; } async getLevel2Connections(): Promise<WS2PConnection[]> { - return this.ws2pServer ? this.ws2pServer.getConnexions() : [] + return this.ws2pServer ? this.ws2pServer.getConnexions() : []; } getAllConnections() { - const all:WS2PConnection[] = this.ws2pServer ? this.ws2pServer.getConnexions().concat(this.ws2pServer.getConnexionsForSync()) : [] + const all: WS2PConnection[] = this.ws2pServer + ? this.ws2pServer + .getConnexions() + .concat(this.ws2pServer.getConnexionsForSync()) + : []; for (const uuid of Object.keys(this.ws2pClients)) { - all.push(this.ws2pClients[uuid].connection) + all.push(this.ws2pClients[uuid].connection); } - return all + return all; } async startCrawling(waitConnection = false) { // For connectivity - this.reconnectionInteval = setInterval(() => this.connectToWS2Peers(), 1000 * WS2PConstants.RECONNEXION_INTERVAL_IN_SEC) + this.reconnectionInteval = setInterval( + () => this.connectToWS2Peers(), + 1000 * WS2PConstants.RECONNEXION_INTERVAL_IN_SEC + ); // For blocks - if (this.syncBlockInterval) - clearInterval(this.syncBlockInterval); - this.syncBlockInterval = setInterval(() => this.pullBlocks(), 1000 * WS2PConstants.BLOCK_PULLING_INTERVAL); + if (this.syncBlockInterval) clearInterval(this.syncBlockInterval); + this.syncBlockInterval = setInterval( + () => this.pullBlocks(), + 1000 * WS2PConstants.BLOCK_PULLING_INTERVAL + ); // Pull blocks right on start const init = async () => { try { - await this.listenServerFlow() - await this.connectToWS2Peers() - await this.pullBlocks() + await this.listenServerFlow(); + await this.connectToWS2Peers(); + await this.pullBlocks(); } catch (e) { - this.server.logger.error(e) + this.server.logger.error(e); } - } + }; if (waitConnection) { - await init() + await init(); } else { - init() + init(); } // For docpool - if (this.syncDocpoolInterval) - clearInterval(this.syncDocpoolInterval); - this.syncDocpoolInterval = setInterval(() => this.pullDocpool(), 1000 * WS2PConstants.DOCPOOL_PULLING_INTERVAL) + if (this.syncDocpoolInterval) clearInterval(this.syncDocpoolInterval); + this.syncDocpoolInterval = setInterval( + () => this.pullDocpool(), + 1000 * WS2PConstants.DOCPOOL_PULLING_INTERVAL + ); // The first pulling occurs 10 minutes after the start - setTimeout(() => this.pullDocpool(), WS2PConstants.SANDBOX_FIRST_PULL_DELAY) + setTimeout( + () => this.pullDocpool(), + WS2PConstants.SANDBOX_FIRST_PULL_DELAY + ); } async stopCrawling() { if (this.reconnectionInteval) { - clearInterval(this.reconnectionInteval) + clearInterval(this.reconnectionInteval); } if (this.syncBlockInterval) { - clearInterval(this.syncBlockInterval) + clearInterval(this.syncBlockInterval); } if (this.syncDocpoolInterval) { - clearInterval(this.syncDocpoolInterval) + clearInterval(this.syncDocpoolInterval); } } async pullBlocks() { - let current:{number:number} = { number: -1 } - let newCurrent:{number:number}|null = { number: 0 } + let current: { number: number } = { number: -1 }; + let newCurrent: { number: number } | null = { number: 0 }; while (current && newCurrent && newCurrent.number > current.number) { - current = newCurrent - await this.makeApullShot() - newCurrent = await this.server.dal.getCurrentBlockOrNull() + current = newCurrent; + await this.makeApullShot(); + newCurrent = await this.server.dal.getCurrentBlockOrNull(); } if (current) { - this.server.pullingEvent('end', current.number) + this.server.pullingEvent("end", current.number); } } private async makeApullShot() { - const connections = this.getAllConnections() - const chosen = randomPick(connections, CrawlerConstants.CRAWL_PEERS_COUNT) + const connections = this.getAllConnections(); + const chosen = randomPick(connections, CrawlerConstants.CRAWL_PEERS_COUNT); + + await Promise.all( + chosen.map(async (conn) => { + try { + const puller = new WS2PBlockPuller(this.server, conn); + await puller.pull(); + } catch (e) { + this.server.logger.warn(e); + } + }) + ); - await Promise.all(chosen.map(async (conn) => { - try { - const puller = new WS2PBlockPuller(this.server, conn) - await puller.pull() - } catch (e) { - this.server.logger.warn(e) + await this.server.BlockchainService.pushFIFO( + "WS2PCrawlerResolution", + async () => { + await this.server.BlockchainService.blockResolution(); + await this.server.BlockchainService.forkResolution(); } - })) - - await this.server.BlockchainService.pushFIFO("WS2PCrawlerResolution", async () => { - await this.server.BlockchainService.blockResolution() - await this.server.BlockchainService.forkResolution() - }) + ); } async pullDocpool() { - const connections = this.getAllConnections() - const chosen = randomPick(connections, CrawlerConstants.CRAWL_PEERS_COUNT) - await Promise.all(chosen.map(async (conn) => { - const puller = new WS2PDocpoolPuller(this.server, conn) - await puller.pull() - })) + const connections = this.getAllConnections(); + const chosen = randomPick(connections, CrawlerConstants.CRAWL_PEERS_COUNT); + await Promise.all( + chosen.map(async (conn) => { + const puller = new WS2PDocpoolPuller(this.server, conn); + await puller.pull(); + }) + ); } getConnectedPubkeys() { - const clients = Object.keys(this.ws2pClients).map(k => this.ws2pClients[k].connection.pubkey) - const served = this.ws2pServer ? this.ws2pServer.getConnexions().map(c => c.pubkey) : [] - return clients.concat(served) + const clients = Object.keys(this.ws2pClients).map( + (k) => this.ws2pClients[k].connection.pubkey + ); + const served = this.ws2pServer + ? this.ws2pServer.getConnexions().map((c) => c.pubkey) + : []; + return clients.concat(served); } getConnectedWS2PUID() { - const clients = Object.keys(this.ws2pClients).map(k => this.ws2pClients[k].connection.uuid) - const served = this.ws2pServer ? this.ws2pServer.getConnexions().map(c => c.uuid) : [] - return clients.concat(served) + const clients = Object.keys(this.ws2pClients).map( + (k) => this.ws2pClients[k].connection.uuid + ); + const served = this.ws2pServer + ? this.ws2pServer.getConnexions().map((c) => c.uuid) + : []; + return clients.concat(served); } - banConnection(c:WS2PConnection, reason:string) { - this.server.logger.warn('Banning connections of %s for %ss, reason: %s', c.pubkey.slice(0, 8), WS2PConstants.BAN_DURATION_IN_SECONDS, reason) + banConnection(c: WS2PConnection, reason: string) { + this.server.logger.warn( + "Banning connections of %s for %ss, reason: %s", + c.pubkey.slice(0, 8), + WS2PConstants.BAN_DURATION_IN_SECONDS, + reason + ); if (c.pubkey) { - this.banned[c.pubkey] = reason + this.banned[c.pubkey] = reason; setTimeout(() => { - delete this.banned[c.pubkey] - }, 1000 * WS2PConstants.BAN_DURATION_IN_SECONDS) - const connections = this.getAllConnections() + delete this.banned[c.pubkey]; + }, 1000 * WS2PConstants.BAN_DURATION_IN_SECONDS); + const connections = this.getAllConnections(); for (const connection of connections) { if (c.pubkey == connection.pubkey) { - connection.close() + connection.close(); } } } @@ -987,19 +1398,22 @@ export class WS2PCluster { rememberAndPrepareBanSyncConnection(pub: string) { if (!this.ok4reconnect[pub]) { - // 1. Remember that the key can reconnect within the next few minutes without issue - this.ok4reconnect[pub] = 'reconnect' + this.ok4reconnect[pub] = "reconnect"; setTimeout(() => { - delete this.ok4reconnect[pub] - }, 1000 * WS2PConstants.SYNC_CONNECTION_DURATION_IN_SECONDS) + delete this.ok4reconnect[pub]; + }, 1000 * WS2PConstants.SYNC_CONNECTION_DURATION_IN_SECONDS); // 2. Remember that the key will be banned between the reconnection period and the ban period - this.server.logger.warn('Prepare banning SYNC connection of %s for %ss (for room)', pub.slice(0, 8), WS2PConstants.SYNC_BAN_DURATION_IN_SECONDS) - this.banned4Sync[pub] = 'sync' + this.server.logger.warn( + "Prepare banning SYNC connection of %s for %ss (for room)", + pub.slice(0, 8), + WS2PConstants.SYNC_BAN_DURATION_IN_SECONDS + ); + this.banned4Sync[pub] = "sync"; setTimeout(() => { - delete this.banned4Sync[pub] - }, 1000 * WS2PConstants.SYNC_BAN_DURATION_IN_SECONDS) + delete this.banned4Sync[pub]; + }, 1000 * WS2PConstants.SYNC_BAN_DURATION_IN_SECONDS); // Anyway, the connection will be closed after the reconnection period (see WS2PServer), // through the usage of constant SYNC_CONNECTION_DURATION_IN_SECONDS diff --git a/app/modules/ws2p/lib/WS2PConnection.ts b/app/modules/ws2p/lib/WS2PConnection.ts index 2d124371bb03e71229eb26ff1f1fdb097ac39dfe..a049abbf55851c1b4ed0e9b9a1fdf44a809a5e15 100644 --- a/app/modules/ws2p/lib/WS2PConnection.ts +++ b/app/modules/ws2p/lib/WS2PConnection.ts @@ -11,25 +11,28 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Key} from "../../../lib/common-libs/crypto/keyring" -import {verify} from "duniteroxyde" -import {WS2PMessageHandler} from "./impl/WS2PMessageHandler" -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {IdentityDTO} from "../../../lib/dto/IdentityDTO" -import {CertificationDTO} from "../../../lib/dto/CertificationDTO" -import {MembershipDTO} from "../../../lib/dto/MembershipDTO" -import {TransactionDTO} from "../../../lib/dto/TransactionDTO" -import {PeerDTO} from "../../../lib/dto/PeerDTO" -import {WS2PConstants} from './constants'; -import {WebSocket} from "../../../lib/common-libs/websocket" -import {ManualPromise, newManualPromise} from "../../../lib/common-libs/manual-promise" -import {DataErrors} from "../../../lib/common-libs/errors" - -const SocksProxyAgent = require('socks-proxy-agent'); -const nuuid = require('node-uuid'); -const logger = require('../../../lib/logger').NewLogger('ws2p') - -const MAXIMUM_ERRORS_COUNT = 5 +import { Key } from "../../../lib/common-libs/crypto/keyring"; +import { verify } from "duniteroxyde"; +import { WS2PMessageHandler } from "./impl/WS2PMessageHandler"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { IdentityDTO } from "../../../lib/dto/IdentityDTO"; +import { CertificationDTO } from "../../../lib/dto/CertificationDTO"; +import { MembershipDTO } from "../../../lib/dto/MembershipDTO"; +import { TransactionDTO } from "../../../lib/dto/TransactionDTO"; +import { PeerDTO } from "../../../lib/dto/PeerDTO"; +import { WS2PConstants } from "./constants"; +import { WebSocket } from "../../../lib/common-libs/websocket"; +import { + ManualPromise, + newManualPromise, +} from "../../../lib/common-libs/manual-promise"; +import { DataErrors } from "../../../lib/common-libs/errors"; + +const SocksProxyAgent = require("socks-proxy-agent"); +const nuuid = require("node-uuid"); +const logger = require("../../../lib/logger").NewLogger("ws2p"); + +const MAXIMUM_ERRORS_COUNT = 5; enum WS2P_ERR { REJECTED_PUBKEY_OR_INCORRECT_ASK_SIGNATURE_FROM_REMOTE, @@ -46,7 +49,7 @@ enum WS2P_ERR { MUST_BE_AUTHENTICATED_FIRST, REQUEST_FAILED, MESSAGE_MUST_BE_AN_OBJECT, - ANSWER_TO_UNDEFINED_REQUEST + ANSWER_TO_UNDEFINED_REQUEST, } export enum WS2P_PUSH { @@ -56,132 +59,154 @@ export enum WS2P_PUSH { CERTIFICATION, IDENTITY, BLOCK, - HEAD + HEAD, } export interface WS2PAuth { - authenticationIsDone(): Promise<void> + authenticationIsDone(): Promise<void>; } export interface WS2PRemoteAuth extends WS2PAuth { - givenCurrency: Promise<string> - registerCONNECT(type: 'CONNECT'|'SYNC', ws2pVersion:number, challenge:string, sig: string, pub: string, currency: string, ws2pId:string): Promise<boolean> - sendACK(ws:any): Promise<void> - registerOK(sig: string): Promise<boolean> - isAuthenticatedByRemote(): boolean - getPubkey(): string - getVersion(): number - isSync(): boolean + givenCurrency: Promise<string>; + registerCONNECT( + type: "CONNECT" | "SYNC", + ws2pVersion: number, + challenge: string, + sig: string, + pub: string, + currency: string, + ws2pId: string + ): Promise<boolean>; + sendACK(ws: any): Promise<void>; + registerOK(sig: string): Promise<boolean>; + isAuthenticatedByRemote(): boolean; + getPubkey(): string; + getVersion(): number; + isSync(): boolean; } export interface WS2PLocalAuth extends WS2PAuth { - sendCONNECT(ws:any, ws2pVersion:number): Promise<void> - registerACK(sig: string, pub: string): Promise<boolean> - sendOK(ws:any): Promise<void> - isRemoteAuthenticated(): boolean - currency: string + sendCONNECT(ws: any, ws2pVersion: number): Promise<void>; + registerACK(sig: string, pub: string): Promise<boolean>; + sendOK(ws: any): Promise<void>; + isRemoteAuthenticated(): boolean; + currency: string; } /** * A passive authenticator based on our keyring. */ export class WS2PPubkeyRemoteAuth implements WS2PRemoteAuth { - - protected challenge:string - protected authenticatedByRemote = false - protected remotePub = "" - protected remoteWs2pId = "" - protected remoteVersion = 1 - protected serverAuth:Promise<void> - protected serverAuthResolve:()=>void - protected serverAuthReject:(err:any)=>void - protected isSyncConnection = false - public givenCurrency: ManualPromise<string> + protected challenge: string; + protected authenticatedByRemote = false; + protected remotePub = ""; + protected remoteWs2pId = ""; + protected remoteVersion = 1; + protected serverAuth: Promise<void>; + protected serverAuthResolve: () => void; + protected serverAuthReject: (err: any) => void; + protected isSyncConnection = false; + public givenCurrency: ManualPromise<string>; constructor( - protected currency:string, - protected pair:Key, - protected tellIsAuthorizedPubkey:(pub: string, isSync: boolean) => Promise<boolean> = () => Promise.resolve(true) + protected currency: string, + protected pair: Key, + protected tellIsAuthorizedPubkey: ( + pub: string, + isSync: boolean + ) => Promise<boolean> = () => Promise.resolve(true) ) { - this.challenge = nuuid.v4() + nuuid.v4() + this.challenge = nuuid.v4() + nuuid.v4(); this.serverAuth = new Promise((resolve, reject) => { - this.serverAuthResolve = resolve - this.serverAuthReject = reject - }) - this.givenCurrency = newManualPromise() + this.serverAuthResolve = resolve; + this.serverAuthReject = reject; + }); + this.givenCurrency = newManualPromise(); // If the currency is already provided, resolve the promise immediately if (currency) { - this.givenCurrency.resolve(currency) + this.givenCurrency.resolve(currency); } } getVersion() { - return this.remoteVersion + return this.remoteVersion; } getPubkey() { - return this.remotePub + return this.remotePub; } isSync() { - return this.isSyncConnection + return this.isSyncConnection; } async sendACK(ws: any): Promise<void> { - const challengeMessage = `WS2P:ACK:${this.currency}:${this.pair.pub}:${this.challenge}` - Logger.log('sendACK >>> ' + challengeMessage) - const sig = this.pair.signSync(challengeMessage) - await ws.send(JSON.stringify({ - auth: 'ACK', - pub: this.pair.pub, - sig - })) - } - - async registerCONNECT(type: 'CONNECT'|'SYNC', ws2pVersion:number, challenge:string, sig: string, pub: string, currency: string, ws2pId:string = ""): Promise<boolean> { - this.isSyncConnection = type === 'SYNC' - const allow = await this.tellIsAuthorizedPubkey(pub, this.isSyncConnection) + const challengeMessage = `WS2P:ACK:${this.currency}:${this.pair.pub}:${this.challenge}`; + Logger.log("sendACK >>> " + challengeMessage); + const sig = this.pair.signSync(challengeMessage); + await ws.send( + JSON.stringify({ + auth: "ACK", + pub: this.pair.pub, + sig, + }) + ); + } + + async registerCONNECT( + type: "CONNECT" | "SYNC", + ws2pVersion: number, + challenge: string, + sig: string, + pub: string, + currency: string, + ws2pId: string = "" + ): Promise<boolean> { + this.isSyncConnection = type === "SYNC"; + const allow = await this.tellIsAuthorizedPubkey(pub, this.isSyncConnection); if (!allow) { - return false + return false; } // If the connection was not aware of the currency beforehand, let's give the remote node's value if (!this.currency && currency) { - this.currency = currency - } - else if (this.currency && this.currency !== currency && currency) { - throw Error(DataErrors[DataErrors.WRONG_CURRENCY_DETECTED]) + this.currency = currency; + } else if (this.currency && this.currency !== currency && currency) { + throw Error(DataErrors[DataErrors.WRONG_CURRENCY_DETECTED]); } - this.givenCurrency.resolve(this.currency) - const challengeMessage = (ws2pVersion > 1) ? `WS2P:${type}:${this.currency}:${pub}:${ws2pId}:${challenge}`:`WS2P:${type}:${this.currency}:${pub}:${challenge}` - Logger.log('registerCONNECT >>> ' + challengeMessage) - const verified = verify(challengeMessage, sig, pub) + this.givenCurrency.resolve(this.currency); + const challengeMessage = + ws2pVersion > 1 + ? `WS2P:${type}:${this.currency}:${pub}:${ws2pId}:${challenge}` + : `WS2P:${type}:${this.currency}:${pub}:${challenge}`; + Logger.log("registerCONNECT >>> " + challengeMessage); + const verified = verify(challengeMessage, sig, pub); if (verified) { - this.remoteVersion = ws2pVersion - this.challenge = challenge - this.remotePub = pub - this.remoteWs2pId = ws2pId + this.remoteVersion = ws2pVersion; + this.challenge = challenge; + this.remotePub = pub; + this.remoteWs2pId = ws2pId; } - return verified + return verified; } async registerOK(sig: string): Promise<boolean> { - const challengeMessage = `WS2P:OK:${this.currency}:${this.remotePub}:${this.challenge}` - Logger.log('registerOK >>> ' + challengeMessage) - this.authenticatedByRemote = verify(challengeMessage, sig, this.remotePub) + const challengeMessage = `WS2P:OK:${this.currency}:${this.remotePub}:${this.challenge}`; + Logger.log("registerOK >>> " + challengeMessage); + this.authenticatedByRemote = verify(challengeMessage, sig, this.remotePub); if (!this.authenticatedByRemote) { - this.serverAuthReject("Wrong signature from remote OK") + this.serverAuthReject("Wrong signature from remote OK"); } else { - this.serverAuthResolve() + this.serverAuthResolve(); } - return this.authenticatedByRemote + return this.authenticatedByRemote; } isAuthenticatedByRemote(): boolean { - return this.authenticatedByRemote + return this.authenticatedByRemote; } authenticationIsDone(): Promise<void> { - return this.serverAuth + return this.serverAuth; } } @@ -189,124 +214,129 @@ export class WS2PPubkeyRemoteAuth implements WS2PRemoteAuth { * An connecting authenticator based on our keyring. */ export class WS2PPubkeyLocalAuth implements WS2PLocalAuth { - - protected challenge:string - protected authenticated = false - protected serverAuth:Promise<void> - protected serverAuthResolve:()=>void - protected serverAuthReject:(err:any)=>void - protected isSync: boolean + protected challenge: string; + protected authenticated = false; + protected serverAuth: Promise<void>; + protected serverAuthResolve: () => void; + protected serverAuthReject: (err: any) => void; + protected isSync: boolean; constructor( - public currency:string, - protected pair:Key, - protected ws2pId:string, - protected tellIsAuthorizedPubkey:(pub: string) => Promise<boolean> = () => Promise.resolve(true) + public currency: string, + protected pair: Key, + protected ws2pId: string, + protected tellIsAuthorizedPubkey: (pub: string) => Promise<boolean> = () => + Promise.resolve(true) ) { - this.challenge = nuuid.v4() + nuuid.v4() + this.challenge = nuuid.v4() + nuuid.v4(); this.serverAuth = new Promise((resolve, reject) => { - this.serverAuthResolve = resolve - this.serverAuthReject = reject - }) - this.isSync = false + this.serverAuthResolve = resolve; + this.serverAuthReject = reject; + }); + this.isSync = false; } - async sendCONNECT(ws:any, ws2pVersion:number): Promise<void> { - const connectWord = this.isSync ? 'SYNC' : 'CONNECT' + async sendCONNECT(ws: any, ws2pVersion: number): Promise<void> { + const connectWord = this.isSync ? "SYNC" : "CONNECT"; if (ws2pVersion > 1) { - const challengeMessage = `WS2P:${ws2pVersion}:${connectWord}:${this.currency}:${this.pair.pub}:${this.ws2pId}:${this.challenge}` - Logger.log('sendCONNECT >>> ' + challengeMessage) - const sig = this.pair.signSync(challengeMessage) - await ws.send(JSON.stringify({ - auth: `${connectWord}`, - version: ws2pVersion, - pub: this.pair.pub, - ws2pid: this.ws2pId, - challenge: this.challenge, - sig, - currency: this.currency, // This is necessary for SYNC: because the currency is supposed not to be known by the remote - })) - return this.serverAuth + const challengeMessage = `WS2P:${ws2pVersion}:${connectWord}:${this.currency}:${this.pair.pub}:${this.ws2pId}:${this.challenge}`; + Logger.log("sendCONNECT >>> " + challengeMessage); + const sig = this.pair.signSync(challengeMessage); + await ws.send( + JSON.stringify({ + auth: `${connectWord}`, + version: ws2pVersion, + pub: this.pair.pub, + ws2pid: this.ws2pId, + challenge: this.challenge, + sig, + currency: this.currency, // This is necessary for SYNC: because the currency is supposed not to be known by the remote + }) + ); + return this.serverAuth; } else if (ws2pVersion == 1) { - const challengeMessage = `WS2P:${connectWord}:${this.currency}:${this.pair.pub}:${this.challenge}` - Logger.log('sendCONNECT >>> ' + challengeMessage) - const sig = this.pair.signSync(challengeMessage) - await ws.send(JSON.stringify({ - auth: `${connectWord}`, - pub: this.pair.pub, - challenge: this.challenge, - sig, - currency: this.currency, // This is necessary for SYNC: because the currency is supposed not to be known by the remote - })) - return this.serverAuth + const challengeMessage = `WS2P:${connectWord}:${this.currency}:${this.pair.pub}:${this.challenge}`; + Logger.log("sendCONNECT >>> " + challengeMessage); + const sig = this.pair.signSync(challengeMessage); + await ws.send( + JSON.stringify({ + auth: `${connectWord}`, + pub: this.pair.pub, + challenge: this.challenge, + sig, + currency: this.currency, // This is necessary for SYNC: because the currency is supposed not to be known by the remote + }) + ); + return this.serverAuth; } } async registerACK(sig: string, pub: string): Promise<boolean> { - const allow = await this.tellIsAuthorizedPubkey(pub) + const allow = await this.tellIsAuthorizedPubkey(pub); if (!allow) { - return false + return false; } - const challengeMessage = `WS2P:ACK:${this.currency}:${pub}:${this.challenge}` - Logger.log('registerACK >>> ' + challengeMessage) - this.authenticated = verify(challengeMessage, sig, pub) + const challengeMessage = `WS2P:ACK:${this.currency}:${pub}:${this.challenge}`; + Logger.log("registerACK >>> " + challengeMessage); + this.authenticated = verify(challengeMessage, sig, pub); if (!this.authenticated) { - this.serverAuthReject("Wrong signature from server ACK") + this.serverAuthReject("Wrong signature from server ACK"); } else { - this.serverAuthResolve() + this.serverAuthResolve(); } - return this.authenticated + return this.authenticated; } - async sendOK(ws:any): Promise<void> { - const challengeMessage = `WS2P:OK:${this.currency}:${this.pair.pub}:${this.challenge}` - Logger.log('sendOK >>> ' + challengeMessage) - const sig = this.pair.signSync(challengeMessage) - await ws.send(JSON.stringify({ - auth: 'OK', - sig - })) - return this.serverAuth + async sendOK(ws: any): Promise<void> { + const challengeMessage = `WS2P:OK:${this.currency}:${this.pair.pub}:${this.challenge}`; + Logger.log("sendOK >>> " + challengeMessage); + const sig = this.pair.signSync(challengeMessage); + await ws.send( + JSON.stringify({ + auth: "OK", + sig, + }) + ); + return this.serverAuth; } isRemoteAuthenticated(): boolean { - return this.authenticated + return this.authenticated; } authenticationIsDone(): Promise<void> { - return this.serverAuth + return this.serverAuth; } } export class WS2PPubkeySyncLocalAuth extends WS2PPubkeyLocalAuth { - constructor( - currency:string, // Only here for function signature purpose - protected pair:Key, - protected ws2pId:string, - protected tellIsAuthorizedPubkey:(pub: string) => Promise<boolean> = () => Promise.resolve(true) + currency: string, // Only here for function signature purpose + protected pair: Key, + protected ws2pId: string, + protected tellIsAuthorizedPubkey: (pub: string) => Promise<boolean> = () => + Promise.resolve(true) ) { - super("", pair, ws2pId, tellIsAuthorizedPubkey) - this.isSync = true + super("", pair, ws2pId, tellIsAuthorizedPubkey); + this.isSync = true; } } -export interface WS2PRequest { - name:string, - params?:any +export interface WS2PRequest { + name: string; + params?: any; } export class WS2PMessageExchange { - - promise: Promise<any> + promise: Promise<any>; extras: { - resolve: (data:any) => void - reject: (err:any) => void - } + resolve: (data: any) => void; + reject: (err: any) => void; + }; constructor(extras: { resolve: () => void; reject: () => void }, race: any) { - this.promise = race - this.extras = extras + this.promise = race; + this.extras = extras; } } @@ -317,440 +347,528 @@ export class WS2PMessageExchange { * Requires an established WebSocket connection in order to work. */ export class WS2PConnection { - - private connectp:Promise<any>|undefined - private connectedp:Promise<string> - private connectedResolve:(pub:string)=>void - private connectedReject:(e:any)=>void - private nbErrors = 0 - private nbRequestsCount = 0 - private nbResponsesCount = 0 - private nbPushsToRemoteCount = 0 - private nbPushsByRemoteCount = 0 + private connectp: Promise<any> | undefined; + private connectedp: Promise<string>; + private connectedResolve: (pub: string) => void; + private connectedReject: (e: any) => void; + private nbErrors = 0; + private nbRequestsCount = 0; + private nbResponsesCount = 0; + private nbPushsToRemoteCount = 0; + private nbPushsByRemoteCount = 0; private exchanges: { - [uuid: string]: WS2PMessageExchange - } = {} + [uuid: string]: WS2PMessageExchange; + } = {}; constructor( - private ws2pVersion:number, - private ws:any, - private onWsOpened:Promise<void>, - private onWsClosed:Promise<void>, - private messageHandler:WS2PMessageHandler, - private localAuth:WS2PLocalAuth, - private remoteAuth:WS2PRemoteAuth, - private options:{ - connectionTimeout:number - requestTimeout:number + private ws2pVersion: number, + private ws: any, + private onWsOpened: Promise<void>, + private onWsClosed: Promise<void>, + private messageHandler: WS2PMessageHandler, + private localAuth: WS2PLocalAuth, + private remoteAuth: WS2PRemoteAuth, + private options: { + connectionTimeout: number; + requestTimeout: number; } = { connectionTimeout: WS2PConstants.REQUEST_TIMEOUT, - requestTimeout: WS2PConstants.REQUEST_TIMEOUT + requestTimeout: WS2PConstants.REQUEST_TIMEOUT, }, - private expectedPub:string = "", - private expectedWS2PUID:string = "" + private expectedPub: string = "", + private expectedWS2PUID: string = "" ) { this.connectedp = new Promise((resolve, reject) => { - this.connectedResolve = resolve - this.connectedReject = reject - }) + this.connectedResolve = resolve; + this.connectedReject = reject; + }); } static newConnectionToAddress( - ws2pVersion:number, - address:string, - messageHandler:WS2PMessageHandler, - localAuth:WS2PLocalAuth, - remoteAuth:WS2PRemoteAuth, - proxySocksAddress:string|undefined = undefined, - options:{ - connectionTimeout:number, - requestTimeout:number + ws2pVersion: number, + address: string, + messageHandler: WS2PMessageHandler, + localAuth: WS2PLocalAuth, + remoteAuth: WS2PRemoteAuth, + proxySocksAddress: string | undefined = undefined, + options: { + connectionTimeout: number; + requestTimeout: number; } = { connectionTimeout: WS2PConstants.REQUEST_TIMEOUT, - requestTimeout: WS2PConstants.REQUEST_TIMEOUT + requestTimeout: WS2PConstants.REQUEST_TIMEOUT, }, - expectedPub:string = "", - expectedWS2PUID:string = "") { - if (address.match(WS2PConstants.FULL_ADDRESS_ONION_REGEX)) { - options = { - connectionTimeout: WS2PConstants.CONNEXION_TOR_TIMEOUT, - requestTimeout: WS2PConstants.REQUEST_TOR_TIMEOUT - } - } - const websocket = (proxySocksAddress !== undefined) ? new WebSocket(address, { agent: SocksProxyAgent("socks://"+proxySocksAddress) }):new WebSocket(address) - const onWsOpened:Promise<void> = new Promise(res => { - websocket.on('open', () => res()) - }) - const onWsClosed:Promise<void> = new Promise(res => { - websocket.on('close', () => res()) - }) - websocket.on('error', () => websocket.close()) - return new WS2PConnection(ws2pVersion, websocket, onWsOpened, onWsClosed, messageHandler, localAuth, remoteAuth, options, expectedPub, expectedWS2PUID) + expectedPub: string = "", + expectedWS2PUID: string = "" + ) { + if (address.match(WS2PConstants.FULL_ADDRESS_ONION_REGEX)) { + options = { + connectionTimeout: WS2PConstants.CONNEXION_TOR_TIMEOUT, + requestTimeout: WS2PConstants.REQUEST_TOR_TIMEOUT, + }; + } + const websocket = + proxySocksAddress !== undefined + ? new WebSocket(address, { + agent: SocksProxyAgent("socks://" + proxySocksAddress), + }) + : new WebSocket(address); + const onWsOpened: Promise<void> = new Promise((res) => { + websocket.on("open", () => res()); + }); + const onWsClosed: Promise<void> = new Promise((res) => { + websocket.on("close", () => res()); + }); + websocket.on("error", () => websocket.close()); + return new WS2PConnection( + ws2pVersion, + websocket, + onWsOpened, + onWsClosed, + messageHandler, + localAuth, + remoteAuth, + options, + expectedPub, + expectedWS2PUID + ); } static newConnectionFromWebSocketServer( - websocket:any, - messageHandler:WS2PMessageHandler, - localAuth:WS2PLocalAuth, - remoteAuth:WS2PRemoteAuth, - options:{ - connectionTimeout:number - requestTimeout:number + websocket: any, + messageHandler: WS2PMessageHandler, + localAuth: WS2PLocalAuth, + remoteAuth: WS2PRemoteAuth, + options: { + connectionTimeout: number; + requestTimeout: number; } = { connectionTimeout: WS2PConstants.REQUEST_TIMEOUT, - requestTimeout: WS2PConstants.REQUEST_TIMEOUT + requestTimeout: WS2PConstants.REQUEST_TIMEOUT, }, - expectedPub:string = "") { - const onWsOpened = Promise.resolve() - const onWsClosed:Promise<void> = new Promise(res => { - websocket.on('close', () => res()) - }) - return new WS2PConnection(WS2PConstants.WS2P_DEFAULT_API_VERSION, websocket, onWsOpened, onWsClosed, messageHandler, localAuth, remoteAuth, options, expectedPub) + expectedPub: string = "" + ) { + const onWsOpened = Promise.resolve(); + const onWsClosed: Promise<void> = new Promise((res) => { + websocket.on("close", () => res()); + }); + return new WS2PConnection( + WS2PConstants.WS2P_DEFAULT_API_VERSION, + websocket, + onWsOpened, + onWsClosed, + messageHandler, + localAuth, + remoteAuth, + options, + expectedPub + ); } get version() { - return Math.min(WS2PConstants.WS2P_HEAD_VERSION, this.remoteAuth.getVersion()) + return Math.min( + WS2PConstants.WS2P_HEAD_VERSION, + this.remoteAuth.getVersion() + ); } get pubkey() { - return this.remoteAuth.getPubkey() + return this.remoteAuth.getPubkey(); } get uuid() { - return this.expectedWS2PUID + return this.expectedWS2PUID; } get isSync() { - return this.remoteAuth.isSync() + return this.remoteAuth.isSync(); } get nbRequests() { - return this.nbRequestsCount + return this.nbRequestsCount; } get nbResponses() { - return this.nbResponsesCount + return this.nbResponsesCount; } get nbPushsToRemote() { - return this.nbPushsToRemoteCount + return this.nbPushsToRemoteCount; } get nbPushsByRemote() { - return this.nbPushsByRemoteCount + return this.nbPushsByRemoteCount; } get connected() { - return this.connectedp + return this.connectedp; } get closed() { - return this.onWsClosed + return this.onWsClosed; } close() { - return this.ws.close() + return this.ws.close(); } async connectAsInitiator() { - return this.connect(true) + return this.connect(true); } async connect(initiator = false) { - const whoIs = initiator ? 'INITIATOR' : 'SERVER' + const whoIs = initiator ? "INITIATOR" : "SERVER"; if (!this.connectp) { this.connectp = (async () => { const connectionTimeout = new Promise((res, rej) => { setTimeout(() => { - rej("WS2P connection timeout") - }, this.options.connectionTimeout) - }) + rej("WS2P connection timeout"); + }, this.options.connectionTimeout); + }); try { - await Promise.race([connectionTimeout, new Promise((resolve, reject) => { - - (async () => { - await this.onWsOpened - try { - - // First: wait to know about the currency name. It can be given spontaneously by the remote node, - // or be already given if we know it before any exchange. - const currency = await this.remoteAuth.givenCurrency - // Then make some checks about its value - if (this.localAuth.currency && this.localAuth.currency !== currency) { - throw Error(DataErrors[DataErrors.WRONG_CURRENCY_DETECTED]) + await Promise.race([ + connectionTimeout, + new Promise((resolve, reject) => { + (async () => { + await this.onWsOpened; + try { + // First: wait to know about the currency name. It can be given spontaneously by the remote node, + // or be already given if we know it before any exchange. + const currency = await this.remoteAuth.givenCurrency; + // Then make some checks about its value + if ( + this.localAuth.currency && + this.localAuth.currency !== currency + ) { + throw Error(DataErrors[DataErrors.WRONG_CURRENCY_DETECTED]); + } + // Eventually give the information to localAuth, which will now be able to start the connection (currency is required for WS2P messages) + if (!this.localAuth.currency) { + this.localAuth.currency = currency; + } + + await this.localAuth.sendCONNECT(this.ws, this.ws2pVersion); + await Promise.all([ + this.localAuth.authenticationIsDone(), + this.remoteAuth.authenticationIsDone(), + ]); + resolve(); + } catch (e) { + reject(e); } - // Eventually give the information to localAuth, which will now be able to start the connection (currency is required for WS2P messages) - if (!this.localAuth.currency) { - this.localAuth.currency = currency + })(); + + this.ws.on("message", async (msg: string) => { + const data = JSON.parse(msg); + + // Incorrect data + if (typeof data !== "object") { + // We only accept JSON objects + await this.errorDetected(WS2P_ERR.MESSAGE_MUST_BE_AN_OBJECT); } - await this.localAuth.sendCONNECT(this.ws, this.ws2pVersion) - await Promise.all([ - this.localAuth.authenticationIsDone(), - this.remoteAuth.authenticationIsDone() - ]) - resolve() - } catch (e) { - reject(e) - } - })() - - this.ws.on('message', async (msg:string) => { - const data = JSON.parse(msg) - - // Incorrect data - if (typeof data !== 'object') { - // We only accept JSON objects - await this.errorDetected(WS2P_ERR.MESSAGE_MUST_BE_AN_OBJECT) - } - - // OK: JSON object - else { - - /************************ - * CONNECTION STUFF - ************************/ - - if (data.auth && typeof data.auth === "string") { - - if (data.auth === "CONNECT" || data.auth === "SYNC") { - if (data.version) { - if (typeof data.version !== "number") { - await this.errorDetected(WS2P_ERR.AUTH_INVALID_ASK_FIELDS) - } else { - this.ws2pVersion = data.version + // OK: JSON object + else { + /************************ + * CONNECTION STUFF + ************************/ + + if (data.auth && typeof data.auth === "string") { + if (data.auth === "CONNECT" || data.auth === "SYNC") { + if (data.version) { + if (typeof data.version !== "number") { + await this.errorDetected( + WS2P_ERR.AUTH_INVALID_ASK_FIELDS + ); + } else { + this.ws2pVersion = data.version; + } } - } - if (this.remoteAuth.isAuthenticatedByRemote()) { - return this.errorDetected(WS2P_ERR.ALREADY_AUTHENTICATED_BY_REMOTE) - } - else if ( - typeof data.pub !== "string" || typeof data.sig !== "string" || typeof data.challenge !== "string" || (this.ws2pVersion > 1 && typeof data.ws2pId !== "string") ) { - await this.errorDetected(WS2P_ERR.AUTH_INVALID_ASK_FIELDS) - } else { - if (this.expectedPub && data.pub !== this.expectedPub) { - await this.errorDetected(WS2P_ERR.INCORRECT_PUBKEY_FOR_REMOTE) + if (this.remoteAuth.isAuthenticatedByRemote()) { + return this.errorDetected( + WS2P_ERR.ALREADY_AUTHENTICATED_BY_REMOTE + ); + } else if ( + typeof data.pub !== "string" || + typeof data.sig !== "string" || + typeof data.challenge !== "string" || + (this.ws2pVersion > 1 && + typeof data.ws2pId !== "string") + ) { + await this.errorDetected( + WS2P_ERR.AUTH_INVALID_ASK_FIELDS + ); } else { - const valid = await this.remoteAuth.registerCONNECT(data.auth, this.ws2pVersion, data.challenge, data.sig, data.pub, data.currency, (this.ws2pVersion > 1) ? data.ws2pID:"") - if (valid) { - await this.remoteAuth.sendACK(this.ws) + if (this.expectedPub && data.pub !== this.expectedPub) { + await this.errorDetected( + WS2P_ERR.INCORRECT_PUBKEY_FOR_REMOTE + ); } else { - await this.errorDetected(WS2P_ERR.REJECTED_PUBKEY_OR_INCORRECT_ASK_SIGNATURE_FROM_REMOTE) + const valid = await this.remoteAuth.registerCONNECT( + data.auth, + this.ws2pVersion, + data.challenge, + data.sig, + data.pub, + data.currency, + this.ws2pVersion > 1 ? data.ws2pID : "" + ); + if (valid) { + await this.remoteAuth.sendACK(this.ws); + } else { + await this.errorDetected( + WS2P_ERR.REJECTED_PUBKEY_OR_INCORRECT_ASK_SIGNATURE_FROM_REMOTE + ); + } } } - } - } - - else if (data.auth === "ACK") { - if (this.localAuth.isRemoteAuthenticated()) { - return this.errorDetected(WS2P_ERR.ALREADY_AUTHENTICATED_REMOTE) - } - if (typeof data.pub !== "string" || typeof data.sig !== "string") { - await this.errorDetected(WS2P_ERR.AUTH_INVALID_ACK_FIELDS) - } else { - if (this.expectedPub && data.pub !== this.expectedPub) { - await this.errorDetected(WS2P_ERR.INCORRECT_PUBKEY_FOR_REMOTE) + } else if (data.auth === "ACK") { + if (this.localAuth.isRemoteAuthenticated()) { + return this.errorDetected( + WS2P_ERR.ALREADY_AUTHENTICATED_REMOTE + ); + } + if ( + typeof data.pub !== "string" || + typeof data.sig !== "string" + ) { + await this.errorDetected( + WS2P_ERR.AUTH_INVALID_ACK_FIELDS + ); } else { - try { - const valid = await this.localAuth.registerACK(data.sig, data.pub) - if (valid) { - await this.localAuth.sendOK(this.ws) + if (this.expectedPub && data.pub !== this.expectedPub) { + await this.errorDetected( + WS2P_ERR.INCORRECT_PUBKEY_FOR_REMOTE + ); + } else { + try { + const valid = await this.localAuth.registerACK( + data.sig, + data.pub + ); + if (valid) { + await this.localAuth.sendOK(this.ws); + } + } catch (e) { + await this.errorDetected( + WS2P_ERR.INCORRECT_ACK_SIGNATURE_FROM_REMOTE + ); } - } catch (e) { - await this.errorDetected(WS2P_ERR.INCORRECT_ACK_SIGNATURE_FROM_REMOTE) } } - } - } - - else if (data.auth === "OK") { - if (this.remoteAuth.isAuthenticatedByRemote()) { - return this.errorDetected(WS2P_ERR.ALREADY_AUTHENTICATED_AND_CONFIRMED_BY_REMOTE) - } - if (typeof data.sig !== "string") { - await this.errorDetected(WS2P_ERR.AUTH_INVALID_OK_FIELDS) + } else if (data.auth === "OK") { + if (this.remoteAuth.isAuthenticatedByRemote()) { + return this.errorDetected( + WS2P_ERR.ALREADY_AUTHENTICATED_AND_CONFIRMED_BY_REMOTE + ); + } + if (typeof data.sig !== "string") { + await this.errorDetected( + WS2P_ERR.AUTH_INVALID_OK_FIELDS + ); + } else { + await this.remoteAuth.registerOK(data.sig); + } } else { - await this.remoteAuth.registerOK(data.sig) + await this.errorDetected(WS2P_ERR.UNKNOWN_AUTH_MESSAGE); + } + } else { + /************************ + * APPLICATION STUFF + ************************/ + if (!this.localAuth.isRemoteAuthenticated()) { + await this.errorDetected( + WS2P_ERR.MUST_BE_AUTHENTICATED_FIRST + ); } - } - - else { - await this.errorDetected(WS2P_ERR.UNKNOWN_AUTH_MESSAGE) - } - } - - /************************ - * APPLICATION STUFF - ************************/ - - else { - - if (!this.localAuth.isRemoteAuthenticated()) { - await this.errorDetected(WS2P_ERR.MUST_BE_AUTHENTICATED_FIRST) - } - // Request message - else if (data.reqId && typeof data.reqId === "string") { - try { - const answer = await this.messageHandler.answerToRequest(data.body, this) - this.ws.send(JSON.stringify({ resId: data.reqId, body: answer })) - } catch (e) { - this.ws.send(JSON.stringify({ resId: data.reqId, err: e })) + // Request message + else if (data.reqId && typeof data.reqId === "string") { + try { + const answer = await this.messageHandler.answerToRequest( + data.body, + this + ); + this.ws.send( + JSON.stringify({ resId: data.reqId, body: answer }) + ); + } catch (e) { + this.ws.send( + JSON.stringify({ resId: data.reqId, err: e }) + ); + } } - } - // Answer message - else if (data.resId && typeof data.resId === "string") { - // An answer - const request = this.exchanges[data.resId] - this.nbResponsesCount++ - if (request !== undefined) { - request.extras.resolve(data.body) - } else { - await this.errorDetected(WS2P_ERR.ANSWER_TO_UNDEFINED_REQUEST) + // Answer message + else if (data.resId && typeof data.resId === "string") { + // An answer + const request = this.exchanges[data.resId]; + this.nbResponsesCount++; + if (request !== undefined) { + request.extras.resolve(data.body); + } else { + await this.errorDetected( + WS2P_ERR.ANSWER_TO_UNDEFINED_REQUEST + ); + } } - } - // Push message - else { - this.nbPushsByRemoteCount++ - await this.messageHandler.handlePushMessage(data, this) + // Push message + else { + this.nbPushsByRemoteCount++; + await this.messageHandler.handlePushMessage(data, this); + } } } - } - }) - })]) + }); + }), + ]); - this.connectedResolve(this.remoteAuth.getPubkey()) + this.connectedResolve(this.remoteAuth.getPubkey()); } catch (e) { - this.connectedReject(e) - await this.connectedp + this.connectedReject(e); + await this.connectedp; } - })() + })(); } - return this.connectp + return this.connectp; } - async request(body:WS2PRequest) { - await this.connectAsInitiator() - const uuid = nuuid.v4() + async request(body: WS2PRequest) { + await this.connectAsInitiator(); + const uuid = nuuid.v4(); return new Promise((resolve, reject) => { - this.nbRequestsCount++ - this.ws.send(JSON.stringify({ - reqId: uuid, - body - }), async (err:any) => { - if (err) { - return reject(err) - } else { - // The request was successfully sent. Now wait for the answer. - const extras = { - resolve: () => { console.error('resolution not implemented') }, - reject: () => { console.error('rejection not implemented') } - } - this.exchanges[uuid] = new WS2PMessageExchange( - extras, - Promise.race([ - // The answer - new Promise((res, rej) => { - extras.resolve = res - extras.reject = () => { - this.errorDetected(WS2P_ERR.REQUEST_FAILED) - rej() - } - }), - // Timeout - new Promise((res, rej) => { - setTimeout(() => { - rej("WS2P request timeout") - }, this.options.requestTimeout) - }) - ]) - ) - try { - resolve(await this.exchanges[uuid].promise) - } catch(e) { - reject(e) - } finally { - delete this.exchanges[uuid] + this.nbRequestsCount++; + this.ws.send( + JSON.stringify({ + reqId: uuid, + body, + }), + async (err: any) => { + if (err) { + return reject(err); + } else { + // The request was successfully sent. Now wait for the answer. + const extras = { + resolve: () => { + console.error("resolution not implemented"); + }, + reject: () => { + console.error("rejection not implemented"); + }, + }; + this.exchanges[uuid] = new WS2PMessageExchange( + extras, + Promise.race([ + // The answer + new Promise((res, rej) => { + extras.resolve = res; + extras.reject = () => { + this.errorDetected(WS2P_ERR.REQUEST_FAILED); + rej(); + }; + }), + // Timeout + new Promise((res, rej) => { + setTimeout(() => { + rej("WS2P request timeout"); + }, this.options.requestTimeout); + }), + ]) + ); + try { + resolve(await this.exchanges[uuid].promise); + } catch (e) { + reject(e); + } finally { + delete this.exchanges[uuid]; + } } } - }) - }) + ); + }); } - async pushBlock(block:BlockDTO) { - return this.pushData(WS2P_PUSH.BLOCK, 'block', block) + async pushBlock(block: BlockDTO) { + return this.pushData(WS2P_PUSH.BLOCK, "block", block); } - async pushIdentity(idty:IdentityDTO) { - return this.pushData(WS2P_PUSH.IDENTITY, 'identity', idty) + async pushIdentity(idty: IdentityDTO) { + return this.pushData(WS2P_PUSH.IDENTITY, "identity", idty); } - async pushCertification(cert:CertificationDTO) { - return this.pushData(WS2P_PUSH.CERTIFICATION, 'certification', cert) + async pushCertification(cert: CertificationDTO) { + return this.pushData(WS2P_PUSH.CERTIFICATION, "certification", cert); } - async pushMembership(ms:MembershipDTO) { - return this.pushData(WS2P_PUSH.MEMBERSHIP, 'membership', ms) + async pushMembership(ms: MembershipDTO) { + return this.pushData(WS2P_PUSH.MEMBERSHIP, "membership", ms); } - async pushTransaction(tx:TransactionDTO) { - return this.pushData(WS2P_PUSH.TRANSACTION, 'transaction', tx) + async pushTransaction(tx: TransactionDTO) { + return this.pushData(WS2P_PUSH.TRANSACTION, "transaction", tx); } - async pushPeer(peer:PeerDTO) { - return this.pushData(WS2P_PUSH.PEER, 'peer', peer) + async pushPeer(peer: PeerDTO) { + return this.pushData(WS2P_PUSH.PEER, "peer", peer); } - async pushHeads(heads:{ message:string, sig:string, messageV2?:string, sigV2?:string, step?:number }[]) { - return this.pushData(WS2P_PUSH.HEAD, 'heads', heads) + async pushHeads( + heads: { + message: string; + sig: string; + messageV2?: string; + sigV2?: string; + step?: number; + }[] + ) { + return this.pushData(WS2P_PUSH.HEAD, "heads", heads); } - async pushData(type:WS2P_PUSH, key:string, data:any) { - await this.connectAsInitiator() + async pushData(type: WS2P_PUSH, key: string, data: any) { + await this.connectAsInitiator(); return new Promise((resolve, reject) => { - this.nbPushsToRemoteCount++ + this.nbPushsToRemoteCount++; try { - this.ws.send(JSON.stringify({ - body: { - name: WS2P_PUSH[type], - [key]: data + this.ws.send( + JSON.stringify({ + body: { + name: WS2P_PUSH[type], + [key]: data, + }, + }), + async (err: any) => { + if (err) { + return reject(err); + } + resolve(); } - }), async (err:any) => { - if (err) { - return reject(err) - } - resolve() - }) + ); } catch (e) { - reject(e) + reject(e); } - }) + }); } - private errorDetected(cause:WS2P_ERR) { - this.nbErrors++ - Logger.error('>>> WS ERROR: %s', WS2P_ERR[cause]) + private errorDetected(cause: WS2P_ERR) { + this.nbErrors++; + Logger.error(">>> WS ERROR: %s", WS2P_ERR[cause]); if (this.nbErrors >= MAXIMUM_ERRORS_COUNT) { - this.ws.terminate() + this.ws.terminate(); } } get hostName(): string { - return (this.ws as any).url - .replace('ws://', '') - .replace('wss://', '') + return (this.ws as any).url.replace("ws://", "").replace("wss://", ""); } } class Logger { - - static log(message:string) { - logger.trace('WS2P >>> ' + message) + static log(message: string) { + logger.trace("WS2P >>> " + message); } - static error(message:string, obj:any) { - logger.error('WS2P >>> ' + message, obj) + static error(message: string, obj: any) { + logger.error("WS2P >>> " + message, obj); } -} \ No newline at end of file +} diff --git a/app/modules/ws2p/lib/WS2PDocpoolPuller.ts b/app/modules/ws2p/lib/WS2PDocpoolPuller.ts index 08b6cfe354489157f3e3edbe8b2fdb099a4f76c2..a5c867322027d7c79d496ad8c470f9913ffd1f2f 100644 --- a/app/modules/ws2p/lib/WS2PDocpoolPuller.ts +++ b/app/modules/ws2p/lib/WS2PDocpoolPuller.ts @@ -11,34 +11,45 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Server} from "../../../../server" -import {WS2PConnection} from "./WS2PConnection" -import {WS2PRequester} from "./WS2PRequester" -import {pullSandboxToLocalServer} from "../../crawler/lib/sandbox" +import { Server } from "../../../../server"; +import { WS2PConnection } from "./WS2PConnection"; +import { WS2PRequester } from "./WS2PRequester"; +import { pullSandboxToLocalServer } from "../../crawler/lib/sandbox"; export class WS2PDocpoolPuller { - - constructor( - private server:Server, - private connection:WS2PConnection - ) {} + constructor(private server: Server, private connection: WS2PConnection) {} async pull() { - const requester = WS2PRequester.fromConnection(this.connection) + const requester = WS2PRequester.fromConnection(this.connection); // node.pubkey = p.pubkey; - return pullSandboxToLocalServer(this.server.conf.currency, { - type: 'WS2P', - getRequirementsPending: (minCert = 1) => { - return requester.getRequirementsPending(minCert) + return pullSandboxToLocalServer( + this.server.conf.currency, + { + type: "WS2P", + getRequirementsPending: (minCert = 1) => { + return requester.getRequirementsPending(minCert); + }, + getName: () => this.connection.pubkey, + getPeers: async () => [], + getCurrent: async () => null, + getBlock: async () => null, + getBlocks: async () => [], + getMilestonesPage: async () => ({ + chunkSize: 0, + totalPages: 0, + milestonesPerPage: 0, + }), + getMilestones: async () => ({ + chunkSize: 0, + totalPages: 0, + currentPage: 0, + milestonesPerPage: 0, + blocks: [], + }), + hostName: "", }, - getName: () => this.connection.pubkey, - getPeers: async () => [], - getCurrent: async () => null, - getBlock: async () => null, - getBlocks: async () => [], - getMilestonesPage: async () => ({ chunkSize: 0, totalPages: 0, milestonesPerPage: 0 }), - getMilestones: async () => ({ chunkSize: 0, totalPages: 0, currentPage: 0, milestonesPerPage: 0, blocks: [] }), - hostName: '' - }, this.server, this.server.logger) + this.server, + this.server.logger + ); } } diff --git a/app/modules/ws2p/lib/WS2PRequester.ts b/app/modules/ws2p/lib/WS2PRequester.ts index 051a07d8171ac789e167b361747c6b92d653c7d8..04139ee6b4d05e5a2d778c85034b9ec1b4c379d3 100644 --- a/app/modules/ws2p/lib/WS2PRequester.ts +++ b/app/modules/ws2p/lib/WS2PRequester.ts @@ -11,10 +11,10 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {WS2PConnection} from "./WS2PConnection" -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {PeerDTO} from "../../../lib/dto/PeerDTO" -import {HttpMilestonePage} from "../../bma/lib/dtos" +import { WS2PConnection } from "./WS2PConnection"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { PeerDTO } from "../../../lib/dto/PeerDTO"; +import { HttpMilestonePage } from "../../bma/lib/dtos"; export enum WS2P_REQ { KNOWN_PEERS, @@ -24,62 +24,60 @@ export enum WS2P_REQ { BLOCK_BY_NUMBER, CURRENT, MILESTONES_PAGE, - MILESTONES + MILESTONES, } export class WS2PRequester { + private constructor(protected ws2pc: WS2PConnection) {} - private constructor( - protected ws2pc:WS2PConnection) {} - - static fromConnection(ws2pc:WS2PConnection) { - return new WS2PRequester(ws2pc) + static fromConnection(ws2pc: WS2PConnection) { + return new WS2PRequester(ws2pc); } getPeer(): Promise<PeerDTO> { - return this.query(WS2P_REQ.PEER_DOCUMENT) + return this.query(WS2P_REQ.PEER_DOCUMENT); } getPeers(): Promise<PeerDTO[]> { - return this.query(WS2P_REQ.KNOWN_PEERS) + return this.query(WS2P_REQ.KNOWN_PEERS); } getCurrent(): Promise<BlockDTO> { - return this.query(WS2P_REQ.CURRENT) + return this.query(WS2P_REQ.CURRENT); } getMilestonesPage(): Promise<HttpMilestonePage> { - return this.query(WS2P_REQ.MILESTONES_PAGE) + return this.query(WS2P_REQ.MILESTONES_PAGE); } getMilestones(page: number): Promise<HttpMilestonePage> { - return this.query(WS2P_REQ.MILESTONES, { page }) + return this.query(WS2P_REQ.MILESTONES, { page }); } - getBlock(number:number): Promise<BlockDTO> { - return this.query(WS2P_REQ.BLOCK_BY_NUMBER, { number }) + getBlock(number: number): Promise<BlockDTO> { + return this.query(WS2P_REQ.BLOCK_BY_NUMBER, { number }); } - getBlocks(count:number, fromNumber:number): Promise<BlockDTO[]> { - return this.query(WS2P_REQ.BLOCKS_CHUNK, { count, fromNumber }) + getBlocks(count: number, fromNumber: number): Promise<BlockDTO[]> { + return this.query(WS2P_REQ.BLOCKS_CHUNK, { count, fromNumber }); } getPubkey() { - return this.ws2pc.pubkey || "########" + return this.ws2pc.pubkey || "########"; } async getRequirementsPending(minCert = 1): Promise<any> { - return this.query(WS2P_REQ.WOT_REQUIREMENTS_OF_PENDING, { minCert }) + return this.query(WS2P_REQ.WOT_REQUIREMENTS_OF_PENDING, { minCert }); } - private query(req:WS2P_REQ, params:any = {}): Promise<any> { + private query(req: WS2P_REQ, params: any = {}): Promise<any> { return this.ws2pc.request({ name: WS2P_REQ[req], - params: params - }) + params: params, + }); } get hostName() { - return this.ws2pc.hostName + return this.ws2pc.hostName; } -} \ No newline at end of file +} diff --git a/app/modules/ws2p/lib/WS2PServer.ts b/app/modules/ws2p/lib/WS2PServer.ts index 1e11603ce4648cf754bcaa614a8833ffc3c63900..5ebf238f6fc2b04121987615b0891089b9dafea7 100644 --- a/app/modules/ws2p/lib/WS2PServer.ts +++ b/app/modules/ws2p/lib/WS2PServer.ts @@ -11,102 +11,139 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Server} from "../../../../server" -import {WS2PConnection, WS2PPubkeyLocalAuth, WS2PPubkeyRemoteAuth} from "./WS2PConnection" -import {Key} from "../../../lib/common-libs/crypto/keyring" -import {GlobalFifoPromise} from "../../../service/GlobalFifoPromise" -import * as events from "events" -import {WS2PConstants} from "./constants" -import {WS2PMessageHandler} from "./impl/WS2PMessageHandler" -import {WS2PStreamer} from "./WS2PStreamer" -import {WS2PSingleWriteStream} from "./WS2PSingleWriteStream" -import {WebSocketServer} from "../../../lib/common-libs/websocket" +import { Server } from "../../../../server"; +import { + WS2PConnection, + WS2PPubkeyLocalAuth, + WS2PPubkeyRemoteAuth, +} from "./WS2PConnection"; +import { Key } from "../../../lib/common-libs/crypto/keyring"; +import { GlobalFifoPromise } from "../../../service/GlobalFifoPromise"; +import * as events from "events"; +import { WS2PConstants } from "./constants"; +import { WS2PMessageHandler } from "./impl/WS2PMessageHandler"; +import { WS2PStreamer } from "./WS2PStreamer"; +import { WS2PSingleWriteStream } from "./WS2PSingleWriteStream"; +import { WebSocketServer } from "../../../lib/common-libs/websocket"; export class WS2PServer extends events.EventEmitter { - - private wss:any - private connections:WS2PConnection[] = [] - private synConnections:WS2PConnection[] = [] + private wss: any; + private connections: WS2PConnection[] = []; + private synConnections: WS2PConnection[] = []; private constructor( - private server:Server, - private host:string, - private port:number, - private fifo:GlobalFifoPromise, - private shouldAcceptConnection:(pubkey:string, isSync: boolean, syncConnectedPubkeys:string[], connectedPubkeys:string[])=>Promise<boolean>, - public keyPriorityLevel:(pubkey:string, privilegedKeys:string[])=>Promise<number>) { - super() + private server: Server, + private host: string, + private port: number, + private fifo: GlobalFifoPromise, + private shouldAcceptConnection: ( + pubkey: string, + isSync: boolean, + syncConnectedPubkeys: string[], + connectedPubkeys: string[] + ) => Promise<boolean>, + public keyPriorityLevel: ( + pubkey: string, + privilegedKeys: string[] + ) => Promise<number> + ) { + super(); } get maxLevel2Peers() { - if (this.server.conf.ws2p && this.server.conf.ws2p.maxPublic !== undefined && this.server.conf.ws2p.maxPublic !== null) { - return this.server.conf.ws2p.maxPublic + if ( + this.server.conf.ws2p && + this.server.conf.ws2p.maxPublic !== undefined && + this.server.conf.ws2p.maxPublic !== null + ) { + return this.server.conf.ws2p.maxPublic; } - return WS2PConstants.MAX_LEVEL_2_PEERS + return WS2PConstants.MAX_LEVEL_2_PEERS; } getConnexions() { - return this.connections.slice() + return this.connections.slice(); } getConnexionsForSync() { - return this.synConnections.slice() + return this.synConnections.slice(); } countConnexions() { - const connections = this.getConnexions() - let count = 0 + const connections = this.getConnexions(); + let count = 0; for (const c of connections) { if (c.pubkey != this.server.conf.pair.pub) { - count++ + count++; } } - return count + return count; } - private listenToWebSocketConnections(messageHandler:WS2PMessageHandler) { - const key = new Key(this.server.conf.pair.pub, this.server.conf.pair.sec) - this.wss = new WebSocketServer({ host: this.host, port: this.port }) - this.wss.on('connection', async (ws:any) => { - - this.server.logger.info('WS2P %s: new incoming connection from %s:%s!', this.server.conf.pair.pub, ws._sender._socket._handle.owner.remoteAddress, ws._sender._socket._handle.owner.remotePort) + private listenToWebSocketConnections(messageHandler: WS2PMessageHandler) { + const key = new Key(this.server.conf.pair.pub, this.server.conf.pair.sec); + this.wss = new WebSocketServer({ host: this.host, port: this.port }); + this.wss.on("connection", async (ws: any) => { + this.server.logger.info( + "WS2P %s: new incoming connection from %s:%s!", + this.server.conf.pair.pub, + ws._sender._socket._handle.owner.remoteAddress, + ws._sender._socket._handle.owner.remotePort + ); /****************** * A NEW CONNECTION ******************/ - let saidPubkey:string = "" + let saidPubkey: string = ""; - const acceptPubkey = async (pub:string, isSync: boolean) => { + const acceptPubkey = async (pub: string, isSync: boolean) => { if (!saidPubkey) { - saidPubkey = pub + saidPubkey = pub; } if (saidPubkey !== pub) { // The key must be identical - return false + return false; } - return await this.shouldAcceptConnection(pub, isSync, this.getConnexionsForSync().map(c => c.pubkey), this.getConnexions().map(c => c.pubkey)) - } + return await this.shouldAcceptConnection( + pub, + isSync, + this.getConnexionsForSync().map((c) => c.pubkey), + this.getConnexions().map((c) => c.pubkey) + ); + }; let timeout = { connectionTimeout: WS2PConstants.CONNEXION_TIMEOUT, - requestTimeout: WS2PConstants.REQUEST_TIMEOUT - } - if (this.server.conf.ws2p && this.server.conf.ws2p.remotehost && this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX)) { + requestTimeout: WS2PConstants.REQUEST_TIMEOUT, + }; + if ( + this.server.conf.ws2p && + this.server.conf.ws2p.remotehost && + this.server.conf.ws2p.remotehost.match(WS2PConstants.HOST_ONION_REGEX) + ) { timeout = { connectionTimeout: WS2PConstants.CONNEXION_TOR_TIMEOUT, - requestTimeout: WS2PConstants.REQUEST_TOR_TIMEOUT - } + requestTimeout: WS2PConstants.REQUEST_TOR_TIMEOUT, + }; } - const myWs2pId = (this.server.conf.ws2p && this.server.conf.ws2p.uuid) ? this.server.conf.ws2p.uuid:"" + const myWs2pId = + this.server.conf.ws2p && this.server.conf.ws2p.uuid + ? this.server.conf.ws2p.uuid + : ""; const c = WS2PConnection.newConnectionFromWebSocketServer( ws, messageHandler, - new WS2PPubkeyLocalAuth(this.server.conf.currency, key, myWs2pId, pub => acceptPubkey(pub, false)), + new WS2PPubkeyLocalAuth( + this.server.conf.currency, + key, + myWs2pId, + (pub) => acceptPubkey(pub, false) + ), new WS2PPubkeyRemoteAuth(this.server.conf.currency, key, acceptPubkey), timeout - ) + ); try { - await c.connect() + await c.connect(); /** * Sync is a particular case: @@ -117,142 +154,199 @@ export class WS2PServer extends events.EventEmitter { */ if (c.isSync) { // We remember it - this.synConnections.push(c) + this.synConnections.push(c); // When the connection closes: - ws.on('close', () => { + ws.on("close", () => { // Remove the connection - const index = this.synConnections.indexOf(c) + const index = this.synConnections.indexOf(c); if (index !== -1) { // Remove the connection - this.synConnections.splice(index, 1) - c.close() + this.synConnections.splice(index, 1); + c.close(); } - }) + }); // We close the connection after a given delay - setTimeout(() => c.close(), 1000 * WS2PConstants.SYNC_CONNECTION_DURATION_IN_SECONDS) + setTimeout( + () => c.close(), + 1000 * WS2PConstants.SYNC_CONNECTION_DURATION_IN_SECONDS + ); // We don't broadcast or pipe data - return + return; } - const host = ws._sender._socket._handle.owner.remoteAddress - const port = ws._sender._socket._handle.owner.remotePort + const host = ws._sender._socket._handle.owner.remoteAddress; + const port = ws._sender._socket._handle.owner.remotePort; this.server.push({ - ws2p: 'connected', - to: { host, port, pubkey: c.pubkey } - }) - this.connections.push(c) - this.emit('newConnection', c) - this.server.logger.info('WS2P: established incoming connection from %s %s:%s', c.pubkey.slice(0, 8), host, port) + ws2p: "connected", + to: { host, port, pubkey: c.pubkey }, + }); + this.connections.push(c); + this.emit("newConnection", c); + this.server.logger.info( + "WS2P: established incoming connection from %s %s:%s", + c.pubkey.slice(0, 8), + host, + port + ); // Broadcasting - const singleWriteProtection = new WS2PSingleWriteStream() - const ws2pStreamer = new WS2PStreamer(c) - this.server - .pipe(singleWriteProtection) - .pipe(ws2pStreamer) + const singleWriteProtection = new WS2PSingleWriteStream(); + const ws2pStreamer = new WS2PStreamer(c); + this.server.pipe(singleWriteProtection).pipe(ws2pStreamer); - ws.on('error', (e:any) => { - this.server.logger.error(e) - }) + ws.on("error", (e: any) => { + this.server.logger.error(e); + }); - ws.on('close', () => { - this.server.unpipe(singleWriteProtection) - singleWriteProtection.unpipe(ws2pStreamer) - this.server.logger.info('WS2P: close incoming connection from %s %s:%s', c.pubkey.slice(0, 8), host, port) - this.removeConnection(c) + ws.on("close", () => { + this.server.unpipe(singleWriteProtection); + singleWriteProtection.unpipe(ws2pStreamer); + this.server.logger.info( + "WS2P: close incoming connection from %s %s:%s", + c.pubkey.slice(0, 8), + host, + port + ); + this.removeConnection(c); this.server.push({ - ws2p: 'disconnected', + ws2p: "disconnected", peer: { - pub: c.pubkey - } - }) - }) + pub: c.pubkey, + }, + }); + }); // Remove excess incoming connections - this.removeExcessIncomingConnections() - - await this.server.dal.setPeerUP(c.pubkey) + this.removeExcessIncomingConnections(); + await this.server.dal.setPeerUP(c.pubkey); } catch (e) { - ws.close() - this.server.logger.warn('WS2P: cannot connect to incoming WebSocket connection: %s', e) + ws.close(); + this.server.logger.warn( + "WS2P: cannot connect to incoming WebSocket connection: %s", + e + ); } - }) + }); } async removeExcessIncomingConnections() { - await this.removeDuplicateConnections() - const ws2pPublicMax = (this.server.conf.ws2p && this.server.conf.ws2p.maxPublic) ? this.server.conf.ws2p.maxPublic:WS2PConstants.MAX_LEVEL_2_PEERS - let privilegedKeys = (this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes) ? this.server.conf.ws2p.privilegedNodes:[] + await this.removeDuplicateConnections(); + const ws2pPublicMax = + this.server.conf.ws2p && this.server.conf.ws2p.maxPublic + ? this.server.conf.ws2p.maxPublic + : WS2PConstants.MAX_LEVEL_2_PEERS; + let privilegedKeys = + this.server.conf.ws2p && this.server.conf.ws2p.privilegedNodes + ? this.server.conf.ws2p.privilegedNodes + : []; while (this.countConnexions() > this.maxLevel2Peers) { - await this.removeLowPriorityConnection(privilegedKeys) + await this.removeLowPriorityConnection(privilegedKeys); } } async removeDuplicateConnections() { - let connectedPubkeys:string[] = [] + let connectedPubkeys: string[] = []; for (const c of this.connections) { if (connectedPubkeys.indexOf(c.pubkey) !== -1) { - this.removeConnection(c) + this.removeConnection(c); } else if (c.pubkey !== this.server.conf.pair.pub) { - connectedPubkeys.push(c.pubkey) + connectedPubkeys.push(c.pubkey); } } } - async removeLowPriorityConnection(privilegedKeys:string[]) { - let lowPriorityConnection:WS2PConnection = this.connections[0] - let minPriorityLevel = await this.keyPriorityLevel(lowPriorityConnection.pubkey, privilegedKeys) + async removeLowPriorityConnection(privilegedKeys: string[]) { + let lowPriorityConnection: WS2PConnection = this.connections[0]; + let minPriorityLevel = await this.keyPriorityLevel( + lowPriorityConnection.pubkey, + privilegedKeys + ); for (const c of this.connections) { if (c !== lowPriorityConnection) { - let cPriorityLevel = await this.keyPriorityLevel(c.pubkey, privilegedKeys) + let cPriorityLevel = await this.keyPriorityLevel( + c.pubkey, + privilegedKeys + ); if (cPriorityLevel < minPriorityLevel) { - lowPriorityConnection = c - minPriorityLevel = cPriorityLevel + lowPriorityConnection = c; + minPriorityLevel = cPriorityLevel; } } } - this.removeConnection(lowPriorityConnection) + this.removeConnection(lowPriorityConnection); } - private removeConnection(c:WS2PConnection) { - const index = this.connections.indexOf(c) + private removeConnection(c: WS2PConnection) { + const index = this.connections.indexOf(c); if (index !== -1) { // Remove the connection - this.connections.splice(index, 1) - c.close() + this.connections.splice(index, 1); + c.close(); } } async close() { - await Promise.all(this.connections.map(c => c.close())) + await Promise.all(this.connections.map((c) => c.close())); return new Promise((res, rej) => { - this.wss.close((err:any) => { - if (err) return rej(err) - res() - }) - }) + this.wss.close((err: any) => { + if (err) return rej(err); + res(); + }); + }); } - async getConnection(pubkeyOfConnection:string) { + async getConnection(pubkeyOfConnection: string) { if (this.connections.length === 0) { - throw Error("No connections to look into.") + throw Error("No connections to look into."); } - return Promise.race(this.connections.map(async (c) => { - await c.connected - if (c.pubkey === pubkeyOfConnection) { - return c - } else { - await new Promise(resolve => setTimeout(resolve, WS2PConstants.CONNEXION_TIMEOUT)) - throw Error("Pubkey not matching or too long to be obtained") - } - })) + return Promise.race( + this.connections.map(async (c) => { + await c.connected; + if (c.pubkey === pubkeyOfConnection) { + return c; + } else { + await new Promise((resolve) => + setTimeout(resolve, WS2PConstants.CONNEXION_TIMEOUT) + ); + throw Error("Pubkey not matching or too long to be obtained"); + } + }) + ); } - static async bindOn(server:Server, host:string, port:number, fifo:GlobalFifoPromise, shouldAcceptConnection:(pubkey:string, isSync: boolean, syncConnectedPubkeys:string[], connectedPubkeys:string[])=>Promise<boolean>, keyPriorityLevel:(pubkey:string, privilegedKeys:string[])=>Promise<number>, messageHandler:WS2PMessageHandler) { - const ws2ps = new WS2PServer(server, host, port, fifo, shouldAcceptConnection, keyPriorityLevel) - await ws2ps.listenToWebSocketConnections(messageHandler) - server.logger.info('WS2P server %s listening on %s:%s', server.conf.pair.pub, host, port) - return ws2ps + static async bindOn( + server: Server, + host: string, + port: number, + fifo: GlobalFifoPromise, + shouldAcceptConnection: ( + pubkey: string, + isSync: boolean, + syncConnectedPubkeys: string[], + connectedPubkeys: string[] + ) => Promise<boolean>, + keyPriorityLevel: ( + pubkey: string, + privilegedKeys: string[] + ) => Promise<number>, + messageHandler: WS2PMessageHandler + ) { + const ws2ps = new WS2PServer( + server, + host, + port, + fifo, + shouldAcceptConnection, + keyPriorityLevel + ); + await ws2ps.listenToWebSocketConnections(messageHandler); + server.logger.info( + "WS2P server %s listening on %s:%s", + server.conf.pair.pub, + host, + port + ); + return ws2ps; } -} \ No newline at end of file +} diff --git a/app/modules/ws2p/lib/WS2PSingleWriteStream.ts b/app/modules/ws2p/lib/WS2PSingleWriteStream.ts index ffba3eb55cde00abb6e6647e099e11164f2c6ee0..cef16ea453d829929a19ae5928a3f77e3b16fafb 100644 --- a/app/modules/ws2p/lib/WS2PSingleWriteStream.ts +++ b/app/modules/ws2p/lib/WS2PSingleWriteStream.ts @@ -11,84 +11,83 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as stream from "stream" -import {NewLogger} from "../../../lib/logger" -import {CertificationDTO} from "../../../lib/dto/CertificationDTO" -import {IdentityDTO} from "../../../lib/dto/IdentityDTO" -import {BlockDTO} from "../../../lib/dto/BlockDTO" -import {MembershipDTO} from "../../../lib/dto/MembershipDTO" -import {TransactionDTO} from "../../../lib/dto/TransactionDTO" -import {PeerDTO} from "../../../lib/dto/PeerDTO" -import {WS2PConstants} from "./constants" +import * as stream from "stream"; +import { NewLogger } from "../../../lib/logger"; +import { CertificationDTO } from "../../../lib/dto/CertificationDTO"; +import { IdentityDTO } from "../../../lib/dto/IdentityDTO"; +import { BlockDTO } from "../../../lib/dto/BlockDTO"; +import { MembershipDTO } from "../../../lib/dto/MembershipDTO"; +import { TransactionDTO } from "../../../lib/dto/TransactionDTO"; +import { PeerDTO } from "../../../lib/dto/PeerDTO"; +import { WS2PConstants } from "./constants"; -const logger = NewLogger() +const logger = NewLogger(); export class WS2PSingleWriteStream extends stream.Transform { + private detections: { + [k: string]: number; + } = {}; - private detections:{ - [k:string]: number - } = {} - - constructor(private protectionDuration = 1000 * WS2PConstants.SINGLE_RECORD_PROTECTION_IN_SECONDS) { - super({ objectMode: true }) + constructor( + private protectionDuration = 1000 * + WS2PConstants.SINGLE_RECORD_PROTECTION_IN_SECONDS + ) { + super({ objectMode: true }); } getNbProtectionsCurrently() { - return Object.keys(this.detections).length + return Object.keys(this.detections).length; } - async _write(obj:any, enc:any, done:any) { - let documentHash = '' - let doStream = false + async _write(obj: any, enc: any, done: any) { + let documentHash = ""; + let doStream = false; try { - if (obj.joiners) { - const dto = BlockDTO.fromJSONObject(obj) - documentHash = dto.getHash() - } - else if (obj.pubkey && obj.uid) { - const dto = IdentityDTO.fromJSONObject(obj) - documentHash = dto.getHash() - } - else if (obj.idty_uid) { - const dto = CertificationDTO.fromJSONObject(obj) - documentHash = dto.getHash() - } - else if (obj.userid) { - const dto = MembershipDTO.fromJSONObject(obj) - documentHash = dto.getHash() - } - else if (obj.issuers) { - const dto = TransactionDTO.fromJSONObject(obj) - documentHash = dto.getHash() - } - else if (obj.endpoints) { - const dto = PeerDTO.fromJSONObject(obj) - documentHash = dto.getHash() + const dto = BlockDTO.fromJSONObject(obj); + documentHash = dto.getHash(); + } else if (obj.pubkey && obj.uid) { + const dto = IdentityDTO.fromJSONObject(obj); + documentHash = dto.getHash(); + } else if (obj.idty_uid) { + const dto = CertificationDTO.fromJSONObject(obj); + documentHash = dto.getHash(); + } else if (obj.userid) { + const dto = MembershipDTO.fromJSONObject(obj); + documentHash = dto.getHash(); + } else if (obj.issuers) { + const dto = TransactionDTO.fromJSONObject(obj); + documentHash = dto.getHash(); + } else if (obj.endpoints) { + const dto = PeerDTO.fromJSONObject(obj); + documentHash = dto.getHash(); } if (documentHash) { if (!this.detections[documentHash]) { - doStream = true - this.detections[documentHash] = 1 + doStream = true; + this.detections[documentHash] = 1; } else { - this.detections[documentHash]++ - logger.warn('WS2P OUT => Document detected %s times: %s', this.detections[documentHash], JSON.stringify(obj)) + this.detections[documentHash]++; + logger.warn( + "WS2P OUT => Document detected %s times: %s", + this.detections[documentHash], + JSON.stringify(obj) + ); } setTimeout(() => { - delete this.detections[documentHash] - }, this.protectionDuration) + delete this.detections[documentHash]; + }, this.protectionDuration); } if (doStream) { - this.push(obj) + this.push(obj); } - } catch (e) { - logger.warn('WS2P >> SingleWrite >>', e) + logger.warn("WS2P >> SingleWrite >>", e); } - done && done() + done && done(); } } diff --git a/app/modules/ws2p/lib/WS2PStreamer.ts b/app/modules/ws2p/lib/WS2PStreamer.ts index 045f06ca65bdbbe58e22a8646034ae72afd2bd4b..fdf4865d29001fdc9f48c8a9fa92b21b257bb517 100644 --- a/app/modules/ws2p/lib/WS2PStreamer.ts +++ b/app/modules/ws2p/lib/WS2PStreamer.ts @@ -11,54 +11,48 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import * as stream from "stream" -import {NewLogger} from "../../../lib/logger"; -import {WS2PConnection} from "./WS2PConnection"; +import * as stream from "stream"; +import { NewLogger } from "../../../lib/logger"; +import { WS2PConnection } from "./WS2PConnection"; -const logger = NewLogger() +const logger = NewLogger(); export class WS2PStreamer extends stream.Transform { + private enabled = true; - private enabled = true - - constructor(private ws2pc:WS2PConnection) { - super({ objectMode: true }) + constructor(private ws2pc: WS2PConnection) { + super({ objectMode: true }); } enable() { - this.enabled = true + this.enabled = true; } disable() { - this.enabled = false + this.enabled = false; } - async _write(obj:any, enc:any, done:any) { + async _write(obj: any, enc: any, done: any) { if (!this.enabled) { - return done && done() + return done && done(); } try { if (obj.joiners) { - await this.ws2pc.pushBlock(obj) - } - else if (obj.pubkey && obj.uid) { - await this.ws2pc.pushIdentity(obj) - } - else if (obj.idty_uid) { - await this.ws2pc.pushCertification(obj) - } - else if (obj.userid) { - await this.ws2pc.pushMembership(obj) - } - else if (obj.issuers) { - await this.ws2pc.pushTransaction(obj) - } - else if (obj.endpoints) { - await this.ws2pc.pushPeer(obj) + await this.ws2pc.pushBlock(obj); + } else if (obj.pubkey && obj.uid) { + await this.ws2pc.pushIdentity(obj); + } else if (obj.idty_uid) { + await this.ws2pc.pushCertification(obj); + } else if (obj.userid) { + await this.ws2pc.pushMembership(obj); + } else if (obj.issuers) { + await this.ws2pc.pushTransaction(obj); + } else if (obj.endpoints) { + await this.ws2pc.pushPeer(obj); } } catch (e) { - logger.warn('WS2P >> Streamer >>', e) - this.ws2pc.close() + logger.warn("WS2P >> Streamer >>", e); + this.ws2pc.close(); } done && done(); } diff --git a/app/modules/ws2p/lib/constants.ts b/app/modules/ws2p/lib/constants.ts index 0782d6b52d4bdb50676423f4a7d62e4babd781f4..0b29bf8f89002a067185ce9ce44d02e5ecb35d46 100644 --- a/app/modules/ws2p/lib/constants.ts +++ b/app/modules/ws2p/lib/constants.ts @@ -11,23 +11,22 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../../../lib/common-libs/constants" +import { CommonConstants } from "../../../lib/common-libs/constants"; export const WS2PConstants = { - NETWORK: { INCOMING: { DEFAULT: 0, - TOR: 1 + TOR: 1, }, OUTCOMING: { DEFAULT: 0, - TOR: 1 + TOR: 1, }, }, - WS2P_DEFAULT_API_VERSION:1, - WS2P_DEFAULT_HEAD_VERSION:1, + WS2P_DEFAULT_API_VERSION: 1, + WS2P_DEFAULT_HEAD_VERSION: 1, WS2P_API_VERSION: 1, WS2P_HEAD_VERSION: 2, @@ -42,9 +41,9 @@ export const WS2PConstants = { REQUEST_TOR_TIMEOUT: 60000, RECONNEXION_INTERVAL_IN_SEC: 60 * 10, // 10 minutes - BLOCK_PULLING_INTERVAL: 300 * 2, // 10 minutes + BLOCK_PULLING_INTERVAL: 300 * 2, // 10 minutes DOCPOOL_PULLING_INTERVAL: 3600 * 4, // 4 hours - SANDBOX_FIRST_PULL_DELAY: 300 * 2, // 10 minutes after the start + SANDBOX_FIRST_PULL_DELAY: 300 * 2, // 10 minutes after the start MAX_LEVEL_1_PEERS: 5, MAX_LEVEL_2_PEERS: 20, @@ -62,32 +61,69 @@ export const WS2PConstants = { ERROR_RECALL_DURATION_IN_SECONDS: 60, SINGLE_RECORD_PROTECTION_IN_SECONDS: 60, - HEAD_V0_REGEXP: new RegExp('^WS2P:HEAD:' - + CommonConstants.FORMATS.PUBKEY + ':' - + CommonConstants.FORMATS.BLOCKSTAMP - + '$'), - - HEAD_V1_REGEXP: new RegExp('^WS2P(?:O[CT][SAM]?)?(?:I[CT])?:HEAD:1:' - + '(' + CommonConstants.FORMATS.PUBKEY + '):' - + '(' + CommonConstants.FORMATS.BLOCKSTAMP + '):' - + '(' + CommonConstants.FORMATS.WS2PID + '):' - + '(' + CommonConstants.FORMATS.SOFTWARE + '):' - + '(' + CommonConstants.FORMATS.SOFT_VERSION + '):' - + '(' + CommonConstants.FORMATS.POW_PREFIX + ')' - + '$'), - - HEAD_V2_REGEXP: new RegExp('^WS2P(?:O[CT][SAM]?)?(?:I[CT])?:HEAD:2:' - + '(' + CommonConstants.FORMATS.PUBKEY + '):' - + '(' + CommonConstants.FORMATS.BLOCKSTAMP + '):' - + '(' + CommonConstants.FORMATS.WS2PID + '):' - + '(' + CommonConstants.FORMATS.SOFTWARE + '):' - + '(' + CommonConstants.FORMATS.SOFT_VERSION + '):' - + '(' + CommonConstants.FORMATS.POW_PREFIX + '):' - + '(' + CommonConstants.FORMATS.ZERO_OR_POSITIVE_INT + '):' - + '(' + CommonConstants.FORMATS.ZERO_OR_POSITIVE_INT + ')' - + '(?::' + CommonConstants.FORMATS.TIMESTAMP + ')?' - + '$'), - + HEAD_V0_REGEXP: new RegExp( + "^WS2P:HEAD:" + + CommonConstants.FORMATS.PUBKEY + + ":" + + CommonConstants.FORMATS.BLOCKSTAMP + + "$" + ), + + HEAD_V1_REGEXP: new RegExp( + "^WS2P(?:O[CT][SAM]?)?(?:I[CT])?:HEAD:1:" + + "(" + + CommonConstants.FORMATS.PUBKEY + + "):" + + "(" + + CommonConstants.FORMATS.BLOCKSTAMP + + "):" + + "(" + + CommonConstants.FORMATS.WS2PID + + "):" + + "(" + + CommonConstants.FORMATS.SOFTWARE + + "):" + + "(" + + CommonConstants.FORMATS.SOFT_VERSION + + "):" + + "(" + + CommonConstants.FORMATS.POW_PREFIX + + ")" + + "$" + ), + + HEAD_V2_REGEXP: new RegExp( + "^WS2P(?:O[CT][SAM]?)?(?:I[CT])?:HEAD:2:" + + "(" + + CommonConstants.FORMATS.PUBKEY + + "):" + + "(" + + CommonConstants.FORMATS.BLOCKSTAMP + + "):" + + "(" + + CommonConstants.FORMATS.WS2PID + + "):" + + "(" + + CommonConstants.FORMATS.SOFTWARE + + "):" + + "(" + + CommonConstants.FORMATS.SOFT_VERSION + + "):" + + "(" + + CommonConstants.FORMATS.POW_PREFIX + + "):" + + "(" + + CommonConstants.FORMATS.ZERO_OR_POSITIVE_INT + + "):" + + "(" + + CommonConstants.FORMATS.ZERO_OR_POSITIVE_INT + + ")" + + "(?::" + + CommonConstants.FORMATS.TIMESTAMP + + ")?" + + "$" + ), + HEAD_SIG_REGEXP: new RegExp(CommonConstants.FORMATS.SIGNATURE), HOST_ONION_REGEX: CommonConstants.HOST_ONION_REGEX, @@ -99,4 +135,4 @@ export const WS2PConstants = { WS2P_SYNC_LIMIT: 15, // Number of concurrent peers for sync SYNC_CONNECTION_DURATION_IN_SECONDS: 1200, // Duration of the SYNC connection -} \ No newline at end of file +}; diff --git a/app/modules/ws2p/lib/impl/WS2PMessageHandler.ts b/app/modules/ws2p/lib/impl/WS2PMessageHandler.ts index c07e07b5e0c27e14fea34662720afc408e9181ba..5a784b7844adca57a4ff2c820a00829494db1b52 100644 --- a/app/modules/ws2p/lib/impl/WS2PMessageHandler.ts +++ b/app/modules/ws2p/lib/impl/WS2PMessageHandler.ts @@ -11,11 +11,10 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {WS2PResponse} from "./WS2PResponse" -import {WS2PConnection} from "../WS2PConnection" +import { WS2PResponse } from "./WS2PResponse"; +import { WS2PConnection } from "../WS2PConnection"; export interface WS2PMessageHandler { - - handlePushMessage(json:any, c:WS2PConnection): Promise<void> - answerToRequest(json:any, c:WS2PConnection): Promise<WS2PResponse> -} \ No newline at end of file + handlePushMessage(json: any, c: WS2PConnection): Promise<void>; + answerToRequest(json: any, c: WS2PConnection): Promise<WS2PResponse>; +} diff --git a/app/modules/ws2p/lib/impl/WS2PReqMapperByServer.ts b/app/modules/ws2p/lib/impl/WS2PReqMapperByServer.ts index 2bc2c58f0d1130e83a8227868299b74c3639d9b9..b62efe9364fb71d64b4ae49260b0f52d99cf230f 100644 --- a/app/modules/ws2p/lib/impl/WS2PReqMapperByServer.ts +++ b/app/modules/ws2p/lib/impl/WS2PReqMapperByServer.ts @@ -11,55 +11,61 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {IdentityForRequirements} from '../../../../service/BlockchainService'; -import {Server} from "../../../../../server" -import {WS2PReqMapper} from "../interface/WS2PReqMapper" -import {BlockDTO} from "../../../../lib/dto/BlockDTO" -import {DBBlock} from "../../../../lib/db/DBBlock" -import {PeerDTO} from "../../../../lib/dto/PeerDTO" -import {HttpMilestonePage} from "../../../bma/lib/dtos" +import { IdentityForRequirements } from "../../../../service/BlockchainService"; +import { Server } from "../../../../../server"; +import { WS2PReqMapper } from "../interface/WS2PReqMapper"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { DBBlock } from "../../../../lib/db/DBBlock"; +import { PeerDTO } from "../../../../lib/dto/PeerDTO"; +import { HttpMilestonePage } from "../../../bma/lib/dtos"; export class WS2PReqMapperByServer implements WS2PReqMapper { - - constructor(protected server:Server) {} + constructor(protected server: Server) {} async getCurrent() { - return this.server.BlockchainService.current() + return this.server.BlockchainService.current(); } async getBlock(number: number): Promise<BlockDTO> { - return Promise.resolve(BlockDTO.fromJSONObject(await this.server.dal.getFullBlockOf(number))) + return Promise.resolve( + BlockDTO.fromJSONObject(await this.server.dal.getFullBlockOf(number)) + ); } async getBlocks(count: number, from: number): Promise<BlockDTO[]> { if (count > 5000) { - throw 'Count is too high' + throw "Count is too high"; } - const current = await this.server.dal.getCurrentBlockOrNull() + const current = await this.server.dal.getCurrentBlockOrNull(); if (!current) { - return [] + return []; } - count = Math.min(current.number - from + 1, count) + count = Math.min(current.number - from + 1, count); if (!current || current.number < from) { - return [] + return []; } - return (await this.server.dal.getBlocksBetween(from, from + count - 1)).map((b:DBBlock) => BlockDTO.fromJSONObject(b)) + return ( + await this.server.dal.getBlocksBetween(from, from + count - 1) + ).map((b: DBBlock) => BlockDTO.fromJSONObject(b)); } getMilestones(page: number): Promise<HttpMilestonePage> { - return this.server.milestones(page) + return this.server.milestones(page); } getMilestonesPage(): Promise<HttpMilestonePage> { - return this.server.milestones() + return this.server.milestones(); } async getRequirementsOfPending(minsig: number): Promise<any> { - let identities:IdentityForRequirements[] = (await this.server.dal.idtyDAL.query( - 'SELECT i.*, count(c.sig) as nbSig ' + - 'FROM idty i, cert c ' + - 'WHERE c.target = i.hash group by i.hash having nbSig >= ?', - [minsig])).map(i => ({ + let identities: IdentityForRequirements[] = ( + await this.server.dal.idtyDAL.query( + "SELECT i.*, count(c.sig) as nbSig " + + "FROM idty i, cert c " + + "WHERE c.target = i.hash group by i.hash having nbSig >= ?", + [minsig] + ) + ).map((i) => ({ hash: i.hash || "", member: i.member || false, wasMember: i.wasMember || false, @@ -69,21 +75,26 @@ export class WS2PReqMapperByServer implements WS2PReqMapper { sig: i.sig || "", revocation_sig: i.revocation_sig, revoked: i.revoked, - revoked_on: i.revoked_on ? 1 : 0 - })) - const members = await this.server.dal.findReceiversAbove(minsig) - identities = identities.concat(members) - const all = await this.server.BlockchainService.requirementsOfIdentities(identities, false) + revoked_on: i.revoked_on ? 1 : 0, + })); + const members = await this.server.dal.findReceiversAbove(minsig); + identities = identities.concat(members); + const all = await this.server.BlockchainService.requirementsOfIdentities( + identities, + false + ); return { - identities: all - } + identities: all, + }; } async getPeer(): Promise<PeerDTO> { - return this.server.PeeringService.peer() + return this.server.PeeringService.peer(); } async getKnownPeers(): Promise<PeerDTO[]> { - return (await this.server.dal.findAllPeersBut([])).map(p => PeerDTO.fromDBPeer(p)) + return (await this.server.dal.findAllPeersBut([])).map((p) => + PeerDTO.fromDBPeer(p) + ); } -} \ No newline at end of file +} diff --git a/app/modules/ws2p/lib/impl/WS2PResponse.ts b/app/modules/ws2p/lib/impl/WS2PResponse.ts index e7e5b12482bde850e5327717cf057f563dc55f72..5a710ff2932e7c2b545c5e7bb6bc91ce28ebb68a 100644 --- a/app/modules/ws2p/lib/impl/WS2PResponse.ts +++ b/app/modules/ws2p/lib/impl/WS2PResponse.ts @@ -11,6 +11,4 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. - -export interface WS2PResponse { -} \ No newline at end of file +export interface WS2PResponse {} diff --git a/app/modules/ws2p/lib/interface/WS2PReqMapper.ts b/app/modules/ws2p/lib/interface/WS2PReqMapper.ts index 4a0ca36a75b3a6821389b574275de2635d9cb5f7..c4295b19cd57837618059babf238c282789127d8 100644 --- a/app/modules/ws2p/lib/interface/WS2PReqMapper.ts +++ b/app/modules/ws2p/lib/interface/WS2PReqMapper.ts @@ -11,19 +11,18 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {BlockDTO} from "../../../../lib/dto/BlockDTO" -import {DBBlock} from "../../../../lib/db/DBBlock" -import {PeerDTO} from "../../../../lib/dto/PeerDTO" -import {HttpMilestonePage} from "../../../bma/lib/dtos" +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { DBBlock } from "../../../../lib/db/DBBlock"; +import { PeerDTO } from "../../../../lib/dto/PeerDTO"; +import { HttpMilestonePage } from "../../../bma/lib/dtos"; export interface WS2PReqMapper { - - getCurrent(): Promise<DBBlock|null> - getBlock(number:number): Promise<BlockDTO> - getBlocks(count:number, fromNumber:number): Promise<BlockDTO[]> - getRequirementsOfPending(minCert:number): Promise<any> - getPeer(): Promise<PeerDTO> - getKnownPeers(): Promise<PeerDTO[]> - getMilestones(page: number): Promise<HttpMilestonePage> - getMilestonesPage(): Promise<HttpMilestonePage> -} \ No newline at end of file + getCurrent(): Promise<DBBlock | null>; + getBlock(number: number): Promise<BlockDTO>; + getBlocks(count: number, fromNumber: number): Promise<BlockDTO[]>; + getRequirementsOfPending(minCert: number): Promise<any>; + getPeer(): Promise<PeerDTO>; + getKnownPeers(): Promise<PeerDTO[]>; + getMilestones(page: number): Promise<HttpMilestonePage>; + getMilestonesPage(): Promise<HttpMilestonePage>; +} diff --git a/app/modules/ws2p/lib/interface/WS2PServerMessageHandler.ts b/app/modules/ws2p/lib/interface/WS2PServerMessageHandler.ts index 06c99b901a37548e3c7feb6fe8035368ff4e39b2..27c26f3ebb526fee6ca6a909f11f23ddf331ba40 100644 --- a/app/modules/ws2p/lib/interface/WS2PServerMessageHandler.ts +++ b/app/modules/ws2p/lib/interface/WS2PServerMessageHandler.ts @@ -11,222 +11,247 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {WS2PMessageHandler} from "../impl/WS2PMessageHandler" -import {WS2PResponse} from "../impl/WS2PResponse" -import {Server} from "../../../../../server" -import {WS2PReqMapperByServer} from "../impl/WS2PReqMapperByServer" -import {WS2PReqMapper} from "./WS2PReqMapper" -import {BlockDTO} from "../../../../lib/dto/BlockDTO" -import {IdentityDTO} from "../../../../lib/dto/IdentityDTO" -import {CertificationDTO} from "../../../../lib/dto/CertificationDTO" -import {MembershipDTO} from "../../../../lib/dto/MembershipDTO" -import {TransactionDTO} from "../../../../lib/dto/TransactionDTO" -import {PeerDTO} from "../../../../lib/dto/PeerDTO" -import {WS2P_REQ} from "../WS2PRequester" -import {WS2PCluster} from "../WS2PCluster" -import {WS2PConnection} from "../WS2PConnection" -import {WS2PConstants} from "../constants" -import {CommonConstants} from "../../../../lib/common-libs/constants" -import {DataErrors} from "../../../../lib/common-libs/errors" +import { WS2PMessageHandler } from "../impl/WS2PMessageHandler"; +import { WS2PResponse } from "../impl/WS2PResponse"; +import { Server } from "../../../../../server"; +import { WS2PReqMapperByServer } from "../impl/WS2PReqMapperByServer"; +import { WS2PReqMapper } from "./WS2PReqMapper"; +import { BlockDTO } from "../../../../lib/dto/BlockDTO"; +import { IdentityDTO } from "../../../../lib/dto/IdentityDTO"; +import { CertificationDTO } from "../../../../lib/dto/CertificationDTO"; +import { MembershipDTO } from "../../../../lib/dto/MembershipDTO"; +import { TransactionDTO } from "../../../../lib/dto/TransactionDTO"; +import { PeerDTO } from "../../../../lib/dto/PeerDTO"; +import { WS2P_REQ } from "../WS2PRequester"; +import { WS2PCluster } from "../WS2PCluster"; +import { WS2PConnection } from "../WS2PConnection"; +import { WS2PConstants } from "../constants"; +import { CommonConstants } from "../../../../lib/common-libs/constants"; +import { DataErrors } from "../../../../lib/common-libs/errors"; export enum WS2P_REQERROR { - UNKNOWN_REQUEST + UNKNOWN_REQUEST, } export class WS2PServerMessageHandler implements WS2PMessageHandler { - - protected mapper:WS2PReqMapper - private errors:{ - [k:string]: { - createdOn: number, + protected mapper: WS2PReqMapper; + private errors: { + [k: string]: { + createdOn: number; pubkeys: { - [p:string]: any[] - } - } - } = {} + [p: string]: any[]; + }; + }; + } = {}; - constructor(protected server:Server, protected cluster:WS2PCluster) { - this.mapper = new WS2PReqMapperByServer(server) + constructor(protected server: Server, protected cluster: WS2PCluster) { + this.mapper = new WS2PReqMapperByServer(server); } - async handlePushMessage(json: any, c:WS2PConnection): Promise<void> { - + async handlePushMessage(json: any, c: WS2PConnection): Promise<void> { if (c.isSync) { // Push messages are forbidden on sync connection - c.close() - return + c.close(); + return; } - let documentHash = '' + let documentHash = ""; try { if (json.body) { if (json.body.block) { - const dto = BlockDTO.fromJSONObject(json.body.block) - const raw = dto.getRawSigned() - documentHash = dto.getHash() - await this.server.writeRawBlock(raw) - } - else if (json.body.identity) { - const dto = IdentityDTO.fromJSONObject(json.body.identity) - const raw = dto.getRawSigned() - documentHash = dto.getHash() - await this.server.writeRawIdentity(raw) - } - else if (json.body.certification) { - const dto = CertificationDTO.fromJSONObject(json.body.certification) - const raw = dto.getRawSigned() - documentHash = dto.getHash() - await this.server.writeRawCertification(raw) - } - else if (json.body.membership) { - const dto = MembershipDTO.fromJSONObject(json.body.membership) - const raw = dto.getRawSigned() - documentHash = dto.getHash() - await this.server.writeRawMembership(raw) - } - else if (json.body.transaction) { - const dto = TransactionDTO.fromJSONObject(json.body.transaction) - const raw = dto.getRaw() - documentHash = dto.getHash() - await this.server.writeRawTransaction(raw) - } - else if (json.body.peer) { - const dto = PeerDTO.fromJSONObject(json.body.peer) - const raw = dto.getRawSigned() - documentHash = dto.getHash() - await this.server.writeRawPeer(raw) - } - else if (json.body.heads && typeof json.body.heads === "object" && json.body.heads.length !== undefined) { + const dto = BlockDTO.fromJSONObject(json.body.block); + const raw = dto.getRawSigned(); + documentHash = dto.getHash(); + await this.server.writeRawBlock(raw); + } else if (json.body.identity) { + const dto = IdentityDTO.fromJSONObject(json.body.identity); + const raw = dto.getRawSigned(); + documentHash = dto.getHash(); + await this.server.writeRawIdentity(raw); + } else if (json.body.certification) { + const dto = CertificationDTO.fromJSONObject(json.body.certification); + const raw = dto.getRawSigned(); + documentHash = dto.getHash(); + await this.server.writeRawCertification(raw); + } else if (json.body.membership) { + const dto = MembershipDTO.fromJSONObject(json.body.membership); + const raw = dto.getRawSigned(); + documentHash = dto.getHash(); + await this.server.writeRawMembership(raw); + } else if (json.body.transaction) { + const dto = TransactionDTO.fromJSONObject(json.body.transaction); + const raw = dto.getRaw(); + documentHash = dto.getHash(); + await this.server.writeRawTransaction(raw); + } else if (json.body.peer) { + const dto = PeerDTO.fromJSONObject(json.body.peer); + const raw = dto.getRawSigned(); + documentHash = dto.getHash(); + await this.server.writeRawPeer(raw); + } else if ( + json.body.heads && + typeof json.body.heads === "object" && + json.body.heads.length !== undefined + ) { if (!json.body.heads.length) { - documentHash = 'HEADs' - throw "Heads empty HEADs received" + documentHash = "HEADs"; + throw "Heads empty HEADs received"; } - await this.cluster.headsReceived(json.body.heads || []) + await this.cluster.headsReceived(json.body.heads || []); } } - } catch(e) { - if (documentHash - && this.errors[documentHash] - && this.errors[documentHash].pubkeys[c.pubkey] !== undefined - && this.server.conf.pair.pub !== c.pubkey) { // We do not want to ban ourselves - this.errors[documentHash].pubkeys[c.pubkey].push(json.body) - if (this.errors[documentHash].pubkeys[c.pubkey].length >= WS2PConstants.BAN_ON_REPEAT_THRESHOLD) { - let message = "peer " + (c.pubkey || '--unknown--') + " sent " + WS2PConstants.BAN_ON_REPEAT_THRESHOLD + " times a same wrong document: " + (e && (e.message || (e.uerr && e.uerr.message)) || JSON.stringify(e)) - this.cluster.banConnection(c, message) + } catch (e) { + if ( + documentHash && + this.errors[documentHash] && + this.errors[documentHash].pubkeys[c.pubkey] !== undefined && + this.server.conf.pair.pub !== c.pubkey + ) { + // We do not want to ban ourselves + this.errors[documentHash].pubkeys[c.pubkey].push(json.body); + if ( + this.errors[documentHash].pubkeys[c.pubkey].length >= + WS2PConstants.BAN_ON_REPEAT_THRESHOLD + ) { + let message = + "peer " + + (c.pubkey || "--unknown--") + + " sent " + + WS2PConstants.BAN_ON_REPEAT_THRESHOLD + + " times a same wrong document: " + + ((e && (e.message || (e.uerr && e.uerr.message))) || + JSON.stringify(e)); + this.cluster.banConnection(c, message); for (const body of this.errors[documentHash].pubkeys[c.pubkey]) { - message += '\n => ' + JSON.stringify(body) + message += "\n => " + JSON.stringify(body); } } else { - let message = "WS2P IN => " + (c.pubkey || '--unknown--') + " sent " + this.errors[documentHash].pubkeys[c.pubkey].length + " times a same wrong document: " + (e && (e.message || (e.uerr && e.uerr.message)) || JSON.stringify(e)) + let message = + "WS2P IN => " + + (c.pubkey || "--unknown--") + + " sent " + + this.errors[documentHash].pubkeys[c.pubkey].length + + " times a same wrong document: " + + ((e && (e.message || (e.uerr && e.uerr.message))) || + JSON.stringify(e)); for (const body of this.errors[documentHash].pubkeys[c.pubkey]) { - message += '\n => ' + JSON.stringify(body) + message += "\n => " + JSON.stringify(body); } - this.server.logger.warn(message) + this.server.logger.warn(message); } setTimeout(() => { if (this.errors[documentHash]) { - delete this.errors[documentHash] + delete this.errors[documentHash]; } - }, 1000 * WS2PConstants.ERROR_RECALL_DURATION_IN_SECONDS) + }, 1000 * WS2PConstants.ERROR_RECALL_DURATION_IN_SECONDS); } else { // Remember the error for some time if (!this.errors[documentHash]) { this.errors[documentHash] = { createdOn: Date.now(), - pubkeys: {} - } + pubkeys: {}, + }; } - this.errors[documentHash].pubkeys[c.pubkey] = [json.body] + this.errors[documentHash].pubkeys[c.pubkey] = [json.body]; setTimeout(() => { if (this.errors[documentHash]) { - delete this.errors[documentHash] + delete this.errors[documentHash]; } - }, 1000 * WS2PConstants.ERROR_RECALL_DURATION_IN_SECONDS) + }, 1000 * WS2PConstants.ERROR_RECALL_DURATION_IN_SECONDS); } - if (e !== "Block already known" - && (!e.uerr - || !(e.uerr.ucode == CommonConstants.ERRORS.DOCUMENT_BEING_TREATED.uerr.ucode - || e.uerr.ucode == CommonConstants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN.uerr.ucode))) { - this.server.logger.warn(e) + if ( + e !== "Block already known" && + (!e.uerr || + !( + e.uerr.ucode == + CommonConstants.ERRORS.DOCUMENT_BEING_TREATED.uerr.ucode || + e.uerr.ucode == + CommonConstants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN.uerr.ucode + )) + ) { + this.server.logger.warn(e); } } } - async answerToRequest(data: any, c:WS2PConnection): Promise<WS2PResponse> { - + async answerToRequest(data: any, c: WS2PConnection): Promise<WS2PResponse> { /********** * REQUEST *********/ - let body:any = {} + let body: any = {}; - const forbiddenRequestsForSync: string[] = [] // For now, no WS2P requests are forbidden - if (c.isSync && (!data || !data.name || forbiddenRequestsForSync.indexOf(data.name) !== -1)) { + const forbiddenRequestsForSync: string[] = []; // For now, no WS2P requests are forbidden + if ( + c.isSync && + (!data || + !data.name || + forbiddenRequestsForSync.indexOf(data.name) !== -1) + ) { // Some messages are forbidden on sync connection - c.close() - throw Error(DataErrors[DataErrors.WS2P_SYNC_PERIMETER_IS_LIMITED]) + c.close(); + throw Error(DataErrors[DataErrors.WS2P_SYNC_PERIMETER_IS_LIMITED]); } if (data && data.name) { switch (data.name) { - case WS2P_REQ[WS2P_REQ.CURRENT]: - body = await this.mapper.getCurrent() + body = await this.mapper.getCurrent(); break; case WS2P_REQ[WS2P_REQ.BLOCK_BY_NUMBER]: if (isNaN(data.params.number)) { - throw "Wrong param `number`" + throw "Wrong param `number`"; } - const number:number = data.params.number - body = await this.mapper.getBlock(number) + const number: number = data.params.number; + body = await this.mapper.getBlock(number); break; case WS2P_REQ[WS2P_REQ.BLOCKS_CHUNK]: if (isNaN(data.params.count)) { - throw "Wrong param `count`" + throw "Wrong param `count`"; } if (isNaN(data.params.fromNumber)) { - throw "Wrong param `fromNumber`" + throw "Wrong param `fromNumber`"; } - const count:number = data.params.count - const fromNumber:number = data.params.fromNumber - body = await this.mapper.getBlocks(count, fromNumber) + const count: number = data.params.count; + const fromNumber: number = data.params.fromNumber; + body = await this.mapper.getBlocks(count, fromNumber); break; case WS2P_REQ[WS2P_REQ.WOT_REQUIREMENTS_OF_PENDING]: if (isNaN(data.params.minCert)) { - throw "Wrong param `minCert`" + throw "Wrong param `minCert`"; } - const minCert:number = data.params.minCert - body = await this.mapper.getRequirementsOfPending(minCert) + const minCert: number = data.params.minCert; + body = await this.mapper.getRequirementsOfPending(minCert); break; case WS2P_REQ[WS2P_REQ.PEER_DOCUMENT]: - body = await this.mapper.getPeer() + body = await this.mapper.getPeer(); break; case WS2P_REQ[WS2P_REQ.KNOWN_PEERS]: - body = await this.mapper.getKnownPeers() + body = await this.mapper.getKnownPeers(); break; case WS2P_REQ[WS2P_REQ.MILESTONES_PAGE]: - body = await this.mapper.getMilestonesPage() + body = await this.mapper.getMilestonesPage(); break; case WS2P_REQ[WS2P_REQ.MILESTONES]: if (isNaN(data.params.page)) { - throw "Wrong param `page`" + throw "Wrong param `page`"; } - const page:number = data.params.page - body = await this.mapper.getMilestones(page) + const page: number = data.params.page; + body = await this.mapper.getMilestones(page); break; default: - throw Error(WS2P_REQERROR[WS2P_REQERROR.UNKNOWN_REQUEST]) + throw Error(WS2P_REQERROR[WS2P_REQERROR.UNKNOWN_REQUEST]); } } - return body + return body; } -} \ No newline at end of file +} diff --git a/app/service/BlockchainService.ts b/app/service/BlockchainService.ts index 7b164f69c58a44ae821e295b7ad9d6d45a2e79ef..75e3172cda714db0558281198325b2303dfacd1d 100644 --- a/app/service/BlockchainService.ts +++ b/app/service/BlockchainService.ts @@ -11,113 +11,130 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {Server} from "../../server" -import {GlobalFifoPromise} from "./GlobalFifoPromise" -import {BlockchainContext} from "../lib/computation/BlockchainContext" -import {ConfDTO} from "../lib/dto/ConfDTO" -import {FileDAL} from "../lib/dal/fileDAL" -import {BlockDTO} from "../lib/dto/BlockDTO" -import {DBBlock} from "../lib/db/DBBlock" -import {GLOBAL_RULES_HELPERS} from "../lib/rules/global_rules" -import {parsers} from "../lib/common-libs/parsers/index" -import {HttpIdentityRequirement} from "../modules/bma/lib/dtos" -import {FIFOService} from "./FIFOService" -import {CommonConstants} from "../lib/common-libs/constants" -import {LOCAL_RULES_FUNCTIONS} from "../lib/rules/local_rules" -import {Switcher, SwitcherDao} from "../lib/blockchain/Switcher" -import {OtherConstants} from "../lib/other_constants" -import {DataErrors} from "../lib/common-libs/errors" -import {DuniterBlockchain} from "../lib/blockchain/DuniterBlockchain" - -const constants = require('../lib/constants'); +import { Server } from "../../server"; +import { GlobalFifoPromise } from "./GlobalFifoPromise"; +import { BlockchainContext } from "../lib/computation/BlockchainContext"; +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { FileDAL } from "../lib/dal/fileDAL"; +import { BlockDTO } from "../lib/dto/BlockDTO"; +import { DBBlock } from "../lib/db/DBBlock"; +import { GLOBAL_RULES_HELPERS } from "../lib/rules/global_rules"; +import { parsers } from "../lib/common-libs/parsers/index"; +import { HttpIdentityRequirement } from "../modules/bma/lib/dtos"; +import { FIFOService } from "./FIFOService"; +import { CommonConstants } from "../lib/common-libs/constants"; +import { LOCAL_RULES_FUNCTIONS } from "../lib/rules/local_rules"; +import { Switcher, SwitcherDao } from "../lib/blockchain/Switcher"; +import { OtherConstants } from "../lib/other_constants"; +import { DataErrors } from "../lib/common-libs/errors"; +import { DuniterBlockchain } from "../lib/blockchain/DuniterBlockchain"; + +const constants = require("../lib/constants"); export interface IdentityForRequirements { - hash:string - member:boolean - wasMember:boolean - pubkey:string - uid:string - buid:string - sig:string - revocation_sig:string|null - revoked:boolean - revoked_on:number + hash: string; + member: boolean; + wasMember: boolean; + pubkey: string; + uid: string; + buid: string; + sig: string; + revocation_sig: string | null; + revoked: boolean; + revoked_on: number; } export interface ValidCert { - from:string - to:string - sig:string - timestamp:number - expiresIn:number + from: string; + to: string; + sig: string; + timestamp: number; + expiresIn: number; } export class BlockchainService extends FIFOService { - - mainContext:BlockchainContext - conf:ConfDTO - dal:FileDAL - logger:any - selfPubkey:string - switcherDao:SwitcherDao<BlockDTO> - invalidForks:string[] = [] - - constructor(private server:Server, fifoPromiseHandler:GlobalFifoPromise) { - super(fifoPromiseHandler) - this.mainContext = new BlockchainContext() + mainContext: BlockchainContext; + conf: ConfDTO; + dal: FileDAL; + logger: any; + selfPubkey: string; + switcherDao: SwitcherDao<BlockDTO>; + invalidForks: string[] = []; + + constructor(private server: Server, fifoPromiseHandler: GlobalFifoPromise) { + super(fifoPromiseHandler); + this.mainContext = new BlockchainContext(); this.switcherDao = new (class ForkDao implements SwitcherDao<BlockDTO> { + constructor(private bcService: BlockchainService) {} - constructor(private bcService:BlockchainService) {} - - async getCurrent(): Promise<BlockDTO|null> { - const current = await this.bcService.current() + async getCurrent(): Promise<BlockDTO | null> { + const current = await this.bcService.current(); if (!current) { - return null + return null; } - return BlockDTO.fromJSONObject(current) + return BlockDTO.fromJSONObject(current); } - async getPotentials(numberStart: number, timeStart: number, maxNumber:number): Promise<BlockDTO[]> { - const blocks = await this.bcService.dal.getPotentialForkBlocks(numberStart, timeStart, maxNumber) - return blocks.map((b:any) => BlockDTO.fromJSONObject(b)) + async getPotentials( + numberStart: number, + timeStart: number, + maxNumber: number + ): Promise<BlockDTO[]> { + const blocks = await this.bcService.dal.getPotentialForkBlocks( + numberStart, + timeStart, + maxNumber + ); + return blocks.map((b: any) => BlockDTO.fromJSONObject(b)); } - async getBlockchainBlock(number: number, hash: string): Promise<BlockDTO | null> { - const b = await this.bcService.dal.getAbsoluteValidBlockInForkWindow(number, hash) - if (!b) return null - return BlockDTO.fromJSONObject(b) + async getBlockchainBlock( + number: number, + hash: string + ): Promise<BlockDTO | null> { + const b = await this.bcService.dal.getAbsoluteValidBlockInForkWindow( + number, + hash + ); + if (!b) return null; + return BlockDTO.fromJSONObject(b); } - async getAbsoluteBlockInForkWindow(number: number, hash: string): Promise<BlockDTO | null> { - const block = await this.bcService.dal.getAbsoluteBlockInForkWindow(number, hash) + async getAbsoluteBlockInForkWindow( + number: number, + hash: string + ): Promise<BlockDTO | null> { + const block = await this.bcService.dal.getAbsoluteBlockInForkWindow( + number, + hash + ); if (block) { - return BlockDTO.fromJSONObject(block) + return BlockDTO.fromJSONObject(block); } else { - return null + return null; } } async revertTo(number: number): Promise<BlockDTO[]> { - const blocks:BlockDTO[] = [] + const blocks: BlockDTO[] = []; const current = await this.bcService.current(); if (!current) { - throw Error(DataErrors[DataErrors.CANNOT_REVERT_NO_CURRENT_BLOCK]) + throw Error(DataErrors[DataErrors.CANNOT_REVERT_NO_CURRENT_BLOCK]); } for (let i = 0, count = current.number - number; i < count; i++) { - const reverted = await this.bcService.mainContext.revertCurrentBlock() - blocks.push(BlockDTO.fromJSONObject(reverted)) + const reverted = await this.bcService.mainContext.revertCurrentBlock(); + blocks.push(BlockDTO.fromJSONObject(reverted)); } if (current.number < number) { - throw "Already below this number" + throw "Already below this number"; } - return blocks + return blocks; } async addBlock(block: BlockDTO): Promise<BlockDTO> { - return await this.bcService.mainContext.checkAndAddBlock(block, false) + return await this.bcService.mainContext.checkAndAddBlock(block, false); } - - })(this) + })(this); } /** @@ -127,31 +144,30 @@ export class BlockchainService extends FIFOService { _read() {} getContext() { - return this.mainContext + return this.mainContext; } - setConfDAL(newConf:ConfDTO, newDAL:FileDAL, newKeyPair:any) { + setConfDAL(newConf: ConfDTO, newDAL: FileDAL, newKeyPair: any) { this.dal = newDAL; this.conf = newConf; - this.logger = require('../lib/logger').NewLogger(this.dal.profile) - this.mainContext.setConfDAL(this.conf, this.dal) + this.logger = require("../lib/logger").NewLogger(this.dal.profile); + this.mainContext.setConfDAL(this.conf, this.dal); this.selfPubkey = newKeyPair.publicKey; } current() { - return this.dal.getCurrentBlockOrNull() + return this.dal.getCurrentBlockOrNull(); } - - async promoted(number:number) { + async promoted(number: number) { const bb = await this.dal.getPromoted(number); if (!bb) throw constants.ERRORS.BLOCK_NOT_FOUND; return bb; } - checkBlock(block:any, withPoWAndSignature = true) { - const dto = BlockDTO.fromJSONObject(block) - return this.mainContext.checkBlock(dto, withPoWAndSignature) + checkBlock(block: any, withPoWAndSignature = true) { + const dto = BlockDTO.fromJSONObject(block); + return this.mainContext.checkBlock(dto, withPoWAndSignature); } /** @@ -159,37 +175,53 @@ export class BlockchainService extends FIFOService { * @returns {Promise<any>} */ async branches() { - const current = await this.current() + const current = await this.current(); if (!current) { - return [] + return []; } - const switcher = new Switcher(this.switcherDao, this.invalidForks, this.conf.avgGenTime, this.conf.forksize, this.conf.switchOnHeadAdvance, this.logger) - const heads = await switcher.findPotentialSuitesHeads(current) - return heads.concat([BlockDTO.fromJSONObject(current)]) + const switcher = new Switcher( + this.switcherDao, + this.invalidForks, + this.conf.avgGenTime, + this.conf.forksize, + this.conf.switchOnHeadAdvance, + this.logger + ); + const heads = await switcher.findPotentialSuitesHeads(current); + return heads.concat([BlockDTO.fromJSONObject(current)]); } - submitBlock(blockToAdd:any, noResolution = false): Promise<BlockDTO> { - const obj = parsers.parseBlock.syncWrite(BlockDTO.fromJSONObject(blockToAdd).getRawSigned()) - const dto = BlockDTO.fromJSONObject(obj) - const hash = dto.getHash() + submitBlock(blockToAdd: any, noResolution = false): Promise<BlockDTO> { + const obj = parsers.parseBlock.syncWrite( + BlockDTO.fromJSONObject(blockToAdd).getRawSigned() + ); + const dto = BlockDTO.fromJSONObject(obj); + const hash = dto.getHash(); return this.pushFIFO(hash, async () => { // Check basic fields: // * currency relatively to conf - if (this.conf && this.conf.currency && this.conf.currency !== dto.currency) { - throw CommonConstants.ERRORS.WRONG_CURRENCY + if ( + this.conf && + this.conf.currency && + this.conf.currency !== dto.currency + ) { + throw CommonConstants.ERRORS.WRONG_CURRENCY; } // * hash relatively to powMin if (!LOCAL_RULES_FUNCTIONS.isProofOfWorkCorrect(dto)) { - throw CommonConstants.ERRORS.WRONG_POW + throw CommonConstants.ERRORS.WRONG_POW; } // * number relatively to fork window and current block if (this.conf && this.conf.forksize !== undefined) { - const current = await this.current() + const current = await this.current(); if (current && dto.number < current.number - this.conf.forksize) { - throw CommonConstants.ERRORS.OUT_OF_FORK_WINDOW + throw CommonConstants.ERRORS.OUT_OF_FORK_WINDOW; } } - const absolute = await this.dal.existsAbsoluteBlockInForkWindow(parseInt(obj.number), obj.hash) + const absolute = await this.dal.existsAbsoluteBlockInForkWindow( + parseInt(obj.number), + obj.hash + ); if (!absolute) { // Save the block in the sandbox await this.mainContext.addSideBlock(dto); @@ -198,104 +230,135 @@ export class BlockchainService extends FIFOService { // This will enhence the block propagation on the network, thus will avoid potential forks to emerge. if (!noResolution) { (() => { - return this.pushFIFO('resolution_' + dto.getHash(), async () => { + return this.pushFIFO("resolution_" + dto.getHash(), async () => { // Resolve the potential new HEAD - await this.blockResolution() + await this.blockResolution(); // Resolve the potential forks - await this.forkResolution() - const current = await this.current() + await this.forkResolution(); + const current = await this.current(); this.push({ bcEvent: OtherConstants.BC_EVENT.RESOLUTION_DONE, - block: current - }) - }) - })() + block: current, + }); + }); + })(); } } else { - throw "Block already known" + throw "Block already known"; } - return dto - }) + return dto; + }); } - async blockResolution(filterFunc: (b: DBBlock) => boolean = () => true): Promise<BlockDTO|null> { - let lastAdded:BlockDTO|null = null - let added:BlockDTO|null - let nbAdded = 0 + async blockResolution( + filterFunc: (b: DBBlock) => boolean = () => true + ): Promise<BlockDTO | null> { + let lastAdded: BlockDTO | null = null; + let added: BlockDTO | null; + let nbAdded = 0; do { - const current = await this.current() - let potentials = [] + const current = await this.current(); + let potentials = []; if (current) { - potentials = (await this.dal.getForkBlocksFollowing(current)).filter(filterFunc) - this.logger.info('Block resolution: %s potential blocks after current#%s...', potentials.length, current.number) + potentials = (await this.dal.getForkBlocksFollowing(current)).filter( + filterFunc + ); + this.logger.info( + "Block resolution: %s potential blocks after current#%s...", + potentials.length, + current.number + ); } else { - potentials = (await this.dal.getPotentialRootBlocks()).filter(filterFunc) - this.logger.info('Block resolution: %s potential blocks for root block...', potentials.length) + potentials = (await this.dal.getPotentialRootBlocks()).filter( + filterFunc + ); + this.logger.info( + "Block resolution: %s potential blocks for root block...", + potentials.length + ); } - added = null - let i = 0 + added = null; + let i = 0; while (!added && i < potentials.length) { - const dto = BlockDTO.fromJSONObject(potentials[i]) + const dto = BlockDTO.fromJSONObject(potentials[i]); try { if (dto.issuer === this.conf.pair.pub) { for (const tx of dto.transactions) { await this.dal.removeTxByHash(tx.hash); } } - lastAdded = added = await this.mainContext.checkAndAddBlock(dto) + lastAdded = added = await this.mainContext.checkAndAddBlock(dto); this.push({ bcEvent: OtherConstants.BC_EVENT.HEAD_CHANGED, - block: added - }) - nbAdded++ + block: added, + }); + nbAdded++; // Clear invalid forks' cache - this.invalidForks.splice(0, this.invalidForks.length) + this.invalidForks.splice(0, this.invalidForks.length); } catch (e) { - this.logger.error(e) - added = null - const theError = e && (e.message || e) + this.logger.error(e); + added = null; + const theError = e && (e.message || e); this.push({ - blockResolutionError: theError - }) + blockResolutionError: theError, + }); } - i++ + i++; } - } while (added) - return lastAdded + } while (added); + return lastAdded; } async forkResolution() { - const switcher = new Switcher(this.switcherDao, this.invalidForks, this.conf.avgGenTime, this.conf.forksize, this.conf.switchOnHeadAdvance, this.logger) - const newCurrent = await switcher.tryToFork() + const switcher = new Switcher( + this.switcherDao, + this.invalidForks, + this.conf.avgGenTime, + this.conf.forksize, + this.conf.switchOnHeadAdvance, + this.logger + ); + const newCurrent = await switcher.tryToFork(); if (newCurrent) { this.push({ bcEvent: OtherConstants.BC_EVENT.SWITCHED, - block: newCurrent - }) + block: newCurrent, + }); } - return newCurrent + return newCurrent; } revertCurrentBlock() { - return this.pushFIFO("revertCurrentBlock", () => this.mainContext.revertCurrentBlock()) + return this.pushFIFO("revertCurrentBlock", () => + this.mainContext.revertCurrentBlock() + ); } revertCurrentHead() { - return this.pushFIFO("revertCurrentHead", () => this.mainContext.revertCurrentHead()) + return this.pushFIFO("revertCurrentHead", () => + this.mainContext.revertCurrentHead() + ); } - applyNextAvailableFork() { - return this.pushFIFO("applyNextAvailableFork", () => this.mainContext.applyNextAvailableFork()) + return this.pushFIFO("applyNextAvailableFork", () => + this.mainContext.applyNextAvailableFork() + ); } - - async requirementsOfIdentities(identities:IdentityForRequirements[], computeDistance = true) { - let all:HttpIdentityRequirement[] = []; + async requirementsOfIdentities( + identities: IdentityForRequirements[], + computeDistance = true + ) { + let all: HttpIdentityRequirement[] = []; let current = await this.dal.getCurrentBlockOrNull(); for (const obj of identities) { try { - let reqs = await this.requirementsOfIdentity(obj, current, computeDistance); + let reqs = await this.requirementsOfIdentity( + obj, + current, + computeDistance + ); all.push(reqs); } catch (e) { this.logger.warn(e); @@ -304,7 +367,11 @@ export class BlockchainService extends FIFOService { return all; } - async requirementsOfIdentity(idty:IdentityForRequirements, current:DBBlock|null, computeDistance = true): Promise<HttpIdentityRequirement> { + async requirementsOfIdentity( + idty: IdentityForRequirements, + current: DBBlock | null, + computeDistance = true + ): Promise<HttpIdentityRequirement> { // TODO: this is not clear let expired = false; let outdistanced = false; @@ -312,47 +379,75 @@ export class BlockchainService extends FIFOService { let wasMember = false; let expiresMS = 0; let expiresPending = 0; - let certs:ValidCert[] = []; + let certs: ValidCert[] = []; let certsPending = []; let mssPending = []; try { - const join = await this.server.generatorGetJoinData(current, idty.hash, 'a'); + const join = await this.server.generatorGetJoinData( + current, + idty.hash, + "a" + ); const pubkey = join.identity.pubkey; // Check WoT stability - const someNewcomers = join.identity.wasMember ? [] : [join.identity.pubkey]; + const someNewcomers = join.identity.wasMember + ? [] + : [join.identity.pubkey]; const nextBlockNumber = current ? current.number + 1 : 0; - const joinData:any = {}; + const joinData: any = {}; joinData[join.identity.pubkey] = join; const updates = {}; certsPending = await this.dal.certDAL.getToTarget(idty.hash); - certsPending = certsPending.map((c:any) => { - c.blockstamp = [c.block_number, c.block_hash].join('-') - return c + certsPending = certsPending.map((c: any) => { + c.blockstamp = [c.block_number, c.block_hash].join("-"); + return c; }); - mssPending = await this.dal.msDAL.getPendingINOfTarget(idty.hash) - mssPending = mssPending.map((ms:any) => { - ms.blockstamp = ms.block - ms.sig = ms.signature - ms.type = ms.membership - return ms + mssPending = await this.dal.msDAL.getPendingINOfTarget(idty.hash); + mssPending = mssPending.map((ms: any) => { + ms.blockstamp = ms.block; + ms.sig = ms.signature; + ms.type = ms.membership; + return ms; }); - const newCerts = await this.server.generatorComputeNewCerts(nextBlockNumber, [join.identity.pubkey], joinData, updates); - const newLinks = await this.server.generatorNewCertsToLinks(newCerts, updates); + const newCerts = await this.server.generatorComputeNewCerts( + nextBlockNumber, + [join.identity.pubkey], + joinData, + updates + ); + const newLinks = await this.server.generatorNewCertsToLinks( + newCerts, + updates + ); const currentTime = current ? current.medianTime : 0; certs = await this.getValidCerts(pubkey, newCerts, currentTime); if (computeDistance) { - outdistanced = await GLOBAL_RULES_HELPERS.isOver3Hops(pubkey, newLinks, someNewcomers, current, this.conf, this.dal); + outdistanced = await GLOBAL_RULES_HELPERS.isOver3Hops( + pubkey, + newLinks, + someNewcomers, + current, + this.conf, + this.dal + ); } // Expiration of current membershship - const currentMembership = await this.dal.mindexDAL.getReducedMSForImplicitRevocation(pubkey); - const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1; + const currentMembership = await this.dal.mindexDAL.getReducedMSForImplicitRevocation( + pubkey + ); + const currentMSN = currentMembership + ? parseInt(currentMembership.created_on) + : -1; if (currentMSN >= 0) { if (join.identity.member) { - const msBlock = await this.dal.getTristampOf(currentMSN) - if (msBlock) { // special case for block #0 - expiresMS = Math.max(0, (msBlock.medianTime + this.conf.msValidity - currentTime)); - } - else { + const msBlock = await this.dal.getTristampOf(currentMSN); + if (msBlock) { + // special case for block #0 + expiresMS = Math.max( + 0, + msBlock.medianTime + this.conf.msValidity - currentTime + ); + } else { expiresMS = this.conf.msValidity; } } else { @@ -362,19 +457,29 @@ export class BlockchainService extends FIFOService { // Expiration of pending membership const lastJoin = await this.dal.lastJoinOfIdentity(idty.hash); if (lastJoin) { - const msBlock = await this.dal.getTristampOf(lastJoin.blockNumber) - if (msBlock) { // Special case for block#0 - expiresPending = Math.max(0, (msBlock.medianTime + this.conf.msValidity - currentTime)); - } - else { + const msBlock = await this.dal.getTristampOf(lastJoin.blockNumber); + if (msBlock) { + // Special case for block#0 + expiresPending = Math.max( + 0, + msBlock.medianTime + this.conf.msValidity - currentTime + ); + } else { expiresPending = this.conf.msValidity; } } wasMember = idty.wasMember; - isSentry = idty.member && (await this.dal.isSentry(idty.pubkey, this.conf)); + isSentry = + idty.member && (await this.dal.isSentry(idty.pubkey, this.conf)); } catch (e) { // We throw whatever isn't "Too old identity" error - if (!(e && e.uerr && e.uerr.ucode == constants.ERRORS.TOO_OLD_IDENTITY.uerr.ucode)) { + if ( + !( + e && + e.uerr && + e.uerr.ucode == constants.ERRORS.TOO_OLD_IDENTITY.uerr.ucode + ) + ) { throw e; } else { expired = true; @@ -385,7 +490,7 @@ export class BlockchainService extends FIFOService { uid: idty.uid, sig: idty.sig, meta: { - timestamp: idty.buid + timestamp: idty.buid, }, revocation_sig: idty.revocation_sig, revoked: idty.revoked, @@ -398,58 +503,65 @@ export class BlockchainService extends FIFOService { pendingCerts: certsPending, pendingMemberships: mssPending, membershipPendingExpiresIn: expiresPending, - membershipExpiresIn: expiresMS + membershipExpiresIn: expiresMS, }; } - async getValidCerts(newcomer:string, newCerts:any, currentTime:number): Promise<ValidCert[]> { + async getValidCerts( + newcomer: string, + newCerts: any, + currentTime: number + ): Promise<ValidCert[]> { const links = await this.dal.getValidLinksTo(newcomer); - const certsFromLinks = links.map((lnk:any) => { return { + const certsFromLinks = links.map((lnk: any) => { + return { from: lnk.issuer, to: lnk.receiver, sig: lnk.sig, timestamp: lnk.expires_on - this.conf.sigValidity, - expiresIn: 0 - } - }) + expiresIn: 0, + }; + }); const certsFromCerts = []; const certs = newCerts[newcomer] || []; for (const cert of certs) { - const block = await this.dal.getTristampOf(cert.block_number) + const block = await this.dal.getTristampOf(cert.block_number); if (block) { certsFromCerts.push({ from: cert.from, to: cert.to, sig: cert.sig, timestamp: block.medianTime, - expiresIn: 0 - }) + expiresIn: 0, + }); } } - return certsFromLinks.concat(certsFromCerts).map(c => { - c.expiresIn = Math.max(0, c.timestamp + this.conf.sigValidity - currentTime) - return c - }) + return certsFromLinks.concat(certsFromCerts).map((c) => { + c.expiresIn = Math.max( + 0, + c.timestamp + this.conf.sigValidity - currentTime + ); + return c; + }); } // TODO: look in archives too getCountOfSelfMadePoW() { - return this.dal.getCountOfPoW(this.selfPubkey) + return this.dal.getCountOfPoW(this.selfPubkey); } - // This method is called by duniter-crawler 1.3.x - saveParametersForRootBlock(block:BlockDTO) { - return DuniterBlockchain.saveParametersForRoot(block, this.conf, this.dal) + saveParametersForRootBlock(block: BlockDTO) { + return DuniterBlockchain.saveParametersForRoot(block, this.conf, this.dal); } - async blocksBetween(from:number, count:number): Promise<DBBlock[]> { + async blocksBetween(from: number, count: number): Promise<DBBlock[]> { if (count > 5000) { - throw 'Count is too high'; + throw "Count is too high"; } - const current = await this.current() + const current = await this.current(); if (!current) { - return [] + return []; } count = Math.min(current.number - from + 1, count); if (!current || current.number < from) { @@ -459,9 +571,9 @@ export class BlockchainService extends FIFOService { } async trimIndexes() { - const HEAD = await this.dal.getCurrentBlockOrNull() + const HEAD = await this.dal.getCurrentBlockOrNull(); if (HEAD) { - return DuniterBlockchain.trimIndexes(this.dal, HEAD, this.conf) + return DuniterBlockchain.trimIndexes(this.dal, HEAD, this.conf); } } } diff --git a/app/service/FIFOService.ts b/app/service/FIFOService.ts index e17509dc2e28af1fa4060ef5571d20a1a6fdf4d5..29f06d0704175df73da1acfccc3e3b228620a299 100644 --- a/app/service/FIFOService.ts +++ b/app/service/FIFOService.ts @@ -11,16 +11,15 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {GlobalFifoPromise} from "./GlobalFifoPromise"; -import * as stream from 'stream'; +import { GlobalFifoPromise } from "./GlobalFifoPromise"; +import * as stream from "stream"; export abstract class FIFOService extends stream.Readable { - - constructor(private fifoPromiseHandler:GlobalFifoPromise) { - super({ objectMode: true }) + constructor(private fifoPromiseHandler: GlobalFifoPromise) { + super({ objectMode: true }); } async pushFIFO<T>(operationId: string, p: () => Promise<T>): Promise<T> { - return this.fifoPromiseHandler.pushFIFOPromise(operationId, p) + return this.fifoPromiseHandler.pushFIFOPromise(operationId, p); } -} \ No newline at end of file +} diff --git a/app/service/GlobalFifoPromise.ts b/app/service/GlobalFifoPromise.ts index 70b2199c2022bd696580bff8fa22d0bb4680130c..b186425784350c3bc086d1fbe32c676ca9877e04 100644 --- a/app/service/GlobalFifoPromise.ts +++ b/app/service/GlobalFifoPromise.ts @@ -11,24 +11,22 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {CommonConstants} from "../lib/common-libs/constants" -import {NewLogger} from "../lib/logger" +import { CommonConstants } from "../lib/common-libs/constants"; +import { NewLogger } from "../lib/logger"; -const querablep = require('querablep'); -const async = require('async'); -const logger = NewLogger() +const querablep = require("querablep"); +const async = require("async"); +const logger = NewLogger(); export class GlobalFifoPromise { - - private fifo:any = async.queue(function (task:any, callback:any) { + private fifo: any = async.queue(function (task: any, callback: any) { task(callback); - }, 1) + }, 1); - private operations:{ [k:string]: boolean } = {} - private currentPromise:any + private operations: { [k: string]: boolean } = {}; + private currentPromise: any; - constructor() { - } + constructor() {} /** * Adds a promise to a FIFO stack of promises, so the given promise will be executed against a shared FIFO stack. @@ -37,43 +35,46 @@ export class GlobalFifoPromise { */ pushFIFOPromise<T>(operationId: string, p: () => Promise<T>): Promise<T> { // Return a promise that will be done after the fifo has executed the given promise - return new Promise((resolve:any, reject:any) => { + return new Promise((resolve: any, reject: any) => { if (this.operations[operationId]) { - throw CommonConstants.ERRORS.DOCUMENT_BEING_TREATED + throw CommonConstants.ERRORS.DOCUMENT_BEING_TREATED; } - this.operations[operationId] = true + this.operations[operationId] = true; // Push the promise on the stack - this.fifo.push(async (cb:any) => { - // OK its the turn of given promise, execute it - try { - this.currentPromise = querablep(p()) - const res = await this.currentPromise - delete this.operations[operationId] - // Finished, we end the function in the FIFO - cb(null, res); - } catch (e) { - delete this.operations[operationId] - // Errored, we end the function with an error - cb(e); + this.fifo.push( + async (cb: any) => { + // OK its the turn of given promise, execute it + try { + this.currentPromise = querablep(p()); + const res = await this.currentPromise; + delete this.operations[operationId]; + // Finished, we end the function in the FIFO + cb(null, res); + } catch (e) { + delete this.operations[operationId]; + // Errored, we end the function with an error + cb(e); + } + }, + (err: any, res: T) => { + // An error occured => reject promise + if (err) return reject(err); + // Success => we resolve with given promise result + resolve(res); } - }, (err:any, res:T) => { - // An error occured => reject promise - if (err) return reject(err); - // Success => we resolve with given promise result - resolve(res); - }); + ); }); } async closeFIFO() { - this.fifo.pause() + this.fifo.pause(); if (this.currentPromise && !this.currentPromise.isFulfilled()) { - logger.info('Waiting current task of documentFIFO to be finished...') - await this.currentPromise + logger.info("Waiting current task of documentFIFO to be finished..."); + await this.currentPromise; } } remainingTasksCount() { - return this.fifo.length() + return this.fifo.length(); } } diff --git a/app/service/IdentityService.ts b/app/service/IdentityService.ts index de99c94c3c1bdc4cb5f64b8e126c03e74ea37d84..9833f05d4c4df44c6b195352bdca0757a0d195d8 100644 --- a/app/service/IdentityService.ts +++ b/app/service/IdentityService.ts @@ -11,53 +11,54 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {GlobalFifoPromise} from "./GlobalFifoPromise" -import {FileDAL} from "../lib/dal/fileDAL" -import {ConfDTO} from "../lib/dto/ConfDTO" -import {DBIdentity} from "../lib/dal/sqliteDAL/IdentityDAL" -import {GLOBAL_RULES_FUNCTIONS, GLOBAL_RULES_HELPERS} from "../lib/rules/global_rules" -import {BlockDTO} from "../lib/dto/BlockDTO" -import {RevocationDTO} from "../lib/dto/RevocationDTO" -import {BasicIdentity, IdentityDTO} from "../lib/dto/IdentityDTO" -import {CertificationDTO} from "../lib/dto/CertificationDTO" -import {DBCert} from "../lib/dal/sqliteDAL/CertDAL" -import {verify} from "duniteroxyde" -import {FIFOService} from "./FIFOService" -import {MindexEntry} from "../lib/indexer" -import {DataErrors} from "../lib/common-libs/errors" -import {Tristamp} from "../lib/common/Tristamp" +import { GlobalFifoPromise } from "./GlobalFifoPromise"; +import { FileDAL } from "../lib/dal/fileDAL"; +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { DBIdentity } from "../lib/dal/sqliteDAL/IdentityDAL"; +import { + GLOBAL_RULES_FUNCTIONS, + GLOBAL_RULES_HELPERS, +} from "../lib/rules/global_rules"; +import { BlockDTO } from "../lib/dto/BlockDTO"; +import { RevocationDTO } from "../lib/dto/RevocationDTO"; +import { BasicIdentity, IdentityDTO } from "../lib/dto/IdentityDTO"; +import { CertificationDTO } from "../lib/dto/CertificationDTO"; +import { DBCert } from "../lib/dal/sqliteDAL/CertDAL"; +import { verify } from "duniteroxyde"; +import { FIFOService } from "./FIFOService"; +import { MindexEntry } from "../lib/indexer"; +import { DataErrors } from "../lib/common-libs/errors"; +import { Tristamp } from "../lib/common/Tristamp"; "use strict"; -const constants = require('../lib/constants'); +const constants = require("../lib/constants"); const BY_ABSORPTION = true; export class IdentityService extends FIFOService { + dal: FileDAL; + conf: ConfDTO; + logger: any; - dal:FileDAL - conf:ConfDTO - logger:any - - constructor(fifoPromiseHandler:GlobalFifoPromise) { - super(fifoPromiseHandler) + constructor(fifoPromiseHandler: GlobalFifoPromise) { + super(fifoPromiseHandler); } - setConfDAL(newConf:ConfDTO, newDAL:FileDAL) { + setConfDAL(newConf: ConfDTO, newDAL: FileDAL) { this.dal = newDAL; this.conf = newConf; - this.logger = require('../lib/logger').NewLogger(this.dal.profile); + this.logger = require("../lib/logger").NewLogger(this.dal.profile); } - searchIdentities(search:string) { - return this.dal.searchJustIdentities(search) + searchIdentities(search: string) { + return this.dal.searchJustIdentities(search); } - async findMember(search:string) { + async findMember(search: string) { let idty = null; if (search.match(constants.PUBLIC_KEY)) { idty = await this.dal.getWrittenIdtyByPubkeyForHashing(search); - } - else { + } else { idty = await this.dal.getWrittenIdtyByUidForHashing(search); } if (!idty) { @@ -65,158 +66,215 @@ export class IdentityService extends FIFOService { } let memberships: { - blockstamp:string - membership:string - number:number - fpr:string - written_number:number|null - }[] = [] + blockstamp: string; + membership: string; + number: number; + fpr: string; + written_number: number | null; + }[] = []; if (idty) { const mss = await this.dal.msDAL.getMembershipsOfIssuer(idty.pub); const mssFromMindex = await this.dal.mindexDAL.reducable(idty.pub); - memberships = mss.map(m => { + memberships = mss.map((m) => { return { - blockstamp: [m.blockNumber, m.blockHash].join('-'), + blockstamp: [m.blockNumber, m.blockHash].join("-"), membership: m.membership, number: m.blockNumber, fpr: m.blockHash, - written_number: m.written_number - } - }) - memberships = memberships.concat(mssFromMindex.map((ms:MindexEntry) => { - const sp = ms.created_on.split('-'); - return { - blockstamp: ms.created_on, - membership: ms.leaving ? 'OUT' : 'IN', - number: parseInt(sp[0]), - fpr: sp[1], - written_number: parseInt(ms.written_on) - } - })) + written_number: m.written_number, + }; + }); + memberships = memberships.concat( + mssFromMindex.map((ms: MindexEntry) => { + const sp = ms.created_on.split("-"); + return { + blockstamp: ms.created_on, + membership: ms.leaving ? "OUT" : "IN", + number: parseInt(sp[0]), + fpr: sp[1], + written_number: parseInt(ms.written_on), + }; + }) + ); } return { idty: { pubkey: idty.pub, uid: idty.uid, - buid: idty.created_on + buid: idty.created_on, }, - memberships - } + memberships, + }; } - getPendingFromPubkey(pubkey:string) { - return this.dal.getNonWritten(pubkey) + getPendingFromPubkey(pubkey: string) { + return this.dal.getNonWritten(pubkey); } - submitIdentity(idty:BasicIdentity, byAbsorption = false): Promise<DBIdentity> { - const idtyObj = IdentityDTO.fromJSONObject(idty) - const toSave = IdentityDTO.fromBasicIdentity(idty) + submitIdentity( + idty: BasicIdentity, + byAbsorption = false + ): Promise<DBIdentity> { + const idtyObj = IdentityDTO.fromJSONObject(idty); + const toSave = IdentityDTO.fromBasicIdentity(idty); // Force usage of local currency name, do not accept other currencies documents idtyObj.currency = this.conf.currency; const createIdentity = idtyObj.rawWithoutSig(); - const hash = idtyObj.getHash() + const hash = idtyObj.getHash(); return this.pushFIFO<DBIdentity>(hash, async () => { - this.logger.info('⬇ IDTY %s %s', idty.pubkey, idty.uid); + this.logger.info("⬇ IDTY %s %s", idty.pubkey, idty.uid); try { // Check signature's validity let verified = verify(createIdentity, idty.sig, idty.pubkey); if (!verified) { throw constants.ERRORS.SIGNATURE_DOES_NOT_MATCH; } - let existing = await this.dal.getGlobalIdentityByHashForExistence(toSave.hash); + let existing = await this.dal.getGlobalIdentityByHashForExistence( + toSave.hash + ); if (existing) { throw constants.ERRORS.ALREADY_UP_TO_DATE; - } - else { + } else { // Create if not already written uid/pubkey - let used = await GLOBAL_RULES_HELPERS.checkExistsPubkey(idty.pubkey, this.dal) + let used = await GLOBAL_RULES_HELPERS.checkExistsPubkey( + idty.pubkey, + this.dal + ); if (used) { throw constants.ERRORS.PUBKEY_ALREADY_USED; } - used = await GLOBAL_RULES_HELPERS.checkExistsUserID(idty.uid, this.dal) + used = await GLOBAL_RULES_HELPERS.checkExistsUserID( + idty.uid, + this.dal + ); if (used) { throw constants.ERRORS.UID_ALREADY_USED; } const current = await this.dal.getCurrentBlockOrNull(); - if (idty.buid == '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855' && current) { + if ( + idty.buid == + "0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855" && + current + ) { throw constants.ERRORS.BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK; } else if (current) { - let basedBlock = await this.dal.getAbsoluteValidBlockInForkWindowByBlockstamp(idty.buid); + let basedBlock = await this.dal.getAbsoluteValidBlockInForkWindowByBlockstamp( + idty.buid + ); if (!basedBlock) { throw constants.ERRORS.BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK; } toSave.expires_on = basedBlock.medianTime + this.conf.idtyWindow; } - await GLOBAL_RULES_FUNCTIONS.checkIdentitiesAreWritable({ identities: [idtyObj.inline()], version: (current && current.version) || constants.BLOCK_GENESIS_VERSION }, this.conf, this.dal); + await GLOBAL_RULES_FUNCTIONS.checkIdentitiesAreWritable( + { + identities: [idtyObj.inline()], + version: + (current && current.version) || constants.BLOCK_GENESIS_VERSION, + }, + this.conf, + this.dal + ); if (byAbsorption !== BY_ABSORPTION) { - if (!(await this.dal.idtyDAL.sandbox.acceptNewSandBoxEntry({ - certsCount: 0, - issuers: [idty.pubkey], - ref_block: parseInt(idty.buid.split('-')[0]) - }, this.conf.pair && this.conf.pair.pub))) { + if ( + !(await this.dal.idtyDAL.sandbox.acceptNewSandBoxEntry( + { + certsCount: 0, + issuers: [idty.pubkey], + ref_block: parseInt(idty.buid.split("-")[0]), + }, + this.conf.pair && this.conf.pair.pub + )) + ) { throw constants.ERRORS.SANDBOX_FOR_IDENTITY_IS_FULL; } } - await this.dal.savePendingIdentity(toSave) - this.logger.info('✔ IDTY %s %s', idty.pubkey, idty.uid); - return toSave + await this.dal.savePendingIdentity(toSave); + this.logger.info("✔ IDTY %s %s", idty.pubkey, idty.uid); + return toSave; } } catch (e) { - this.logger.info('✘ IDTY %s %s', idty.pubkey, idty.uid); - throw e + this.logger.info("✘ IDTY %s %s", idty.pubkey, idty.uid); + throw e; } - }) + }); } - async submitCertification(obj:any): Promise<CertificationDTO> { + async submitCertification(obj: any): Promise<CertificationDTO> { const current = await this.dal.getCurrentBlockOrNull(); // Prepare validator for certifications - const potentialNext = BlockDTO.fromJSONObject({ currency: this.conf.currency, identities: [], number: current ? current.number + 1 : 0 }); + const potentialNext = BlockDTO.fromJSONObject({ + currency: this.conf.currency, + identities: [], + number: current ? current.number + 1 : 0, + }); // Force usage of local currency name, do not accept other currencies documents obj.currency = this.conf.currency || obj.currency; - const cert = CertificationDTO.fromJSONObject(obj) + const cert = CertificationDTO.fromJSONObject(obj); const targetHash = cert.getTargetHash(); - let possiblyNullIdty = await this.dal.getGlobalIdentityByHashForHashingAndSig(targetHash); - let idtyAbsorbed = false - const idty:{ - pubkey:string - uid:string - buid:string - sig:string - } = possiblyNullIdty !== null ? possiblyNullIdty : await this.submitIdentity({ - pubkey: cert.idty_issuer, - uid: cert.idty_uid, - buid: cert.idty_buid, - sig: cert.idty_sig - }, BY_ABSORPTION); + let possiblyNullIdty = await this.dal.getGlobalIdentityByHashForHashingAndSig( + targetHash + ); + let idtyAbsorbed = false; + const idty: { + pubkey: string; + uid: string; + buid: string; + sig: string; + } = + possiblyNullIdty !== null + ? possiblyNullIdty + : await this.submitIdentity( + { + pubkey: cert.idty_issuer, + uid: cert.idty_uid, + buid: cert.idty_buid, + sig: cert.idty_sig, + }, + BY_ABSORPTION + ); if (possiblyNullIdty === null) { - idtyAbsorbed = true + idtyAbsorbed = true; } - let anErr:any - const hash = cert.getHash() + let anErr: any; + const hash = cert.getHash(); return this.pushFIFO<CertificationDTO>(hash, async () => { - this.logger.info('⬇ CERT %s block#%s -> %s', cert.from, cert.block_number, idty.uid); + this.logger.info( + "⬇ CERT %s block#%s -> %s", + cert.from, + cert.block_number, + idty.uid + ); try { - await GLOBAL_RULES_HELPERS.checkCertificationIsValidInSandbox(cert, potentialNext, () => Promise.resolve(idty), this.conf, this.dal); + await GLOBAL_RULES_HELPERS.checkCertificationIsValidInSandbox( + cert, + potentialNext, + () => Promise.resolve(idty), + this.conf, + this.dal + ); } catch (e) { anErr = e; } if (!anErr) { try { - let basedBlock: Tristamp|null = await this.dal.getTristampOf(cert.block_number); + let basedBlock: Tristamp | null = await this.dal.getTristampOf( + cert.block_number + ); if (cert.block_number == 0 && !basedBlock) { basedBlock = { number: 0, - hash: 'E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855', - medianTime: 0 + hash: + "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855", + medianTime: 0, }; } if (!basedBlock) { - throw Error(DataErrors[DataErrors.CERT_BASED_ON_UNKNOWN_BLOCK]) + throw Error(DataErrors[DataErrors.CERT_BASED_ON_UNKNOWN_BLOCK]); } - const mCert:DBCert = { + const mCert: DBCert = { issuers: [cert.from], from: cert.from, sig: cert.sig, @@ -230,92 +288,116 @@ export class IdentityService extends FIFOService { expired: false, written_block: null, written_hash: null, - block: cert.block_number - } + block: cert.block_number, + }; if (current && mCert.expires_on < current.medianTime) { - throw DataErrors[DataErrors.CERT_WINDOW_IS_PASSED] + throw DataErrors[DataErrors.CERT_WINDOW_IS_PASSED]; } - let existingCert = await this.dal.existsCert(mCert, current) + let existingCert = await this.dal.existsCert(mCert, current); if (!existingCert) { - if (!(await this.dal.certDAL.getSandboxForKey(cert.from).acceptNewSandBoxEntry(mCert, this.conf.pair && this.conf.pair.pub))) { + if ( + !(await this.dal.certDAL + .getSandboxForKey(cert.from) + .acceptNewSandBoxEntry( + mCert, + this.conf.pair && this.conf.pair.pub + )) + ) { throw constants.ERRORS.SANDBOX_FOR_CERT_IS_FULL; } - await this.dal.registerNewCertification(mCert) - this.logger.info('✔ CERT %s block#%s -> %s', cert.from, cert.block_number, idty.uid) + await this.dal.registerNewCertification(mCert); + this.logger.info( + "✔ CERT %s block#%s -> %s", + cert.from, + cert.block_number, + idty.uid + ); } else { throw constants.ERRORS.ALREADY_UP_TO_DATE; } } catch (e) { - anErr = e + anErr = e; } } if (anErr) { if (idtyAbsorbed) { - await this.dal.idtyDAL.deleteByHash(targetHash) + await this.dal.idtyDAL.deleteByHash(targetHash); } - const err = anErr - const errMessage = (err.uerr && err.uerr.message) || err.message || err - this.logger.info('✘ CERT %s %s', cert.from, errMessage); + const err = anErr; + const errMessage = (err.uerr && err.uerr.message) || err.message || err; + this.logger.info("✘ CERT %s %s", cert.from, errMessage); throw anErr; } return cert; - }) + }); } - submitRevocation(obj:any) { + submitRevocation(obj: any) { // Force usage of local currency name, do not accept other currencies documents obj.currency = this.conf.currency || obj.currency; - const revoc = RevocationDTO.fromJSONObject(obj) + const revoc = RevocationDTO.fromJSONObject(obj); const raw = revoc.rawWithoutSig(); - const hash = revoc.getHash() + const hash = revoc.getHash(); return this.pushFIFO<RevocationDTO>(hash, async () => { try { - this.logger.info('⬇ REVOCATION %s %s', revoc.pubkey, revoc.idty_uid); + this.logger.info("⬇ REVOCATION %s %s", revoc.pubkey, revoc.idty_uid); let verified = verify(raw, revoc.revocation, revoc.pubkey); if (!verified) { - throw 'Wrong signature for revocation'; + throw "Wrong signature for revocation"; } - const existing = await this.dal.getGlobalIdentityByHashForRevocation(obj.hash) + const existing = await this.dal.getGlobalIdentityByHashForRevocation( + obj.hash + ); if (existing) { // Modify if (existing.revoked) { - throw 'Already revoked'; - } - else if (existing.revocation_sig) { - throw 'Revocation already registered'; + throw "Already revoked"; + } else if (existing.revocation_sig) { + throw "Revocation already registered"; } else { - await this.dal.setRevocating({ - pubkey: existing.pub, - buid: existing.created_on, - sig: existing.sig, - uid: existing.uid, - expires_on: existing.expires_on, - member: existing.member, - wasMember: existing.wasMember, - }, revoc.revocation); - this.logger.info('✔ REVOCATION %s %s', revoc.pubkey, revoc.idty_uid); - return revoc + await this.dal.setRevocating( + { + pubkey: existing.pub, + buid: existing.created_on, + sig: existing.sig, + uid: existing.uid, + expires_on: existing.expires_on, + member: existing.member, + wasMember: existing.wasMember, + }, + revoc.revocation + ); + this.logger.info( + "✔ REVOCATION %s %s", + revoc.pubkey, + revoc.idty_uid + ); + return revoc; } - } - else { + } else { // Create identity given by the revocation const idty = IdentityDTO.fromRevocation(revoc); idty.revocation_sig = revoc.revocation; - if (!(await this.dal.idtyDAL.sandbox.acceptNewSandBoxEntry({ - issuers: [idty.pubkey], - ref_block: parseInt(idty.buid.split('-')[0]), - certsCount: 0 - }, this.conf.pair && this.conf.pair.pub))) { + if ( + !(await this.dal.idtyDAL.sandbox.acceptNewSandBoxEntry( + { + issuers: [idty.pubkey], + ref_block: parseInt(idty.buid.split("-")[0]), + certsCount: 0, + }, + this.conf.pair && this.conf.pair.pub + )) + ) { throw constants.ERRORS.SANDBOX_FOR_IDENTITY_IS_FULL; } await this.dal.savePendingIdentity(idty); - this.logger.info('✔ REVOCATION %s %s', revoc.pubkey, revoc.idty_uid); - return revoc + this.logger.info("✔ REVOCATION %s %s", revoc.pubkey, revoc.idty_uid); + return revoc; } } catch (e) { - this.logger.info('✘ REVOCATION %s %s', revoc.pubkey, revoc.idty_uid); + this.logger.info("✘ REVOCATION %s %s", revoc.pubkey, revoc.idty_uid); throw e; } - }) + }); } } diff --git a/app/service/MembershipService.ts b/app/service/MembershipService.ts index a1cdaa8c5297f38e860063be2fe10e209bc0dfe5..d09af9f1d80c30b1373c0f5f4c00266c2472e195 100644 --- a/app/service/MembershipService.ts +++ b/app/service/MembershipService.ts @@ -12,73 +12,86 @@ // GNU Affero General Public License for more details. "use strict"; -import {GlobalFifoPromise} from "./GlobalFifoPromise"; -import {ConfDTO} from "../lib/dto/ConfDTO"; -import {FileDAL} from "../lib/dal/fileDAL"; -import {LOCAL_RULES_HELPERS} from "../lib/rules/local_rules"; -import {GLOBAL_RULES_HELPERS} from "../lib/rules/global_rules"; -import {MembershipDTO} from "../lib/dto/MembershipDTO"; -import {FIFOService} from "./FIFOService"; -import {DBBlock} from "../lib/db/DBBlock" -import {DataErrors} from "../lib/common-libs/errors" +import { GlobalFifoPromise } from "./GlobalFifoPromise"; +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { FileDAL } from "../lib/dal/fileDAL"; +import { LOCAL_RULES_HELPERS } from "../lib/rules/local_rules"; +import { GLOBAL_RULES_HELPERS } from "../lib/rules/global_rules"; +import { MembershipDTO } from "../lib/dto/MembershipDTO"; +import { FIFOService } from "./FIFOService"; +import { DBBlock } from "../lib/db/DBBlock"; +import { DataErrors } from "../lib/common-libs/errors"; -const constants = require('../lib/constants'); +const constants = require("../lib/constants"); export class MembershipService extends FIFOService { - - constructor(fifoPromiseHandler:GlobalFifoPromise) { - super(fifoPromiseHandler) + constructor(fifoPromiseHandler: GlobalFifoPromise) { + super(fifoPromiseHandler); } - conf:ConfDTO - dal:FileDAL - logger:any + conf: ConfDTO; + dal: FileDAL; + logger: any; - setConfDAL(newConf:ConfDTO, newDAL:FileDAL) { + setConfDAL(newConf: ConfDTO, newDAL: FileDAL) { this.dal = newDAL; this.conf = newConf; - this.logger = require('../lib/logger').NewLogger(this.dal.profile); + this.logger = require("../lib/logger").NewLogger(this.dal.profile); } current(): Promise<DBBlock | null> { - return this.dal.getCurrentBlockOrNull() + return this.dal.getCurrentBlockOrNull(); } - submitMembership(ms:any) { - const entry = MembershipDTO.fromJSONObject(ms) - const hash = entry.getHash() + submitMembership(ms: any) { + const entry = MembershipDTO.fromJSONObject(ms); + const hash = entry.getHash(); return this.pushFIFO<MembershipDTO>(hash, async () => { // Force usage of local currency name, do not accept other currencies documents entry.currency = this.conf.currency || entry.currency; - this.logger.info('⬇ %s %s', entry.issuer, entry.membership); + this.logger.info("⬇ %s %s", entry.issuer, entry.membership); if (!LOCAL_RULES_HELPERS.checkSingleMembershipSignature(entry)) { throw constants.ERRORS.WRONG_SIGNATURE_MEMBERSHIP; } // Get already existing Membership with same parameters - const mostRecentNumber = await this.dal.getMostRecentMembershipNumberForIssuer(entry.issuer); - const thisNumber = entry.number + const mostRecentNumber = await this.dal.getMostRecentMembershipNumberForIssuer( + entry.issuer + ); + const thisNumber = entry.number; if (mostRecentNumber == thisNumber) { throw constants.ERRORS.ALREADY_RECEIVED_MEMBERSHIP; } else if (mostRecentNumber > thisNumber) { throw constants.ERRORS.A_MORE_RECENT_MEMBERSHIP_EXISTS; } const isMember = await this.dal.isMember(entry.issuer); - const isJoin = entry.membership == 'IN'; + const isJoin = entry.membership == "IN"; if (!isMember && !isJoin) { // LEAVE throw constants.ERRORS.MEMBERSHIP_A_NON_MEMBER_CANNOT_LEAVE; } const current = await this.dal.getCurrentBlockOrNull(); - const basedBlock = await GLOBAL_RULES_HELPERS.checkMembershipBlock(entry, current, this.conf, this.dal); - if (!(await this.dal.msDAL.sandbox.acceptNewSandBoxEntry({ - issuers: [entry.pubkey], - block_number: entry.block_number - }, this.conf.pair && this.conf.pair.pub))) { + const basedBlock = await GLOBAL_RULES_HELPERS.checkMembershipBlock( + entry, + current, + this.conf, + this.dal + ); + if ( + !(await this.dal.msDAL.sandbox.acceptNewSandBoxEntry( + { + issuers: [entry.pubkey], + block_number: entry.block_number, + }, + this.conf.pair && this.conf.pair.pub + )) + ) { throw constants.ERRORS.SANDBOX_FOR_MEMERSHIP_IS_FULL; } - const expires_on = basedBlock ? basedBlock.medianTime + this.conf.msWindow : 0 + const expires_on = basedBlock + ? basedBlock.medianTime + this.conf.msWindow + : 0; if (current && expires_on < current.medianTime) { - throw DataErrors[DataErrors.MEMBERSHIP_WINDOW_IS_PASSED] + throw DataErrors[DataErrors.MEMBERSHIP_WINDOW_IS_PASSED]; } // Saves entry await this.dal.savePendingMembership({ @@ -98,10 +111,10 @@ export class MembershipService extends FIFOService { expires_on, signature: entry.signature, expired: false, - block_number: entry.number + block_number: entry.number, }); - this.logger.info('✔ %s %s', entry.issuer, entry.membership); + this.logger.info("✔ %s %s", entry.issuer, entry.membership); return entry; - }) + }); } } diff --git a/app/service/PeeringService.ts b/app/service/PeeringService.ts index 67ed60c6ffc7088d74cc8453bd25f387f5627644..ee2d5623316b02b5da042512019dd0f0ac2643eb 100755 --- a/app/service/PeeringService.ts +++ b/app/service/PeeringService.ts @@ -11,127 +11,144 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../lib/dto/ConfDTO" -import {FileDAL} from "../lib/dal/fileDAL" -import {DBBlock} from "../lib/db/DBBlock" -import {Multicaster} from "../lib/streams/multicaster" -import {PeerDTO} from "../lib/dto/PeerDTO" -import {verify} from "duniteroxyde" -import {dos2unix} from "../lib/common-libs/dos2unix" -import {rawer} from "../lib/common-libs/index" -import {Server} from "../../server" -import {GlobalFifoPromise} from "./GlobalFifoPromise" -import {DBPeer} from "../lib/db/DBPeer" -import {Underscore} from "../lib/common-libs/underscore" -import {CommonConstants} from "../lib/common-libs/constants" -import {DataErrors} from "../lib/common-libs/errors" -import {cleanLongDownPeers} from "../modules/crawler/lib/garbager" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { FileDAL } from "../lib/dal/fileDAL"; +import { DBBlock } from "../lib/db/DBBlock"; +import { Multicaster } from "../lib/streams/multicaster"; +import { PeerDTO } from "../lib/dto/PeerDTO"; +import { verify } from "duniteroxyde"; +import { dos2unix } from "../lib/common-libs/dos2unix"; +import { rawer } from "../lib/common-libs/index"; +import { Server } from "../../server"; +import { GlobalFifoPromise } from "./GlobalFifoPromise"; +import { DBPeer } from "../lib/db/DBPeer"; +import { Underscore } from "../lib/common-libs/underscore"; +import { CommonConstants } from "../lib/common-libs/constants"; +import { DataErrors } from "../lib/common-libs/errors"; +import { cleanLongDownPeers } from "../modules/crawler/lib/garbager"; -const util = require('util'); -const events = require('events'); -const logger = require('../lib/logger').NewLogger('peering'); -const constants = require('../lib/constants'); +const util = require("util"); +const events = require("events"); +const logger = require("../lib/logger").NewLogger("peering"); +const constants = require("../lib/constants"); export interface Keyring { - publicKey:string - secretKey:string + publicKey: string; + secretKey: string; } // Note: for an unknown reason, PeeringService cannot extend FIFOService correctly. When this.pushFIFO() is called // from within submitp(), "this.pushFIFO === undefined" is true. export class PeeringService { + conf: ConfDTO; + dal: FileDAL; + selfPubkey: string; + pair: Keyring; + pubkey: string; + peerInstance: DBPeer | null; + logger: any; - conf:ConfDTO - dal:FileDAL - selfPubkey:string - pair:Keyring - pubkey:string - peerInstance:DBPeer | null - logger:any + constructor( + private server: Server, + private fifoPromiseHandler: GlobalFifoPromise + ) {} - constructor(private server:Server, private fifoPromiseHandler:GlobalFifoPromise) { - } - - setConfDAL(newConf:ConfDTO, newDAL:FileDAL, newPair:Keyring) { + setConfDAL(newConf: ConfDTO, newDAL: FileDAL, newPair: Keyring) { this.dal = newDAL; this.conf = newConf; this.pair = newPair; this.pubkey = this.pair.publicKey; this.selfPubkey = this.pubkey; - this.logger = require('../lib/logger').NewLogger(this.dal.profile) + this.logger = require("../lib/logger").NewLogger(this.dal.profile); } - async peer(newPeer:DBPeer | null = null) { + async peer(newPeer: DBPeer | null = null) { if (newPeer) { this.peerInstance = newPeer; } let thePeer = this.peerInstance; if (!thePeer) { - thePeer = await this.generateSelfPeer(this.conf) + thePeer = await this.generateSelfPeer(this.conf); } - return PeerDTO.fromJSONObject(thePeer) + return PeerDTO.fromJSONObject(thePeer); } async mirrorBMAEndpoints() { const localPeer = await this.peer(); - const localEndpoints = await this.server.getEndpoints() - return this.getOtherEndpoints(localPeer.endpoints, localEndpoints).filter((ep) => ep.match(/^BASIC_MERKLED_API/)) + const localEndpoints = await this.server.getEndpoints(); + return this.getOtherEndpoints( + localPeer.endpoints, + localEndpoints + ).filter((ep) => ep.match(/^BASIC_MERKLED_API/)); } - checkPeerSignature(p:PeerDTO) { + checkPeerSignature(p: PeerDTO) { const raw = rawer.getPeerWithoutSignature(p); const sig = p.signature; const pub = p.pubkey; const signaturesMatching = verify(raw, sig, pub); return !!signaturesMatching; - }; + } - submitP(peering:DBPeer, eraseIfAlreadyRecorded = false, cautious = true, acceptNonWoT = false): Promise<PeerDTO> { + submitP( + peering: DBPeer, + eraseIfAlreadyRecorded = false, + cautious = true, + acceptNonWoT = false + ): Promise<PeerDTO> { // Force usage of local currency name, do not accept other currencies documents peering.currency = this.conf.currency || peering.currency; - this.logger.info('[' + this.server.conf.pair.pub.substr(0,8) + '] ⬇ PEER %s', peering.pubkey.substr(0, 8), peering.block.substr(0, 8)) - let thePeerDTO = PeerDTO.fromJSONObject(peering) - let thePeer = thePeerDTO.toDBPeer() - let sp = thePeer.block.split('-'); + this.logger.info( + "[" + this.server.conf.pair.pub.substr(0, 8) + "] ⬇ PEER %s", + peering.pubkey.substr(0, 8), + peering.block.substr(0, 8) + ); + let thePeerDTO = PeerDTO.fromJSONObject(peering); + let thePeer = thePeerDTO.toDBPeer(); + let sp = thePeer.block.split("-"); const blockNumber = parseInt(sp[0]); let blockHash = sp[1]; let sigTime = 0; - let block:DBBlock | null; + let block: DBBlock | null; let makeCheckings = cautious || cautious === undefined; - const hash = thePeerDTO.getHash() + const hash = thePeerDTO.getHash(); return this.fifoPromiseHandler.pushFIFOPromise<PeerDTO>(hash, async () => { try { // First: let's make a cleanup of old peers - await cleanLongDownPeers(this.server, Date.now()) + await cleanLongDownPeers(this.server, Date.now()); if (makeCheckings) { - let goodSignature = this.checkPeerSignature(thePeerDTO) + let goodSignature = this.checkPeerSignature(thePeerDTO); if (!goodSignature) { - throw 'Signature from a peer must match'; + throw "Signature from a peer must match"; } } // We accept peer documents up to 100 entries, then only member or specific peers are accepted - let isNonWoT = false + let isNonWoT = false; if (!acceptNonWoT) { // Of course we accept our own key if (peering.pubkey !== this.conf.pair.pub) { // As well as prefered/priviledged nodes - const isInPrivileged = this.conf.ws2p - && this.conf.ws2p.privilegedNodes - && this.conf.ws2p.privilegedNodes.length - && this.conf.ws2p.privilegedNodes.indexOf(peering.pubkey) !== -1 - const isInPrefered = this.conf.ws2p - && this.conf.ws2p.preferedNodes - && this.conf.ws2p.preferedNodes.length - && this.conf.ws2p.preferedNodes.indexOf(peering.pubkey) !== -1 + const isInPrivileged = + this.conf.ws2p && + this.conf.ws2p.privilegedNodes && + this.conf.ws2p.privilegedNodes.length && + this.conf.ws2p.privilegedNodes.indexOf(peering.pubkey) !== -1; + const isInPrefered = + this.conf.ws2p && + this.conf.ws2p.preferedNodes && + this.conf.ws2p.preferedNodes.length && + this.conf.ws2p.preferedNodes.indexOf(peering.pubkey) !== -1; if (!isInPrefered && !isInPrivileged) { // We also accept all members - const isMember = await this.dal.isMember(this.conf.pair.pub) + const isMember = await this.dal.isMember(this.conf.pair.pub); if (!isMember) { - isNonWoT = true + isNonWoT = true; // Then as long as we have some room, we accept peers - const hasEnoughRoom = (await this.dal.peerDAL.countNonWoTPeers()) < this.conf.nonWoTPeersLimit + const hasEnoughRoom = + (await this.dal.peerDAL.countNonWoTPeers()) < + this.conf.nonWoTPeersLimit; if (!hasEnoughRoom) { - throw Error(DataErrors[DataErrors.PEER_REJECTED]) + throw Error(DataErrors[DataErrors.PEER_REJECTED]); } } } @@ -140,130 +157,179 @@ export class PeeringService { if (thePeer.block == constants.PEER.SPECIAL_BLOCK) { thePeer.block = constants.PEER.SPECIAL_BLOCK; thePeer.statusTS = 0; - thePeer.status = 'UP'; + thePeer.status = "UP"; } else { - block = await this.dal.getAbsoluteValidBlockInForkWindow(blockNumber, blockHash) + block = await this.dal.getAbsoluteValidBlockInForkWindow( + blockNumber, + blockHash + ); if (!block && makeCheckings) { throw constants.ERROR.PEER.UNKNOWN_REFERENCE_BLOCK; } else if (!block) { thePeer.block = constants.PEER.SPECIAL_BLOCK; thePeer.statusTS = 0; - thePeer.status = 'UP'; + thePeer.status = "UP"; } - const current = await this.dal.getCurrentBlockOrNull() - if (current && ((!block && current.number > CommonConstants.MAX_AGE_OF_PEER_IN_BLOCKS) || (block && current.number - block.number > CommonConstants.MAX_AGE_OF_PEER_IN_BLOCKS))) { - throw Error(DataErrors[DataErrors.TOO_OLD_PEER]) + const current = await this.dal.getCurrentBlockOrNull(); + if ( + current && + ((!block && + current.number > CommonConstants.MAX_AGE_OF_PEER_IN_BLOCKS) || + (block && + current.number - block.number > + CommonConstants.MAX_AGE_OF_PEER_IN_BLOCKS)) + ) { + throw Error(DataErrors[DataErrors.TOO_OLD_PEER]); } } sigTime = block ? block.medianTime : 0; thePeer.statusTS = sigTime; let found = await this.dal.getPeerOrNull(thePeer.pubkey); - let peerEntityOld = PeerDTO.fromJSONObject(found || thePeer) - if(!found && thePeerDTO.endpoints.length === 0){ - throw 'Peer with zero endpoints that is not already known' - } - else if(found){ + let peerEntityOld = PeerDTO.fromJSONObject(found || thePeer); + if (!found && thePeerDTO.endpoints.length === 0) { + throw "Peer with zero endpoints that is not already known"; + } else if (found) { // Already existing peer - const sp2 = found.block.split('-'); + const sp2 = found.block.split("-"); const previousBlockNumber = parseInt(sp2[0]); - const interfacesChanged = thePeerDTO.endpointSum() != peerEntityOld.endpointSum() - const isOutdatedDocument = blockNumber < previousBlockNumber && !eraseIfAlreadyRecorded; - const isAlreadyKnown = blockNumber == previousBlockNumber && !eraseIfAlreadyRecorded; - if (isOutdatedDocument){ - const error = Underscore.extend({}, constants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE); + const interfacesChanged = + thePeerDTO.endpointSum() != peerEntityOld.endpointSum(); + const isOutdatedDocument = + blockNumber < previousBlockNumber && !eraseIfAlreadyRecorded; + const isAlreadyKnown = + blockNumber == previousBlockNumber && !eraseIfAlreadyRecorded; + if (isOutdatedDocument) { + const error = Underscore.extend( + {}, + constants.ERRORS.NEWER_PEER_DOCUMENT_AVAILABLE + ); Underscore.extend(error.uerr, { peer: found }); throw error; } else if (isAlreadyKnown) { throw constants.ERRORS.PEER_DOCUMENT_ALREADY_KNOWN; } - peerEntityOld = PeerDTO.fromJSONObject(found) + peerEntityOld = PeerDTO.fromJSONObject(found); if (interfacesChanged) { // Warns the old peer of the change const caster = new Multicaster(); - caster.sendPeering(PeerDTO.fromJSONObject(peerEntityOld), PeerDTO.fromJSONObject(thePeer)) + caster.sendPeering( + PeerDTO.fromJSONObject(peerEntityOld), + PeerDTO.fromJSONObject(thePeer) + ); } - peerEntityOld.version = thePeer.version - peerEntityOld.currency = thePeer.currency - peerEntityOld.pubkey = thePeer.pubkey - peerEntityOld.endpoints = thePeer.endpoints - peerEntityOld.status = thePeer.status - peerEntityOld.signature = thePeer.signature - peerEntityOld.blockstamp = thePeer.block + peerEntityOld.version = thePeer.version; + peerEntityOld.currency = thePeer.currency; + peerEntityOld.pubkey = thePeer.pubkey; + peerEntityOld.endpoints = thePeer.endpoints; + peerEntityOld.status = thePeer.status; + peerEntityOld.signature = thePeer.signature; + peerEntityOld.blockstamp = thePeer.block; } // Set the peer as UP again - const peerEntity = peerEntityOld.toDBPeer() - peerEntity.statusTS = thePeer.statusTS - peerEntity.block = thePeer.block - peerEntity.status = 'UP'; + const peerEntity = peerEntityOld.toDBPeer(); + peerEntity.statusTS = thePeer.statusTS; + peerEntity.block = thePeer.block; + peerEntity.status = "UP"; peerEntity.first_down = null; peerEntity.last_try = null; - peerEntity.hash = peerEntityOld.getHash() + peerEntity.hash = peerEntityOld.getHash(); peerEntity.raw = peerEntityOld.getRaw(); - peerEntity.nonWoT = isNonWoT - peerEntity.lastContact = Math.floor(Date.now() / 1000) + peerEntity.nonWoT = isNonWoT; + peerEntity.lastContact = Math.floor(Date.now() / 1000); await this.dal.savePeer(peerEntity); - this.logger.info('[' + this.server.conf.pair.pub.substr(0,8) + '] ✔ PEER %s', peering.pubkey.substr(0, 8), peerEntity.block.substr(0, 8)) - let savedPeer = PeerDTO.fromJSONObject(peerEntity).toDBPeer() + this.logger.info( + "[" + this.server.conf.pair.pub.substr(0, 8) + "] ✔ PEER %s", + peering.pubkey.substr(0, 8), + peerEntity.block.substr(0, 8) + ); + let savedPeer = PeerDTO.fromJSONObject(peerEntity).toDBPeer(); if (peerEntity.pubkey == this.selfPubkey) { - const localEndpoints = await this.server.getEndpoints() - const localNodeNotListed = !peerEntityOld.containsAllEndpoints(localEndpoints) - const current = localNodeNotListed && (await this.dal.getCurrentBlockOrNull()); - if (localNodeNotListed && (!current || current.number > blockNumber)) { + const localEndpoints = await this.server.getEndpoints(); + const localNodeNotListed = !peerEntityOld.containsAllEndpoints( + localEndpoints + ); + const current = + localNodeNotListed && (await this.dal.getCurrentBlockOrNull()); + if ( + localNodeNotListed && + (!current || current.number > blockNumber) + ) { // Document with pubkey of local peer, but doesn't contain local interface: we must add it this.generateSelfPeer(this.conf); } else { this.peerInstance = peerEntity; } } - return PeerDTO.fromDBPeer(savedPeer) + return PeerDTO.fromDBPeer(savedPeer); } catch (e) { - throw e + throw e; } - }) + }); } - handleNewerPeer(pretendedNewer:DBPeer) { - logger.debug('Applying pretended newer peer document %s/%s', pretendedNewer.block); - return this.server.writePeer(pretendedNewer) + handleNewerPeer(pretendedNewer: DBPeer) { + logger.debug( + "Applying pretended newer peer document %s/%s", + pretendedNewer.block + ); + return this.server.writePeer(pretendedNewer); } - async generateSelfPeer(theConf:{ currency: string }, signalTimeInterval = 0): Promise<DBPeer|null> { + async generateSelfPeer( + theConf: { currency: string }, + signalTimeInterval = 0 + ): Promise<DBPeer | null> { const current = await this.server.dal.getCurrentBlockOrNull(); const currency = theConf.currency || constants.DEFAULT_CURRENCY_NAME; const peers = await this.dal.findPeers(this.selfPubkey); let p1 = { version: constants.DOCUMENTS_VERSION, currency: currency, - block: '0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855', - endpoints: <string[]>[] + block: + "0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855", + endpoints: <string[]>[], }; - const currentSelfPeer = peers[0] + const currentSelfPeer = peers[0]; if (peers.length != 0 && currentSelfPeer) { - p1 = currentSelfPeer - p1.version = constants.DOCUMENTS_VERSION - p1.currency = currency + p1 = currentSelfPeer; + p1.version = constants.DOCUMENTS_VERSION; + p1.currency = currency; } - const localEndpoints = await this.server.getEndpoints() - const otherPotentialEndpoints = this.getOtherEndpoints(p1.endpoints, localEndpoints) - logger.info('Sibling endpoints:', otherPotentialEndpoints); - const wrongEndpoints = await this.server.getWrongEndpoints(otherPotentialEndpoints) + const localEndpoints = await this.server.getEndpoints(); + const otherPotentialEndpoints = this.getOtherEndpoints( + p1.endpoints, + localEndpoints + ); + logger.info("Sibling endpoints:", otherPotentialEndpoints); + const wrongEndpoints = await this.server.getWrongEndpoints( + otherPotentialEndpoints + ); for (const wrong of wrongEndpoints) { - logger.warn('Wrong endpoint \'%s\'', wrong) + logger.warn("Wrong endpoint '%s'", wrong); } - const toRemoveByConf = (this.conf.rmEndpoints || []) - let toConserve = otherPotentialEndpoints.filter(ep => wrongEndpoints.indexOf(ep) === -1 && toRemoveByConf.indexOf(ep) === -1) + const toRemoveByConf = this.conf.rmEndpoints || []; + let toConserve = otherPotentialEndpoints.filter( + (ep) => + wrongEndpoints.indexOf(ep) === -1 && toRemoveByConf.indexOf(ep) === -1 + ); if (!currency) { - logger.error('It seems there is an issue with your configuration.'); - logger.error('Please restart your node with:'); - logger.error('$ duniter restart'); - return null + logger.error("It seems there is an issue with your configuration."); + logger.error("Please restart your node with:"); + logger.error("$ duniter restart"); + return null; } - const endpointsToDeclare = localEndpoints.concat(toConserve).concat(this.conf.endpoints || []) - if (currentSelfPeer && endpointsToDeclare.length === 0 && currentSelfPeer.endpoints.length === 0) { + const endpointsToDeclare = localEndpoints + .concat(toConserve) + .concat(this.conf.endpoints || []); + if ( + currentSelfPeer && + endpointsToDeclare.length === 0 && + currentSelfPeer.endpoints.length === 0 + ) { /********************* * Conserver peer document *********************/ - return currentSelfPeer + return currentSelfPeer; } else { /********************* * Renew peer document @@ -272,51 +338,65 @@ export class PeeringService { let minBlock = current ? current.number - 30 : 0; if (p1) { // But if already have a peer record within this distance, we need to take the next block of it - minBlock = Math.max(minBlock, parseInt(p1.block.split('-')[0], 10) + 1); + minBlock = Math.max(minBlock, parseInt(p1.block.split("-")[0], 10) + 1); } // The number cannot be superior to current block minBlock = Math.min(minBlock, current ? current.number : minBlock); - const targetBlockstamp: string|null = await this.server.dal.getBlockstampOf(minBlock) - const p2:any = { + const targetBlockstamp: + | string + | null = await this.server.dal.getBlockstampOf(minBlock); + const p2: any = { version: constants.DOCUMENTS_VERSION, currency: currency, pubkey: this.selfPubkey, - block: targetBlockstamp ? targetBlockstamp : constants.PEER.SPECIAL_BLOCK, - endpoints: Underscore.uniq(endpointsToDeclare) + block: targetBlockstamp + ? targetBlockstamp + : constants.PEER.SPECIAL_BLOCK, + endpoints: Underscore.uniq(endpointsToDeclare), }; const raw2 = dos2unix(PeerDTO.fromJSONObject(p2).getRaw()); - const bmaAccess = PeerDTO.fromJSONObject(p2).getURL() + const bmaAccess = PeerDTO.fromJSONObject(p2).getURL(); if (bmaAccess) { - logger.info('BMA access:', bmaAccess) + logger.info("BMA access:", bmaAccess); } - const ws2pAccess = PeerDTO.fromJSONObject(p2).getFirstNonTorWS2P() + const ws2pAccess = PeerDTO.fromJSONObject(p2).getFirstNonTorWS2P(); if (ws2pAccess) { - logger.info(`WS2P access: ${ws2pAccess.host} :${ws2pAccess.port}${ws2pAccess.path && ' ' + ws2pAccess.path || ''}`) + logger.info( + `WS2P access: ${ws2pAccess.host} :${ws2pAccess.port}${ + (ws2pAccess.path && " " + ws2pAccess.path) || "" + }` + ); } - logger.debug('Generating server\'s peering entry based on block#%s...', p2.block.split('-')[0]); + logger.debug( + "Generating server's peering entry based on block#%s...", + p2.block.split("-")[0] + ); p2.signature = await this.server.sign(raw2); p2.pubkey = this.selfPubkey; // Remember this is now local peer value this.peerInstance = p2; try { // Submit & share with the network - await this.server.writePeer(p2) + await this.server.writePeer(p2); } catch (e) { - logger.error(e) + logger.error(e); } const selfPeer = await this.dal.getPeer(this.selfPubkey); // Set peer's statut to UP await this.peer(selfPeer); this.server.streamPush(selfPeer); if (signalTimeInterval) { - logger.info("Next peering signal in %s min", signalTimeInterval / 1000 / 60) + logger.info( + "Next peering signal in %s min", + signalTimeInterval / 1000 / 60 + ); } - return selfPeer + return selfPeer; } } - private getOtherEndpoints(endpoints:string[], localEndpoints:string[]) { - return endpoints.filter((ep) => localEndpoints.indexOf(ep) === -1) + private getOtherEndpoints(endpoints: string[], localEndpoints: string[]) { + return endpoints.filter((ep) => localEndpoints.indexOf(ep) === -1); } } diff --git a/app/service/TransactionsService.ts b/app/service/TransactionsService.ts index a343b625b13e6756e365b1ed7e422ea0bad7cdaf..ec67cb29b7755b620a33ccad13424f94177aa3dc 100644 --- a/app/service/TransactionsService.ts +++ b/app/service/TransactionsService.ts @@ -11,69 +11,97 @@ // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU Affero General Public License for more details. -import {ConfDTO} from "../lib/dto/ConfDTO"; -import {FileDAL} from "../lib/dal/fileDAL"; -import {TransactionDTO} from "../lib/dto/TransactionDTO"; -import {LOCAL_RULES_HELPERS} from "../lib/rules/local_rules"; -import {GLOBAL_RULES_HELPERS} from "../lib/rules/global_rules"; -import {FIFOService} from "./FIFOService"; -import {GlobalFifoPromise} from "./GlobalFifoPromise"; -import {DataErrors} from "../lib/common-libs/errors" -import {DBTx} from "../lib/db/DBTx" +import { ConfDTO } from "../lib/dto/ConfDTO"; +import { FileDAL } from "../lib/dal/fileDAL"; +import { TransactionDTO } from "../lib/dto/TransactionDTO"; +import { LOCAL_RULES_HELPERS } from "../lib/rules/local_rules"; +import { GLOBAL_RULES_HELPERS } from "../lib/rules/global_rules"; +import { FIFOService } from "./FIFOService"; +import { GlobalFifoPromise } from "./GlobalFifoPromise"; +import { DataErrors } from "../lib/common-libs/errors"; +import { DBTx } from "../lib/db/DBTx"; -const constants = require('../lib/constants'); +const constants = require("../lib/constants"); export class TransactionService extends FIFOService { - - constructor(fifoPromiseHandler:GlobalFifoPromise) { - super(fifoPromiseHandler) + constructor(fifoPromiseHandler: GlobalFifoPromise) { + super(fifoPromiseHandler); } - conf:ConfDTO - dal:FileDAL - logger:any + conf: ConfDTO; + dal: FileDAL; + logger: any; - setConfDAL(newConf:ConfDTO, newDAL:FileDAL) { + setConfDAL(newConf: ConfDTO, newDAL: FileDAL) { this.dal = newDAL; this.conf = newConf; - this.logger = require('../lib/logger').NewLogger(this.dal.profile); + this.logger = require("../lib/logger").NewLogger(this.dal.profile); } - processTx(txObj:any) { - const tx = TransactionDTO.fromJSONObject(txObj, this.conf.currency) - const hash = tx.getHash() + processTx(txObj: any) { + const tx = TransactionDTO.fromJSONObject(txObj, this.conf.currency); + const hash = tx.getHash(); return this.pushFIFO<TransactionDTO>(hash, async () => { try { - this.logger.info('⬇ TX %s:%s from %s', tx.output_amount, tx.output_base, tx.issuers); + this.logger.info( + "⬇ TX %s:%s from %s", + tx.output_amount, + tx.output_base, + tx.issuers + ); const existing = await this.dal.getTxByHash(tx.hash); const current = await this.dal.getCurrentBlockOrNull(); if (!current) { - throw Error(DataErrors[DataErrors.NO_TRANSACTION_POSSIBLE_IF_NOT_CURRENT_BLOCK]) + throw Error( + DataErrors[DataErrors.NO_TRANSACTION_POSSIBLE_IF_NOT_CURRENT_BLOCK] + ); } if (existing) { throw constants.ERRORS.TX_ALREADY_PROCESSED; } // Start checks... const fakeTimeVariation = current.medianTime + 1; - const dto = TransactionDTO.fromJSONObject(tx) - await LOCAL_RULES_HELPERS.checkSingleTransactionLocally(dto, this.conf) + const dto = TransactionDTO.fromJSONObject(tx); + await LOCAL_RULES_HELPERS.checkSingleTransactionLocally(dto, this.conf); await GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal); - await GLOBAL_RULES_HELPERS.checkSingleTransaction(dto, current.version, fakeTimeVariation, this.conf, this.dal, this.dal.getTxByHash.bind(this.dal)); + await GLOBAL_RULES_HELPERS.checkSingleTransaction( + dto, + current.version, + fakeTimeVariation, + this.conf, + this.dal, + this.dal.getTxByHash.bind(this.dal) + ); const server_pubkey = this.conf.pair && this.conf.pair.pub; - if (!(await this.dal.txsDAL.sandbox.acceptNewSandBoxEntry({ - issuers: tx.issuers, - output_base: tx.output_base, - output_amount: tx.output_amount - }, server_pubkey))) { + if ( + !(await this.dal.txsDAL.sandbox.acceptNewSandBoxEntry( + { + issuers: tx.issuers, + output_base: tx.output_base, + output_amount: tx.output_amount, + }, + server_pubkey + )) + ) { throw constants.ERRORS.SANDBOX_FOR_TRANSACTION_IS_FULL; } await this.dal.saveTransaction(DBTx.fromTransactionDTO(tx)); - this.logger.info('✔ TX %s:%s from %s', tx.output_amount, tx.output_base, tx.issuers); + this.logger.info( + "✔ TX %s:%s from %s", + tx.output_amount, + tx.output_base, + tx.issuers + ); return tx; } catch (e) { - this.logger.info('✘ TX %s:%s from %s', tx.output_amount, tx.output_base, tx.issuers); + this.logger.info( + "✘ TX %s:%s from %s", + tx.output_amount, + tx.output_base, + tx.issuers + ); throw e; } - }) + }); } }