diff --git a/.eslintignore b/.eslintignore index 3d2384b6a4b301d0aa2778b44925c3396a1a11e1..7cb2847b69c168e871626a8b20b2d23c9f091321 100644 --- a/.eslintignore +++ b/.eslintignore @@ -23,5 +23,7 @@ app/modules/daemon.js app/modules/export-bc.js app/modules/check-config.js app/modules/config.js +app/modules/prover/*.js +app/modules/prover/lib/*.js test/*.js test/**/*.js \ No newline at end of file diff --git a/.gitignore b/.gitignore index 9878ba45de545a644271f8ff2beb8b5e5fc34022..eee24e686cab5d5729b4ba12158f71702abc9ff2 100644 --- a/.gitignore +++ b/.gitignore @@ -51,6 +51,8 @@ app/lib/rules/*.js* app/lib/logger*js* app/service/*.js* app/lib/wot.js* +app/modules/prover/*.js* +app/modules/prover/lib/*.js* app/modules/router*.js* app/modules/wizard.js* app/modules/revert.js* diff --git a/app/lib/computation/BlockchainContext.ts b/app/lib/computation/BlockchainContext.ts index c192e8cebcba225911bd17af55fd06043e251516..82a6c6ecd7f552a75025c556a05b8999ca7e4855 100644 --- a/app/lib/computation/BlockchainContext.ts +++ b/app/lib/computation/BlockchainContext.ts @@ -60,7 +60,7 @@ export class BlockchainContext { * Gets a copy of vHEAD, extended with some extra properties. * @param props The extra properties to add. */ - async getvHeadCopy(props: any): Promise<any> { + async getvHeadCopy(props: any = {}): Promise<any> { if (!this.vHEAD) { await this.refreshHead(); } diff --git a/app/lib/dto/ConfDTO.ts b/app/lib/dto/ConfDTO.ts index 074cce036df04e95790d33d31f0f96b7e54ae2e6..7965bb326be8555fa8b720016cd94954ad60a3bb 100644 --- a/app/lib/dto/ConfDTO.ts +++ b/app/lib/dto/ConfDTO.ts @@ -10,7 +10,13 @@ export class ConfDTO { public currency: string, public endpoints: string[], public rmEndpoints: string[], + public rootoffset: number, public upInterval: number, + public cpu: number, + public nbCores: number, + public prefix: number, + public powSecurityRetryDelay: number, + public powMaxHandicap: number, public c: number, public dt: number, public dtReeval: number, @@ -46,6 +52,6 @@ export class ConfDTO { ) {} static mock() { - return new ConfDTO("", "", [], [], 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, false, 0, false, 0, 0, 0, 0, 0, null, 0, "", "", "") + return new ConfDTO("", "", [], [], 0, 0, 0.6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, false, 0, false, 0, 0, 0, 0, 0, null, 0, "", "", "") } } \ No newline at end of file diff --git a/app/lib/rules/global_rules.ts b/app/lib/rules/global_rules.ts index 0a6fdc2491bb5deb0eb6a9ba4b4c6a1da2fa8298..2937013fb76a571271d37428c3c5bab9f637adae 100644 --- a/app/lib/rules/global_rules.ts +++ b/app/lib/rules/global_rules.ts @@ -1,13 +1,10 @@ "use strict"; -import {BlockDTO} from "../dto/BlockDTO" import {ConfDTO} from "../dto/ConfDTO" import {FileDAL} from "../dal/fileDAL" import {DBBlock} from "../db/DBBlock" -import {DBIdentity} from "../dal/sqliteDAL/IdentityDAL" import {TransactionDTO} from "../dto/TransactionDTO" import * as local_rules from "./local_rules" -const co = require('co'); const _ = require('underscore'); const common = require('duniter-common'); const indexer = require('../indexer').Indexer @@ -158,9 +155,13 @@ export const GLOBAL_RULES_HELPERS = { // Functions used in an external context too checkMembershipBlock: (ms:any, current:DBBlock, conf:ConfDTO, dal:FileDAL) => checkMSTarget(ms, current ? { number: current.number + 1} : { number: 0 }, conf, dal), - checkCertificationIsValid: (cert:any, current:DBBlock, findIdtyFunc:any, conf:ConfDTO, dal:FileDAL) => checkCertificationIsValid(current ? current : { number: 0 }, cert, findIdtyFunc, conf, dal), + checkCertificationIsValid: (cert:any, current:DBBlock, findIdtyFunc:any, conf:ConfDTO, dal:FileDAL) => { + return checkCertificationIsValid(current ? current : { number: 0, currency: '' }, cert, findIdtyFunc, conf, dal) + }, - checkCertificationIsValidForBlock: (cert:any, block:BlockDTO, idty:DBIdentity, conf:ConfDTO, dal:FileDAL) => checkCertificationIsValid(block, cert, () => idty, conf, dal), + checkCertificationIsValidForBlock: (cert:any, block:{ number:number, currency:string }, findIdtyFunc:(b:{ number:number, currency:string }, pubkey:string, dal:FileDAL) => Promise<any>, conf:ConfDTO, dal:FileDAL) => { + return checkCertificationIsValid(block, cert, findIdtyFunc, conf, dal) + }, isOver3Hops: async (member:any, newLinks:any, newcomers:string[], current:DBBlock, conf:ConfDTO, dal:FileDAL) => { if (!current) { @@ -177,7 +178,7 @@ export const GLOBAL_RULES_HELPERS = { checkExistsPubkey: (pub:string, dal:FileDAL) => dal.getWrittenIdtyByPubkey(pub), - checkSingleTransaction: (tx:TransactionDTO, block:{ medianTime: number }, conf:ConfDTO, dal:FileDAL, alsoCheckPendingTransactions:boolean) => GLOBAL_RULES_FUNCTIONS.checkSourcesAvailability({ + checkSingleTransaction: (tx:TransactionDTO, block:{ medianTime: number }, conf:ConfDTO, dal:FileDAL, alsoCheckPendingTransactions:boolean = false) => GLOBAL_RULES_FUNCTIONS.checkSourcesAvailability({ transactions: [tx], medianTime: block.medianTime }, conf, dal, alsoCheckPendingTransactions), @@ -228,7 +229,7 @@ async function checkMSTarget (ms:any, block:any, conf:ConfDTO, dal:FileDAL) { } } -async function checkCertificationIsValid (block:any, cert:any, findIdtyFunc:any, conf:ConfDTO, dal:FileDAL) { +async function checkCertificationIsValid (block:{ number:number, currency:string }, cert:any, findIdtyFunc:(b:{ number:number, currency:string }, pubkey:string, dal:FileDAL) => Promise<any>, conf:ConfDTO, dal:FileDAL) { if (block.number == 0 && cert.block_number != 0) { throw Error('Number must be 0 for root block\'s certifications'); } else { @@ -250,10 +251,7 @@ async function checkCertificationIsValid (block:any, cert:any, findIdtyFunc:any, throw Error('Certifier must be a member') } } - // TODO: weird call, we cannot just do "await findIdtyFunc(...)". There is a bug somewhere. - let idty = await co(function*() { - return yield findIdtyFunc(block, cert.to, dal) - }) + let idty = await findIdtyFunc(block, cert.to, dal) let current = block.number == 0 ? null : await dal.getCurrentBlockOrNull(); if (!idty) { throw Error('Identity does not exist for certified'); diff --git a/app/modules/check-config.ts b/app/modules/check-config.ts index d122c6d63b2e6997ce11627d6a210a5c1e739376..befcdd1e101d32d857384c30eac02cb5ad13c37e 100644 --- a/app/modules/check-config.ts +++ b/app/modules/check-config.ts @@ -1,6 +1,5 @@ const constants = require('../lib/constants'); const wizard = require('../lib/wizard'); -const logger = require('../lib/logger').NewLogger('wizard'); module.exports = { duniter: { @@ -11,6 +10,7 @@ module.exports = { onConfiguredExecute: async (server:any) => { await server.checkConfig() + const logger = require('../lib/logger').NewLogger('wizard') logger.warn('Configuration seems correct.'); } }] diff --git a/app/modules/prover/index.js b/app/modules/prover/index.js index f1079ab869e675ee8548d9ae64647bb82f3ce854..4163dd211d332b3aa0727587915f94f198a08bb8 100644 --- a/app/modules/prover/index.js +++ b/app/modules/prover/index.js @@ -1,204 +1,203 @@ "use strict"; - -const co = require('co'); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const blockGenerator_1 = require("./lib/blockGenerator"); +const constants_1 = require("./lib/constants"); +const blockProver_1 = require("./lib/blockProver"); +const prover_1 = require("./lib/prover"); const async = require('async'); const contacter = require('duniter-crawler').duniter.methods.contacter; const common = require('duniter-common'); -const constants = require('./lib/constants'); -const Prover = require('./lib/prover'); -const blockGenerator = require('./lib/blockGenerator'); -const blockProver = require('./lib/blockProver'); - -const Peer = common.document.Peer - -module.exports = { - - duniter: { - - /*********** Permanent prover **************/ - config: { - onLoading: (conf) => co(function*() { - if (conf.cpu === null || conf.cpu === undefined) { - conf.cpu = constants.DEFAULT_CPU; - } - conf.powSecurityRetryDelay = constants.POW_SECURITY_RETRY_DELAY; - conf.powMaxHandicap = constants.POW_MAXIMUM_ACCEPTABLE_HANDICAP; - }), - beforeSave: (conf) => co(function*() { - delete conf.powSecurityRetryDelay; - delete conf.powMaxHandicap; - }) - }, - - service: { - output: (server, conf, logger) => { - const generator = blockGenerator(server); - server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator) - server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator) - server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator) - return new Prover(server, conf, logger) - } - }, - - methods: { - hookServer: (server) => { - const generator = blockGenerator(server); - server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator) - server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator) - server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator) - }, - blockProver: blockProver, - prover: (server, conf, logger) => new Prover(server, conf, logger), - blockGenerator: (server, prover) => blockGenerator(server, prover), - generateTheNextBlock: (server, manualValues) => co(function*() { - const prover = blockProver(server); - const generator = blockGenerator(server, prover); - return generator.nextBlock(manualValues); - }), - generateAndProveTheNext: (server, block, trial, manualValues) => co(function*() { - const prover = blockProver(server); - const generator = blockGenerator(server, prover); - let res = yield generator.makeNextBlock(block, trial, manualValues); - return res - }) - }, - - /*********** CLI gen-next + gen-root **************/ - - cliOptions: [ - {value: '--show', desc: 'With gen-next or gen-root commands, displays the generated block.'}, - {value: '--check', desc: 'With gen-next: just check validity of generated block.'}, - {value: '--at <medianTime>', desc: 'With gen-next --show --check: allows to try in a future time.', parser: parseInt } - ], - - cli: [{ - name: 'gen-next [host] [port] [difficulty]', - desc: 'Tries to generate the next block of the blockchain.', - onDatabaseExecute: (server, conf, program, params) => co(function*() { - const host = params[0]; - const port = params[1]; - const difficulty = params[2]; - const generator = blockGenerator(server, null); - return generateAndSend(program, host, port, difficulty, server, () => generator.nextBlock); - }) - }, { - name: 'gen-root [host] [port] [difficulty]', - desc: 'Tries to generate the next block of the blockchain.', - preventIfRunning: true, - onDatabaseExecute: (server, conf, program, params) => co(function*() { - const host = params[0]; - const port = params[1]; - const difficulty = params[2]; - const generator = blockGenerator(server, null); - let toDelete, catched = true; - do { - try { - yield generateAndSend(program, host, port, difficulty, server, () => generator.nextBlock); - catched = false; - } catch (e) { - toDelete = yield server.dal.idtyDAL.query('SELECT * FROM idty i WHERE 5 > (SELECT count(*) from cert c where c.`to` = i.pubkey)'); - console.log('Deleting', toDelete.map(i => i.pubkey)); - yield server.dal.idtyDAL.exec('DELETE FROM idty WHERE pubkey IN (' + toDelete.map(i => "'" + i.pubkey + "'").join(',') + ')'); - yield server.dal.idtyDAL.exec('DELETE FROM cert WHERE `to` IN (' + toDelete.map(i => "'" + i.pubkey + "'").join(',') + ')'); - yield server.dal.idtyDAL.exec('DELETE FROM cert WHERE `from` IN (' + toDelete.map(i => "'" + i.pubkey + "'").join(',') + ')'); - } - } while (catched && toDelete.length); - console.log('Done'); - }) - }, { - name: 'gen-root-choose [host] [port] [difficulty]', - desc: 'Tries to generate root block, with choice of root members.', - preventIfRunning: true, - onDatabaseExecute: (server, conf, program, params, startServices, stopServices) => co(function*() { - const host = params[0]; - const port = params[1]; - const difficulty = params[2]; - if (!host) { - throw 'Host is required.'; - } - if (!port) { - throw 'Port is required.'; - } - if (!difficulty) { - throw 'Difficulty is required.'; - } - const generator = blockGenerator(server, null); - return generateAndSend(program, host, port, difficulty, server, () => generator.manualRoot); - }) - }] - } -} - +const Peer = common.document.Peer; +exports.ProverDependency = { + duniter: { + /*********** Permanent prover **************/ + config: { + onLoading: (conf) => __awaiter(this, void 0, void 0, function* () { + if (conf.cpu === null || conf.cpu === undefined) { + conf.cpu = constants_1.Constants.DEFAULT_CPU; + } + conf.powSecurityRetryDelay = constants_1.Constants.POW_SECURITY_RETRY_DELAY; + conf.powMaxHandicap = constants_1.Constants.POW_MAXIMUM_ACCEPTABLE_HANDICAP; + }), + beforeSave: (conf) => __awaiter(this, void 0, void 0, function* () { + delete conf.powSecurityRetryDelay; + delete conf.powMaxHandicap; + }) + }, + service: { + output: (server) => { + const generator = new blockGenerator_1.BlockGenerator(server); + server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator); + server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator); + server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator); + return new prover_1.Prover(server); + } + }, + methods: { + hookServer: (server) => { + const generator = new blockGenerator_1.BlockGenerator(server); + server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator); + server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator); + server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator); + }, + prover: (server, conf, logger) => new prover_1.Prover(server), + blockGenerator: (server, prover) => new blockGenerator_1.BlockGeneratorWhichProves(server, prover), + generateTheNextBlock: (server, manualValues) => __awaiter(this, void 0, void 0, function* () { + const prover = new blockProver_1.BlockProver(server); + const generator = new blockGenerator_1.BlockGeneratorWhichProves(server, prover); + return generator.nextBlock(manualValues); + }), + generateAndProveTheNext: (server, block, trial, manualValues) => __awaiter(this, void 0, void 0, function* () { + const prover = new blockProver_1.BlockProver(server); + const generator = new blockGenerator_1.BlockGeneratorWhichProves(server, prover); + let res = yield generator.makeNextBlock(block, trial, manualValues); + return res; + }) + }, + /*********** CLI gen-next + gen-root **************/ + cliOptions: [ + { value: '--show', desc: 'With gen-next or gen-root commands, displays the generated block.' }, + { value: '--check', desc: 'With gen-next: just check validity of generated block.' }, + { value: '--at <medianTime>', desc: 'With gen-next --show --check: allows to try in a future time.', parser: parseInt } + ], + cli: [{ + name: 'gen-next [host] [port] [difficulty]', + desc: 'Tries to generate the next block of the blockchain.', + onDatabaseExecute: (server, conf, program, params) => __awaiter(this, void 0, void 0, function* () { + const host = params[0]; + const port = params[1]; + const difficulty = params[2]; + const generator = new blockGenerator_1.BlockGeneratorWhichProves(server, null); + return generateAndSend(program, host, port, difficulty, server, () => generator.nextBlock); + }) + }, { + name: 'gen-root [host] [port] [difficulty]', + desc: 'Tries to generate the next block of the blockchain.', + preventIfRunning: true, + onDatabaseExecute: (server, conf, program, params) => __awaiter(this, void 0, void 0, function* () { + const host = params[0]; + const port = params[1]; + const difficulty = params[2]; + const generator = new blockGenerator_1.BlockGeneratorWhichProves(server, null); + let toDelete, catched = true; + do { + try { + yield generateAndSend(program, host, port, difficulty, server, () => generator.nextBlock); + catched = false; + } + catch (e) { + toDelete = yield server.dal.idtyDAL.query('SELECT * FROM idty i WHERE 5 > (SELECT count(*) from cert c where c.`to` = i.pubkey)'); + console.log('Deleting', toDelete.map((i) => i.pubkey)); + yield server.dal.idtyDAL.exec('DELETE FROM idty WHERE pubkey IN (' + toDelete.map((i) => "'" + i.pubkey + "'").join(',') + ')'); + yield server.dal.idtyDAL.exec('DELETE FROM cert WHERE `to` IN (' + toDelete.map((i) => "'" + i.pubkey + "'").join(',') + ')'); + yield server.dal.idtyDAL.exec('DELETE FROM cert WHERE `from` IN (' + toDelete.map((i) => "'" + i.pubkey + "'").join(',') + ')'); + } + } while (catched && toDelete.length); + console.log('Done'); + }) + }, { + name: 'gen-root-choose [host] [port] [difficulty]', + desc: 'Tries to generate root block, with choice of root members.', + preventIfRunning: true, + onDatabaseExecute: (server, conf, program, params) => __awaiter(this, void 0, void 0, function* () { + const host = params[0]; + const port = params[1]; + const difficulty = params[2]; + if (!host) { + throw 'Host is required.'; + } + if (!port) { + throw 'Port is required.'; + } + if (!difficulty) { + throw 'Difficulty is required.'; + } + const generator = new blockGenerator_1.BlockGenerator(server); + return generateAndSend(program, host, port, difficulty, server, () => generator.manualRoot); + }) + }] + } +}; function generateAndSend(program, host, port, difficulty, server, getGenerationMethod) { - const logger = server.logger; - return new Promise((resolve, reject) => { - async.waterfall([ - function (next) { - const method = getGenerationMethod(server); - co(function*(){ - const simulationValues = {} - if (program.show && program.check) { - if (program.at && !isNaN(program.at)) { - simulationValues.medianTime = program.at + const logger = server.logger; + return new Promise((resolve, reject) => { + async.waterfall([ + function (next) { + const method = getGenerationMethod(server); + (() => __awaiter(this, void 0, void 0, function* () { + const simulationValues = {}; + if (program.show && program.check) { + if (program.at && !isNaN(program.at)) { + simulationValues.medianTime = program.at; + } + } + const block = yield method(null, simulationValues); + next(null, block); + }))(); + }, + function (block, next) { + if (program.check) { + block.time = block.medianTime; + program.show && console.log(block.getRawSigned()); + (() => __awaiter(this, void 0, void 0, function* () { + try { + const parsed = common.parsers.parseBlock.syncWrite(block.getRawSigned()); + yield server.BlockchainService.checkBlock(parsed, false); + logger.info('Acceptable block'); + next(); + } + catch (e) { + next(e); + } + }))(); + } + else { + logger.debug('Block to be sent: %s', block.getRawInnerPart()); + async.waterfall([ + function (subNext) { + proveAndSend(program, server, block, server.conf.pair.pub, parseInt(difficulty), host, parseInt(port), subNext); + } + ], next); + } } - } - const block = yield method(null, simulationValues); - next(null, block); + ], (err, data) => { + err && reject(err); + !err && resolve(data); }); - }, - function (block, next) { - if (program.check) { - block.time = block.medianTime; - program.show && console.log(block.getRawSigned()); - co(function*(){ - try { - const parsed = common.parsers.parseBlock.syncWrite(block.getRawSigned()); - yield server.BlockchainService.checkBlock(parsed, false); - logger.info('Acceptable block'); - next(); - } catch (e) { - next(e); - } - }); - } - else { - logger.debug('Block to be sent: %s', block.getRawInnerPart()); - async.waterfall([ - function (subNext) { - proveAndSend(program, server, block, server.conf.pair.pub, parseInt(difficulty), host, parseInt(port), subNext); - } - ], next); - } - } - ], (err, data) => { - err && reject(err); - !err && resolve(data); }); - }); } - function proveAndSend(program, server, block, issuer, difficulty, host, port, done) { - const logger = server.logger; - async.waterfall([ - function (next) { - block.issuer = issuer; - program.show && console.log(block.getRawSigned()); - co(function*(){ - try { - const prover = blockProver(server); - const proven = yield prover.prove(block, difficulty); - const peer = Peer.fromJSON({ - endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] - }); - program.show && console.log(proven.getRawSigned()); - logger.info('Posted block ' + proven.getRawSigned()); - const p = Peer.fromJSON(peer); - const contact = contacter(p.getHostPreferDNS(), p.getPort()); - yield contact.postBlock(proven.getRawSigned()); - } catch(e) { - next(e); + const logger = server.logger; + async.waterfall([ + function (next) { + block.issuer = issuer; + program.show && console.log(block.getRawSigned()); + (() => __awaiter(this, void 0, void 0, function* () { + try { + const prover = new blockProver_1.BlockProver(server); + const proven = yield prover.prove(block, difficulty); + const peer = Peer.fromJSON({ + endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] + }); + program.show && console.log(proven.getRawSigned()); + logger.info('Posted block ' + proven.getRawSigned()); + const p = Peer.fromJSON(peer); + const contact = contacter(p.getHostPreferDNS(), p.getPort()); + yield contact.postBlock(proven.getRawSigned()); + } + catch (e) { + next(e); + } + }))(); } - }); - } - ], done); + ], done); } +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/app/modules/prover/index.ts b/app/modules/prover/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..84937e761b934b51b567b7bc00e4ae3627e8bde1 --- /dev/null +++ b/app/modules/prover/index.ts @@ -0,0 +1,202 @@ +import {ConfDTO} from "../../lib/dto/ConfDTO" +import {BlockGenerator, BlockGeneratorWhichProves} from "./lib/blockGenerator" +import {Constants} from "./lib/constants" +import {BlockProver} from "./lib/blockProver" +import {Prover} from "./lib/prover" + +const async = require('async'); +const contacter = require('duniter-crawler').duniter.methods.contacter; +const common = require('duniter-common'); + +const Peer = common.document.Peer + +export const ProverDependency = { + + duniter: { + + /*********** Permanent prover **************/ + config: { + onLoading: async (conf:ConfDTO) => { + if (conf.cpu === null || conf.cpu === undefined) { + conf.cpu = Constants.DEFAULT_CPU; + } + conf.powSecurityRetryDelay = Constants.POW_SECURITY_RETRY_DELAY; + conf.powMaxHandicap = Constants.POW_MAXIMUM_ACCEPTABLE_HANDICAP; + }, + beforeSave: async (conf:ConfDTO) => { + delete conf.powSecurityRetryDelay; + delete conf.powMaxHandicap; + } + }, + + service: { + output: (server:any) => { + const generator = new BlockGenerator(server); + server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator) + server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator) + server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator) + return new Prover(server) + } + }, + + methods: { + hookServer: (server:any) => { + const generator = new BlockGenerator(server); + server.generatorGetJoinData = generator.getSinglePreJoinData.bind(generator) + server.generatorComputeNewCerts = generator.computeNewCerts.bind(generator) + server.generatorNewCertsToLinks = generator.newCertsToLinks.bind(generator) + }, + prover: (server:any, conf:ConfDTO, logger:any) => new Prover(server), + blockGenerator: (server:any, prover:any) => new BlockGeneratorWhichProves(server, prover), + generateTheNextBlock: async (server:any, manualValues:any) => { + const prover = new BlockProver(server); + const generator = new BlockGeneratorWhichProves(server, prover); + return generator.nextBlock(manualValues); + }, + generateAndProveTheNext: async (server:any, block:any, trial:any, manualValues:any) => { + const prover = new BlockProver(server); + const generator = new BlockGeneratorWhichProves(server, prover); + let res = await generator.makeNextBlock(block, trial, manualValues); + return res + } + }, + + /*********** CLI gen-next + gen-root **************/ + + cliOptions: [ + {value: '--show', desc: 'With gen-next or gen-root commands, displays the generated block.'}, + {value: '--check', desc: 'With gen-next: just check validity of generated block.'}, + {value: '--at <medianTime>', desc: 'With gen-next --show --check: allows to try in a future time.', parser: parseInt } + ], + + cli: [{ + name: 'gen-next [host] [port] [difficulty]', + desc: 'Tries to generate the next block of the blockchain.', + onDatabaseExecute: async (server:any, conf:ConfDTO, program:any, params:any) => { + const host = params[0]; + const port = params[1]; + const difficulty = params[2]; + const generator = new BlockGeneratorWhichProves(server, null); + return generateAndSend(program, host, port, difficulty, server, () => generator.nextBlock); + } + }, { + name: 'gen-root [host] [port] [difficulty]', + desc: 'Tries to generate the next block of the blockchain.', + preventIfRunning: true, + onDatabaseExecute: async (server:any, conf:ConfDTO, program:any, params:any) => { + const host = params[0]; + const port = params[1]; + const difficulty = params[2]; + const generator = new BlockGeneratorWhichProves(server, null); + let toDelete, catched = true; + do { + try { + await generateAndSend(program, host, port, difficulty, server, () => generator.nextBlock); + catched = false; + } catch (e) { + toDelete = await server.dal.idtyDAL.query('SELECT * FROM idty i WHERE 5 > (SELECT count(*) from cert c where c.`to` = i.pubkey)'); + console.log('Deleting', toDelete.map((i:any) => i.pubkey)); + await server.dal.idtyDAL.exec('DELETE FROM idty WHERE pubkey IN (' + toDelete.map((i:any) => "'" + i.pubkey + "'").join(',') + ')'); + await server.dal.idtyDAL.exec('DELETE FROM cert WHERE `to` IN (' + toDelete.map((i:any) => "'" + i.pubkey + "'").join(',') + ')'); + await server.dal.idtyDAL.exec('DELETE FROM cert WHERE `from` IN (' + toDelete.map((i:any) => "'" + i.pubkey + "'").join(',') + ')'); + } + } while (catched && toDelete.length); + console.log('Done'); + } + }, { + name: 'gen-root-choose [host] [port] [difficulty]', + desc: 'Tries to generate root block, with choice of root members.', + preventIfRunning: true, + onDatabaseExecute: async (server:any, conf:ConfDTO, program:any, params:any) => { + const host = params[0]; + const port = params[1]; + const difficulty = params[2]; + if (!host) { + throw 'Host is required.'; + } + if (!port) { + throw 'Port is required.'; + } + if (!difficulty) { + throw 'Difficulty is required.'; + } + const generator = new BlockGenerator(server); + return generateAndSend(program, host, port, difficulty, server, () => generator.manualRoot); + } + }] + } +} + +function generateAndSend(program:any, host:string, port:string, difficulty:string, server:any, getGenerationMethod:any) { + const logger = server.logger; + return new Promise((resolve, reject) => { + async.waterfall([ + function (next:any) { + const method = getGenerationMethod(server); + (async() => { + const simulationValues:any = {} + if (program.show && program.check) { + if (program.at && !isNaN(program.at)) { + simulationValues.medianTime = program.at + } + } + const block = await method(null, simulationValues); + next(null, block); + })() + }, + function (block:any, next:any) { + if (program.check) { + block.time = block.medianTime; + program.show && console.log(block.getRawSigned()); + (async() => { + try { + const parsed = common.parsers.parseBlock.syncWrite(block.getRawSigned()); + await server.BlockchainService.checkBlock(parsed, false); + logger.info('Acceptable block'); + next(); + } catch (e) { + next(e); + } + })() + } + else { + logger.debug('Block to be sent: %s', block.getRawInnerPart()); + async.waterfall([ + function (subNext:any) { + proveAndSend(program, server, block, server.conf.pair.pub, parseInt(difficulty), host, parseInt(port), subNext); + } + ], next); + } + } + ], (err:any, data:any) => { + err && reject(err); + !err && resolve(data); + }); + }); +} + +function proveAndSend(program:any, server:any, block:any, issuer:any, difficulty:any, host:any, port:any, done:any) { + const logger = server.logger; + async.waterfall([ + function (next:any) { + block.issuer = issuer; + program.show && console.log(block.getRawSigned()); + (async () => { + try { + const prover = new BlockProver(server); + const proven = await prover.prove(block, difficulty); + const peer = Peer.fromJSON({ + endpoints: [['BASIC_MERKLED_API', host, port].join(' ')] + }); + program.show && console.log(proven.getRawSigned()); + logger.info('Posted block ' + proven.getRawSigned()); + const p = Peer.fromJSON(peer); + const contact = contacter(p.getHostPreferDNS(), p.getPort()); + await contact.postBlock(proven.getRawSigned()); + } catch(e) { + next(e); + } + })() + } + ], done); +} diff --git a/app/modules/prover/lib/blockGenerator.js b/app/modules/prover/lib/blockGenerator.js index 1f11404685b9e307e9fd3615e5ef12a46de970f0..171e445642739c1642e9adeb13f9ee1ed7348193 100644 --- a/app/modules/prover/lib/blockGenerator.js +++ b/app/modules/prover/lib/blockGenerator.js @@ -1,759 +1,788 @@ "use strict"; -const _ = require('underscore'); -const co = require('co'); -const moment = require('moment'); -const inquirer = require('inquirer'); -const common = require('duniter-common'); -const indexer = require('../../../lib/indexer').Indexer -const LOCAL_HELPERS = require('../../../lib/rules/local_rules').LOCAL_RULES_HELPERS -const GLOBAL_HELPERS = require('../../../lib/rules/global_rules').GLOBAL_RULES_HELPERS -const TransactionDTO = require('../../../lib/dto/TransactionDTO').TransactionDTO - -const keyring = common.keyring; -const hashf = common.hashf; -const rawer = common.rawer; -const Block = common.document.Block; -const Membership = common.document.Membership; -const Transaction = common.document.Transaction; -const Identity = common.document.Identity; -const Certification = common.document.Certification; -const constants = common.constants - -module.exports = (server, prover) => { - return new BlockGenerator(server, prover); -}; - -function BlockGenerator(server, prover) { - - const that = this; - const conf = server.conf; - const dal = server.dal; - const mainContext = server.BlockchainService.getContext(); - const selfPubkey = conf.pair.pub; - const logger = server.logger; - - this.nextBlock = (manualValues, simulationValues) => generateNextBlock(new NextBlockGenerator(mainContext, conf, dal, logger), manualValues, simulationValues); - - this.manualRoot = () => co(function *() { - let current = yield dal.getCurrentBlockOrNull(); - if (current) { - throw 'Cannot generate root block: it already exists.'; - } - return generateNextBlock(new ManualRootGenerator()); - }); - - this.makeNextBlock = (block, trial, manualValues) => co(function *() { - const unsignedBlock = block || (yield that.nextBlock(manualValues)); - const trialLevel = trial || (yield mainContext.getIssuerPersonalizedDifficulty(selfPubkey)); - return prover.prove(unsignedBlock, trialLevel, (manualValues && manualValues.time) || null); - }); - - /** - * Generate next block, gathering both updates & newcomers - */ - const generateNextBlock = (generator, manualValues, simulationValues) => co(function *() { - const vHEAD_1 = yield mainContext.getvHEAD_1() - if (simulationValues && simulationValues.medianTime) { - vHEAD_1.medianTime = simulationValues.medianTime - } - const current = yield dal.getCurrentBlockOrNull(); - const revocations = yield dal.getRevocatingMembers(); - const exclusions = yield dal.getToBeKickedPubkeys(); - const newCertsFromWoT = yield generator.findNewCertsFromWoT(current); - const newcomersLeavers = yield findNewcomersAndLeavers(current, generator.filterJoiners); - const transactions = yield findTransactions(current); - const joinData = newcomersLeavers[2]; - const leaveData = newcomersLeavers[3]; - const newCertsFromNewcomers = newcomersLeavers[4]; - const certifiersOfNewcomers = _.uniq(_.keys(joinData).reduce((theCertifiers, newcomer) => { - return theCertifiers.concat(_.pluck(joinData[newcomer].certs, 'from')); - }, [])); - const certifiers = [].concat(certifiersOfNewcomers); - // Merges updates - _(newCertsFromWoT).keys().forEach(function(certified){ - newCertsFromWoT[certified] = newCertsFromWoT[certified].filter((cert) => { - // Must not certify a newcomer, since it would mean multiple certifications at same time from one member - const isCertifier = certifiers.indexOf(cert.from) != -1; - if (!isCertifier) { - certifiers.push(cert.from); - } - return !isCertifier; - }); - }); - _(newCertsFromNewcomers).keys().forEach((certified) => { - newCertsFromWoT[certified] = (newCertsFromWoT[certified] || []).concat(newCertsFromNewcomers[certified]); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); }); - // Revocations - // Create the block - return createBlock(current, joinData, leaveData, newCertsFromWoT, revocations, exclusions, transactions, manualValues); - }); - - const findNewcomersAndLeavers = (current, filteringFunc) => co(function*() { - const newcomers = yield findNewcomers(current, filteringFunc); - const leavers = yield findLeavers(current); - - const cur = newcomers.current; - const newWoTMembers = newcomers.newWotMembers; - const finalJoinData = newcomers.finalJoinData; - const updates = newcomers.updates; - - return [cur, newWoTMembers, finalJoinData, leavers, updates]; - }); - - const findTransactions = (current) => co(function*() { - const versionMin = current ? Math.min(common.constants.LAST_VERSION_FOR_TX, current.version) : common.constants.DOCUMENTS_VERSION; - const txs = yield dal.getTransactionsPending(versionMin); - const transactions = []; - const passingTxs = []; - for (const obj of txs) { - obj.currency = conf.currency - const tx = TransactionDTO.fromJSONObject(obj); - try { - yield new Promise((resolve, reject) => { - LOCAL_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), (err, res) => { - if (err) return reject(err) - return resolve(res) - }) - }) - const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 }; - yield GLOBAL_HELPERS.checkSingleTransaction(tx, nextBlockWithFakeTimeVariation, conf, dal); - yield GLOBAL_HELPERS.checkTxBlockStamp(tx, dal); - transactions.push(tx); - passingTxs.push(tx); - logger.info('Transaction %s added to block', tx.hash); - } catch (err) { - logger.error(err); - const currentNumber = (current && current.number) || 0; - const blockstamp = tx.blockstamp || (currentNumber + '-'); - const txBlockNumber = parseInt(blockstamp.split('-')[0]); - // 10 blocks before removing the transaction - if (currentNumber - txBlockNumber + 1 >= common.constants.TRANSACTION_MAX_TRIES) { - yield dal.removeTxByHash(tx.hash); - } - } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const TransactionDTO_1 = require("../../../lib/dto/TransactionDTO"); +const global_rules_1 = require("../../../lib/rules/global_rules"); +const local_rules_1 = require("../../../lib/rules/local_rules"); +const indexer_1 = require("../../../lib/indexer"); +const _ = require('underscore'); +const moment = require('moment'); +const inquirer = require('inquirer'); +const common = require('duniter-common'); +const keyring = common.keyring; +const hashf = common.hashf; +const rawer = common.rawer; +const Block = common.document.Block; +const Membership = common.document.Membership; +const Transaction = common.document.Transaction; +const Identity = common.document.Identity; +const Certification = common.document.Certification; +const constants = common.constants; +class BlockGenerator { + constructor(server) { + this.server = server; + this.conf = server.conf; + this.dal = server.dal; + this.mainContext = server.BlockchainService.getContext(); + this.selfPubkey = (this.conf.pair && this.conf.pair.pub) || ''; + this.logger = server.logger; } - return transactions; - }); - - const findLeavers = (current) => co(function*() { - const leaveData = {}; - const memberships = yield dal.findLeavers(); - const leavers = []; - memberships.forEach((ms) => leavers.push(ms.issuer)); - for (const ms of memberships) { - const leave = { identity: null, ms: ms, key: null, idHash: '' }; - leave.idHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase(); - let block; - if (current) { - block = yield dal.getBlock(ms.number); - } - else { - block = {}; - } - const identity = yield dal.getIdentityByHashOrNull(leave.idHash); - const currentMembership = yield dal.mindexDAL.getReducedMS(ms.issuer); - const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1; - if (identity && block && currentMSN < leave.ms.number && identity.member) { - // MS + matching cert are found - leave.identity = identity; - leaveData[identity.pubkey] = leave; - } + nextBlock(manualValues, simulationValues = {}) { + return this.generateNextBlock(new NextBlockGenerator(this.mainContext, this.conf, this.dal, this.logger), manualValues, simulationValues); } - return leaveData; - }); - - const findNewcomers = (current, filteringFunc) => co(function*() { - const updates = {}; - const preJoinData = yield getPreJoinData(current); - const joinData = yield filteringFunc(preJoinData); - const members = yield dal.getMembers(); - const wotMembers = _.pluck(members, 'pubkey'); - // Checking step - const newcomers = _(joinData).keys(); - const nextBlockNumber = current ? current.number + 1 : 0; - try { - const realNewcomers = yield iteratedChecking(newcomers, (someNewcomers) => co(function*() { - const nextBlock = { - number: nextBlockNumber, - joiners: someNewcomers, - identities: _.filter(newcomers.map((pub) => joinData[pub].identity), { wasMember: false }).map((idty) => idty.pubkey) - }; - const theNewLinks = yield computeNewLinks(nextBlockNumber, someNewcomers, joinData, updates); - yield checkWoTConstraints(nextBlock, theNewLinks, current); - })); - const newLinks = yield computeNewLinks(nextBlockNumber, realNewcomers, joinData, updates); - const newWoT = wotMembers.concat(realNewcomers); - const finalJoinData = {}; - realNewcomers.forEach((newcomer) => { - // Only keep membership of selected newcomers - finalJoinData[newcomer] = joinData[newcomer]; - // Only keep certifications from final members - const keptCerts = []; - joinData[newcomer].certs.forEach((cert) => { - const issuer = cert.from; - if (~newWoT.indexOf(issuer) && ~newLinks[cert.to].indexOf(issuer)) { - keptCerts.push(cert); - } + manualRoot() { + return __awaiter(this, void 0, void 0, function* () { + let current = yield this.dal.getCurrentBlockOrNull(); + if (current) { + throw 'Cannot generate root block: it already exists.'; + } + return this.generateNextBlock(new ManualRootGenerator()); }); - joinData[newcomer].certs = keptCerts; - }); - return { - current: current, - newWotMembers: wotMembers.concat(realNewcomers), - finalJoinData: finalJoinData, - updates: updates - } - } catch(err) { - logger.error(err); - throw err; } - }); - - const checkWoTConstraints = (block, newLinks, current) => co(function*() { - if (block.number < 0) { - throw 'Cannot compute WoT constraint for negative block number'; - } - const newcomers = block.joiners.map((inlineMS) => inlineMS.split(':')[0]); - const realNewcomers = block.identities; - for (const newcomer of newcomers) { - if (block.number > 0) { - try { - // Will throw an error if not enough links - yield mainContext.checkHaveEnoughLinks(newcomer, newLinks); - // This one does not throw but returns a boolean - const isOut = yield GLOBAL_HELPERS.isOver3Hops(newcomer, newLinks, realNewcomers, current, conf, dal); - if (isOut) { - throw 'Key ' + newcomer + ' is not recognized by the WoT for this block'; - } - } catch (e) { - logger.debug(e); - throw e; - } - } + /** + * Generate next block, gathering both updates & newcomers + */ + generateNextBlock(generator, manualValues = null, simulationValues = null) { + return __awaiter(this, void 0, void 0, function* () { + const vHEAD_1 = yield this.mainContext.getvHEAD_1(); + if (simulationValues && simulationValues.medianTime) { + vHEAD_1.medianTime = simulationValues.medianTime; + } + const current = yield this.dal.getCurrentBlockOrNull(); + const revocations = yield this.dal.getRevocatingMembers(); + const exclusions = yield this.dal.getToBeKickedPubkeys(); + const newCertsFromWoT = yield generator.findNewCertsFromWoT(current); + const newcomersLeavers = yield this.findNewcomersAndLeavers(current, (joinersData) => generator.filterJoiners(joinersData)); + const transactions = yield this.findTransactions(current); + const joinData = newcomersLeavers[2]; + const leaveData = newcomersLeavers[3]; + const newCertsFromNewcomers = newcomersLeavers[4]; + const certifiersOfNewcomers = _.uniq(_.keys(joinData).reduce((theCertifiers, newcomer) => { + return theCertifiers.concat(_.pluck(joinData[newcomer].certs, 'from')); + }, [])); + const certifiers = [].concat(certifiersOfNewcomers); + // Merges updates + _(newCertsFromWoT).keys().forEach(function (certified) { + newCertsFromWoT[certified] = newCertsFromWoT[certified].filter((cert) => { + // Must not certify a newcomer, since it would mean multiple certifications at same time from one member + const isCertifier = certifiers.indexOf(cert.from) != -1; + if (!isCertifier) { + certifiers.push(cert.from); + } + return !isCertifier; + }); + }); + _(newCertsFromNewcomers).keys().forEach((certified) => { + newCertsFromWoT[certified] = (newCertsFromWoT[certified] || []).concat(newCertsFromNewcomers[certified]); + }); + // Revocations + // Create the block + return this.createBlock(current, joinData, leaveData, newCertsFromWoT, revocations, exclusions, transactions, manualValues); + }); } - }); - - const iteratedChecking = (newcomers, checkWoTForNewcomers) => co(function*() { - const passingNewcomers = []; - let hadError = false; - for (const newcomer of newcomers) { - try { - yield checkWoTForNewcomers(passingNewcomers.concat(newcomer)); - passingNewcomers.push(newcomer); - } catch (err) { - hadError = hadError || err; - } + findNewcomersAndLeavers(current, filteringFunc) { + return __awaiter(this, void 0, void 0, function* () { + const newcomers = yield this.findNewcomers(current, filteringFunc); + const leavers = yield this.findLeavers(current); + const cur = newcomers.current; + const newWoTMembers = newcomers.newWotMembers; + const finalJoinData = newcomers.finalJoinData; + const updates = newcomers.updates; + return [cur, newWoTMembers, finalJoinData, leavers, updates]; + }); } - if (hadError) { - return yield iteratedChecking(passingNewcomers, checkWoTForNewcomers); - } else { - return passingNewcomers; + findTransactions(current) { + return __awaiter(this, void 0, void 0, function* () { + const versionMin = current ? Math.min(common.constants.LAST_VERSION_FOR_TX, current.version) : common.constants.DOCUMENTS_VERSION; + const txs = yield this.dal.getTransactionsPending(versionMin); + const transactions = []; + const passingTxs = []; + for (const obj of txs) { + obj.currency = this.conf.currency; + const tx = TransactionDTO_1.TransactionDTO.fromJSONObject(obj); + try { + yield new Promise((resolve, reject) => { + local_rules_1.LOCAL_RULES_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), (err, res) => { + if (err) + return reject(err); + return resolve(res); + }); + }); + const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 }; + yield global_rules_1.GLOBAL_RULES_HELPERS.checkSingleTransaction(tx, nextBlockWithFakeTimeVariation, this.conf, this.dal); + yield global_rules_1.GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal); + transactions.push(tx); + passingTxs.push(tx); + this.logger.info('Transaction %s added to block', tx.hash); + } + catch (err) { + this.logger.error(err); + const currentNumber = (current && current.number) || 0; + const blockstamp = tx.blockstamp || (currentNumber + '-'); + const txBlockNumber = parseInt(blockstamp.split('-')[0]); + // 10 blocks before removing the transaction + if (currentNumber - txBlockNumber + 1 >= common.constants.TRANSACTION_MAX_TRIES) { + yield this.dal.removeTxByHash(tx.hash); + } + } + } + return transactions; + }); } - }); - - const getPreJoinData = (current) => co(function*() { - const preJoinData = {}; - const memberships = yield dal.findNewcomers(current && current.medianTime) - const joiners = []; - memberships.forEach((ms) =>joiners.push(ms.issuer)); - for (const ms of memberships) { - try { - if (ms.block !== common.constants.SPECIAL_BLOCK) { - let msBasedBlock = yield dal.getBlockByBlockstampOrNull(ms.block); - if (!msBasedBlock) { - throw constants.ERRORS.BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK; - } - let age = current.medianTime - msBasedBlock.medianTime; - if (age > conf.msWindow) { - throw constants.ERRORS.TOO_OLD_MEMBERSHIP; - } - } - const idtyHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase(); - const join = yield that.getSinglePreJoinData(current, idtyHash, joiners); - join.ms = ms; - const currentMembership = yield dal.mindexDAL.getReducedMS(ms.issuer); - const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1; - if (!join.identity.revoked && currentMSN < parseInt(join.ms.number)) { - preJoinData[join.identity.pubkey] = join; - } - } catch (err) { - if (err && !err.uerr) { - logger.warn(err); - } - } + findLeavers(current) { + return __awaiter(this, void 0, void 0, function* () { + const leaveData = {}; + const memberships = yield this.dal.findLeavers(); + const leavers = []; + memberships.forEach((ms) => leavers.push(ms.issuer)); + for (const ms of memberships) { + const leave = { identity: null, ms: ms, key: null, idHash: '' }; + leave.idHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase(); + let block; + if (current) { + block = yield this.dal.getBlock(ms.number); + } + else { + block = {}; + } + const identity = yield this.dal.getIdentityByHashOrNull(leave.idHash); + const currentMembership = yield this.dal.mindexDAL.getReducedMS(ms.issuer); + const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1; + if (identity && block && currentMSN < leave.ms.number && identity.member) { + // MS + matching cert are found + leave.identity = identity; + leaveData[identity.pubkey] = leave; + } + } + return leaveData; + }); } - return preJoinData; - }); - - const computeNewLinks = (forBlock, theNewcomers, joinData, updates) => co(function *() { - let newCerts = yield that.computeNewCerts(forBlock, theNewcomers, joinData); - return that.newCertsToLinks(newCerts, updates); - }); - - this.newCertsToLinks = (newCerts, updates) => { - let newLinks = {}; - _.mapObject(newCerts, function(certs, pubkey) { - newLinks[pubkey] = _.pluck(certs, 'from'); - }); - _.mapObject(updates, function(certs, pubkey) { - newLinks[pubkey] = (newLinks[pubkey] || []).concat(_.pluck(certs, 'pubkey')); - }); - return newLinks; - }; - - this.computeNewCerts = (forBlock, theNewcomers, joinData) => co(function *() { - const newCerts = {}, certifiers = []; - const certsByKey = _.mapObject(joinData, function(val){ return val.certs; }); - for (const newcomer of theNewcomers) { - // New array of certifiers - newCerts[newcomer] = newCerts[newcomer] || []; - // Check wether each certification of the block is from valid newcomer/member - for (const cert of certsByKey[newcomer]) { - const isAlreadyCertifying = certifiers.indexOf(cert.from) !== -1; - if (!(isAlreadyCertifying && forBlock > 0)) { - if (~theNewcomers.indexOf(cert.from)) { - // Newcomer to newcomer => valid link - newCerts[newcomer].push(cert); - certifiers.push(cert.from); - } else { - let isMember = yield dal.isMember(cert.from); - // Member to newcomer => valid link - if (isMember) { - newCerts[newcomer].push(cert); - certifiers.push(cert.from); - } - } - } - } + findNewcomers(current, filteringFunc) { + return __awaiter(this, void 0, void 0, function* () { + const updates = {}; + const preJoinData = yield this.getPreJoinData(current); + const joinData = yield filteringFunc(preJoinData); + const members = yield this.dal.getMembers(); + const wotMembers = _.pluck(members, 'pubkey'); + // Checking step + const newcomers = _(joinData).keys(); + const nextBlockNumber = current ? current.number + 1 : 0; + try { + const realNewcomers = yield this.iteratedChecking(newcomers, (someNewcomers) => __awaiter(this, void 0, void 0, function* () { + const nextBlock = { + number: nextBlockNumber, + joiners: someNewcomers, + identities: _.filter(newcomers.map((pub) => joinData[pub].identity), { wasMember: false }).map((idty) => idty.pubkey) + }; + const theNewLinks = yield this.computeNewLinks(nextBlockNumber, someNewcomers, joinData, updates); + yield this.checkWoTConstraints(nextBlock, theNewLinks, current); + })); + const newLinks = yield this.computeNewLinks(nextBlockNumber, realNewcomers, joinData, updates); + const newWoT = wotMembers.concat(realNewcomers); + const finalJoinData = {}; + realNewcomers.forEach((newcomer) => { + // Only keep membership of selected newcomers + finalJoinData[newcomer] = joinData[newcomer]; + // Only keep certifications from final members + const keptCerts = []; + joinData[newcomer].certs.forEach((cert) => { + const issuer = cert.from; + if (~newWoT.indexOf(issuer) && ~newLinks[cert.to].indexOf(issuer)) { + keptCerts.push(cert); + } + }); + joinData[newcomer].certs = keptCerts; + }); + return { + current: current, + newWotMembers: wotMembers.concat(realNewcomers), + finalJoinData: finalJoinData, + updates: updates + }; + } + catch (err) { + this.logger.error(err); + throw err; + } + }); } - return newCerts; - }); - - this.getSinglePreJoinData = (current, idHash, joiners) => co(function *() { - const identity = yield dal.getIdentityByHashOrNull(idHash); - let foundCerts = []; - const vHEAD_1 = yield mainContext.getvHEAD_1(); - if (!identity) { - throw 'Identity with hash \'' + idHash + '\' not found'; + checkWoTConstraints(block, newLinks, current) { + return __awaiter(this, void 0, void 0, function* () { + if (block.number < 0) { + throw 'Cannot compute WoT constraint for negative block number'; + } + const newcomers = block.joiners.map((inlineMS) => inlineMS.split(':')[0]); + const realNewcomers = block.identities; + for (const newcomer of newcomers) { + if (block.number > 0) { + try { + // Will throw an error if not enough links + yield this.mainContext.checkHaveEnoughLinks(newcomer, newLinks); + // This one does not throw but returns a boolean + const isOut = yield global_rules_1.GLOBAL_RULES_HELPERS.isOver3Hops(newcomer, newLinks, realNewcomers, current, this.conf, this.dal); + if (isOut) { + throw 'Key ' + newcomer + ' is not recognized by the WoT for this block'; + } + } + catch (e) { + this.logger.debug(e); + throw e; + } + } + } + }); } - if (current && identity.buid == common.constants.SPECIAL_BLOCK && !identity.wasMember) { - throw constants.ERRORS.TOO_OLD_IDENTITY; + iteratedChecking(newcomers, checkWoTForNewcomers) { + return __awaiter(this, void 0, void 0, function* () { + const passingNewcomers = []; + let hadError = false; + for (const newcomer of newcomers) { + try { + yield checkWoTForNewcomers(passingNewcomers.concat(newcomer)); + passingNewcomers.push(newcomer); + } + catch (err) { + hadError = hadError || err; + } + } + if (hadError) { + return yield this.iteratedChecking(passingNewcomers, checkWoTForNewcomers); + } + else { + return passingNewcomers; + } + }); } - else if (!identity.wasMember && identity.buid != common.constants.SPECIAL_BLOCK) { - const idtyBasedBlock = yield dal.getBlock(identity.buid); - const age = current.medianTime - idtyBasedBlock.medianTime; - if (age > conf.idtyWindow) { - throw constants.ERRORS.TOO_OLD_IDENTITY; - } + getPreJoinData(current) { + return __awaiter(this, void 0, void 0, function* () { + const preJoinData = {}; + const memberships = yield this.dal.findNewcomers(current && current.medianTime); + const joiners = []; + memberships.forEach((ms) => joiners.push(ms.issuer)); + for (const ms of memberships) { + try { + if (ms.block !== common.constants.SPECIAL_BLOCK) { + let msBasedBlock = yield this.dal.getBlockByBlockstampOrNull(ms.block); + if (!msBasedBlock) { + throw constants.ERRORS.BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK; + } + let age = current.medianTime - msBasedBlock.medianTime; + if (age > this.conf.msWindow) { + throw constants.ERRORS.TOO_OLD_MEMBERSHIP; + } + } + const idtyHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase(); + const join = yield this.getSinglePreJoinData(current, idtyHash, joiners); + join.ms = ms; + const currentMembership = yield this.dal.mindexDAL.getReducedMS(ms.issuer); + const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1; + if (!join.identity.revoked && currentMSN < parseInt(join.ms.number)) { + preJoinData[join.identity.pubkey] = join; + } + } + catch (err) { + if (err && !err.uerr) { + this.logger.warn(err); + } + } + } + return preJoinData; + }); } - const idty = Identity.fromJSON(identity); - idty.currency = conf.currency; - const createIdentity = idty.rawWithoutSig(); - const verified = keyring.verify(createIdentity, idty.sig, idty.pubkey); - if (!verified) { - throw constants.ERRORS.IDENTITY_WRONGLY_SIGNED; + computeNewLinks(forBlock, theNewcomers, joinData, updates) { + return __awaiter(this, void 0, void 0, function* () { + let newCerts = yield this.computeNewCerts(forBlock, theNewcomers, joinData); + return this.newCertsToLinks(newCerts, updates); + }); } - const isIdentityLeaving = yield dal.isLeaving(idty.pubkey); - if (!isIdentityLeaving) { - if (!current) { - // Look for certifications from initial joiners - const certs = yield dal.certsNotLinkedToTarget(idHash); - foundCerts = _.filter(certs, function(cert){ - // Add 'joiners && ': special case when block#0 not written ANd not joiner yet (avoid undefined error) - return joiners && ~joiners.indexOf(cert.from); + newCertsToLinks(newCerts, updates) { + let newLinks = {}; + _.mapObject(newCerts, function (certs, pubkey) { + newLinks[pubkey] = _.pluck(certs, 'from'); }); - } else { - // Look for certifications from WoT members - let certs = yield dal.certsNotLinkedToTarget(idHash); - const certifiers = []; - for (const cert of certs) { - try { - const basedBlock = yield dal.getBlock(cert.block_number); - if (!basedBlock) { - throw 'Unknown timestamp block for identity'; + _.mapObject(updates, function (certs, pubkey) { + newLinks[pubkey] = (newLinks[pubkey] || []).concat(_.pluck(certs, 'pubkey')); + }); + return newLinks; + } + computeNewCerts(forBlock, theNewcomers, joinData) { + return __awaiter(this, void 0, void 0, function* () { + const newCerts = {}, certifiers = []; + const certsByKey = _.mapObject(joinData, function (val) { return val.certs; }); + for (const newcomer of theNewcomers) { + // New array of certifiers + newCerts[newcomer] = newCerts[newcomer] || []; + // Check wether each certification of the block is from valid newcomer/member + for (const cert of certsByKey[newcomer]) { + const isAlreadyCertifying = certifiers.indexOf(cert.from) !== -1; + if (!(isAlreadyCertifying && forBlock > 0)) { + if (~theNewcomers.indexOf(cert.from)) { + // Newcomer to newcomer => valid link + newCerts[newcomer].push(cert); + certifiers.push(cert.from); + } + else { + let isMember = yield this.dal.isMember(cert.from); + // Member to newcomer => valid link + if (isMember) { + newCerts[newcomer].push(cert); + certifiers.push(cert.from); + } + } + } + } } - if (current) { - const age = current.medianTime - basedBlock.medianTime; - if (age > conf.sigWindow || age > conf.sigValidity) { - throw 'Too old certification'; - } - } - // Already exists a link not replayable yet? - let exists = yield dal.existsNonReplayableLink(cert.from, cert.to); - if (exists) { - throw 'It already exists a similar certification written, which is not replayable yet'; - } - // Already exists a link not chainable yet? - exists = yield dal.existsNonChainableLink(cert.from, vHEAD_1, conf.sigStock); - if (exists) { - throw 'It already exists a written certification from ' + cert.from + ' which is not chainable yet'; - } - const isMember = yield dal.isMember(cert.from); - const doubleSignature = ~certifiers.indexOf(cert.from) ? true : false; - if (isMember && !doubleSignature) { - const isValid = yield GLOBAL_HELPERS.checkCertificationIsValidForBlock(cert, { number: current.number + 1, currency: current.currency }, identity, conf, dal); - if (isValid) { - certifiers.push(cert.from); - foundCerts.push(cert); - } - } - } catch (e) { - logger.debug(e.stack || e.message || e); - // Go on - } - } - } + return newCerts; + }); } - return { - identity: identity, - key: null, - idHash: idHash, - certs: foundCerts - }; - }); - - const createBlock = (current, joinData, leaveData, updates, revocations, exclusions, transactions, manualValues) => { - return co(function *() { - - if (manualValues && manualValues.excluded) { - exclusions = manualValues.excluded; - } - if (manualValues && manualValues.revoked) { - revocations = []; - } - - const vHEAD = yield mainContext.getvHeadCopy(); - const vHEAD_1 = yield mainContext.getvHEAD_1(); - const maxLenOfBlock = indexer.DUP_HELPERS.getMaxBlockSize(vHEAD); - let blockLen = 0; - // Revocations have an impact on exclusions - revocations.forEach((idty) => exclusions.push(idty.pubkey)); - // Prevent writing joins/updates for excluded members - exclusions = _.uniq(exclusions); - exclusions.forEach((excluded) => { - delete updates[excluded]; - delete joinData[excluded]; - delete leaveData[excluded]; - }); - _(leaveData).keys().forEach((leaver) => { - delete updates[leaver]; - delete joinData[leaver]; - }); - const block = new Block(); - block.number = current ? current.number + 1 : 0; - // Compute the new MedianTime - if (block.number == 0) { - block.medianTime = moment.utc().unix() - conf.rootoffset; - } - else { - block.medianTime = vHEAD.medianTime; - } - // Choose the version - block.version = (manualValues && manualValues.version) || (yield LOCAL_HELPERS.getMaxPossibleVersionNumber(current)); - block.currency = current ? current.currency : conf.currency; - block.nonce = 0; - if (!conf.dtReeval) { - conf.dtReeval = conf.dt; - } - if (!conf.udTime0) { - conf.udTime0 = block.medianTime + conf.dt; - } - if (!conf.udReevalTime0) { - conf.udReevalTime0 = block.medianTime + conf.dtReeval; - } - block.parameters = block.number > 0 ? '' : [ - conf.c, conf.dt, conf.ud0, - conf.sigPeriod, conf.sigStock, conf.sigWindow, conf.sigValidity, - conf.sigQty, conf.idtyWindow, conf.msWindow, conf.xpercent, conf.msValidity, - conf.stepMax, conf.medianTimeBlocks, conf.avgGenTime, conf.dtDiffEval, - (conf.percentRot == 1 ? "1.0" : conf.percentRot), - conf.udTime0, - conf.udReevalTime0, - conf.dtReeval - ].join(':'); - block.previousHash = current ? current.hash : ""; - block.previousIssuer = current ? current.issuer : ""; - if (selfPubkey) - block.issuer = selfPubkey; - // Members merkle - const joiners = _(joinData).keys(); - joiners.sort() - const previousCount = current ? current.membersCount : 0; - if (joiners.length == 0 && !current) { - throw constants.ERRORS.CANNOT_ROOT_BLOCK_NO_MEMBERS; - } - - // Kicked people - block.excluded = exclusions; - - /***** - * Priority 1: keep the WoT sane - */ - // Certifications from the WoT, to the WoT - _(updates).keys().forEach((certifiedMember) => { - const certs = updates[certifiedMember] || []; - certs.forEach((cert) => { - if (blockLen < maxLenOfBlock) { - block.certifications.push(Certification.fromJSON(cert).inline()); - blockLen++; - } + getSinglePreJoinData(current, idHash, joiners) { + return __awaiter(this, void 0, void 0, function* () { + const identity = yield this.dal.getIdentityByHashOrNull(idHash); + let foundCerts = []; + const vHEAD_1 = yield this.mainContext.getvHEAD_1(); + if (!identity) { + throw 'Identity with hash \'' + idHash + '\' not found'; + } + if (current && identity.buid == common.constants.SPECIAL_BLOCK && !identity.wasMember) { + throw constants.ERRORS.TOO_OLD_IDENTITY; + } + else if (!identity.wasMember && identity.buid != common.constants.SPECIAL_BLOCK) { + const idtyBasedBlock = yield this.dal.getBlock(identity.buid); + const age = current.medianTime - idtyBasedBlock.medianTime; + if (age > this.conf.idtyWindow) { + throw constants.ERRORS.TOO_OLD_IDENTITY; + } + } + const idty = Identity.fromJSON(identity); + idty.currency = this.conf.currency; + const createIdentity = idty.rawWithoutSig(); + const verified = keyring.verify(createIdentity, idty.sig, idty.pubkey); + if (!verified) { + throw constants.ERRORS.IDENTITY_WRONGLY_SIGNED; + } + const isIdentityLeaving = yield this.dal.isLeaving(idty.pubkey); + if (!isIdentityLeaving) { + if (!current) { + // Look for certifications from initial joiners + const certs = yield this.dal.certsNotLinkedToTarget(idHash); + foundCerts = _.filter(certs, function (cert) { + // Add 'joiners && ': special case when block#0 not written ANd not joiner yet (avoid undefined error) + return joiners && ~joiners.indexOf(cert.from); + }); + } + else { + // Look for certifications from WoT members + let certs = yield this.dal.certsNotLinkedToTarget(idHash); + const certifiers = []; + for (const cert of certs) { + try { + const basedBlock = yield this.dal.getBlock(cert.block_number); + if (!basedBlock) { + throw 'Unknown timestamp block for identity'; + } + if (current) { + const age = current.medianTime - basedBlock.medianTime; + if (age > this.conf.sigWindow || age > this.conf.sigValidity) { + throw 'Too old certification'; + } + } + // Already exists a link not replayable yet? + let exists = yield this.dal.existsNonReplayableLink(cert.from, cert.to); + if (exists) { + throw 'It already exists a similar certification written, which is not replayable yet'; + } + // Already exists a link not chainable yet? + exists = yield this.dal.existsNonChainableLink(cert.from, vHEAD_1, this.conf.sigStock); + if (exists) { + throw 'It already exists a written certification from ' + cert.from + ' which is not chainable yet'; + } + const isMember = yield this.dal.isMember(cert.from); + const doubleSignature = !!(~certifiers.indexOf(cert.from)); + if (isMember && !doubleSignature) { + const isValid = yield global_rules_1.GLOBAL_RULES_HELPERS.checkCertificationIsValidForBlock(cert, { number: current.number + 1, currency: current.currency }, () => __awaiter(this, void 0, void 0, function* () { + const idty = yield this.dal.getIdentityByHashOrNull(idHash); + return idty; + }), this.conf, this.dal); + if (isValid) { + certifiers.push(cert.from); + foundCerts.push(cert); + } + } + } + catch (e) { + this.logger.debug(e.stack || e.message || e); + // Go on + } + } + } + } + return { + identity: identity, + key: null, + idHash: idHash, + certs: foundCerts + }; }); - }); - // Renewed - joiners.forEach((joiner) => { - const data = joinData[joiner]; - // Join only for non-members - if (data.identity.member) { - if (blockLen < maxLenOfBlock) { - block.actives.push(Membership.fromJSON(data.ms).inline()); - blockLen++; - } - } - }); - // Leavers - const leavers = _(leaveData).keys(); - leavers.forEach((leaver) => { - const data = leaveData[leaver]; - // Join only for non-members - if (data.identity.member) { - if (blockLen < maxLenOfBlock) { - block.leavers.push(Membership.fromJSON(data.ms).inline()); - blockLen++; - } - } - }); - - /***** - * Priority 2: revoked identities - */ - revocations.forEach((idty) => { - if (blockLen < maxLenOfBlock) { - block.revoked.push([idty.pubkey, idty.revocation_sig].join(':')); - blockLen++; - } - }); - - /***** - * Priority 3: newcomers/renewcomers - */ - let countOfCertsToNewcomers = 0; - // Newcomers - // Newcomers + back people - joiners.forEach((joiner) => { - const data = joinData[joiner]; - // Identities only for never-have-been members - if (!data.identity.member && !data.identity.wasMember) { - block.identities.push(Identity.fromJSON(data.identity).inline()); - } - // Join only for non-members - if (!data.identity.member) { - block.joiners.push(Membership.fromJSON(data.ms).inline()); - } - }); - block.identities = _.sortBy(block.identities, (line) => { - const sp = line.split(':'); - return sp[2] + sp[3]; - }); - - // Certifications from the WoT, to newcomers - joiners.forEach((joiner) => { - const data = joinData[joiner] || []; - data.certs.forEach((cert) => { - countOfCertsToNewcomers++; - block.certifications.push(Certification.fromJSON(cert).inline()); + } + createBlock(current, joinData, leaveData, updates, revocations, exclusions, transactions, manualValues) { + return __awaiter(this, void 0, void 0, function* () { + if (manualValues && manualValues.excluded) { + exclusions = manualValues.excluded; + } + if (manualValues && manualValues.revoked) { + revocations = []; + } + const vHEAD = yield this.mainContext.getvHeadCopy(); + const vHEAD_1 = yield this.mainContext.getvHEAD_1(); + const maxLenOfBlock = indexer_1.Indexer.DUP_HELPERS.getMaxBlockSize(vHEAD); + let blockLen = 0; + // Revocations have an impact on exclusions + revocations.forEach((idty) => exclusions.push(idty.pubkey)); + // Prevent writing joins/updates for excluded members + exclusions = _.uniq(exclusions); + exclusions.forEach((excluded) => { + delete updates[excluded]; + delete joinData[excluded]; + delete leaveData[excluded]; + }); + _(leaveData).keys().forEach((leaver) => { + delete updates[leaver]; + delete joinData[leaver]; + }); + const block = new Block(); + block.number = current ? current.number + 1 : 0; + // Compute the new MedianTime + if (block.number == 0) { + block.medianTime = moment.utc().unix() - this.conf.rootoffset; + } + else { + block.medianTime = vHEAD.medianTime; + } + // Choose the version + block.version = (manualValues && manualValues.version) || (yield local_rules_1.LOCAL_RULES_HELPERS.getMaxPossibleVersionNumber(current)); + block.currency = current ? current.currency : this.conf.currency; + block.nonce = 0; + if (!this.conf.dtReeval) { + this.conf.dtReeval = this.conf.dt; + } + if (!this.conf.udTime0) { + this.conf.udTime0 = block.medianTime + this.conf.dt; + } + if (!this.conf.udReevalTime0) { + this.conf.udReevalTime0 = block.medianTime + this.conf.dtReeval; + } + block.parameters = block.number > 0 ? '' : [ + this.conf.c, this.conf.dt, this.conf.ud0, + this.conf.sigPeriod, this.conf.sigStock, this.conf.sigWindow, this.conf.sigValidity, + this.conf.sigQty, this.conf.idtyWindow, this.conf.msWindow, this.conf.xpercent, this.conf.msValidity, + this.conf.stepMax, this.conf.medianTimeBlocks, this.conf.avgGenTime, this.conf.dtDiffEval, + (this.conf.percentRot == 1 ? "1.0" : this.conf.percentRot), + this.conf.udTime0, + this.conf.udReevalTime0, + this.conf.dtReeval + ].join(':'); + block.previousHash = current ? current.hash : ""; + block.previousIssuer = current ? current.issuer : ""; + if (this.selfPubkey) { + block.issuer = this.selfPubkey; + } + // Members merkle + const joiners = _(joinData).keys(); + joiners.sort(); + const previousCount = current ? current.membersCount : 0; + if (joiners.length == 0 && !current) { + throw constants.ERRORS.CANNOT_ROOT_BLOCK_NO_MEMBERS; + } + // Kicked people + block.excluded = exclusions; + /***** + * Priority 1: keep the WoT sane + */ + // Certifications from the WoT, to the WoT + _(updates).keys().forEach((certifiedMember) => { + const certs = updates[certifiedMember] || []; + certs.forEach((cert) => { + if (blockLen < maxLenOfBlock) { + block.certifications.push(Certification.fromJSON(cert).inline()); + blockLen++; + } + }); + }); + // Renewed + joiners.forEach((joiner) => { + const data = joinData[joiner]; + // Join only for non-members + if (data.identity.member) { + if (blockLen < maxLenOfBlock) { + block.actives.push(Membership.fromJSON(data.ms).inline()); + blockLen++; + } + } + }); + // Leavers + const leavers = _(leaveData).keys(); + leavers.forEach((leaver) => { + const data = leaveData[leaver]; + // Join only for non-members + if (data.identity.member) { + if (blockLen < maxLenOfBlock) { + block.leavers.push(Membership.fromJSON(data.ms).inline()); + blockLen++; + } + } + }); + /***** + * Priority 2: revoked identities + */ + revocations.forEach((idty) => { + if (blockLen < maxLenOfBlock) { + block.revoked.push([idty.pubkey, idty.revocation_sig].join(':')); + blockLen++; + } + }); + /***** + * Priority 3: newcomers/renewcomers + */ + let countOfCertsToNewcomers = 0; + // Newcomers + // Newcomers + back people + joiners.forEach((joiner) => { + const data = joinData[joiner]; + // Identities only for never-have-been members + if (!data.identity.member && !data.identity.wasMember) { + block.identities.push(Identity.fromJSON(data.identity).inline()); + } + // Join only for non-members + if (!data.identity.member) { + block.joiners.push(Membership.fromJSON(data.ms).inline()); + } + }); + block.identities = _.sortBy(block.identities, (line) => { + const sp = line.split(':'); + return sp[2] + sp[3]; + }); + // Certifications from the WoT, to newcomers + joiners.forEach((joiner) => { + const data = joinData[joiner] || []; + data.certs.forEach((cert) => { + countOfCertsToNewcomers++; + block.certifications.push(Certification.fromJSON(cert).inline()); + }); + }); + // Eventually revert newcomers/renewcomer + if (block.number > 0 && Block.getLen(block) > maxLenOfBlock) { + for (let i = 0; i < block.identities.length; i++) { + block.identities.pop(); + block.joiners.pop(); + } + for (let i = 0; i < countOfCertsToNewcomers; i++) { + block.certifications.pop(); + } + } + // Final number of members + block.membersCount = previousCount + block.joiners.length - block.excluded.length; + vHEAD.membersCount = block.membersCount; + /***** + * Priority 4: transactions + */ + block.transactions = []; + blockLen = Block.getLen(block); + if (blockLen < maxLenOfBlock) { + transactions.forEach((tx) => { + const txLen = Transaction.getLen(tx); + if (txLen <= common.constants.MAXIMUM_LEN_OF_COMPACT_TX && blockLen + txLen <= maxLenOfBlock && tx.version == common.constants.TRANSACTION_VERSION) { + block.transactions.push({ raw: tx.getCompactVersion() }); + } + blockLen += txLen; + }); + } + /** + * Finally handle the Universal Dividend + */ + block.powMin = vHEAD.powMin; + // Universal Dividend + if (vHEAD.new_dividend) { + // BR_G13 + // Recompute according to block.membersCount + indexer_1.Indexer.prepareDividend(vHEAD, vHEAD_1, this.conf); + // BR_G14 + indexer_1.Indexer.prepareUnitBase(vHEAD); + // Fix BR_G14 double call + vHEAD.unitBase = Math.min(vHEAD_1.unitBase + 1, vHEAD.unitBase); + block.dividend = vHEAD.dividend; + block.unitbase = vHEAD.unitBase; + } + else { + block.unitbase = block.number == 0 ? 0 : current.unitbase; + } + // Rotation + block.issuersCount = vHEAD.issuersCount; + block.issuersFrame = vHEAD.issuersFrame; + block.issuersFrameVar = vHEAD.issuersFrameVar; + // Manual values before hashing + if (manualValues) { + _.extend(block, _.omit(manualValues, 'time')); + } + // InnerHash + block.time = block.medianTime; + block.inner_hash = hashf(rawer.getBlockInnerPart(block)).toUpperCase(); + return block; }); - }); - - // Eventually revert newcomers/renewcomer - if (block.number > 0 && Block.getLen(block) > maxLenOfBlock) { - for (let i = 0; i < block.identities.length; i++) { - block.identities.pop(); - block.joiners.pop(); - } - for (let i = 0; i < countOfCertsToNewcomers; i++) { - block.certifications.pop(); - } - } - - // Final number of members - block.membersCount = previousCount + block.joiners.length - block.excluded.length; - - vHEAD.membersCount = block.membersCount; - - /***** - * Priority 4: transactions - */ - block.transactions = []; - blockLen = Block.getLen(block); - if (blockLen < maxLenOfBlock) { - transactions.forEach((tx) => { - const txLen = Transaction.getLen(tx); - if (txLen <= common.constants.MAXIMUM_LEN_OF_COMPACT_TX && blockLen + txLen <= maxLenOfBlock && tx.version == common.constants.TRANSACTION_VERSION) { - block.transactions.push({ raw: tx.getCompactVersion() }); - } - blockLen += txLen; + } +} +exports.BlockGenerator = BlockGenerator; +class BlockGeneratorWhichProves extends BlockGenerator { + constructor(server, prover) { + super(server); + this.prover = prover; + } + makeNextBlock(block, trial, manualValues = null) { + return __awaiter(this, void 0, void 0, function* () { + const unsignedBlock = block || (yield this.nextBlock(manualValues)); + const trialLevel = trial || (yield this.mainContext.getIssuerPersonalizedDifficulty(this.selfPubkey)); + return this.prover.prove(unsignedBlock, trialLevel, (manualValues && manualValues.time) || null); }); - } - - /** - * Finally handle the Universal Dividend - */ - block.powMin = vHEAD.powMin; - - // Universal Dividend - if (vHEAD.new_dividend) { - - // BR_G13 - // Recompute according to block.membersCount - indexer.prepareDividend(vHEAD, vHEAD_1, conf); - // BR_G14 - indexer.prepareUnitBase(vHEAD, vHEAD_1, conf); - - // Fix BR_G14 double call - vHEAD.unitBase = Math.min(vHEAD_1.unitBase + 1, vHEAD.unitBase); - - block.dividend = vHEAD.dividend; - block.unitbase = vHEAD.unitBase; - } else { - block.unitbase = block.number == 0 ? 0 : current.unitbase; - } - // Rotation - block.issuersCount = vHEAD.issuersCount; - block.issuersFrame = vHEAD.issuersFrame; - block.issuersFrameVar = vHEAD.issuersFrameVar; - // Manual values before hashing - if (manualValues) { - _.extend(block, _.omit(manualValues, 'time')); - } - // InnerHash - block.time = block.medianTime; - block.inner_hash = hashf(rawer.getBlockInnerPart(block)).toUpperCase(); - return block; - }); - } + } } - +exports.BlockGeneratorWhichProves = BlockGeneratorWhichProves; /** * Class to implement strategy of automatic selection of incoming data for next block. * @constructor */ -function NextBlockGenerator(mainContext, conf, dal, logger) { - - this.findNewCertsFromWoT = (current) => co(function *() { - const updates = {}; - const updatesToFrom = {}; - const certs = yield dal.certsFindNew(); - const vHEAD_1 = yield mainContext.getvHEAD_1(); - for (const cert of certs) { - const targetIdty = yield dal.getIdentityByHashOrNull(cert.target); - // The identity must be known - if (targetIdty) { - const certSig = cert.sig; - // Do not rely on certification block UID, prefer using the known hash of the block by its given number - const targetBlock = yield dal.getBlock(cert.block_number); - // Check if writable - let duration = current && targetBlock ? current.medianTime - parseInt(targetBlock.medianTime) : 0; - if (targetBlock && duration <= conf.sigWindow) { - cert.sig = ''; - cert.currency = conf.currency; - cert.issuer = cert.from; - cert.idty_issuer = targetIdty.pubkey; - cert.idty_uid = targetIdty.uid; - cert.idty_buid = targetIdty.buid; - cert.idty_sig = targetIdty.sig; - cert.buid = current ? [cert.block_number, targetBlock.hash].join('-') : common.constants.SPECIAL_BLOCK; - const rawCert = Certification.fromJSON(cert).getRaw(); - if (keyring.verify(rawCert, certSig, cert.from)) { - cert.sig = certSig; - let exists = false; - if (current) { - // Already exists a link not replayable yet? - exists = yield dal.existsNonReplayableLink(cert.from, cert.to); - } - if (!exists) { - // Already exists a link not chainable yet? - // No chainability block means absolutely nobody can issue certifications yet - exists = yield dal.existsNonChainableLink(cert.from, vHEAD_1, conf.sigStock); - if (!exists) { - // It does NOT already exists a similar certification written, which is not replayable yet - // Signatory must be a member - const isSignatoryAMember = yield dal.isMember(cert.from); - const isCertifiedANonLeavingMember = isSignatoryAMember && (yield dal.isMemberAndNonLeaver(cert.to)); - // Certified must be a member and non-leaver - if (isSignatoryAMember && isCertifiedANonLeavingMember) { - updatesToFrom[cert.to] = updatesToFrom[cert.to] || []; - updates[cert.to] = updates[cert.to] || []; - if (updatesToFrom[cert.to].indexOf(cert.from) == -1) { - updates[cert.to].push(cert); - updatesToFrom[cert.to].push(cert.from); - } +class NextBlockGenerator { + constructor(mainContext, conf, dal, logger) { + this.mainContext = mainContext; + this.conf = conf; + this.dal = dal; + this.logger = logger; + } + findNewCertsFromWoT(current) { + return __awaiter(this, void 0, void 0, function* () { + const updates = {}; + const updatesToFrom = {}; + const certs = yield this.dal.certsFindNew(); + const vHEAD_1 = yield this.mainContext.getvHEAD_1(); + for (const cert of certs) { + const targetIdty = yield this.dal.getIdentityByHashOrNull(cert.target); + // The identity must be known + if (targetIdty) { + const certSig = cert.sig; + // Do not rely on certification block UID, prefer using the known hash of the block by its given number + const targetBlock = yield this.dal.getBlock(cert.block_number); + // Check if writable + let duration = current && targetBlock ? current.medianTime - parseInt(targetBlock.medianTime) : 0; + if (targetBlock && duration <= this.conf.sigWindow) { + cert.sig = ''; + cert.currency = this.conf.currency; + cert.issuer = cert.from; + cert.idty_issuer = targetIdty.pubkey; + cert.idty_uid = targetIdty.uid; + cert.idty_buid = targetIdty.buid; + cert.idty_sig = targetIdty.sig; + cert.buid = current ? [cert.block_number, targetBlock.hash].join('-') : common.constants.SPECIAL_BLOCK; + const rawCert = Certification.fromJSON(cert).getRaw(); + if (keyring.verify(rawCert, certSig, cert.from)) { + cert.sig = certSig; + let exists = false; + if (current) { + // Already exists a link not replayable yet? + exists = yield this.dal.existsNonReplayableLink(cert.from, cert.to); + } + if (!exists) { + // Already exists a link not chainable yet? + // No chainability block means absolutely nobody can issue certifications yet + exists = yield this.dal.existsNonChainableLink(cert.from, vHEAD_1, this.conf.sigStock); + if (!exists) { + // It does NOT already exists a similar certification written, which is not replayable yet + // Signatory must be a member + const isSignatoryAMember = yield this.dal.isMember(cert.from); + const isCertifiedANonLeavingMember = isSignatoryAMember && (yield this.dal.isMemberAndNonLeaver(cert.to)); + // Certified must be a member and non-leaver + if (isSignatoryAMember && isCertifiedANonLeavingMember) { + updatesToFrom[cert.to] = updatesToFrom[cert.to] || []; + updates[cert.to] = updates[cert.to] || []; + if (updatesToFrom[cert.to].indexOf(cert.from) == -1) { + updates[cert.to].push(cert); + updatesToFrom[cert.to].push(cert.from); + } + } + } + } + } + } } - } } - } - } - } + return updates; + }); + } + filterJoiners(preJoinData) { + return __awaiter(this, void 0, void 0, function* () { + const filtered = {}; + const filterings = []; + const filter = (pubkey) => __awaiter(this, void 0, void 0, function* () { + try { + // No manual filtering, takes all BUT already used UID or pubkey + let exists = yield global_rules_1.GLOBAL_RULES_HELPERS.checkExistsUserID(preJoinData[pubkey].identity.uid, this.dal); + if (exists && !preJoinData[pubkey].identity.wasMember) { + throw 'UID already taken'; + } + exists = yield global_rules_1.GLOBAL_RULES_HELPERS.checkExistsPubkey(pubkey, this.dal); + if (exists && !preJoinData[pubkey].identity.wasMember) { + throw 'Pubkey already taken'; + } + filtered[pubkey] = preJoinData[pubkey]; + } + catch (err) { + this.logger.warn(err); + } + }); + _.keys(preJoinData).forEach((joinPubkey) => filterings.push(filter(joinPubkey))); + yield Promise.all(filterings); + return filtered; + }); } - return updates; - }); - - this.filterJoiners = (preJoinData) => co(function*() { - const filtered = {}; - const filterings = []; - const filter = (pubkey) => co(function*() { - try { - // No manual filtering, takes all BUT already used UID or pubkey - let exists = yield GLOBAL_HELPERS.checkExistsUserID(preJoinData[pubkey].identity.uid, dal); - if (exists && !preJoinData[pubkey].identity.wasMember) { - throw 'UID already taken'; - } - exists = yield GLOBAL_HELPERS.checkExistsPubkey(pubkey, dal); - if (exists && !preJoinData[pubkey].identity.wasMember) { - throw 'Pubkey already taken'; - } - filtered[pubkey] = preJoinData[pubkey]; - } - catch (err) { - logger.warn(err); - } - }); - _.keys(preJoinData).forEach( (joinPubkey) => filterings.push(filter(joinPubkey))); - yield filterings; - return filtered; - }); } - /** * Class to implement strategy of manual selection of root members for root block. * @constructor */ -function ManualRootGenerator() { - - this.findNewCertsFromWoT = () => Promise.resolve({}); - - this.filterJoiners = (preJoinData) => co(function*() { - const filtered = {}; - const newcomers = _(preJoinData).keys(); - const uids = []; - newcomers.forEach((newcomer) => uids.push(preJoinData[newcomer].ms.userid)); - - if (newcomers.length > 0) { - const answers = yield inquirer.prompt([{ - type: "checkbox", - name: "uids", - message: "Newcomers to add", - choices: uids, - default: uids[0] - }]); - newcomers.forEach((newcomer) => { - if (~answers.uids.indexOf(preJoinData[newcomer].ms.userid)) - filtered[newcomer] = preJoinData[newcomer]; - }); - if (answers.uids.length == 0) - throw 'No newcomer selected'; - return filtered - } else { - throw 'No newcomer found'; +class ManualRootGenerator { + findNewCertsFromWoT() { + return Promise.resolve({}); + } + filterJoiners(preJoinData) { + return __awaiter(this, void 0, void 0, function* () { + const filtered = {}; + const newcomers = _(preJoinData).keys(); + const uids = []; + newcomers.forEach((newcomer) => uids.push(preJoinData[newcomer].ms.userid)); + if (newcomers.length > 0) { + const answers = yield inquirer.prompt([{ + type: "checkbox", + name: "uids", + message: "Newcomers to add", + choices: uids, + default: uids[0] + }]); + newcomers.forEach((newcomer) => { + if (~answers.uids.indexOf(preJoinData[newcomer].ms.userid)) + filtered[newcomer] = preJoinData[newcomer]; + }); + if (answers.uids.length == 0) + throw 'No newcomer selected'; + return filtered; + } + else { + throw 'No newcomer found'; + } + }); } - }); } +//# sourceMappingURL=blockGenerator.js.map \ No newline at end of file diff --git a/app/modules/prover/lib/blockGenerator.ts b/app/modules/prover/lib/blockGenerator.ts new file mode 100644 index 0000000000000000000000000000000000000000..e0f1f44ac630ccd87e0cea4b11aa88f9fe67de5b --- /dev/null +++ b/app/modules/prover/lib/blockGenerator.ts @@ -0,0 +1,791 @@ +"use strict"; +import {ConfDTO} from "../../../lib/dto/ConfDTO" +import {BlockchainContext} from "../../../lib/computation/BlockchainContext" +import {TransactionDTO} from "../../../lib/dto/TransactionDTO" +import {GLOBAL_RULES_HELPERS} from "../../../lib/rules/global_rules" +import {LOCAL_RULES_HELPERS} from "../../../lib/rules/local_rules" +import {Indexer} from "../../../lib/indexer" +import {FileDAL} from "../../../lib/dal/fileDAL" +import {DBBlock} from "../../../lib/db/DBBlock" + +const _ = require('underscore'); +const moment = require('moment'); +const inquirer = require('inquirer'); +const common = require('duniter-common'); + +const keyring = common.keyring; +const hashf = common.hashf; +const rawer = common.rawer; +const Block = common.document.Block; +const Membership = common.document.Membership; +const Transaction = common.document.Transaction; +const Identity = common.document.Identity; +const Certification = common.document.Certification; +const constants = common.constants + +export class BlockGenerator { + + conf:ConfDTO + dal:any + mainContext:BlockchainContext + selfPubkey:string + logger:any + + constructor(private server:any) { + this.conf = server.conf; + this.dal = server.dal; + this.mainContext = server.BlockchainService.getContext(); + this.selfPubkey = (this.conf.pair && this.conf.pair.pub) || '' + this.logger = server.logger; + } + + nextBlock(manualValues:any, simulationValues:any = {}) { + return this.generateNextBlock(new NextBlockGenerator(this.mainContext, this.conf, this.dal, this.logger), manualValues, simulationValues) + } + + async manualRoot() { + let current = await this.dal.getCurrentBlockOrNull() + if (current) { + throw 'Cannot generate root block: it already exists.'; + } + return this.generateNextBlock(new ManualRootGenerator()); + } + + /** + * Generate next block, gathering both updates & newcomers + */ + private async generateNextBlock(generator:BlockGeneratorInterface, manualValues:any = null, simulationValues:any = null) { + const vHEAD_1 = await this.mainContext.getvHEAD_1() + if (simulationValues && simulationValues.medianTime) { + vHEAD_1.medianTime = simulationValues.medianTime + } + const current = await this.dal.getCurrentBlockOrNull(); + const revocations = await this.dal.getRevocatingMembers(); + const exclusions = await this.dal.getToBeKickedPubkeys(); + const newCertsFromWoT = await generator.findNewCertsFromWoT(current); + const newcomersLeavers = await this.findNewcomersAndLeavers(current, (joinersData:any) => generator.filterJoiners(joinersData)); + const transactions = await this.findTransactions(current); + const joinData = newcomersLeavers[2]; + const leaveData = newcomersLeavers[3]; + const newCertsFromNewcomers = newcomersLeavers[4]; + const certifiersOfNewcomers = _.uniq(_.keys(joinData).reduce((theCertifiers:any, newcomer:string) => { + return theCertifiers.concat(_.pluck(joinData[newcomer].certs, 'from')); + }, [])); + const certifiers:string[] = [].concat(certifiersOfNewcomers); + // Merges updates + _(newCertsFromWoT).keys().forEach(function(certified:string){ + newCertsFromWoT[certified] = newCertsFromWoT[certified].filter((cert:any) => { + // Must not certify a newcomer, since it would mean multiple certifications at same time from one member + const isCertifier = certifiers.indexOf(cert.from) != -1; + if (!isCertifier) { + certifiers.push(cert.from); + } + return !isCertifier; + }); + }); + _(newCertsFromNewcomers).keys().forEach((certified:string) => { + newCertsFromWoT[certified] = (newCertsFromWoT[certified] || []).concat(newCertsFromNewcomers[certified]); + }); + // Revocations + // Create the block + return this.createBlock(current, joinData, leaveData, newCertsFromWoT, revocations, exclusions, transactions, manualValues); + } + + private async findNewcomersAndLeavers(current:DBBlock, filteringFunc: (joinData: { [pub:string]: any }) => Promise<{ [pub:string]: any }>) { + const newcomers = await this.findNewcomers(current, filteringFunc); + const leavers = await this.findLeavers(current); + + const cur = newcomers.current; + const newWoTMembers = newcomers.newWotMembers; + const finalJoinData = newcomers.finalJoinData; + const updates = newcomers.updates; + + return [cur, newWoTMembers, finalJoinData, leavers, updates]; + } + + private async findTransactions(current:DBBlock) { + const versionMin = current ? Math.min(common.constants.LAST_VERSION_FOR_TX, current.version) : common.constants.DOCUMENTS_VERSION; + const txs = await this.dal.getTransactionsPending(versionMin); + const transactions = []; + const passingTxs:any[] = []; + for (const obj of txs) { + obj.currency = this.conf.currency + const tx = TransactionDTO.fromJSONObject(obj); + try { + await new Promise((resolve, reject) => { + LOCAL_RULES_HELPERS.checkBunchOfTransactions(passingTxs.concat(tx), (err:any, res:any) => { + if (err) return reject(err) + return resolve(res) + }) + }) + const nextBlockWithFakeTimeVariation = { medianTime: current.medianTime + 1 }; + await GLOBAL_RULES_HELPERS.checkSingleTransaction(tx, nextBlockWithFakeTimeVariation, this.conf, this.dal); + await GLOBAL_RULES_HELPERS.checkTxBlockStamp(tx, this.dal); + transactions.push(tx); + passingTxs.push(tx); + this.logger.info('Transaction %s added to block', tx.hash); + } catch (err) { + this.logger.error(err); + const currentNumber = (current && current.number) || 0; + const blockstamp = tx.blockstamp || (currentNumber + '-'); + const txBlockNumber = parseInt(blockstamp.split('-')[0]); + // 10 blocks before removing the transaction + if (currentNumber - txBlockNumber + 1 >= common.constants.TRANSACTION_MAX_TRIES) { + await this.dal.removeTxByHash(tx.hash); + } + } + } + return transactions; + } + + private async findLeavers(current:DBBlock) { + const leaveData: { [pub:string]: any } = {}; + const memberships = await this.dal.findLeavers(); + const leavers:string[] = []; + memberships.forEach((ms:any) => leavers.push(ms.issuer)); + for (const ms of memberships) { + const leave = { identity: null, ms: ms, key: null, idHash: '' }; + leave.idHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase(); + let block; + if (current) { + block = await this.dal.getBlock(ms.number); + } + else { + block = {}; + } + const identity = await this.dal.getIdentityByHashOrNull(leave.idHash); + const currentMembership = await this.dal.mindexDAL.getReducedMS(ms.issuer); + const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1; + if (identity && block && currentMSN < leave.ms.number && identity.member) { + // MS + matching cert are found + leave.identity = identity; + leaveData[identity.pubkey] = leave; + } + } + return leaveData; + } + + private async findNewcomers(current:DBBlock, filteringFunc: (joinData: { [pub:string]: any }) => Promise<{ [pub:string]: any }>) { + const updates = {}; + const preJoinData = await this.getPreJoinData(current); + const joinData = await filteringFunc(preJoinData); + const members = await this.dal.getMembers(); + const wotMembers = _.pluck(members, 'pubkey'); + // Checking step + const newcomers = _(joinData).keys(); + const nextBlockNumber = current ? current.number + 1 : 0; + try { + const realNewcomers = await this.iteratedChecking(newcomers, async (someNewcomers:string[]) => { + const nextBlock = { + number: nextBlockNumber, + joiners: someNewcomers, + identities: _.filter(newcomers.map((pub:string) => joinData[pub].identity), { wasMember: false }).map((idty:any) => idty.pubkey) + }; + const theNewLinks = await this.computeNewLinks(nextBlockNumber, someNewcomers, joinData, updates) + await this.checkWoTConstraints(nextBlock, theNewLinks, current); + }) + const newLinks = await this.computeNewLinks(nextBlockNumber, realNewcomers, joinData, updates); + const newWoT = wotMembers.concat(realNewcomers); + const finalJoinData: { [pub:string]: any } = {}; + realNewcomers.forEach((newcomer:string) => { + // Only keep membership of selected newcomers + finalJoinData[newcomer] = joinData[newcomer]; + // Only keep certifications from final members + const keptCerts:any[] = []; + joinData[newcomer].certs.forEach((cert:any) => { + const issuer = cert.from; + if (~newWoT.indexOf(issuer) && ~newLinks[cert.to].indexOf(issuer)) { + keptCerts.push(cert); + } + }); + joinData[newcomer].certs = keptCerts; + }); + return { + current: current, + newWotMembers: wotMembers.concat(realNewcomers), + finalJoinData: finalJoinData, + updates: updates + } + } catch(err) { + this.logger.error(err); + throw err; + } + } + + private async checkWoTConstraints(block:{ number:number, joiners:string[], identities:string[] }, newLinks:any, current:DBBlock) { + if (block.number < 0) { + throw 'Cannot compute WoT constraint for negative block number'; + } + const newcomers = block.joiners.map((inlineMS:string) => inlineMS.split(':')[0]); + const realNewcomers = block.identities; + for (const newcomer of newcomers) { + if (block.number > 0) { + try { + // Will throw an error if not enough links + await this.mainContext.checkHaveEnoughLinks(newcomer, newLinks); + // This one does not throw but returns a boolean + const isOut = await GLOBAL_RULES_HELPERS.isOver3Hops(newcomer, newLinks, realNewcomers, current, this.conf, this.dal); + if (isOut) { + throw 'Key ' + newcomer + ' is not recognized by the WoT for this block'; + } + } catch (e) { + this.logger.debug(e); + throw e; + } + } + } + } + + private async iteratedChecking(newcomers:string[], checkWoTForNewcomers: (someNewcomers:string[]) => Promise<void>): Promise<string[]> { + const passingNewcomers:string[] = [] + let hadError = false; + for (const newcomer of newcomers) { + try { + await checkWoTForNewcomers(passingNewcomers.concat(newcomer)); + passingNewcomers.push(newcomer); + } catch (err) { + hadError = hadError || err; + } + } + if (hadError) { + return await this.iteratedChecking(passingNewcomers, checkWoTForNewcomers); + } else { + return passingNewcomers; + } + } + + private async getPreJoinData(current:DBBlock) { + const preJoinData:any = {}; + const memberships = await this.dal.findNewcomers(current && current.medianTime) + const joiners:string[] = []; + memberships.forEach((ms:any) => joiners.push(ms.issuer)); + for (const ms of memberships) { + try { + if (ms.block !== common.constants.SPECIAL_BLOCK) { + let msBasedBlock = await this.dal.getBlockByBlockstampOrNull(ms.block); + if (!msBasedBlock) { + throw constants.ERRORS.BLOCKSTAMP_DOES_NOT_MATCH_A_BLOCK; + } + let age = current.medianTime - msBasedBlock.medianTime; + if (age > this.conf.msWindow) { + throw constants.ERRORS.TOO_OLD_MEMBERSHIP; + } + } + const idtyHash = (hashf(ms.userid + ms.certts + ms.issuer) + "").toUpperCase(); + const join:any = await this.getSinglePreJoinData(current, idtyHash, joiners); + join.ms = ms; + const currentMembership = await this.dal.mindexDAL.getReducedMS(ms.issuer); + const currentMSN = currentMembership ? parseInt(currentMembership.created_on) : -1; + if (!join.identity.revoked && currentMSN < parseInt(join.ms.number)) { + preJoinData[join.identity.pubkey] = join; + } + } catch (err) { + if (err && !err.uerr) { + this.logger.warn(err); + } + } + } + return preJoinData; + } + + private async computeNewLinks(forBlock:number, theNewcomers:any, joinData:any, updates:any) { + let newCerts = await this.computeNewCerts(forBlock, theNewcomers, joinData); + return this.newCertsToLinks(newCerts, updates); + } + + newCertsToLinks(newCerts:any, updates:any) { + let newLinks:any = {}; + _.mapObject(newCerts, function(certs:any, pubkey:string) { + newLinks[pubkey] = _.pluck(certs, 'from'); + }); + _.mapObject(updates, function(certs:any, pubkey:string) { + newLinks[pubkey] = (newLinks[pubkey] || []).concat(_.pluck(certs, 'pubkey')); + }); + return newLinks; + } + + async computeNewCerts(forBlock:number, theNewcomers:any, joinData:any) { + const newCerts:any = {}, certifiers = []; + const certsByKey = _.mapObject(joinData, function(val:any){ return val.certs; }); + for (const newcomer of theNewcomers) { + // New array of certifiers + newCerts[newcomer] = newCerts[newcomer] || []; + // Check wether each certification of the block is from valid newcomer/member + for (const cert of certsByKey[newcomer]) { + const isAlreadyCertifying = certifiers.indexOf(cert.from) !== -1; + if (!(isAlreadyCertifying && forBlock > 0)) { + if (~theNewcomers.indexOf(cert.from)) { + // Newcomer to newcomer => valid link + newCerts[newcomer].push(cert); + certifiers.push(cert.from); + } else { + let isMember = await this.dal.isMember(cert.from) + // Member to newcomer => valid link + if (isMember) { + newCerts[newcomer].push(cert); + certifiers.push(cert.from); + } + } + } + } + } + return newCerts; + } + + async getSinglePreJoinData(current:DBBlock, idHash:string, joiners:string[]) { + const identity = await this.dal.getIdentityByHashOrNull(idHash); + let foundCerts = []; + const vHEAD_1 = await this.mainContext.getvHEAD_1(); + if (!identity) { + throw 'Identity with hash \'' + idHash + '\' not found'; + } + if (current && identity.buid == common.constants.SPECIAL_BLOCK && !identity.wasMember) { + throw constants.ERRORS.TOO_OLD_IDENTITY; + } + else if (!identity.wasMember && identity.buid != common.constants.SPECIAL_BLOCK) { + const idtyBasedBlock = await this.dal.getBlock(identity.buid); + const age = current.medianTime - idtyBasedBlock.medianTime; + if (age > this.conf.idtyWindow) { + throw constants.ERRORS.TOO_OLD_IDENTITY; + } + } + const idty = Identity.fromJSON(identity); + idty.currency = this.conf.currency; + const createIdentity = idty.rawWithoutSig(); + const verified = keyring.verify(createIdentity, idty.sig, idty.pubkey); + if (!verified) { + throw constants.ERRORS.IDENTITY_WRONGLY_SIGNED; + } + const isIdentityLeaving = await this.dal.isLeaving(idty.pubkey); + if (!isIdentityLeaving) { + if (!current) { + // Look for certifications from initial joiners + const certs = await this.dal.certsNotLinkedToTarget(idHash); + foundCerts = _.filter(certs, function(cert:any){ + // Add 'joiners && ': special case when block#0 not written ANd not joiner yet (avoid undefined error) + return joiners && ~joiners.indexOf(cert.from); + }); + } else { + // Look for certifications from WoT members + let certs = await this.dal.certsNotLinkedToTarget(idHash); + const certifiers = []; + for (const cert of certs) { + try { + const basedBlock = await this.dal.getBlock(cert.block_number); + if (!basedBlock) { + throw 'Unknown timestamp block for identity'; + } + if (current) { + const age = current.medianTime - basedBlock.medianTime; + if (age > this.conf.sigWindow || age > this.conf.sigValidity) { + throw 'Too old certification'; + } + } + // Already exists a link not replayable yet? + let exists = await this.dal.existsNonReplayableLink(cert.from, cert.to); + if (exists) { + throw 'It already exists a similar certification written, which is not replayable yet'; + } + // Already exists a link not chainable yet? + exists = await this.dal.existsNonChainableLink(cert.from, vHEAD_1, this.conf.sigStock); + if (exists) { + throw 'It already exists a written certification from ' + cert.from + ' which is not chainable yet'; + } + const isMember = await this.dal.isMember(cert.from); + const doubleSignature = !!(~certifiers.indexOf(cert.from)) + if (isMember && !doubleSignature) { + const isValid = await GLOBAL_RULES_HELPERS.checkCertificationIsValidForBlock(cert, { number: current.number + 1, currency: current.currency }, async () => { + const idty = await this.dal.getIdentityByHashOrNull(idHash) + return idty + }, this.conf, this.dal); + if (isValid) { + certifiers.push(cert.from); + foundCerts.push(cert); + } + } + } catch (e) { + this.logger.debug(e.stack || e.message || e); + // Go on + } + } + } + } + return { + identity: identity, + key: null, + idHash: idHash, + certs: foundCerts + }; + } + + private async createBlock(current:DBBlock, joinData:any, leaveData:any, updates:any, revocations:any, exclusions:any, transactions:any, manualValues:any) { + + if (manualValues && manualValues.excluded) { + exclusions = manualValues.excluded; + } + if (manualValues && manualValues.revoked) { + revocations = []; + } + + const vHEAD = await this.mainContext.getvHeadCopy(); + const vHEAD_1 = await this.mainContext.getvHEAD_1(); + const maxLenOfBlock = Indexer.DUP_HELPERS.getMaxBlockSize(vHEAD); + let blockLen = 0; + // Revocations have an impact on exclusions + revocations.forEach((idty:any) => exclusions.push(idty.pubkey)); + // Prevent writing joins/updates for excluded members + exclusions = _.uniq(exclusions); + exclusions.forEach((excluded:any) => { + delete updates[excluded]; + delete joinData[excluded]; + delete leaveData[excluded]; + }); + _(leaveData).keys().forEach((leaver:any) => { + delete updates[leaver]; + delete joinData[leaver]; + }); + const block = new Block(); + block.number = current ? current.number + 1 : 0; + // Compute the new MedianTime + if (block.number == 0) { + block.medianTime = moment.utc().unix() - this.conf.rootoffset; + } + else { + block.medianTime = vHEAD.medianTime; + } + // Choose the version + block.version = (manualValues && manualValues.version) || (await LOCAL_RULES_HELPERS.getMaxPossibleVersionNumber(current)); + block.currency = current ? current.currency : this.conf.currency; + block.nonce = 0; + if (!this.conf.dtReeval) { + this.conf.dtReeval = this.conf.dt; + } + if (!this.conf.udTime0) { + this.conf.udTime0 = block.medianTime + this.conf.dt; + } + if (!this.conf.udReevalTime0) { + this.conf.udReevalTime0 = block.medianTime + this.conf.dtReeval; + } + block.parameters = block.number > 0 ? '' : [ + this.conf.c, this.conf.dt, this.conf.ud0, + this.conf.sigPeriod, this.conf.sigStock, this.conf.sigWindow, this.conf.sigValidity, + this.conf.sigQty, this.conf.idtyWindow, this.conf.msWindow, this.conf.xpercent, this.conf.msValidity, + this.conf.stepMax, this.conf.medianTimeBlocks, this.conf.avgGenTime, this.conf.dtDiffEval, + (this.conf.percentRot == 1 ? "1.0" : this.conf.percentRot), + this.conf.udTime0, + this.conf.udReevalTime0, + this.conf.dtReeval + ].join(':'); + block.previousHash = current ? current.hash : ""; + block.previousIssuer = current ? current.issuer : ""; + if (this.selfPubkey) { + block.issuer = this.selfPubkey + } + // Members merkle + const joiners = _(joinData).keys(); + joiners.sort() + const previousCount = current ? current.membersCount : 0; + if (joiners.length == 0 && !current) { + throw constants.ERRORS.CANNOT_ROOT_BLOCK_NO_MEMBERS; + } + + // Kicked people + block.excluded = exclusions; + + /***** + * Priority 1: keep the WoT sane + */ + // Certifications from the WoT, to the WoT + _(updates).keys().forEach((certifiedMember:any) => { + const certs = updates[certifiedMember] || []; + certs.forEach((cert:any) => { + if (blockLen < maxLenOfBlock) { + block.certifications.push(Certification.fromJSON(cert).inline()); + blockLen++; + } + }); + }); + // Renewed + joiners.forEach((joiner:any) => { + const data = joinData[joiner]; + // Join only for non-members + if (data.identity.member) { + if (blockLen < maxLenOfBlock) { + block.actives.push(Membership.fromJSON(data.ms).inline()); + blockLen++; + } + } + }); + // Leavers + const leavers = _(leaveData).keys(); + leavers.forEach((leaver:any) => { + const data = leaveData[leaver]; + // Join only for non-members + if (data.identity.member) { + if (blockLen < maxLenOfBlock) { + block.leavers.push(Membership.fromJSON(data.ms).inline()); + blockLen++; + } + } + }); + + /***** + * Priority 2: revoked identities + */ + revocations.forEach((idty:any) => { + if (blockLen < maxLenOfBlock) { + block.revoked.push([idty.pubkey, idty.revocation_sig].join(':')); + blockLen++; + } + }); + + /***** + * Priority 3: newcomers/renewcomers + */ + let countOfCertsToNewcomers = 0; + // Newcomers + // Newcomers + back people + joiners.forEach((joiner:any) => { + const data = joinData[joiner]; + // Identities only for never-have-been members + if (!data.identity.member && !data.identity.wasMember) { + block.identities.push(Identity.fromJSON(data.identity).inline()); + } + // Join only for non-members + if (!data.identity.member) { + block.joiners.push(Membership.fromJSON(data.ms).inline()); + } + }); + block.identities = _.sortBy(block.identities, (line:string) => { + const sp = line.split(':'); + return sp[2] + sp[3]; + }); + + // Certifications from the WoT, to newcomers + joiners.forEach((joiner:any) => { + const data = joinData[joiner] || []; + data.certs.forEach((cert:any) => { + countOfCertsToNewcomers++; + block.certifications.push(Certification.fromJSON(cert).inline()); + }); + }); + + // Eventually revert newcomers/renewcomer + if (block.number > 0 && Block.getLen(block) > maxLenOfBlock) { + for (let i = 0; i < block.identities.length; i++) { + block.identities.pop(); + block.joiners.pop(); + } + for (let i = 0; i < countOfCertsToNewcomers; i++) { + block.certifications.pop(); + } + } + + // Final number of members + block.membersCount = previousCount + block.joiners.length - block.excluded.length; + + vHEAD.membersCount = block.membersCount; + + /***** + * Priority 4: transactions + */ + block.transactions = []; + blockLen = Block.getLen(block); + if (blockLen < maxLenOfBlock) { + transactions.forEach((tx:any) => { + const txLen = Transaction.getLen(tx); + if (txLen <= common.constants.MAXIMUM_LEN_OF_COMPACT_TX && blockLen + txLen <= maxLenOfBlock && tx.version == common.constants.TRANSACTION_VERSION) { + block.transactions.push({ raw: tx.getCompactVersion() }); + } + blockLen += txLen; + }); + } + + /** + * Finally handle the Universal Dividend + */ + block.powMin = vHEAD.powMin; + + // Universal Dividend + if (vHEAD.new_dividend) { + + // BR_G13 + // Recompute according to block.membersCount + Indexer.prepareDividend(vHEAD, vHEAD_1, this.conf) + // BR_G14 + Indexer.prepareUnitBase(vHEAD) + + // Fix BR_G14 double call + vHEAD.unitBase = Math.min(vHEAD_1.unitBase + 1, vHEAD.unitBase); + + block.dividend = vHEAD.dividend; + block.unitbase = vHEAD.unitBase; + } else { + block.unitbase = block.number == 0 ? 0 : current.unitbase; + } + // Rotation + block.issuersCount = vHEAD.issuersCount; + block.issuersFrame = vHEAD.issuersFrame; + block.issuersFrameVar = vHEAD.issuersFrameVar; + // Manual values before hashing + if (manualValues) { + _.extend(block, _.omit(manualValues, 'time')); + } + // InnerHash + block.time = block.medianTime; + block.inner_hash = hashf(rawer.getBlockInnerPart(block)).toUpperCase(); + return block; + } +} + +export class BlockGeneratorWhichProves extends BlockGenerator { + + constructor(server:any, private prover:any) { + super(server) + } + + async makeNextBlock(block:DBBlock|null, trial:number, manualValues:any = null) { + const unsignedBlock = block || (await this.nextBlock(manualValues)) + const trialLevel = trial || (await this.mainContext.getIssuerPersonalizedDifficulty(this.selfPubkey)) + return this.prover.prove(unsignedBlock, trialLevel, (manualValues && manualValues.time) || null); + } +} + +interface BlockGeneratorInterface { + findNewCertsFromWoT(current:DBBlock): Promise<any> + filterJoiners(preJoinData:any): Promise<any> +} + +/** + * Class to implement strategy of automatic selection of incoming data for next block. + * @constructor + */ +class NextBlockGenerator implements BlockGeneratorInterface { + + constructor( + private mainContext:BlockchainContext, + private conf:ConfDTO, + private dal:FileDAL, + private logger:any) { + } + + async findNewCertsFromWoT(current:DBBlock) { + const updates:any = {}; + const updatesToFrom:any = {}; + const certs = await this.dal.certsFindNew(); + const vHEAD_1 = await this.mainContext.getvHEAD_1(); + for (const cert of certs) { + const targetIdty = await this.dal.getIdentityByHashOrNull(cert.target); + // The identity must be known + if (targetIdty) { + const certSig = cert.sig; + // Do not rely on certification block UID, prefer using the known hash of the block by its given number + const targetBlock = await this.dal.getBlock(cert.block_number); + // Check if writable + let duration = current && targetBlock ? current.medianTime - parseInt(targetBlock.medianTime) : 0; + if (targetBlock && duration <= this.conf.sigWindow) { + cert.sig = ''; + cert.currency = this.conf.currency; + cert.issuer = cert.from; + cert.idty_issuer = targetIdty.pubkey; + cert.idty_uid = targetIdty.uid; + cert.idty_buid = targetIdty.buid; + cert.idty_sig = targetIdty.sig; + cert.buid = current ? [cert.block_number, targetBlock.hash].join('-') : common.constants.SPECIAL_BLOCK; + const rawCert = Certification.fromJSON(cert).getRaw(); + if (keyring.verify(rawCert, certSig, cert.from)) { + cert.sig = certSig; + let exists = false; + if (current) { + // Already exists a link not replayable yet? + exists = await this.dal.existsNonReplayableLink(cert.from, cert.to); + } + if (!exists) { + // Already exists a link not chainable yet? + // No chainability block means absolutely nobody can issue certifications yet + exists = await this.dal.existsNonChainableLink(cert.from, vHEAD_1, this.conf.sigStock); + if (!exists) { + // It does NOT already exists a similar certification written, which is not replayable yet + // Signatory must be a member + const isSignatoryAMember = await this.dal.isMember(cert.from); + const isCertifiedANonLeavingMember = isSignatoryAMember && (await this.dal.isMemberAndNonLeaver(cert.to)); + // Certified must be a member and non-leaver + if (isSignatoryAMember && isCertifiedANonLeavingMember) { + updatesToFrom[cert.to] = updatesToFrom[cert.to] || []; + updates[cert.to] = updates[cert.to] || []; + if (updatesToFrom[cert.to].indexOf(cert.from) == -1) { + updates[cert.to].push(cert); + updatesToFrom[cert.to].push(cert.from); + } + } + } + } + } + } + } + } + return updates; + } + + async filterJoiners(preJoinData:any) { + const filtered:any = {}; + const filterings:any = []; + const filter = async (pubkey:string) => { + try { + // No manual filtering, takes all BUT already used UID or pubkey + let exists = await GLOBAL_RULES_HELPERS.checkExistsUserID(preJoinData[pubkey].identity.uid, this.dal); + if (exists && !preJoinData[pubkey].identity.wasMember) { + throw 'UID already taken'; + } + exists = await GLOBAL_RULES_HELPERS.checkExistsPubkey(pubkey, this.dal); + if (exists && !preJoinData[pubkey].identity.wasMember) { + throw 'Pubkey already taken'; + } + filtered[pubkey] = preJoinData[pubkey]; + } + catch (err) { + this.logger.warn(err); + } + } + _.keys(preJoinData).forEach( (joinPubkey:any) => filterings.push(filter(joinPubkey))); + await Promise.all(filterings) + return filtered; + } +} + +/** + * Class to implement strategy of manual selection of root members for root block. + * @constructor + */ +class ManualRootGenerator implements BlockGeneratorInterface { + + findNewCertsFromWoT() { + return Promise.resolve({}) + } + + async filterJoiners(preJoinData:any) { + const filtered:any = {}; + const newcomers = _(preJoinData).keys(); + const uids:string[] = []; + newcomers.forEach((newcomer:string) => uids.push(preJoinData[newcomer].ms.userid)); + + if (newcomers.length > 0) { + const answers = await inquirer.prompt([{ + type: "checkbox", + name: "uids", + message: "Newcomers to add", + choices: uids, + default: uids[0] + }]); + newcomers.forEach((newcomer:string) => { + if (~answers.uids.indexOf(preJoinData[newcomer].ms.userid)) + filtered[newcomer] = preJoinData[newcomer]; + }); + if (answers.uids.length == 0) + throw 'No newcomer selected'; + return filtered + } else { + throw 'No newcomer found'; + } + } +} diff --git a/app/modules/prover/lib/blockProver.js b/app/modules/prover/lib/blockProver.js index c01019626590b4ac6f1eb28db4318a6f6966a016..b0049b1a2a94eb32718c2bacbc16c45454997dd6 100644 --- a/app/modules/prover/lib/blockProver.js +++ b/app/modules/prover/lib/blockProver.js @@ -1,176 +1,181 @@ "use strict"; -const co = require('co'); -const engine = require('./engine'); -const querablep = require('querablep'); -const common = require('duniter-common'); -const constants = require('./constants'); - -const Block = common.document.Block - +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const constants_1 = require("./constants"); +const engine_1 = require("./engine"); +const querablep = require('querablep'); +const common = require('duniter-common'); +const Block = common.document.Block; const POW_FOUND = true; const POW_NOT_FOUND_YET = false; - -module.exports = (server) => new BlockProver(server); - -function BlockProver(server) { - - let conf = server.conf; - let pair = conf.pair; - let logger = server.logger; - let waitResolve; - - let workerFarmPromise; - - function getWorker() { - return (workerFarmPromise || (workerFarmPromise = co(function*() { - return new WorkerFarm(); - }))); - } - - const debug = process.execArgv.toString().indexOf('--debug') !== -1; - if(debug) { - //Set an unused port number. - process.execArgv = []; - } - - this.cancel = (gottenBlock) => co(function*() { - // If no farm was instanciated, there is nothing to do yet - if (workerFarmPromise) { - let farm = yield getWorker(); - if (farm.isComputing() && !farm.isStopping()) { - yield farm.stopPoW(gottenBlock); - } - if (waitResolve) { - waitResolve(); - waitResolve = null; - } +class WorkerFarm { + constructor(server, logger) { + this.server = server; + this.logger = logger; + this.onAlmostPoW = null; + this.powPromise = null; + this.stopPromise = null; + this.checkPoWandNotify = null; + this.theEngine = new engine_1.PowEngine(server.conf, server.logger); + // An utility method to filter the pow notifications + this.checkPoWandNotify = (hash, block, found) => { + const matches = hash.match(/^(0{2,})[^0]/); + if (matches && this.onAlmostPoW) { + this.onAlmostPoW(hash, matches, block, found); + } + }; + // Keep track of PoW advancement + this.theEngine.setOnInfoMessage((message) => { + if (message.error) { + this.logger.error('Error in engine#%s:', this.theEngine.id, message.error); + } + else if (message.pow) { + // A message about the PoW + const msg = message.pow; + this.checkPoWandNotify(msg.pow, msg.block, POW_NOT_FOUND_YET); + } + }); } - }); - - this.prove = function (block, difficulty, forcedTime) { - - if (waitResolve) { - waitResolve(); - waitResolve = null; + changeCPU(cpu) { + return this.theEngine.setConf({ cpu }); } - - const remainder = difficulty % 16; - const nbZeros = (difficulty - remainder) / 16; - const highMark = common.constants.PROOF_OF_WORK.UPPER_BOUND[remainder]; - - return co(function*() { - - let powFarm = yield getWorker(); - - if (block.number == 0) { - // On initial block, difficulty is the one given manually - block.powMin = difficulty; - } - - // Start - powFarm.setOnAlmostPoW(function(pow, matches, aBlock, found) { - powEvent(found, pow); - if (matches && matches[1].length >= constants.MINIMAL_ZEROS_TO_SHOW_IN_LOGS) { - logger.info('Matched %s zeros %s with Nonce = %s for block#%s by %s', matches[1].length, pow, aBlock.nonce, aBlock.number, aBlock.issuer.slice(0,6)); - } - }); - - block.nonce = 0; - logger.info('Generating proof-of-work with %s leading zeros followed by [0-' + highMark + ']... (CPU usage set to %s%) for block#%s', nbZeros, (conf.cpu * 100).toFixed(0), block.number, block.issuer.slice(0,6)); - const start = Date.now(); - let result = yield powFarm.askNewProof({ - newPoW: { conf: conf, block: block, zeros: nbZeros, highMark: highMark, forcedTime: forcedTime, pair } - }); - if (!result) { - logger.info('GIVEN proof-of-work for block#%s with %s leading zeros followed by [0-' + highMark + ']! stop PoW for %s', block.number, nbZeros, pair.pub.slice(0,6)); - throw 'Proof-of-work computation canceled because block received'; - } else { - const proof = result.block; - const testsCount = result.testsCount; - const duration = (Date.now() - start); - const testsPerSecond = (testsCount / (duration / 1000)).toFixed(2); - logger.info('Done: #%s, %s in %ss (%s tests, ~%s tests/s)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond); - logger.info('FOUND proof-of-work with %s leading zeros followed by [0-' + highMark + ']!', nbZeros); - return Block.fromJSON(proof); - } - }); - }; - - this.changeCPU = (cpu) => co(function*() { - conf.cpu = cpu; - const farm = yield getWorker(); - return farm.changeCPU(cpu); - }); - - this.changePoWPrefix = (prefix) => co(function*() { - const farm = yield getWorker(); - return farm.changePoWPrefix(prefix); - }); - - function powEvent(found, hash) { - server && server.push({ pow: { found, hash } }); - } - - function WorkerFarm() { - // Create - const theEngine = engine(server.conf, server.logger) - - let onAlmostPoW - - // An utility method to filter the pow notifications - const checkPoWandNotify = (hash, block, found) => { - const matches = hash.match(/^(0{2,})[^0]/); - if (matches && onAlmostPoW) { - onAlmostPoW(hash, matches, block, found); - } + changePoWPrefix(prefix) { + return this.theEngine.setConf({ prefix }); + } + isComputing() { + return this.powPromise !== null && !this.powPromise.isResolved(); + } + isStopping() { + return this.stopPromise !== null && !this.stopPromise.isResolved(); } - - // Keep track of PoW advancement - theEngine.setOnInfoMessage((message) => { - if (message.error) { - logger.error('Error in engine#%s:', theEngine.id, message.error) - } else if (message.pow) { - // A message about the PoW - const msg = message.pow - checkPoWandNotify(msg.pow, msg.block, POW_NOT_FOUND_YET) - } - }) - - // We use as much cores as available, but not more than CORES_MAXIMUM_USE_IN_PARALLEL - - let powPromise = null - let stopPromise = null - - this.changeCPU = (cpu) => theEngine.setConf({ cpu }) - - this.changePoWPrefix = (prefix) => theEngine.setConf({ prefix }) - - this.isComputing = () => powPromise !== null && !powPromise.isResolved() - - this.isStopping = () => stopPromise !== null && !stopPromise.isResolved() - /** * Eventually stops the engine PoW if one was computing */ - this.stopPoW = (gottenBlock) => { - stopPromise = querablep(theEngine.cancel(gottenBlock)) - return stopPromise; - }; - + stopPoW() { + this.stopPromise = querablep(this.theEngine.cancel()); + return this.stopPromise; + } /** * Starts a new computation of PoW * @param stuff The necessary data for computing the PoW */ - this.askNewProof = (stuff) => co(function*() { - // Starts the PoW - powPromise = querablep(theEngine.prove(stuff)) - const res = yield powPromise - if (res) { - checkPoWandNotify(res.pow.pow, res.pow.block, POW_FOUND); - } - return res && res.pow - }) - - this.setOnAlmostPoW = (onPoW) => onAlmostPoW = onPoW - } + askNewProof(stuff) { + return __awaiter(this, void 0, void 0, function* () { + // Starts the PoW + this.powPromise = querablep(this.theEngine.prove(stuff)); + const res = yield this.powPromise; + if (res) { + this.checkPoWandNotify(res.pow.pow, res.pow.block, POW_FOUND); + } + return res && res.pow; + }); + } + setOnAlmostPoW(onPoW) { + this.onAlmostPoW = onPoW; + } +} +exports.WorkerFarm = WorkerFarm; +class BlockProver { + constructor(server) { + this.server = server; + this.conf = server.conf; + this.pair = this.conf.pair; + this.logger = server.logger; + const debug = process.execArgv.toString().indexOf('--debug') !== -1; + if (debug) { + //Set an unused port number. + process.execArgv = []; + } + } + getWorker() { + if (!this.workerFarmPromise) { + this.workerFarmPromise = (() => __awaiter(this, void 0, void 0, function* () { + return new WorkerFarm(this.server, this.logger); + }))(); + } + return this.workerFarmPromise; + } + cancel() { + return __awaiter(this, void 0, void 0, function* () { + // If no farm was instanciated, there is nothing to do yet + if (this.workerFarmPromise) { + let farm = yield this.getWorker(); + if (farm.isComputing() && !farm.isStopping()) { + yield farm.stopPoW(); + } + if (this.waitResolve) { + this.waitResolve(); + this.waitResolve = null; + } + } + }); + } + prove(block, difficulty, forcedTime = null) { + if (this.waitResolve) { + this.waitResolve(); + this.waitResolve = null; + } + const remainder = difficulty % 16; + const nbZeros = (difficulty - remainder) / 16; + const highMark = common.constants.PROOF_OF_WORK.UPPER_BOUND[remainder]; + return (() => __awaiter(this, void 0, void 0, function* () { + let powFarm = yield this.getWorker(); + if (block.number == 0) { + // On initial block, difficulty is the one given manually + block.powMin = difficulty; + } + // Start + powFarm.setOnAlmostPoW((pow, matches, aBlock, found) => { + this.powEvent(found, pow); + if (matches && matches[1].length >= constants_1.Constants.MINIMAL_ZEROS_TO_SHOW_IN_LOGS) { + this.logger.info('Matched %s zeros %s with Nonce = %s for block#%s by %s', matches[1].length, pow, aBlock.nonce, aBlock.number, aBlock.issuer.slice(0, 6)); + } + }); + block.nonce = 0; + this.logger.info('Generating proof-of-work with %s leading zeros followed by [0-' + highMark + ']... (CPU usage set to %s%) for block#%s', nbZeros, (this.conf.cpu * 100).toFixed(0), block.number, block.issuer.slice(0, 6)); + const start = Date.now(); + let result = yield powFarm.askNewProof({ + newPoW: { conf: this.conf, block: block, zeros: nbZeros, highMark: highMark, forcedTime: forcedTime, pair: this.pair } + }); + if (!result) { + this.logger.info('GIVEN proof-of-work for block#%s with %s leading zeros followed by [0-' + highMark + ']! stop PoW for %s', block.number, nbZeros, this.pair && this.pair.pub.slice(0, 6)); + throw 'Proof-of-work computation canceled because block received'; + } + else { + const proof = result.block; + const testsCount = result.testsCount; + const duration = (Date.now() - start); + const testsPerSecond = (testsCount / (duration / 1000)).toFixed(2); + this.logger.info('Done: #%s, %s in %ss (%s tests, ~%s tests/s)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond); + this.logger.info('FOUND proof-of-work with %s leading zeros followed by [0-' + highMark + ']!', nbZeros); + return Block.fromJSON(proof); + } + }))(); + } + ; + changeCPU(cpu) { + return __awaiter(this, void 0, void 0, function* () { + this.conf.cpu = cpu; + const farm = yield this.getWorker(); + return farm.changeCPU(cpu); + }); + } + changePoWPrefix(prefix) { + return __awaiter(this, void 0, void 0, function* () { + const farm = yield this.getWorker(); + return farm.changePoWPrefix(prefix); + }); + } + powEvent(found, hash) { + this.server && this.server.push({ pow: { found, hash } }); + } } +exports.BlockProver = BlockProver; +//# sourceMappingURL=blockProver.js.map \ No newline at end of file diff --git a/app/modules/prover/lib/blockProver.ts b/app/modules/prover/lib/blockProver.ts new file mode 100644 index 0000000000000000000000000000000000000000..e092b93f7867a0f262aecb9a0d1932670fcd3836 --- /dev/null +++ b/app/modules/prover/lib/blockProver.ts @@ -0,0 +1,196 @@ +import {Constants} from "./constants" +import {ConfDTO, Keypair} from "../../../lib/dto/ConfDTO" +import {PowEngine} from "./engine" +import {DBBlock} from "../../../lib/db/DBBlock" + +const querablep = require('querablep'); +const common = require('duniter-common'); + +const Block = common.document.Block + +const POW_FOUND = true; +const POW_NOT_FOUND_YET = false; + +export class WorkerFarm { + + private theEngine:PowEngine + private onAlmostPoW:any = null + private powPromise:any = null + private stopPromise:any = null + private checkPoWandNotify:any = null + + constructor(private server:any, private logger:any) { + + this.theEngine = new PowEngine(server.conf, server.logger) + + // An utility method to filter the pow notifications + this.checkPoWandNotify = (hash:string, block:DBBlock, found:boolean) => { + const matches = hash.match(/^(0{2,})[^0]/); + if (matches && this.onAlmostPoW) { + this.onAlmostPoW(hash, matches, block, found); + } + } + + // Keep track of PoW advancement + this.theEngine.setOnInfoMessage((message:any) => { + if (message.error) { + this.logger.error('Error in engine#%s:', this.theEngine.id, message.error) + } else if (message.pow) { + // A message about the PoW + const msg = message.pow + this.checkPoWandNotify(msg.pow, msg.block, POW_NOT_FOUND_YET) + } + }) + } + + + changeCPU(cpu:any) { + return this.theEngine.setConf({ cpu }) + } + + changePoWPrefix(prefix:any) { + return this.theEngine.setConf({ prefix }) + } + + isComputing() { + return this.powPromise !== null && !this.powPromise.isResolved() + } + + isStopping() { + return this.stopPromise !== null && !this.stopPromise.isResolved() + } + + /** + * Eventually stops the engine PoW if one was computing + */ + stopPoW() { + this.stopPromise = querablep(this.theEngine.cancel()) + return this.stopPromise; + } + + /** + * Starts a new computation of PoW + * @param stuff The necessary data for computing the PoW + */ + async askNewProof(stuff:any) { + // Starts the PoW + this.powPromise = querablep(this.theEngine.prove(stuff)) + const res = await this.powPromise + if (res) { + this.checkPoWandNotify(res.pow.pow, res.pow.block, POW_FOUND); + } + return res && res.pow + } + + setOnAlmostPoW(onPoW:any) { + this.onAlmostPoW = onPoW + } +} + +export class BlockProver { + + conf:ConfDTO + pair:Keypair|null + logger:any + waitResolve:any + workerFarmPromise:any + + constructor(private server:any) { + this.conf = server.conf + this.pair = this.conf.pair + this.logger = server.logger + + const debug = process.execArgv.toString().indexOf('--debug') !== -1; + if(debug) { + //Set an unused port number. + process.execArgv = []; + } + } + + getWorker() { + if (!this.workerFarmPromise) { + this.workerFarmPromise = (async () => { + return new WorkerFarm(this.server, this.logger) + })() + } + return this.workerFarmPromise + } + + async cancel() { + // If no farm was instanciated, there is nothing to do yet + if (this.workerFarmPromise) { + let farm = await this.getWorker(); + if (farm.isComputing() && !farm.isStopping()) { + await farm.stopPoW() + } + if (this.waitResolve) { + this.waitResolve(); + this.waitResolve = null; + } + } + } + + prove(block:any, difficulty:any, forcedTime:any = null) { + + if (this.waitResolve) { + this.waitResolve(); + this.waitResolve = null; + } + + const remainder = difficulty % 16; + const nbZeros = (difficulty - remainder) / 16; + const highMark = common.constants.PROOF_OF_WORK.UPPER_BOUND[remainder]; + + return (async () => { + + let powFarm = await this.getWorker(); + + if (block.number == 0) { + // On initial block, difficulty is the one given manually + block.powMin = difficulty; + } + + // Start + powFarm.setOnAlmostPoW((pow:any, matches:any, aBlock:any, found:boolean) => { + this.powEvent(found, pow); + if (matches && matches[1].length >= Constants.MINIMAL_ZEROS_TO_SHOW_IN_LOGS) { + this.logger.info('Matched %s zeros %s with Nonce = %s for block#%s by %s', matches[1].length, pow, aBlock.nonce, aBlock.number, aBlock.issuer.slice(0,6)); + } + }); + + block.nonce = 0; + this.logger.info('Generating proof-of-work with %s leading zeros followed by [0-' + highMark + ']... (CPU usage set to %s%) for block#%s', nbZeros, (this.conf.cpu * 100).toFixed(0), block.number, block.issuer.slice(0,6)); + const start = Date.now(); + let result = await powFarm.askNewProof({ + newPoW: { conf: this.conf, block: block, zeros: nbZeros, highMark: highMark, forcedTime: forcedTime, pair: this.pair } + }); + if (!result) { + this.logger.info('GIVEN proof-of-work for block#%s with %s leading zeros followed by [0-' + highMark + ']! stop PoW for %s', block.number, nbZeros, this.pair && this.pair.pub.slice(0,6)); + throw 'Proof-of-work computation canceled because block received'; + } else { + const proof = result.block; + const testsCount = result.testsCount; + const duration = (Date.now() - start); + const testsPerSecond = (testsCount / (duration / 1000)).toFixed(2); + this.logger.info('Done: #%s, %s in %ss (%s tests, ~%s tests/s)', block.number, proof.hash, (duration / 1000).toFixed(2), testsCount, testsPerSecond); + this.logger.info('FOUND proof-of-work with %s leading zeros followed by [0-' + highMark + ']!', nbZeros); + return Block.fromJSON(proof); + } + })() + }; + + async changeCPU(cpu:number) { + this.conf.cpu = cpu; + const farm = await this.getWorker() + return farm.changeCPU(cpu) + } + + async changePoWPrefix(prefix:any) { + const farm = await this.getWorker() + return farm.changePoWPrefix(prefix) + } + + private powEvent(found:boolean, hash:string) { + this.server && this.server.push({ pow: { found, hash } }); + } +} diff --git a/app/modules/prover/lib/constants.js b/app/modules/prover/lib/constants.js index 4ed3b07654c3c470d8100596098b42bd419f41dc..22f27c1898633dfa4035ff6bcb75ecd3626ed787 100644 --- a/app/modules/prover/lib/constants.js +++ b/app/modules/prover/lib/constants.js @@ -1,20 +1,14 @@ "use strict"; - -module.exports = { - - PULLING_MAX_DURATION: 10 * 1000, // 10 seconds - - CORES_MAXIMUM_USE_IN_PARALLEL: 8, - - MINIMAL_ZEROS_TO_SHOW_IN_LOGS: 3, - - POW_MINIMAL_TO_SHOW: 2, - DEFAULT_CPU: 0.6, - - NONCE_RANGE: 1000 * 1000 * 1000 * 100, - - POW_MAXIMUM_ACCEPTABLE_HANDICAP: 64, - - // When to trigger the PoW process again if no PoW is triggered for a while. In milliseconds. - POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000 +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Constants = { + PULLING_MAX_DURATION: 10 * 1000, + CORES_MAXIMUM_USE_IN_PARALLEL: 8, + MINIMAL_ZEROS_TO_SHOW_IN_LOGS: 3, + POW_MINIMAL_TO_SHOW: 2, + DEFAULT_CPU: 0.6, + NONCE_RANGE: 1000 * 1000 * 1000 * 100, + POW_MAXIMUM_ACCEPTABLE_HANDICAP: 64, + // When to trigger the PoW process again if no PoW is triggered for a while. In milliseconds. + POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000 }; +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/app/modules/prover/lib/constants.ts b/app/modules/prover/lib/constants.ts new file mode 100644 index 0000000000000000000000000000000000000000..a30bd0ed44537fc5bf4998d6b36a0ea178b33191 --- /dev/null +++ b/app/modules/prover/lib/constants.ts @@ -0,0 +1,18 @@ +export const Constants = { + + PULLING_MAX_DURATION: 10 * 1000, // 10 seconds + + CORES_MAXIMUM_USE_IN_PARALLEL: 8, + + MINIMAL_ZEROS_TO_SHOW_IN_LOGS: 3, + + POW_MINIMAL_TO_SHOW: 2, + DEFAULT_CPU: 0.6, + + NONCE_RANGE: 1000 * 1000 * 1000 * 100, + + POW_MAXIMUM_ACCEPTABLE_HANDICAP: 64, + + // When to trigger the PoW process again if no PoW is triggered for a while. In milliseconds. + POW_SECURITY_RETRY_DELAY: 10 * 60 * 1000 +} diff --git a/app/modules/prover/lib/engine.js b/app/modules/prover/lib/engine.js index 1d19458ba7d9995995d0bc77352cff82302f0666..c4a71e69e97ab2eb0dcedaec427d59b35e5d113b 100644 --- a/app/modules/prover/lib/engine.js +++ b/app/modules/prover/lib/engine.js @@ -1,47 +1,53 @@ "use strict"; - -const os = require('os') -const co = require('co') -const querablep = require('querablep') -const powCluster = require('./powCluster') -const constants = require('./constants') - -module.exports = function (conf, logger) { - return new PowEngine(conf, logger); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); }; - -function PowEngine(conf, logger) { - - // Super important for Node.js debugging - const debug = process.execArgv.toString().indexOf('--debug') !== -1; - if(debug) { +Object.defineProperty(exports, "__esModule", { value: true }); +const constants_1 = require("./constants"); +const powCluster_1 = require("./powCluster"); +const os = require('os'); +// Super important for Node.js debugging +const debug = process.execArgv.toString().indexOf('--debug') !== -1; +if (debug) { //Set an unused port number. process.execArgv = []; - } - - const nbWorkers = (conf && conf.nbCores) || Math.min(constants.CORES_MAXIMUM_USE_IN_PARALLEL, require('os').cpus().length) - const cluster = powCluster(nbWorkers, logger) - - this.forceInit = () => cluster.initCluster() - - this.id = cluster.clusterId - - this.prove = (stuff) => co(function*() { - - if (cluster.hasProofPending) { - yield cluster.cancelWork() +} +class PowEngine { + constructor(conf, logger) { + this.conf = conf; + // We use as much cores as available, but not more than CORES_MAXIMUM_USE_IN_PARALLEL + this.nbWorkers = (conf && conf.nbCores) || Math.min(constants_1.Constants.CORES_MAXIMUM_USE_IN_PARALLEL, require('os').cpus().length); + this.cluster = new powCluster_1.Master(this.nbWorkers, logger); + this.id = this.cluster.clusterId; + } + forceInit() { + return this.cluster.initCluster(); + } + prove(stuff) { + return __awaiter(this, void 0, void 0, function* () { + if (this.cluster.hasProofPending) { + yield this.cluster.cancelWork(); + } + if (os.arch().match(/arm/)) { + stuff.conf.cpu /= 2; // Don't know exactly why is ARM so much saturated by PoW, so let's divide by 2 + } + return yield this.cluster.proveByWorkers(stuff); + }); + } + cancel() { + return this.cluster.cancelWork(); + } + setConf(value) { + return this.cluster.changeConf(value); } - - if (os.arch().match(/arm/)) { - stuff.conf.cpu /= 2; // Don't know exactly why is ARM so much saturated by PoW, so let's divide by 2 + setOnInfoMessage(callback) { + return this.cluster.onInfoMessage = callback; } - let res = yield cluster.proveByWorkers(stuff) - return res - }) - - this.cancel = () => cluster.cancelWork() - - this.setConf = (value) => cluster.changeConf(value) - - this.setOnInfoMessage = (callback) => cluster.onInfoMessage = callback } +exports.PowEngine = PowEngine; +//# sourceMappingURL=engine.js.map \ No newline at end of file diff --git a/app/modules/prover/lib/engine.ts b/app/modules/prover/lib/engine.ts new file mode 100644 index 0000000000000000000000000000000000000000..2a6f83ddfd30caa77b73788c0d9f423843e87581 --- /dev/null +++ b/app/modules/prover/lib/engine.ts @@ -0,0 +1,55 @@ +import {Constants} from "./constants" +import {Master as PowCluster} from "./powCluster" +import {ConfDTO} from "../../../lib/dto/ConfDTO" + +const os = require('os') + +// Super important for Node.js debugging +const debug = process.execArgv.toString().indexOf('--debug') !== -1; +if(debug) { + //Set an unused port number. + process.execArgv = []; +} + +export class PowEngine { + + private nbWorkers:number + private cluster:PowCluster + readonly id:number + + constructor(private conf:ConfDTO, logger:any) { + + // We use as much cores as available, but not more than CORES_MAXIMUM_USE_IN_PARALLEL + this.nbWorkers = (conf && conf.nbCores) || Math.min(Constants.CORES_MAXIMUM_USE_IN_PARALLEL, require('os').cpus().length) + this.cluster = new PowCluster(this.nbWorkers, logger) + this.id = this.cluster.clusterId + } + + forceInit() { + return this.cluster.initCluster() + } + + async prove(stuff:any) { + + if (this.cluster.hasProofPending) { + await this.cluster.cancelWork() + } + + if (os.arch().match(/arm/)) { + stuff.conf.cpu /= 2; // Don't know exactly why is ARM so much saturated by PoW, so let's divide by 2 + } + return await this.cluster.proveByWorkers(stuff) + } + + cancel() { + return this.cluster.cancelWork() + } + + setConf(value:any) { + return this.cluster.changeConf(value) + } + + setOnInfoMessage(callback:any) { + return this.cluster.onInfoMessage = callback + } +} diff --git a/app/modules/prover/lib/permanentProver.js b/app/modules/prover/lib/permanentProver.js index 62d45039a68ae0c5ac90f50d29894cffc8d40337..a411b42318044953707b56c66ec1cf1edf6e5d14 100644 --- a/app/modules/prover/lib/permanentProver.js +++ b/app/modules/prover/lib/permanentProver.js @@ -1,209 +1,216 @@ "use strict"; - -const co = require('co'); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const blockGenerator_1 = require("./blockGenerator"); +const blockProver_1 = require("./blockProver"); +const constants_1 = require("./constants"); const querablep = require('querablep'); const common = require('duniter-common'); -const constants = require('./constants'); -const blockProver = require('./blockProver'); -const blockGenerator = require('./blockGenerator'); - -module.exports = (server) => new PermanentProver(server); - -function PermanentProver(server) { - - const dos2unix = common.dos2unix; - const parsers = common.parsers; - const logger = server.logger; - const conf = server.conf; - const prover = this.prover = blockProver(server); - const generator = blockGenerator(server, prover); - const that = this; - - let blockchainChangedResolver = null, - promiseOfWaitingBetween2BlocksOfOurs = null, - lastComputedBlock = null; - - // Promises triggering the prooving lopp - let resolveContinuePromise = null; - let continuePromise = new Promise((resolve) => resolveContinuePromise = resolve); - - let pullingResolveCallback = null; - let timeoutPullingCallback = null, timeoutPulling; - let pullingFinishedPromise = querablep(Promise.resolve()); - - this.allowedToStart = () => { - resolveContinuePromise(true); - }; - - // When we detected a pulling, we stop the PoW loop - this.pullingDetected = () => { - if (pullingFinishedPromise.isResolved()) { - pullingFinishedPromise = querablep(Promise.race([ - // We wait for end of pulling signal - new Promise((res) => pullingResolveCallback = res), - // Security: if the end of pulling signal is not emitted after some, we automatically trigger it - new Promise((res) => timeoutPullingCallback = () => { - logger.warn('Pulling not finished after %s ms, continue PoW', constants.PULLING_MAX_DURATION); - res(); - }) - ])); +const dos2unix = common.dos2unix; +const parsers = common.parsers; +class PermanentProver { + constructor(server) { + this.server = server; + this.permanenceStarted = false; + this.blockchainChangedResolver = null; + this.promiseOfWaitingBetween2BlocksOfOurs = null; + this.lastComputedBlock = null; + this.resolveContinuePromise = null; + this.continuePromise = null; + this.pullingResolveCallback = null; + this.timeoutPullingCallback = null; + this.pullingFinishedPromise = null; + this.timeoutPulling = null; + this.logger = server.logger; + this.conf = server.conf; + this.prover = new blockProver_1.BlockProver(server); + this.generator = new blockGenerator_1.BlockGeneratorWhichProves(server, this.prover); + // Promises triggering the prooving lopp + this.resolveContinuePromise = null; + this.continuePromise = new Promise((resolve) => this.resolveContinuePromise = resolve); + this.pullingResolveCallback = null; + this.timeoutPullingCallback = null; + this.pullingFinishedPromise = querablep(Promise.resolve()); + this.loops = 0; } - // Delay the triggering of pulling timeout - if (timeoutPulling) { - clearTimeout(timeoutPulling); + allowedToStart() { + if (!this.permanenceStarted) { + this.permanenceStarted = true; + this.startPermanence(); + } + this.resolveContinuePromise(true); } - timeoutPulling = setTimeout(timeoutPullingCallback, constants.PULLING_MAX_DURATION); - }; - - this.pullingFinished = () => pullingResolveCallback && pullingResolveCallback(); - - this.loops = 0; - - /****************** - * Main proof loop - *****************/ - co(function*() { - while (yield continuePromise) { - try { - const waitingRaces = []; - - // By default, we do not make a new proof - let doProof = false; - - try { - const selfPubkey = server.keyPair.publicKey; - const dal = server.dal; - const theConf = server.conf; - if (!selfPubkey) { - throw 'No self pubkey found.'; - } - let current; - const isMember = yield dal.isMember(selfPubkey); - if (!isMember) { - throw 'Local node is not a member. Waiting to be a member before computing a block.'; - } - current = yield dal.getCurrentBlockOrNull(); - if (!current) { - throw 'Waiting for a root block before computing new blocks'; - } - const trial = yield server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey); - checkTrialIsNotTooHigh(trial, current, selfPubkey); - const lastIssuedByUs = current.issuer == selfPubkey; - if (pullingFinishedPromise && !pullingFinishedPromise.isFulfilled()) { - logger.warn('Waiting for the end of pulling...'); - yield pullingFinishedPromise; - logger.warn('Pulling done. Continue proof-of-work loop.'); - } - if (lastIssuedByUs && !promiseOfWaitingBetween2BlocksOfOurs) { - promiseOfWaitingBetween2BlocksOfOurs = new Promise((resolve) => setTimeout(resolve, theConf.powDelay)); - logger.warn('Waiting ' + theConf.powDelay + 'ms before starting to compute next block...'); - } else { - // We have waited enough - promiseOfWaitingBetween2BlocksOfOurs = null; - // But under some conditions, we can make one - doProof = true; - } - } catch (e) { - logger.warn(e); + // When we detected a pulling, we stop the PoW loop + pullingDetected() { + if (this.pullingFinishedPromise.isResolved()) { + this.pullingFinishedPromise = querablep(Promise.race([ + // We wait for end of pulling signal + new Promise((res) => this.pullingResolveCallback = res), + // Security: if the end of pulling signal is not emitted after some, we automatically trigger it + new Promise((res) => this.timeoutPullingCallback = () => { + this.logger.warn('Pulling not finished after %s ms, continue PoW', constants_1.Constants.PULLING_MAX_DURATION); + res(); + }) + ])); + } + // Delay the triggering of pulling timeout + if (this.timeoutPulling) { + clearTimeout(this.timeoutPulling); } - - if (doProof) { - - /******************* - * COMPUTING A BLOCK - ******************/ - yield Promise.race([ - - // We still listen at eventual blockchain change - co(function*() { - // If the blockchain changes - yield new Promise((resolve) => blockchainChangedResolver = resolve); - // Then cancel the generation - yield prover.cancel(); - }), - - // The generation - co(function*() { - try { - const current = yield server.dal.getCurrentBlockOrNull(); - const selfPubkey = server.keyPair.publicKey; - const trial2 = yield server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey); - checkTrialIsNotTooHigh(trial2, current, selfPubkey); - lastComputedBlock = yield generator.makeNextBlock(null, trial2); + this.timeoutPulling = setTimeout(this.timeoutPullingCallback, constants_1.Constants.PULLING_MAX_DURATION); + } + pullingFinished() { + return this.pullingResolveCallback && this.pullingResolveCallback(); + } + startPermanence() { + return __awaiter(this, void 0, void 0, function* () { + /****************** + * Main proof loop + *****************/ + while (yield this.continuePromise) { try { - const obj = parsers.parseBlock.syncWrite(dos2unix(lastComputedBlock.getRawSigned())); - yield server.singleWritePromise(obj); - } catch (err) { - logger.warn('Proof-of-work self-submission: %s', err.message || err); + const waitingRaces = []; + // By default, we do not make a new proof + let doProof = false; + try { + const selfPubkey = this.server.keyPair.publicKey; + const dal = this.server.dal; + const theConf = this.server.conf; + if (!selfPubkey) { + throw 'No self pubkey found.'; + } + let current; + const isMember = yield dal.isMember(selfPubkey); + if (!isMember) { + throw 'Local node is not a member. Waiting to be a member before computing a block.'; + } + current = yield dal.getCurrentBlockOrNull(); + if (!current) { + throw 'Waiting for a root block before computing new blocks'; + } + const trial = yield this.server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey); + this.checkTrialIsNotTooHigh(trial, current, selfPubkey); + const lastIssuedByUs = current.issuer == selfPubkey; + if (this.pullingFinishedPromise && !this.pullingFinishedPromise.isFulfilled()) { + this.logger.warn('Waiting for the end of pulling...'); + yield this.pullingFinishedPromise; + this.logger.warn('Pulling done. Continue proof-of-work loop.'); + } + if (lastIssuedByUs && !this.promiseOfWaitingBetween2BlocksOfOurs) { + this.promiseOfWaitingBetween2BlocksOfOurs = new Promise((resolve) => setTimeout(resolve, theConf.powDelay)); + this.logger.warn('Waiting ' + theConf.powDelay + 'ms before starting to compute next block...'); + } + else { + // We have waited enough + this.promiseOfWaitingBetween2BlocksOfOurs = null; + // But under some conditions, we can make one + doProof = true; + } + } + catch (e) { + this.logger.warn(e); + } + if (doProof) { + /******************* + * COMPUTING A BLOCK + ******************/ + yield Promise.race([ + // We still listen at eventual blockchain change + (() => __awaiter(this, void 0, void 0, function* () { + // If the blockchain changes + yield new Promise((resolve) => this.blockchainChangedResolver = resolve); + // Then cancel the generation + yield this.prover.cancel(); + }))(), + // The generation + (() => __awaiter(this, void 0, void 0, function* () { + try { + const current = yield this.server.dal.getCurrentBlockOrNull(); + const selfPubkey = this.server.keyPair.publicKey; + const trial2 = yield this.server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey); + this.checkTrialIsNotTooHigh(trial2, current, selfPubkey); + this.lastComputedBlock = yield this.generator.makeNextBlock(null, trial2); + try { + const obj = parsers.parseBlock.syncWrite(dos2unix(this.lastComputedBlock.getRawSigned())); + yield this.server.singleWritePromise(obj); + } + catch (err) { + this.logger.warn('Proof-of-work self-submission: %s', err.message || err); + } + } + catch (e) { + this.logger.warn('The proof-of-work generation was canceled: %s', (e && e.message) || e || 'unkonwn reason'); + } + }))() + ]); + } + else { + /******************* + * OR WAITING PHASE + ******************/ + if (this.promiseOfWaitingBetween2BlocksOfOurs) { + waitingRaces.push(this.promiseOfWaitingBetween2BlocksOfOurs); + } + let raceDone = false; + yield Promise.race(waitingRaces.concat([ + // The blockchain has changed! We or someone else found a proof, we must make a gnu one + new Promise((resolve) => this.blockchainChangedResolver = () => { + this.logger.warn('Blockchain changed!'); + resolve(); + }), + // Security: if nothing happens for a while, trigger the whole process again + new Promise((resolve) => setTimeout(() => { + if (!raceDone) { + this.logger.warn('Security trigger: proof-of-work process seems stuck'); + resolve(); + } + }, this.conf.powSecurityRetryDelay)) + ])); + raceDone = true; + } } - } catch (e) { - logger.warn('The proof-of-work generation was canceled: %s', (e && e.message) || e || 'unkonwn reason'); - } - }) - ]); - } else { - - /******************* - * OR WAITING PHASE - ******************/ - if (promiseOfWaitingBetween2BlocksOfOurs) { - waitingRaces.push(promiseOfWaitingBetween2BlocksOfOurs); - } - - let raceDone = false; - - yield Promise.race(waitingRaces.concat([ - - // The blockchain has changed! We or someone else found a proof, we must make a gnu one - new Promise((resolve) => blockchainChangedResolver = () => { - logger.warn('Blockchain changed!'); - resolve(); - }), - - // Security: if nothing happens for a while, trigger the whole process again - new Promise((resolve) => setTimeout(() => { - if (!raceDone) { - logger.warn('Security trigger: proof-of-work process seems stuck'); - resolve(); - } - }, conf.powSecurityRetryDelay)) - ])); - - raceDone = true; - } - } catch (e) { - logger.warn(e); - } - - that.loops++; - // Informative variable - logger.trace('PoW loops = %s', that.loops); + catch (e) { + this.logger.warn(e); + } + this.loops++; + // Informative variable + this.logger.trace('PoW loops = %s', this.loops); + } + }); } - }); - - this.blockchainChanged = (gottenBlock) => co(function*() { - if (server && (!gottenBlock || !lastComputedBlock || gottenBlock.hash !== lastComputedBlock.hash)) { - // Cancel any processing proof - yield prover.cancel(gottenBlock); - // If we were waiting, stop it and process the continuous generation - blockchainChangedResolver && blockchainChangedResolver(); + blockchainChanged(gottenBlock) { + return __awaiter(this, void 0, void 0, function* () { + if (this.server && (!gottenBlock || !this.lastComputedBlock || gottenBlock.hash !== this.lastComputedBlock.hash)) { + // Cancel any processing proof + yield this.prover.cancel(); + // If we were waiting, stop it and process the continuous generation + this.blockchainChangedResolver && this.blockchainChangedResolver(); + } + }); } - }); - - this.stopEveryting = () => co(function*() { - // First: avoid continuing the main loop - continuePromise = new Promise((resolve) => resolveContinuePromise = resolve); - // Second: stop any started proof - yield prover.cancel(); - // If we were waiting, stop it and process the continuous generation - blockchainChangedResolver && blockchainChangedResolver(); - }); - - function checkTrialIsNotTooHigh(trial, current, selfPubkey) { - if (trial > (current.powMin + conf.powMaxHandicap)) { - logger.debug('Trial = %s, powMin = %s, pubkey = %s', trial, current.powMin, selfPubkey.slice(0, 6)); - throw 'Too high difficulty: waiting for other members to write next block'; + stopEveryting() { + return __awaiter(this, void 0, void 0, function* () { + // First: avoid continuing the main loop + this.continuePromise = new Promise((resolve) => this.resolveContinuePromise = resolve); + // Second: stop any started proof + yield this.prover.cancel(); + // If we were waiting, stop it and process the continuous generation + this.blockchainChangedResolver && this.blockchainChangedResolver(); + }); + } + checkTrialIsNotTooHigh(trial, current, selfPubkey) { + if (trial > (current.powMin + this.conf.powMaxHandicap)) { + this.logger.debug('Trial = %s, powMin = %s, pubkey = %s', trial, current.powMin, selfPubkey.slice(0, 6)); + throw 'Too high difficulty: waiting for other members to write next block'; + } } - } } - +exports.PermanentProver = PermanentProver; +//# sourceMappingURL=permanentProver.js.map \ No newline at end of file diff --git a/app/modules/prover/lib/permanentProver.ts b/app/modules/prover/lib/permanentProver.ts new file mode 100644 index 0000000000000000000000000000000000000000..142449125f69dc6867f549c0632ef7ddfacd4086 --- /dev/null +++ b/app/modules/prover/lib/permanentProver.ts @@ -0,0 +1,230 @@ +import {BlockGeneratorWhichProves} from "./blockGenerator" +import {ConfDTO} from "../../../lib/dto/ConfDTO" +import {BlockProver} from "./blockProver" +import {Constants} from "./constants" +import {DBBlock} from "../../../lib/db/DBBlock" + +const querablep = require('querablep'); +const common = require('duniter-common'); +const dos2unix = common.dos2unix; +const parsers = common.parsers; + +export class PermanentProver { + + logger:any + conf:ConfDTO + prover:BlockProver + generator:BlockGeneratorWhichProves + loops:number + + private permanenceStarted = false + + private blockchainChangedResolver:any = null + private promiseOfWaitingBetween2BlocksOfOurs:any = null + private lastComputedBlock:any = null + private resolveContinuePromise:any = null + private continuePromise:any = null + private pullingResolveCallback:any = null + private timeoutPullingCallback:any = null + private pullingFinishedPromise:any = null + private timeoutPulling:any = null + + constructor(private server:any) { + this.logger = server.logger; + this.conf = server.conf; + this.prover = new BlockProver(server) + this.generator = new BlockGeneratorWhichProves(server, this.prover) + + // Promises triggering the prooving lopp + this.resolveContinuePromise = null; + this.continuePromise = new Promise((resolve) => this.resolveContinuePromise = resolve); + this.pullingResolveCallback = null + this.timeoutPullingCallback = null + this.pullingFinishedPromise = querablep(Promise.resolve()); + + this.loops = 0; + + + } + + allowedToStart() { + if (!this.permanenceStarted) { + this.permanenceStarted = true + this.startPermanence() + } + this.resolveContinuePromise(true); + } + + // When we detected a pulling, we stop the PoW loop + pullingDetected() { + if (this.pullingFinishedPromise.isResolved()) { + this.pullingFinishedPromise = querablep(Promise.race([ + // We wait for end of pulling signal + new Promise((res) => this.pullingResolveCallback = res), + // Security: if the end of pulling signal is not emitted after some, we automatically trigger it + new Promise((res) => this.timeoutPullingCallback = () => { + this.logger.warn('Pulling not finished after %s ms, continue PoW', Constants.PULLING_MAX_DURATION); + res(); + }) + ])); + } + // Delay the triggering of pulling timeout + if (this.timeoutPulling) { + clearTimeout(this.timeoutPulling); + } + this.timeoutPulling = setTimeout(this.timeoutPullingCallback, Constants.PULLING_MAX_DURATION); + } + + pullingFinished() { + return this.pullingResolveCallback && this.pullingResolveCallback() + } + + async startPermanence() { + /****************** + * Main proof loop + *****************/ + + while (await this.continuePromise) { + try { + const waitingRaces = []; + + // By default, we do not make a new proof + let doProof = false; + + try { + const selfPubkey = this.server.keyPair.publicKey; + const dal = this.server.dal; + const theConf = this.server.conf; + if (!selfPubkey) { + throw 'No self pubkey found.'; + } + let current; + const isMember = await dal.isMember(selfPubkey); + if (!isMember) { + throw 'Local node is not a member. Waiting to be a member before computing a block.'; + } + current = await dal.getCurrentBlockOrNull(); + if (!current) { + throw 'Waiting for a root block before computing new blocks'; + } + const trial = await this.server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey); + this.checkTrialIsNotTooHigh(trial, current, selfPubkey); + const lastIssuedByUs = current.issuer == selfPubkey; + if (this.pullingFinishedPromise && !this.pullingFinishedPromise.isFulfilled()) { + this.logger.warn('Waiting for the end of pulling...'); + await this.pullingFinishedPromise; + this.logger.warn('Pulling done. Continue proof-of-work loop.'); + } + if (lastIssuedByUs && !this.promiseOfWaitingBetween2BlocksOfOurs) { + this.promiseOfWaitingBetween2BlocksOfOurs = new Promise((resolve) => setTimeout(resolve, theConf.powDelay)); + this.logger.warn('Waiting ' + theConf.powDelay + 'ms before starting to compute next block...'); + } else { + // We have waited enough + this.promiseOfWaitingBetween2BlocksOfOurs = null; + // But under some conditions, we can make one + doProof = true; + } + } catch (e) { + this.logger.warn(e); + } + + if (doProof) { + + /******************* + * COMPUTING A BLOCK + ******************/ + await Promise.race([ + + // We still listen at eventual blockchain change + (async () => { + // If the blockchain changes + await new Promise((resolve) => this.blockchainChangedResolver = resolve); + // Then cancel the generation + await this.prover.cancel(); + })(), + + // The generation + (async () => { + try { + const current = await this.server.dal.getCurrentBlockOrNull(); + const selfPubkey = this.server.keyPair.publicKey; + const trial2 = await this.server.getBcContext().getIssuerPersonalizedDifficulty(selfPubkey); + this.checkTrialIsNotTooHigh(trial2, current, selfPubkey); + this.lastComputedBlock = await this.generator.makeNextBlock(null, trial2); + try { + const obj = parsers.parseBlock.syncWrite(dos2unix(this.lastComputedBlock.getRawSigned())); + await this.server.singleWritePromise(obj); + } catch (err) { + this.logger.warn('Proof-of-work self-submission: %s', err.message || err); + } + } catch (e) { + this.logger.warn('The proof-of-work generation was canceled: %s', (e && e.message) || e || 'unkonwn reason'); + } + })() + ]) + } else { + + /******************* + * OR WAITING PHASE + ******************/ + if (this.promiseOfWaitingBetween2BlocksOfOurs) { + waitingRaces.push(this.promiseOfWaitingBetween2BlocksOfOurs); + } + + let raceDone = false; + + await Promise.race(waitingRaces.concat([ + + // The blockchain has changed! We or someone else found a proof, we must make a gnu one + new Promise((resolve) => this.blockchainChangedResolver = () => { + this.logger.warn('Blockchain changed!'); + resolve(); + }), + + // Security: if nothing happens for a while, trigger the whole process again + new Promise((resolve) => setTimeout(() => { + if (!raceDone) { + this.logger.warn('Security trigger: proof-of-work process seems stuck'); + resolve(); + } + }, this.conf.powSecurityRetryDelay)) + ])); + + raceDone = true; + } + } catch (e) { + this.logger.warn(e); + } + + this.loops++; + // Informative variable + this.logger.trace('PoW loops = %s', this.loops); + } + } + + async blockchainChanged(gottenBlock:any) { + if (this.server && (!gottenBlock || !this.lastComputedBlock || gottenBlock.hash !== this.lastComputedBlock.hash)) { + // Cancel any processing proof + await this.prover.cancel() + // If we were waiting, stop it and process the continuous generation + this.blockchainChangedResolver && this.blockchainChangedResolver(); + } + } + + async stopEveryting() { + // First: avoid continuing the main loop + this.continuePromise = new Promise((resolve) => this.resolveContinuePromise = resolve); + // Second: stop any started proof + await this.prover.cancel(); + // If we were waiting, stop it and process the continuous generation + this.blockchainChangedResolver && this.blockchainChangedResolver(); + } + + private checkTrialIsNotTooHigh(trial:number, current:DBBlock, selfPubkey:string) { + if (trial > (current.powMin + this.conf.powMaxHandicap)) { + this.logger.debug('Trial = %s, powMin = %s, pubkey = %s', trial, current.powMin, selfPubkey.slice(0, 6)); + throw 'Too high difficulty: waiting for other members to write next block'; + } + } +} + diff --git a/app/modules/prover/lib/powCluster.js b/app/modules/prover/lib/powCluster.js index c1eaf520f192ab7d6b844b0e9337ee35dc4aaeb3..2499cfe6e5712c1860e651b9aa1d0eb69b24a24e 100644 --- a/app/modules/prover/lib/powCluster.js +++ b/app/modules/prover/lib/powCluster.js @@ -1,230 +1,198 @@ "use strict"; - -const co = require('co'); -const _ = require('underscore') +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const constants_1 = require("./constants"); +const _ = require('underscore'); const nuuid = require('node-uuid'); const moment = require('moment'); -const cluster = require('cluster') -const querablep = require('querablep') -const constants = require('./constants') - -let clusterId = 0 - -if (cluster.isMaster) { - - // Super important for Node.js debugging - const debug = process.execArgv.toString().indexOf('--debug') !== -1; - if(debug) { - //Set an unused port number. - process.execArgv = []; - } - - /** - * Cluster controller, handles the messages between the main program and the PoW cluster. - */ - class Master { - +const cluster = require('cluster'); +const querablep = require('querablep'); +let clusterId = 0; +/** + * Cluster controller, handles the messages between the main program and the PoW cluster. + */ +class Master { constructor(nbCores, logger) { - this.clusterId = clusterId++ - this.nbCores = nbCores - this.logger = logger || Master.defaultLogger() - this.currentPromise = null - this.slaves = [] - this.slavesMap = {} - this.conf = {} - this.onInfoMessage = (message) => { - this.logger.info(`${message.pow.pow} nonce = ${message.pow.block.nonce}`) - } + this.nbCores = nbCores; + this.currentPromise = null; + this.slaves = []; + this.slavesMap = {}; + this.conf = {}; + this.clusterId = clusterId++; + this.logger = logger || Master.defaultLogger(); + this.onInfoMessage = (message) => { + this.logger.info(`${message.pow.pow} nonce = ${message.pow.block.nonce}`); + }; } - get nbWorkers() { - return this.slaves.length + return this.slaves.length; } - get hasProofPending() { - return !!this.currentPromise + return !!this.currentPromise; } - set onInfoMessage(callback) { - this.onInfoCallback = callback + this.onInfoCallback = callback; } - onWorkerMessage(worker, message) { - // this.logger.info(`worker#${this.slavesMap[worker.id].index} sent message:${message}`) - if (message.pow && message.pow.pow) { - this.onInfoCallback && this.onInfoCallback(message) - } - if (this.currentPromise && message.uuid === this.currentPromise.extras.uuid && !this.currentPromise.isResolved() && message.answer) { - this.logger.info(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index} HAS FOUND A PROOF #${message.answer.pow.pow}`) - this.currentPromise.extras.resolve(message.answer) - // Stop the slaves' current work - this.cancelWork() - } - // this.logger.debug(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index}:`, message) - } - - initCluster() { - // Setup master - cluster.setupMaster({ - exec: __filename - }) - - this.slaves = Array.from({ length: this.nbCores }).map((value, index) => { - const worker = cluster.fork() - this.logger.info(`Creating worker c#${this.clusterId}#w#${worker.id}`) - this.slavesMap[worker.id] = { - - // The Node.js worker - worker, - - // Inner identifier - index, - - // Worker ready - online: (function onlinePromise() { - let resolve - const p = querablep(new Promise(res => resolve = res)) - p.extras = { resolve } - return p - })(), - - // Each worker has his own chunk of possible nonces - nonceBeginning: this.nbCores === 1 ? 0 : (index + 1) * constants.NONCE_RANGE - } - return this.slavesMap[worker.id] - }) - - cluster.on('exit', (worker, code, signal) => { - this.logger.info(`worker ${worker.process.pid} died with code ${code} and signal ${signal}`) - }) - - cluster.on('online', (worker) => { - // We just listen to the workers of this Master - if (this.slavesMap[worker.id]) { - this.logger.info(`[online] worker c#${this.clusterId}#w#${worker.id}`) - this.slavesMap[worker.id].online.extras.resolve() - worker.send({ - command: 'conf', - value: this.conf - }) + // this.logger.info(`worker#${this.slavesMap[worker.id].index} sent message:${message}`) + if (message.pow && message.pow.pow) { + this.onInfoCallback && this.onInfoCallback(message); } - }) - - cluster.on('message', (worker, msg) => { - // Message for this cluster - if (this.slavesMap[worker.id]) { - this.onWorkerMessage(worker, msg) + if (this.currentPromise && message.uuid === this.currentPromise.extras.uuid && !this.currentPromise.isResolved() && message.answer) { + this.logger.info(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index} HAS FOUND A PROOF #${message.answer.pow.pow}`); + this.currentPromise.extras.resolve(message.answer); + // Stop the slaves' current work + this.cancelWork(); } - }) - - this.workersOnline = this.slaves.map(s => s.online) - return this.workersOnline + // this.logger.debug(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index}:`, message) + } + initCluster() { + // Setup master + cluster.setupMaster({ + exec: __filename + }); + this.slaves = Array.from({ length: this.nbCores }).map((value, index) => { + const worker = cluster.fork(); + this.logger.info(`Creating worker c#${this.clusterId}#w#${worker.id}`); + this.slavesMap[worker.id] = { + // The Node.js worker + worker, + // Inner identifier + index, + // Worker ready + online: (function onlinePromise() { + let resolve; + const p = querablep(new Promise(res => resolve = res)); + p.extras = { resolve }; + return p; + })(), + // Each worker has his own chunk of possible nonces + nonceBeginning: this.nbCores === 1 ? 0 : (index + 1) * constants_1.Constants.NONCE_RANGE + }; + return this.slavesMap[worker.id]; + }); + cluster.on('exit', (worker, code, signal) => { + this.logger.info(`worker ${worker.process.pid} died with code ${code} and signal ${signal}`); + }); + cluster.on('online', (worker) => { + // We just listen to the workers of this Master + if (this.slavesMap[worker.id]) { + this.logger.info(`[online] worker c#${this.clusterId}#w#${worker.id}`); + this.slavesMap[worker.id].online.extras.resolve(); + worker.send({ + command: 'conf', + value: this.conf + }); + } + }); + cluster.on('message', (worker, msg) => { + // Message for this cluster + if (this.slavesMap[worker.id]) { + this.onWorkerMessage(worker, msg); + } + }); + this.workersOnline = this.slaves.map((s) => s.online); + return Promise.all(this.workersOnline); } - changeConf(conf) { - this.logger.info(`Changing conf to: ${JSON.stringify(conf)} on PoW cluster`) - this.conf.cpu = this.conf.cpu || conf.cpu - this.conf.prefix = this.conf.prefix || conf.prefix - this.slaves.forEach(s => { - s.worker.send({ - command: 'conf', - value: this.conf - }) - }) - return Promise.resolve(_.clone(conf)) + this.logger.info(`Changing conf to: ${JSON.stringify(conf)} on PoW cluster`); + this.conf.cpu = this.conf.cpu || conf.cpu; + this.conf.prefix = this.conf.prefix || conf.prefix; + this.slaves.forEach(s => { + s.worker.send({ + command: 'conf', + value: this.conf + }); + }); + return Promise.resolve(_.clone(conf)); } - cancelWork() { - this.logger.info(`Cancelling the work on PoW cluster`) - this.slaves.forEach(s => { - s.worker.send({ - command: 'cancel' - }) - }) - - // Eventually force the end of current promise - if (this.currentPromise && !this.currentPromise.isFulfilled()) { - this.currentPromise.extras.resolve(null) - } - - // Current promise is done - this.currentPromise = null - - return Promise.resolve() + this.logger.info(`Cancelling the work on PoW cluster`); + this.slaves.forEach(s => { + s.worker.send({ + command: 'cancel' + }); + }); + // Eventually force the end of current promise + if (this.currentPromise && !this.currentPromise.isFulfilled()) { + this.currentPromise.extras.resolve(null); + } + // Current promise is done + this.currentPromise = null; + return Promise.resolve(); } - newPromise(uuid) { - let resolve - const p = querablep(new Promise(res => resolve = res)) - p.extras = { resolve, uuid } - return p + let resolve; + const p = querablep(new Promise(res => resolve = res)); + p.extras = { resolve, uuid }; + return p; } - proveByWorkers(stuff) { - - // Eventually spawn the workers - if (this.slaves.length === 0) { - this.initCluster() - } - - // Register the new proof uuid - const uuid = nuuid.v4() - this.currentPromise = this.newPromise(uuid) - - const that = this - - return co(function*() { - yield that.workersOnline - - if (!that.currentPromise) { - that.logger.info(`Proof canceled during workers' initialization`) - return null + // Eventually spawn the workers + if (this.slaves.length === 0) { + this.initCluster(); } - - // Start the salves' job - that.slaves.forEach(s => { - s.worker.send({ - uuid, - command: 'newPoW', - value: { - block: stuff.newPoW.block, - nonceBeginning: s.nonceBeginning, - zeros: stuff.newPoW.zeros, - highMark: stuff.newPoW.highMark, - pair: _.clone(stuff.newPoW.pair), - forcedTime: stuff.newPoW.forcedTime, - turnDuration: stuff.newPoW.turnDuration, - conf: { - medianTimeBlocks: stuff.newPoW.conf.medianTimeBlocks, - avgGenTime: stuff.newPoW.conf.avgGenTime, - cpu: stuff.newPoW.conf.cpu, - prefix: stuff.newPoW.conf.prefix - } + // Register the new proof uuid + const uuid = nuuid.v4(); + this.currentPromise = this.newPromise(uuid); + return (() => __awaiter(this, void 0, void 0, function* () { + yield Promise.all(this.workersOnline); + if (!this.currentPromise) { + this.logger.info(`Proof canceled during workers' initialization`); + return null; } - }) - }) - - let res = yield that.currentPromise - return res - }) + // Start the salves' job + this.slaves.forEach((s) => { + s.worker.send({ + uuid, + command: 'newPoW', + value: { + block: stuff.newPoW.block, + nonceBeginning: s.nonceBeginning, + zeros: stuff.newPoW.zeros, + highMark: stuff.newPoW.highMark, + pair: _.clone(stuff.newPoW.pair), + forcedTime: stuff.newPoW.forcedTime, + turnDuration: stuff.newPoW.turnDuration, + conf: { + medianTimeBlocks: stuff.newPoW.conf.medianTimeBlocks, + avgGenTime: stuff.newPoW.conf.avgGenTime, + cpu: stuff.newPoW.conf.cpu, + prefix: stuff.newPoW.conf.prefix + } + } + }); + }); + return yield this.currentPromise; + }))(); } - static defaultLogger() { - return { - info: (message) => {} - } + return { + info: (message) => { } + }; + } +} +exports.Master = Master; +if (cluster.isMaster) { + // Super important for Node.js debugging + const debug = process.execArgv.toString().indexOf('--debug') !== -1; + if (debug) { + //Set an unused port number. + process.execArgv = []; } - } - - module.exports = (nbCores, logger) => new Master(nbCores, logger) - -} else { - - process.on("SIGTERM", function() { - console.log(`SIGTERM received, closing worker ${process.pid}`); - process.exit(0) - }); - - require('./proof') } - +else { + process.on("SIGTERM", function () { + console.log(`SIGTERM received, closing worker ${process.pid}`); + process.exit(0); + }); + require('./proof'); +} +//# sourceMappingURL=powCluster.js.map \ No newline at end of file diff --git a/app/modules/prover/lib/powCluster.ts b/app/modules/prover/lib/powCluster.ts new file mode 100644 index 0000000000000000000000000000000000000000..057cb48bd9c175282d5ef509c3b2fcab7c61b4bd --- /dev/null +++ b/app/modules/prover/lib/powCluster.ts @@ -0,0 +1,227 @@ +import {ConfDTO} from "../../../lib/dto/ConfDTO" +import {Constants} from "./constants" + +const _ = require('underscore') +const nuuid = require('node-uuid'); +const moment = require('moment'); +const cluster = require('cluster') +const querablep = require('querablep') + +let clusterId = 0 + +/** + * Cluster controller, handles the messages between the main program and the PoW cluster. + */ +export class Master { + + clusterId:number + currentPromise:any|null = null + slaves:any[] = [] + slavesMap:any = {} + conf:any = {} + logger:any + onInfoCallback:any + workersOnline:Promise<any>[] + + constructor(private nbCores:number, logger:any) { + this.clusterId = clusterId++ + this.logger = logger || Master.defaultLogger() + this.onInfoMessage = (message:any) => { + this.logger.info(`${message.pow.pow} nonce = ${message.pow.block.nonce}`) + } + } + + get nbWorkers() { + return this.slaves.length + } + + get hasProofPending() { + return !!this.currentPromise + } + + set onInfoMessage(callback:any) { + this.onInfoCallback = callback + } + + onWorkerMessage(worker:any, message:any) { + // this.logger.info(`worker#${this.slavesMap[worker.id].index} sent message:${message}`) + if (message.pow && message.pow.pow) { + this.onInfoCallback && this.onInfoCallback(message) + } + if (this.currentPromise && message.uuid === this.currentPromise.extras.uuid && !this.currentPromise.isResolved() && message.answer) { + this.logger.info(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index} HAS FOUND A PROOF #${message.answer.pow.pow}`) + this.currentPromise.extras.resolve(message.answer) + // Stop the slaves' current work + this.cancelWork() + } + // this.logger.debug(`ENGINE c#${this.clusterId}#${this.slavesMap[worker.id].index}:`, message) + } + + initCluster() { + // Setup master + cluster.setupMaster({ + exec: __filename + }) + + this.slaves = Array.from({ length: this.nbCores }).map((value, index) => { + const worker = cluster.fork() + this.logger.info(`Creating worker c#${this.clusterId}#w#${worker.id}`) + this.slavesMap[worker.id] = { + + // The Node.js worker + worker, + + // Inner identifier + index, + + // Worker ready + online: (function onlinePromise() { + let resolve + const p = querablep(new Promise(res => resolve = res)) + p.extras = { resolve } + return p + })(), + + // Each worker has his own chunk of possible nonces + nonceBeginning: this.nbCores === 1 ? 0 : (index + 1) * Constants.NONCE_RANGE + } + return this.slavesMap[worker.id] + }) + + cluster.on('exit', (worker:any, code:any, signal:any) => { + this.logger.info(`worker ${worker.process.pid} died with code ${code} and signal ${signal}`) + }) + + cluster.on('online', (worker:any) => { + // We just listen to the workers of this Master + if (this.slavesMap[worker.id]) { + this.logger.info(`[online] worker c#${this.clusterId}#w#${worker.id}`) + this.slavesMap[worker.id].online.extras.resolve() + worker.send({ + command: 'conf', + value: this.conf + }) + } + }) + + cluster.on('message', (worker:any, msg:any) => { + // Message for this cluster + if (this.slavesMap[worker.id]) { + this.onWorkerMessage(worker, msg) + } + }) + + this.workersOnline = this.slaves.map((s:any) => s.online) + return Promise.all(this.workersOnline) + } + + changeConf(conf:ConfDTO) { + this.logger.info(`Changing conf to: ${JSON.stringify(conf)} on PoW cluster`) + this.conf.cpu = this.conf.cpu || conf.cpu + this.conf.prefix = this.conf.prefix || conf.prefix + this.slaves.forEach(s => { + s.worker.send({ + command: 'conf', + value: this.conf + }) + }) + return Promise.resolve(_.clone(conf)) + } + + cancelWork() { + this.logger.info(`Cancelling the work on PoW cluster`) + this.slaves.forEach(s => { + s.worker.send({ + command: 'cancel' + }) + }) + + // Eventually force the end of current promise + if (this.currentPromise && !this.currentPromise.isFulfilled()) { + this.currentPromise.extras.resolve(null) + } + + // Current promise is done + this.currentPromise = null + + return Promise.resolve() + } + + newPromise(uuid:string) { + let resolve + const p = querablep(new Promise(res => resolve = res)) + p.extras = { resolve, uuid } + return p + } + + proveByWorkers(stuff:any) { + + // Eventually spawn the workers + if (this.slaves.length === 0) { + this.initCluster() + } + + // Register the new proof uuid + const uuid = nuuid.v4() + this.currentPromise = this.newPromise(uuid) + + return (async () => { + await Promise.all(this.workersOnline) + + if (!this.currentPromise) { + this.logger.info(`Proof canceled during workers' initialization`) + return null + } + + // Start the salves' job + this.slaves.forEach((s:any) => { + s.worker.send({ + uuid, + command: 'newPoW', + value: { + block: stuff.newPoW.block, + nonceBeginning: s.nonceBeginning, + zeros: stuff.newPoW.zeros, + highMark: stuff.newPoW.highMark, + pair: _.clone(stuff.newPoW.pair), + forcedTime: stuff.newPoW.forcedTime, + turnDuration: stuff.newPoW.turnDuration, + conf: { + medianTimeBlocks: stuff.newPoW.conf.medianTimeBlocks, + avgGenTime: stuff.newPoW.conf.avgGenTime, + cpu: stuff.newPoW.conf.cpu, + prefix: stuff.newPoW.conf.prefix + } + } + }) + }) + + return await this.currentPromise + })() + } + + static defaultLogger() { + return { + info: (message:any) => {} + } + } +} + +if (cluster.isMaster) { + + // Super important for Node.js debugging + const debug = process.execArgv.toString().indexOf('--debug') !== -1; + if(debug) { + //Set an unused port number. + process.execArgv = []; + } + +} else { + + process.on("SIGTERM", function() { + console.log(`SIGTERM received, closing worker ${process.pid}`); + process.exit(0) + }); + + require('./proof') +} diff --git a/app/modules/prover/lib/proof.js b/app/modules/prover/lib/proof.js index 2046935080e58c4b558c804f05af29f57b7daa1a..9e9a2df658ddc53742b8745f62787b5b369093e0 100644 --- a/app/modules/prover/lib/proof.js +++ b/app/modules/prover/lib/proof.js @@ -1,293 +1,258 @@ "use strict"; -const co = require('co'); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const local_rules_1 = require("../../../lib/rules/local_rules"); +const common_1 = require("../../../lib/common"); +const constants_1 = require("./constants"); const moment = require('moment'); -const hashf = require('duniter-common').hashf; const dos2unix = require('duniter-common').dos2unix; const querablep = require('querablep'); -const constants = require('./constants'); const keyring = require('duniter-common').keyring; const rawer = require('duniter-common').rawer; -const LOCAL_HELPERS = require('../../../lib/rules/local_rules').LOCAL_RULES_HELPERS - const PAUSES_PER_TURN = 5; - // This value can be changed let TURN_DURATION_IN_MILLISEC = 100; - let computing = querablep(Promise.resolve(null)); let askedStop = false; - // By default, we do not prefix the PoW by any number let prefix = 0; - let signatureFunc, lastSecret, currentCPU = 1; - process.on('uncaughtException', (err) => { - console.error(err.stack || Error(err)); - process.send({error: err}); -}); - -process.on('message', (message) => co(function*() { - - switch (message.command) { - - case 'newPoW': - co(function*() { - askedStop = true - - // Very important: do not yield if the computation is already done, to keep the lock on JS engine - if (!computing.isFulfilled()) { - yield computing; - } - - const res = yield beginNewProofOfWork(message.value); - answer(message, res); - }); - break; - - case 'cancel': - if (!computing.isFulfilled()) { - askedStop = true; - } - break; - - case 'conf': - if (message.value.cpu !== undefined) { - currentCPU = message.value.cpu - } - if (message.value.prefix !== undefined) { - prefix = message.value.prefix - } - answer(message, { currentCPU, prefix }); - break; - } - -})); - -function beginNewProofOfWork(stuff) { - askedStop = false; - computing = querablep(co(function*() { - - /***************** - * PREPARE POW STUFF - ****************/ - - let nonce = 0; - const conf = stuff.conf; - const block = stuff.block; - const nonceBeginning = stuff.nonceBeginning; - const nbZeros = stuff.zeros; - const pair = stuff.pair; - const forcedTime = stuff.forcedTime; - currentCPU = conf.cpu || constants.DEFAULT_CPU; - prefix = parseInt(conf.prefix || prefix) * 10 * constants.NONCE_RANGE; - const highMark = stuff.highMark; - const turnDuration = stuff.turnDuration || TURN_DURATION_IN_MILLISEC - let sigFunc = null; - if (signatureFunc && lastSecret === pair.sec) { - sigFunc = signatureFunc; + console.error(err.stack || Error(err)); + if (process.send) { + process.send({ error: err }); } else { - lastSecret = pair.sec; - sigFunc = keyring.Key(pair.pub, pair.sec).signSync; + throw Error('process.send() is not defined'); } - signatureFunc = sigFunc; - let pow = "", sig = "", raw = ""; - - /***************** - * GO! - ****************/ - - let testsCount = 0; - let found = false; - let score = 0; - let turn = 0; - - while (!found && !askedStop) { - - /***************** - * A TURN - ****************/ - - yield Promise.race([ - - // I. Stop the turn if it exceeds `turnDuration` ms - countDown(turnDuration), - - // II. Process the turn's PoW - co(function*() { - - /***************** - * A TURN OF POW ~= 100ms by default - * -------------------- - * - * The concept of "turn" is required to limit the CPU usage. - * We need a time reference to have the speed = nb tests / period of time. - * Here we have: - * - * - speed = testsCount / turn - * - * We have taken 1 turn = 100ms to control the CPU usage after 100ms of PoW. This means that during the - * very first 100ms of the PoW, CPU usage = 100%. Then it becomes controlled to the %CPU set. - ****************/ - - // Prove - let i = 0; - const thisTurn = turn; - const pausePeriod = score ? score / PAUSES_PER_TURN : 10; // number of pauses per turn - // We limit the number of tests according to CPU usage - const testsPerRound = score ? Math.floor(score * currentCPU) : 1000 * 1000 * 1000 - - // Time is updated regularly during the proof - block.time = getBlockTime(block, conf, forcedTime) - if (block.number === 0) { - block.medianTime = block.time - } - block.inner_hash = getBlockInnerHash(block); - - /***************** - * Iterations of a turn - ****************/ - - while(!found && i < testsPerRound && thisTurn === turn && !askedStop) { - - // Nonce change (what makes the PoW change if the time field remains the same) - nonce++ - - /***************** - * A PROOF OF WORK - ****************/ - - // The final nonce is composed of 3 parts - block.nonce = prefix + nonceBeginning + nonce - raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n") - sig = dos2unix(sigFunc(raw)) - pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase() - - /***************** - * Check the POW result - ****************/ - - let j = 0, charOK = true; - while (j < nbZeros && charOK) { - charOK = pow[j] === '0'; - j++; - } - if (charOK) { - found = pow[nbZeros].match(new RegExp('[0-' + highMark + ']')); - } - if (!found && nbZeros > 0 && j - 1 >= constants.POW_MINIMAL_TO_SHOW) { - pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }}); +}); +process.on('message', (message) => __awaiter(this, void 0, void 0, function* () { + switch (message.command) { + case 'newPoW': + (() => __awaiter(this, void 0, void 0, function* () { + askedStop = true; + // Very important: do not await if the computation is already done, to keep the lock on JS engine + if (!computing.isFulfilled()) { + yield computing; + } + const res = yield beginNewProofOfWork(message.value); + answer(message, res); + }))(); + break; + case 'cancel': + if (!computing.isFulfilled()) { + askedStop = true; } - - /***************** - * - Update local vars - * - Allow to receive stop signal - ****************/ - - if (!found && !askedStop) { - i++; - testsCount++; - if (i % pausePeriod === 0) { - yield countDown(0); // Very low pause, just the time to process eventual end of the turn - } + break; + case 'conf': + if (message.value.cpu !== undefined) { + currentCPU = message.value.cpu; } - } - - /***************** - * Check the POW result - ****************/ - if (!found) { - - // CPU speed recording - if (turn > 0 && !score) { - score = testsCount; + if (message.value.prefix !== undefined) { + prefix = message.value.prefix; } - + answer(message, { currentCPU, prefix }); + break; + } +})); +function beginNewProofOfWork(stuff) { + askedStop = false; + computing = querablep((() => __awaiter(this, void 0, void 0, function* () { + /***************** + * PREPARE POW STUFF + ****************/ + let nonce = 0; + const conf = stuff.conf; + const block = stuff.block; + const nonceBeginning = stuff.nonceBeginning; + const nbZeros = stuff.zeros; + const pair = stuff.pair; + const forcedTime = stuff.forcedTime; + currentCPU = conf.cpu || constants_1.Constants.DEFAULT_CPU; + prefix = parseInt(conf.prefix || prefix) * 10 * constants_1.Constants.NONCE_RANGE; + const highMark = stuff.highMark; + const turnDuration = stuff.turnDuration || TURN_DURATION_IN_MILLISEC; + let sigFunc = null; + if (signatureFunc && lastSecret === pair.sec) { + sigFunc = signatureFunc; + } + else { + lastSecret = pair.sec; + sigFunc = keyring.Key(pair.pub, pair.sec).signSync; + } + signatureFunc = sigFunc; + let pow = "", sig = "", raw = ""; + /***************** + * GO! + ****************/ + let testsCount = 0; + let found = false; + let score = 0; + let turn = 0; + while (!found && !askedStop) { /***************** - * UNLOAD CPU CHARGE + * A TURN ****************/ - // We wait for a maximum time of `turnDuration`. - // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script - // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu" - // parameter. - yield countDown(turnDuration); - } - }) - ]); - - // Next turn - turn++ - } - - /***************** - * POW IS OVER - * ----------- - * - * We either have found a valid POW or a stop event has been detected. - ****************/ - - if (askedStop) { - - // PoW stopped - askedStop = false; - return null - - } else { - - // PoW success - block.hash = pow - block.signature = sig - return { - pow: { - block: block, - testsCount: testsCount, - pow: pow + yield Promise.race([ + // I. Stop the turn if it exceeds `turnDuration` ms + countDown(turnDuration), + // II. Process the turn's PoW + (() => __awaiter(this, void 0, void 0, function* () { + /***************** + * A TURN OF POW ~= 100ms by default + * -------------------- + * + * The concept of "turn" is required to limit the CPU usage. + * We need a time reference to have the speed = nb tests / period of time. + * Here we have: + * + * - speed = testsCount / turn + * + * We have taken 1 turn = 100ms to control the CPU usage after 100ms of PoW. This means that during the + * very first 100ms of the PoW, CPU usage = 100%. Then it becomes controlled to the %CPU set. + ****************/ + // Prove + let i = 0; + const thisTurn = turn; + const pausePeriod = score ? score / PAUSES_PER_TURN : 10; // number of pauses per turn + // We limit the number of tests according to CPU usage + const testsPerRound = score ? Math.floor(score * currentCPU) : 1000 * 1000 * 1000; + // Time is updated regularly during the proof + block.time = getBlockTime(block, conf, forcedTime); + if (block.number === 0) { + block.medianTime = block.time; + } + block.inner_hash = getBlockInnerHash(block); + /***************** + * Iterations of a turn + ****************/ + while (!found && i < testsPerRound && thisTurn === turn && !askedStop) { + // Nonce change (what makes the PoW change if the time field remains the same) + nonce++; + /***************** + * A PROOF OF WORK + ****************/ + // The final nonce is composed of 3 parts + block.nonce = prefix + nonceBeginning + nonce; + raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n"); + sig = dos2unix(sigFunc(raw)); + pow = common_1.hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase(); + /***************** + * Check the POW result + ****************/ + let j = 0, charOK = true; + while (j < nbZeros && charOK) { + charOK = pow[j] === '0'; + j++; + } + if (charOK) { + found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']'))); + } + if (!found && nbZeros > 0 && j - 1 >= constants_1.Constants.POW_MINIMAL_TO_SHOW) { + pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros } }); + } + /***************** + * - Update local vars + * - Allow to receive stop signal + ****************/ + if (!found && !askedStop) { + i++; + testsCount++; + if (i % pausePeriod === 0) { + yield countDown(0); // Very low pause, just the time to process eventual end of the turn + } + } + } + /***************** + * Check the POW result + ****************/ + if (!found) { + // CPU speed recording + if (turn > 0 && !score) { + score = testsCount; + } + /***************** + * UNLOAD CPU CHARGE + ****************/ + // We wait for a maximum time of `turnDuration`. + // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script + // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu" + // parameter. + yield countDown(turnDuration); + } + }))() + ]); + // Next turn + turn++; } - } - } - })); - - return computing; + /***************** + * POW IS OVER + * ----------- + * + * We either have found a valid POW or a stop event has been detected. + ****************/ + if (askedStop) { + // PoW stopped + askedStop = false; + return null; + } + else { + // PoW success + block.hash = pow; + block.signature = sig; + return { + pow: { + block: block, + testsCount: testsCount, + pow: pow + } + }; + } + }))()); + return computing; } - function countDown(duration) { - return new Promise((resolve) => setTimeout(resolve, duration)); + return new Promise((resolve) => setTimeout(resolve, duration)); } - function getBlockInnerHash(block) { - const raw = rawer.getBlockInnerPart(block); - return hash(raw); + const raw = rawer.getBlockInnerPart(block); + return common_1.hashf(raw); } - -function hash(str) { - return hashf(str).toUpperCase(); -} - -function getBlockTime (block, conf, forcedTime) { - if (forcedTime) { - return forcedTime; - } - const now = moment.utc().unix(); - const maxAcceleration = LOCAL_HELPERS.maxAcceleration(conf); - const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0; - const medianTime = block.medianTime; - const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset); - return Math.max(medianTime, upperBound); +function getBlockTime(block, conf, forcedTime) { + if (forcedTime) { + return forcedTime; + } + const now = moment.utc().unix(); + const maxAcceleration = local_rules_1.LOCAL_RULES_HELPERS.maxAcceleration(conf); + const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0; + const medianTime = block.medianTime; + const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset); + return Math.max(medianTime, upperBound); } - function answer(message, theAnswer) { - return pSend({ - uuid: message.uuid, - answer: theAnswer - }) + return pSend({ + uuid: message.uuid, + answer: theAnswer + }); } - function pSend(stuff) { - return new Promise(function (resolve, reject) { - process.send(stuff, function (error) { - !error && resolve(); - error && reject(); + return new Promise(function (resolve, reject) { + if (process.send) { + process.send(stuff, function (error) { + !error && resolve(); + error && reject(); + }); + } + else { + reject('process.send() is not defined'); + } }); - }); } +//# sourceMappingURL=proof.js.map \ No newline at end of file diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts new file mode 100644 index 0000000000000000000000000000000000000000..3bd033ad5db1a7cb2864c790555781d761124e4f --- /dev/null +++ b/app/modules/prover/lib/proof.ts @@ -0,0 +1,298 @@ +import {LOCAL_RULES_HELPERS} from "../../../lib/rules/local_rules" +import {hashf} from "../../../lib/common" +import {DBBlock} from "../../../lib/db/DBBlock" +import {ConfDTO} from "../../../lib/dto/ConfDTO" +import {Constants} from "./constants" + +const moment = require('moment'); +const dos2unix = require('duniter-common').dos2unix; +const querablep = require('querablep'); +const keyring = require('duniter-common').keyring; +const rawer = require('duniter-common').rawer; + +const PAUSES_PER_TURN = 5; + +// This value can be changed +let TURN_DURATION_IN_MILLISEC = 100; + +let computing = querablep(Promise.resolve(null)); +let askedStop = false; + +// By default, we do not prefix the PoW by any number +let prefix = 0; + +let signatureFunc:any, lastSecret:any, currentCPU = 1; + +process.on('uncaughtException', (err:any) => { + console.error(err.stack || Error(err)) + if (process.send) { + process.send({error: err}); + } else { + throw Error('process.send() is not defined') + } +}); + +process.on('message', async (message) => { + + switch (message.command) { + + case 'newPoW': + (async () => { + askedStop = true + + // Very important: do not await if the computation is already done, to keep the lock on JS engine + if (!computing.isFulfilled()) { + await computing; + } + + const res = await beginNewProofOfWork(message.value); + answer(message, res); + })() + break; + + case 'cancel': + if (!computing.isFulfilled()) { + askedStop = true; + } + break; + + case 'conf': + if (message.value.cpu !== undefined) { + currentCPU = message.value.cpu + } + if (message.value.prefix !== undefined) { + prefix = message.value.prefix + } + answer(message, { currentCPU, prefix }); + break; + } + +}) + +function beginNewProofOfWork(stuff:any) { + askedStop = false; + computing = querablep((async () => { + + /***************** + * PREPARE POW STUFF + ****************/ + + let nonce = 0; + const conf = stuff.conf; + const block = stuff.block; + const nonceBeginning = stuff.nonceBeginning; + const nbZeros = stuff.zeros; + const pair = stuff.pair; + const forcedTime = stuff.forcedTime; + currentCPU = conf.cpu || Constants.DEFAULT_CPU; + prefix = parseInt(conf.prefix || prefix) * 10 * Constants.NONCE_RANGE + const highMark = stuff.highMark; + const turnDuration = stuff.turnDuration || TURN_DURATION_IN_MILLISEC + let sigFunc = null; + if (signatureFunc && lastSecret === pair.sec) { + sigFunc = signatureFunc; + } + else { + lastSecret = pair.sec; + sigFunc = keyring.Key(pair.pub, pair.sec).signSync; + } + signatureFunc = sigFunc; + let pow = "", sig = "", raw = ""; + + /***************** + * GO! + ****************/ + + let testsCount = 0; + let found = false; + let score = 0; + let turn = 0; + + while (!found && !askedStop) { + + /***************** + * A TURN + ****************/ + + await Promise.race([ + + // I. Stop the turn if it exceeds `turnDuration` ms + countDown(turnDuration), + + // II. Process the turn's PoW + (async () => { + + /***************** + * A TURN OF POW ~= 100ms by default + * -------------------- + * + * The concept of "turn" is required to limit the CPU usage. + * We need a time reference to have the speed = nb tests / period of time. + * Here we have: + * + * - speed = testsCount / turn + * + * We have taken 1 turn = 100ms to control the CPU usage after 100ms of PoW. This means that during the + * very first 100ms of the PoW, CPU usage = 100%. Then it becomes controlled to the %CPU set. + ****************/ + + // Prove + let i = 0; + const thisTurn = turn; + const pausePeriod = score ? score / PAUSES_PER_TURN : 10; // number of pauses per turn + // We limit the number of tests according to CPU usage + const testsPerRound = score ? Math.floor(score * currentCPU) : 1000 * 1000 * 1000 + + // Time is updated regularly during the proof + block.time = getBlockTime(block, conf, forcedTime) + if (block.number === 0) { + block.medianTime = block.time + } + block.inner_hash = getBlockInnerHash(block); + + /***************** + * Iterations of a turn + ****************/ + + while(!found && i < testsPerRound && thisTurn === turn && !askedStop) { + + // Nonce change (what makes the PoW change if the time field remains the same) + nonce++ + + /***************** + * A PROOF OF WORK + ****************/ + + // The final nonce is composed of 3 parts + block.nonce = prefix + nonceBeginning + nonce + raw = dos2unix("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n") + sig = dos2unix(sigFunc(raw)) + pow = hashf("InnerHash: " + block.inner_hash + "\nNonce: " + block.nonce + "\n" + sig + "\n").toUpperCase() + + /***************** + * Check the POW result + ****************/ + + let j = 0, charOK = true; + while (j < nbZeros && charOK) { + charOK = pow[j] === '0'; + j++; + } + if (charOK) { + found = !!(pow[nbZeros].match(new RegExp('[0-' + highMark + ']'))) + } + if (!found && nbZeros > 0 && j - 1 >= Constants.POW_MINIMAL_TO_SHOW) { + pSend({ pow: { pow: pow, block: block, nbZeros: nbZeros }}); + } + + /***************** + * - Update local vars + * - Allow to receive stop signal + ****************/ + + if (!found && !askedStop) { + i++; + testsCount++; + if (i % pausePeriod === 0) { + await countDown(0); // Very low pause, just the time to process eventual end of the turn + } + } + } + + /***************** + * Check the POW result + ****************/ + if (!found) { + + // CPU speed recording + if (turn > 0 && !score) { + score = testsCount; + } + + /***************** + * UNLOAD CPU CHARGE + ****************/ + // We wait for a maximum time of `turnDuration`. + // This will trigger the end of the turn by the concurrent race I. During that time, the proof.js script + // just does nothing: this gives of a bit of breath to the CPU. Tthe amount of "breath" depends on the "cpu" + // parameter. + await countDown(turnDuration); + } + })() + ]); + + // Next turn + turn++ + } + + /***************** + * POW IS OVER + * ----------- + * + * We either have found a valid POW or a stop event has been detected. + ****************/ + + if (askedStop) { + + // PoW stopped + askedStop = false; + return null + + } else { + + // PoW success + block.hash = pow + block.signature = sig + return { + pow: { + block: block, + testsCount: testsCount, + pow: pow + } + } + } + })()) + + return computing; +} + +function countDown(duration:number) { + return new Promise((resolve) => setTimeout(resolve, duration)); +} + +function getBlockInnerHash(block:DBBlock) { + const raw = rawer.getBlockInnerPart(block); + return hashf(raw) +} + +function getBlockTime (block:DBBlock, conf:ConfDTO, forcedTime:number|null) { + if (forcedTime) { + return forcedTime; + } + const now = moment.utc().unix(); + const maxAcceleration = LOCAL_RULES_HELPERS.maxAcceleration(conf); + const timeoffset = block.number >= conf.medianTimeBlocks ? 0 : conf.rootoffset || 0; + const medianTime = block.medianTime; + const upperBound = block.number === 0 ? medianTime : Math.min(medianTime + maxAcceleration, now - timeoffset); + return Math.max(medianTime, upperBound); +} + +function answer(message:any, theAnswer:any) { + return pSend({ + uuid: message.uuid, + answer: theAnswer + }) +} + +function pSend(stuff:any) { + return new Promise(function (resolve, reject) { + if (process.send) { + process.send(stuff, function (error:any) { + !error && resolve(); + error && reject(); + }) + } else { + reject('process.send() is not defined') + } + }); +} diff --git a/app/modules/prover/lib/prover.js b/app/modules/prover/lib/prover.js index 504ceb1a4fabab712386830fa2ecffc66c23bb6d..3621d83f7cede641414b9f7085742a67b240f5b2 100644 --- a/app/modules/prover/lib/prover.js +++ b/app/modules/prover/lib/prover.js @@ -1,44 +1,52 @@ "use strict"; - -const co = require('co'); -const util = require('util'); -const stream = require('stream'); -const permanentProver = require('./permanentProver'); - -module.exports = Prover; - -function Prover(server) { - - const permaProver = this.permaProver = permanentProver(server); - - stream.Transform.call(this, { objectMode: true }); - - this._write = function (obj, enc, done) { - // Never close the stream - if (obj && obj.membersCount) { - permaProver.blockchainChanged(obj); - } else if (obj.nodeIndexInPeers !== undefined) { - permaProver.prover.changePoWPrefix((obj.nodeIndexInPeers + 1) * 10); // We multiply by 10 to give room to computers with < 100 cores - } else if (obj.cpu !== undefined) { - permaProver.prover.changeCPU(obj.cpu); // We multiply by 10 to give room to computers with < 100 cores - } else if (obj.pulling !== undefined) { - if (obj.pulling === 'processing') { - permaProver.pullingDetected(); - } - else if (obj.pulling === 'finished') { - permaProver.pullingFinished(); - } +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const permanentProver_1 = require("./permanentProver"); +const stream = require("stream"); +class Prover extends stream.Transform { + constructor(server) { + super({ objectMode: true }); + this.permaProver = this.permaProver = new permanentProver_1.PermanentProver(server); + } + _write(obj, enc, done) { + // Never close the stream + if (obj && obj.membersCount) { + this.permaProver.blockchainChanged(obj); + } + else if (obj.nodeIndexInPeers !== undefined) { + this.permaProver.prover.changePoWPrefix((obj.nodeIndexInPeers + 1) * 10); // We multiply by 10 to give room to computers with < 100 cores + } + else if (obj.cpu !== undefined) { + this.permaProver.prover.changeCPU(obj.cpu); // We multiply by 10 to give room to computers with < 100 cores + } + else if (obj.pulling !== undefined) { + if (obj.pulling === 'processing') { + this.permaProver.pullingDetected(); + } + else if (obj.pulling === 'finished') { + this.permaProver.pullingFinished(); + } + } + done && done(); + } + ; + startService() { + return __awaiter(this, void 0, void 0, function* () { + this.permaProver.allowedToStart(); + }); + } + stopService() { + return __awaiter(this, void 0, void 0, function* () { + this.permaProver.stopEveryting(); + }); } - done && done(); - }; - - this.startService = () => co(function*() { - permaProver.allowedToStart(); - }); - - this.stopService = () => co(function*() { - permaProver.stopEveryting(); - }); } - -util.inherits(Prover, stream.Transform); +exports.Prover = Prover; +//# sourceMappingURL=prover.js.map \ No newline at end of file diff --git a/app/modules/prover/lib/prover.ts b/app/modules/prover/lib/prover.ts new file mode 100644 index 0000000000000000000000000000000000000000..67856f76ad9d8a8fe676bf5a696cd5a1c1f3a4d1 --- /dev/null +++ b/app/modules/prover/lib/prover.ts @@ -0,0 +1,40 @@ +"use strict"; +import {PermanentProver} from "./permanentProver" +import * as stream from "stream" + +export class Prover extends stream.Transform { + + private permaProver:PermanentProver + + constructor(server:any) { + super({ objectMode: true }) + this.permaProver = this.permaProver = new PermanentProver(server) + } + + _write(obj:any, enc:any, done:any) { + // Never close the stream + if (obj && obj.membersCount) { + this.permaProver.blockchainChanged(obj); + } else if (obj.nodeIndexInPeers !== undefined) { + this.permaProver.prover.changePoWPrefix((obj.nodeIndexInPeers + 1) * 10); // We multiply by 10 to give room to computers with < 100 cores + } else if (obj.cpu !== undefined) { + this.permaProver.prover.changeCPU(obj.cpu); // We multiply by 10 to give room to computers with < 100 cores + } else if (obj.pulling !== undefined) { + if (obj.pulling === 'processing') { + this.permaProver.pullingDetected(); + } + else if (obj.pulling === 'finished') { + this.permaProver.pullingFinished(); + } + } + done && done(); + }; + + async startService() { + this.permaProver.allowedToStart(); + } + + async stopService() { + this.permaProver.stopEveryting(); + } +} diff --git a/index.js b/index.js index 4abf5004000cc016180543d0ed82ac9c618da933..07bc3b3454dc725b19ddd7fb3c9e061986c1c5ca 100644 --- a/index.js +++ b/index.js @@ -22,7 +22,7 @@ const daemonDependency = require('./app/modules/daemon'); const pSignalDependency = require('./app/modules/peersignal'); const routerDependency = require('./app/modules/router'); const pluginDependency = require('./app/modules/plugin'); -const proverDependency = require('./app/modules/prover'); +const proverDependency = require('./app/modules/prover').ProverDependency; const MINIMAL_DEPENDENCIES = [ { name: 'duniter-config', required: configDependency } diff --git a/test/fast/prover/pow-1-cluster.js b/test/fast/prover/pow-1-cluster.js index 130dffe2f6650a6262100b9d464f8c64d9f61b92..96d58c12b9f36dd1391c91d3b649bb4f12f25bbc 100644 --- a/test/fast/prover/pow-1-cluster.js +++ b/test/fast/prover/pow-1-cluster.js @@ -2,7 +2,7 @@ const co = require('co') const should = require('should') -const powCluster = require('../../../app/modules/prover/lib/powCluster') +const PowCluster = require('../../../app/modules/prover/lib/powCluster').Master const logger = require('../../../app/lib/logger').NewLogger() let master @@ -10,7 +10,7 @@ let master describe('PoW Cluster', () => { before(() => { - master = powCluster(1, logger) + master = new PowCluster(1, logger) }) it('should have an empty cluster if no PoW was asked', () => { diff --git a/test/fast/prover/pow-2-engine.js b/test/fast/prover/pow-2-engine.js index 2a2d301eebe537f8b0c05962716b9a9bb8d342ff..8238438d02c1866d4bf7acc0d26625f2f3549a32 100644 --- a/test/fast/prover/pow-2-engine.js +++ b/test/fast/prover/pow-2-engine.js @@ -2,18 +2,18 @@ const co = require('co'); const should = require('should'); -const engine = require('../../../app/modules/prover/lib/engine'); +const PowEngine = require('../../../app/modules/prover/lib/engine').PowEngine const logger = require('../../../app/lib/logger').NewLogger() describe('PoW Engine', () => { it('should be configurable', () => co(function*(){ - const e1 = engine({ nbCores: 1 }, logger); + const e1 = new PowEngine({ nbCores: 1 }, logger); (yield e1.setConf({ cpu: 0.2, prefix: '34' })).should.deepEqual({ cpu: 0.2, prefix: '34' }); })); it('should be able to make a proof', () => co(function*(){ - const e1 = engine({ nbCores: 1 }, logger); + const e1 = new PowEngine({ nbCores: 1 }, logger); const block = { number: 35 }; const zeros = 2; const highMark = 'A'; @@ -55,7 +55,7 @@ describe('PoW Engine', () => { })); it('should be able to stop a proof', () => co(function*(){ - const e1 = engine({ nbCores: 1 }, logger); + const e1 = new PowEngine({ nbCores: 1 }, logger); yield e1.forceInit() const block = { number: 26 }; const zeros = 10; // Requires hundreds of thousands of tries probably diff --git a/test/fast/prover/pow-3-prover.js b/test/fast/prover/pow-3-prover.js index 2568abd77efa518fb175326560ccbe5be9b909a5..9b0ce4cce7d8703be946a74ceb1efd6479a81f26 100644 --- a/test/fast/prover/pow-3-prover.js +++ b/test/fast/prover/pow-3-prover.js @@ -4,7 +4,7 @@ const co = require('co') const should = require('should') const moment = require('moment') const winston = require('winston') -const blockProver = require('../../../app/modules/prover/lib/blockProver'); +const BlockProver = require('../../../app/modules/prover/lib/blockProver').BlockProver // Mute logger winston.remove(winston.transports.Console) @@ -14,7 +14,7 @@ describe('PoW block prover', () => { let prover before(() => { - prover = blockProver({ + prover = new BlockProver({ conf: { nbCores: 1, medianTimeBlocks: 20, diff --git a/test/integration/branches_revert2.js b/test/integration/branches_revert2.js index 5c4c93f78877bb78b25051ec4c3f6286510f92b3..6845ac5a587ca08dd106738cff800405f6f3d2d6 100644 --- a/test/integration/branches_revert2.js +++ b/test/integration/branches_revert2.js @@ -9,7 +9,7 @@ const rp = require('request-promise'); const httpTest = require('./tools/http'); const commit = require('./tools/commit'); -require('../../app/modules/prover/lib/constants').CORES_MAXIMUM_USE_IN_PARALLEL = 1 +require('../../app/modules/prover/lib/constants').Constants.CORES_MAXIMUM_USE_IN_PARALLEL = 1 require('duniter-bma').duniter.methods.noLimit(); // Disables the HTTP limiter const expectJSON = httpTest.expectJSON; diff --git a/test/integration/http_api.js b/test/integration/http_api.js index 19841dce716cf2b2c92d6809e3963578c145e6b7..84747eaa22e2dff429c88c99e5ad94be68c9bbc3 100644 --- a/test/integration/http_api.js +++ b/test/integration/http_api.js @@ -12,7 +12,7 @@ const constants = require('../../app/lib/constants'); const rp = require('request-promise'); const ws = require('ws'); -require('../../app/modules/prover/lib/constants').CORES_MAXIMUM_USE_IN_PARALLEL = 1 +require('../../app/modules/prover/lib/constants').Constants.CORES_MAXIMUM_USE_IN_PARALLEL = 1 const server = duniter( '/bb11', @@ -60,7 +60,7 @@ describe("HTTP API", function() { function makeBlockAndPost(theServer) { return function() { - return require('../../app/modules/prover').duniter.methods.generateAndProveTheNext(theServer) + return require('../../app/modules/prover').ProverDependency.duniter.methods.generateAndProveTheNext(theServer) .then(postBlock(theServer)); }; } diff --git a/test/integration/identity-expiry.js b/test/integration/identity-expiry.js index ad5ca6e28891c8ce09ca32d369e29295531d6fe4..17fc852da291f28188132e8a510d1ea448dfa5a7 100644 --- a/test/integration/identity-expiry.js +++ b/test/integration/identity-expiry.js @@ -5,7 +5,7 @@ const co = require('co'); const should = require('should'); const duniter = require('../../index'); const bma = require('duniter-bma').duniter.methods.bma; -const prover = require('../../app/modules/prover').duniter.methods; +const prover = require('../../app/modules/prover').ProverDependency.duniter.methods; const user = require('./tools/user'); const constants = require('../../app/lib/constants'); const rp = require('request-promise'); diff --git a/test/integration/identity-kicking.js b/test/integration/identity-kicking.js index 627ea188745d777d044f5e34b845f618383bcca7..9a1e996bb765c2c5b51d0e0cf5dc4d1a0c03b7ff 100644 --- a/test/integration/identity-kicking.js +++ b/test/integration/identity-kicking.js @@ -50,7 +50,7 @@ describe("Identities kicking", function() { const now = 1400000000 yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); - require('../../app/modules/prover').duniter.methods.hookServer(s1); + require('../../app/modules/prover').ProverDependency.duniter.methods.hookServer(s1); yield cat.createIdentity(); yield tac.createIdentity(); yield cat.cert(tac); diff --git a/test/integration/identity-test.js b/test/integration/identity-test.js index aa38506a516c9122402cf935b31a5e2179eb8fa6..bcb51094bad3fa2c65823c1efd28a528b8e6d811 100644 --- a/test/integration/identity-test.js +++ b/test/integration/identity-test.js @@ -54,7 +54,7 @@ describe("Identities collision", function() { return co(function *() { yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); - require('../../app/modules/prover').duniter.methods.hookServer(s1); + require('../../app/modules/prover').ProverDependency.duniter.methods.hookServer(s1); yield cat.createIdentity(); yield tac.createIdentity(); yield toc.createIdentity(); diff --git a/test/integration/proof-of-work.js b/test/integration/proof-of-work.js index 70ce187f20ef53b74394362eeb3a9e2e1642c2ad..ce5fdb176bd78b0d89ab13cf76114c5cf122ba46 100644 --- a/test/integration/proof-of-work.js +++ b/test/integration/proof-of-work.js @@ -6,7 +6,7 @@ const toolbox = require('./tools/toolbox'); const Block = require('../../app/lib/entity/block'); const constants = require('../../app/lib/constants'); const logger = require('../../app/lib/logger').NewLogger(); -const blockProver = require('../../app/modules/prover').duniter.methods.blockProver; +const BlockProver = require('../../app/modules/prover/lib/blockProver').BlockProver /*** conf.medianTimeBlocks @@ -19,7 +19,7 @@ keyring from Key const intermediateProofs = []; const NB_CORES_FOR_COMPUTATION = 1 // For simple tests. Can be changed to test multiple cores. -const prover = blockProver({ +const prover = new BlockProver({ push: (data) => intermediateProofs.push(data), conf: { nbCores: NB_CORES_FOR_COMPUTATION, diff --git a/test/integration/start_generate_blocks.js b/test/integration/start_generate_blocks.js index 3b471cde1aa885e8fcaf7c5930846a0b4f8196fd..4f1d8bb82b1f098b655a0eb72d3c05178c1aefbf 100644 --- a/test/integration/start_generate_blocks.js +++ b/test/integration/start_generate_blocks.js @@ -73,7 +73,7 @@ describe("Generation", function() { yield server.bma.openConnections(); require('../../app/modules/router').duniter.methods.routeToNetwork(server); yield server.PeeringService.generateSelfPeer(server.conf, 0); - const prover = require('../../app/modules/prover').duniter.methods.prover(server); + const prover = require('../../app/modules/prover').ProverDependency.duniter.methods.prover(server); server.startBlockComputation = () => prover.startService(); server.stopBlockComputation = () => prover.stopService(); } diff --git a/test/integration/tools/commit.js b/test/integration/tools/commit.js index 6b308127f68e84cad5aa063e18595622d703a5c2..95aa8f25a6a56d30039e24677db23e669a4ffa85 100644 --- a/test/integration/tools/commit.js +++ b/test/integration/tools/commit.js @@ -4,6 +4,7 @@ var _ = require('underscore'); var co = require('co'); var rp = require('request-promise'); var logger = require('../../../app/lib/logger').NewLogger('test'); +const BlockProver = require('../../../app/modules/prover/lib/blockProver').BlockProver module.exports = function makeBlockAndPost(theServer, extraProps) { return function(manualValues) { @@ -13,8 +14,8 @@ module.exports = function makeBlockAndPost(theServer, extraProps) { } return co(function *() { if (!theServer._utProver) { - theServer._utProver = require('../../../app/modules/prover').duniter.methods.blockProver(theServer) - theServer._utGenerator = require('../../../app/modules/prover').duniter.methods.blockGenerator(theServer, theServer._utProver) + theServer._utProver = new BlockProver(theServer) + theServer._utGenerator = require('../../../app/modules/prover').ProverDependency.duniter.methods.blockGenerator(theServer, theServer._utProver) } let proven = yield theServer._utGenerator.makeNextBlock(null, null, manualValues) const block = yield postBlock(theServer)(proven); diff --git a/test/integration/tools/node.js b/test/integration/tools/node.js index b753cd2dc16f24ae16141ff75fb8afc1a25e7eb3..b5823cfca8146f44d318c8360a43e02245a2cc81 100644 --- a/test/integration/tools/node.js +++ b/test/integration/tools/node.js @@ -77,9 +77,9 @@ function Node (dbName, options) { block: function(callback){ co(function *() { try { - const block2 = yield require('../../../app/modules/prover').duniter.methods.generateTheNextBlock(that.server, params); + const block2 = yield require('../../../app/modules/prover').ProverDependency.duniter.methods.generateTheNextBlock(that.server, params); const trial2 = yield that.server.getBcContext().getIssuerPersonalizedDifficulty(that.server.keyPair.publicKey); - const block = yield require('../../../app/modules/prover').duniter.methods.generateAndProveTheNext(that.server, block2, trial2, params); + const block = yield require('../../../app/modules/prover').ProverDependency.duniter.methods.generateAndProveTheNext(that.server, block2, trial2, params); callback(null, block); } catch (e) { callback(e); diff --git a/test/integration/tools/toolbox.js b/test/integration/tools/toolbox.js index ecc3e6d71ef6014355e9e11ecafc3225a5b6b811..ff23208633ce400554cddb02d455395a7c726099 100644 --- a/test/integration/tools/toolbox.js +++ b/test/integration/tools/toolbox.js @@ -255,7 +255,7 @@ module.exports = { }); server.makeNext = (overrideProps) => co(function*() { - const block = yield require('../../../app/modules/prover').duniter.methods.generateAndProveTheNext(server, null, null, overrideProps || {}); + const block = yield require('../../../app/modules/prover').ProverDependency.duniter.methods.generateAndProveTheNext(server, null, null, overrideProps || {}); return Block.statics.fromJSON(block); }); @@ -301,13 +301,13 @@ module.exports = { server.bma = bmaAPI; require('../../../app/modules/router').duniter.methods.routeToNetwork(server); // Extra: for /wot/requirements URL - require('../../../app/modules/prover').duniter.methods.hookServer(server); + require('../../../app/modules/prover').ProverDependency.duniter.methods.hookServer(server); }); let prover; server.startBlockComputation = () => { if (!prover) { - prover = require('../../../app/modules/prover').duniter.methods.prover(server); + prover = require('../../../app/modules/prover').ProverDependency.duniter.methods.prover(server); server.permaProver = prover.permaProver; server.pipe(prover); }