From 0fbfe814f7e9f18b03314e964a9d3d3d481c014e Mon Sep 17 00:00:00 2001 From: cgeek <cem.moreau@gmail.com> Date: Sat, 19 Aug 2017 11:07:08 +0200 Subject: [PATCH] [fix] #1076 New block & fork resolution --- app/lib/blockchain/DuniterBlockchain.ts | 6 +- app/lib/blockchain/Switcher.ts | 156 ++++--- app/lib/common-libs/constants.ts | 3 + app/lib/computation/BlockchainContext.ts | 16 +- app/lib/constants.ts | 3 +- app/lib/dal/fileDAL.ts | 8 + app/lib/dal/sqliteDAL/BlockDAL.ts | 9 + app/lib/indexer.ts | 7 +- app/lib/other_constants.ts | 4 + app/lib/rules/local_rules.ts | 8 +- app/modules/crawler/lib/crawler.ts | 17 +- app/modules/crawler/lib/sync.ts | 2 +- app/service/BlockchainService.ts | 307 ++++++------- app/service/FIFOService.ts | 7 +- doc/validator-guide.md | 4 + server.ts | 17 +- test/fast/fork-resolution-3-3.ts | 114 +++-- test/integration/branches2.js | 434 ++++++++---------- test/integration/branches2.ts | 247 ++++++++++ test/integration/branches_revert2.js | 1 + test/integration/branches_revert_balance.js | 1 + test/integration/branches_switch.js | 201 ++++---- test/integration/branches_switch.ts | 119 +++++ test/integration/continuous-proof.js | 10 + test/integration/documents-currency.ts | 2 +- test/integration/http_api.js | 15 +- test/integration/identity-kicking-by-certs.js | 17 +- test/integration/membership_chainability.ts | 4 +- test/integration/network-update.js | 1 + test/integration/peer-outdated.js | 1 + test/integration/peerings.js | 24 +- test/integration/peers-same-pubkey.js | 14 +- test/integration/register-fork-blocks.js | 17 +- test/integration/revocation-test.js | 1 + test/integration/single-document-treatment.js | 1 + test/integration/start_generate_blocks.js | 6 +- test/integration/tools/commit.js | 18 +- test/integration/tools/toolbox.ts | 57 ++- test/integration/v0.6-difficulties.js | 10 +- 39 files changed, 1169 insertions(+), 720 deletions(-) create mode 100644 app/lib/other_constants.ts create mode 100644 test/integration/branches2.ts create mode 100644 test/integration/branches_switch.ts diff --git a/app/lib/blockchain/DuniterBlockchain.ts b/app/lib/blockchain/DuniterBlockchain.ts index 1341b1fdd..a7b0fdc5a 100644 --- a/app/lib/blockchain/DuniterBlockchain.ts +++ b/app/lib/blockchain/DuniterBlockchain.ts @@ -228,8 +228,6 @@ export class DuniterBlockchain extends MiscIndexedBlockchain { const dbb = DBBlock.fromBlockDTO(block) this.updateBlocksComputedVars(current, dbb) - // Saves the block (DAL) - await dal.saveBlock(dbb); // --> Update links await dal.updateWotbLinks(indexes.cindex); @@ -244,6 +242,8 @@ export class DuniterBlockchain extends MiscIndexedBlockchain { await this.deleteTransactions(block, dal); await dal.trimSandboxes(block); + // Saves the block (DAL) + await dal.saveBlock(dbb); return block; } @@ -370,6 +370,8 @@ export class DuniterBlockchain extends MiscIndexedBlockchain { // Restore block's transaction as incoming transactions await this.undoDeleteTransactions(block, dal) + + return block } async undoMembersUpdate(blockstamp:string, dal:any) { diff --git a/app/lib/blockchain/Switcher.ts b/app/lib/blockchain/Switcher.ts index ea4514ebd..e5747a671 100644 --- a/app/lib/blockchain/Switcher.ts +++ b/app/lib/blockchain/Switcher.ts @@ -6,68 +6,94 @@ export interface SwitchBlock { medianTime:number } -export interface SwitcherDao { +export interface SwitcherDao<T extends SwitchBlock> { - getCurrent(): SwitchBlock - getPotentials(numberStart:number, timeStart:number): SwitchBlock[] - getBlockchainBlock(number:number, hash:string): SwitchBlock|null - getSandboxBlock(number:number, hash:string): SwitchBlock|null - revertTo(number:number): SwitchBlock[] - addBlock(block:SwitchBlock): SwitchBlock + getCurrent(): Promise<T> + getPotentials(numberStart:number, timeStart:number): Promise<T[]> + getBlockchainBlock(number:number, hash:string): Promise<T|null> + getSandboxBlock(number:number, hash:string): Promise<T|null> + revertTo(number:number): Promise<T[]> + addBlock(block:T): Promise<T> } -export class Switcher { +export class Switcher<T extends SwitchBlock> { - constructor(private dao:SwitcherDao, private avgGenTime:number, private forkWindowSize:number) {} + constructor( + private dao:SwitcherDao<T>, + private avgGenTime:number, + private forkWindowSize:number, + private switchOnHeadAdvance:number, + private logger:any = undefined) {} /** * Looks at known blocks in the sandbox and try to follow the longest resulting chain that has at least both 3 blocks of * advance and 3 * avgGenTime of medianTime advancce. - * @returns {SwitchBlock} */ - tryToFork() { - const current = this.dao.getCurrent() - const numberStart = current.number + 3 - const timeStart = current.medianTime + 3 * this.avgGenTime - // Phase 1: find potential chains - const suites = this.findPotentialSuites(current, numberStart, timeStart) - // Phase 2: select the best chain - let longestChain:null|SwitchBlock[] = this.findLongestChain(suites) - // Phase 3: a best exist? - if (longestChain) { - const chainHEAD = longestChain[longestChain.length - 1] - // apply it if it respects the 3-3 rule - if (chainHEAD.number >= numberStart && chainHEAD.medianTime >= timeStart) { - this.switchOnChain(longestChain) + async tryToFork() { + const current = await this.dao.getCurrent() + if (current) { + const numberStart = current.number + this.switchOnHeadAdvance + const timeStart = current.medianTime + this.switchOnHeadAdvance * this.avgGenTime + // Phase 1: find potential chains + const suites = await this.findPotentialSuites(current, numberStart, timeStart) + if (suites.length) { + this.logger && this.logger.info("Fork resolution: %s potential suite(s) found...", suites.length) + } + // Phase 2: select the best chain + let longestChain:null|T[] = await this.findLongestChain(current, suites) + // Phase 3: a best exist? + if (longestChain) { + const chainHEAD = longestChain[longestChain.length - 1] + // apply it if it respects the 3-3 rule + if (chainHEAD.number >= numberStart && chainHEAD.medianTime >= timeStart) { + await this.switchOnChain(longestChain) + return await this.dao.getCurrent() + } } } - return this.dao.getCurrent() + return null + } + + /** + * Find all the suites' HEAD that we could potentially fork on, in the current fork window. + * @param current + */ + async findPotentialSuitesHeads(current:T) { + const numberStart = current.number - this.forkWindowSize + const timeStart = current.medianTime - this.forkWindowSize * this.avgGenTime + const suites = await this.findPotentialSuites(current, numberStart, timeStart) + return suites.map(suite => suite[suite.length - 1]) } /** * Looks at the potential blocks that could form fork chains in the sandbox, and sort them to have a maximum of unique * chains. * @param {SwitchBlock} current HEAD of local blockchain. - * @param {number} numberStart The minimum number of a fork block. - * @param {number} timeStart The minimum medianTime of a fork block. + * @param numberStart The minimum number of a fork block. + * @param timeStart The minimum medianTime of a fork block. * @returns {SwitchBlock[][]} The suites found. */ - private findPotentialSuites(current:SwitchBlock, numberStart:number, timeStart:number) { - const suites:SwitchBlock[][] = [] - const potentials:SwitchBlock[] = this.dao.getPotentials(numberStart, timeStart) - const invalids: { [hash:string]: SwitchBlock } = {} + private async findPotentialSuites(current:T, numberStart:number, timeStart:number) { + const suites:T[][] = [] + const potentials:T[] = await this.dao.getPotentials(numberStart, timeStart) + const invalids: { [hash:string]: T } = {} + if (potentials.length) { + this.logger && this.logger.info("Fork resolution: %s potential block(s) found...", potentials.length) + } for (const candidate of potentials) { - const suite:SwitchBlock[] = [] + const suite:T[] = [] // Do not process the block if it is already known as invalid (has no fork point with current blockchain or misses // some blocks) or is already contained in a valid chain. if (!invalids[candidate.hash] && !Switcher.suitesContains(suites, candidate)) { // Tries to build up a full chain that is linked to current chain by a fork point. - let previous:SwitchBlock|null = candidate, commonRootFound = false - while (previous && previous.number > current.number - this.forkWindowSize) { + let previous:T|null = candidate, commonRootFound = false + let previousNumber:number = previous.number - 1 + let previousHash:string = previous.previousHash + while (previous && previous.number > candidate.number - this.forkWindowSize) { suite.push(previous) - const previousNumber = previous.number - 1 - const previousHash = previous.previousHash - previous = this.dao.getBlockchainBlock(previousNumber, previousHash) + previousNumber = previous.number - 1 + previousHash = previous.previousHash + previous = await this.dao.getBlockchainBlock(previousNumber, previousHash) if (previous) { // Stop the loop: common block has been found previous = null @@ -75,13 +101,19 @@ export class Switcher { commonRootFound = true } else { // Have a look in sandboxes - previous = this.dao.getSandboxBlock(previousNumber, previousHash) + previous = await this.dao.getSandboxBlock(previousNumber, previousHash) } } // Forget about invalid blocks if (!commonRootFound) { - for (const b of suite) { - invalids[b.hash] = b + if (!previous) { + this.logger && this.logger.debug("Suite -> %s-%s missing block#%s-%s", candidate.number, candidate.hash.substr(0, 8), previousNumber, previousHash.substr(0, 8)) + for (const b of suite) { + invalids[b.hash] = b + } + } else { + // The chain would be too long, we could not revert correctly the chain. + this.logger && this.logger.debug("Suite #%s-%s -> %s-%s out of fork window", previousNumber, previousHash.substr(0, 8), candidate.number, candidate.hash.substr(0, 8)) } } } @@ -92,37 +124,52 @@ export class Switcher { /** * Find the longest chain among a suite of chains. Tests the validity of each block against the current blockchain. * The length of a chain is the number of blocks successfuly added to current blockchain. + * @param {SwitchBlock} current * @param {SwitchBlock[][]} suites * @returns {SwitchBlock[]} */ - private findLongestChain(suites:SwitchBlock[][]) { - let longestChain:null|SwitchBlock[] = null + private async findLongestChain(current:T, suites:T[][]) { + if (suites.length) { + this.logger && this.logger.info("Fork resolution: HEAD = block#%s", current.number) + } + let longestChain:null|T[] = null + let j = 0 for (const s of suites) { + j++ s.reverse() // Revert current blockchain to fork point - const reverted = this.dao.revertTo(s[0].number - 1) + const forkPoint = s[0].number - 1 + const forkHead = s[s.length - 1] + this.logger && this.logger.info("Fork resolution: suite %s/%s (-> #%s-%s) revert to fork point block#%s", j, suites.length, forkHead.number, forkHead.hash.substr(0, 6), forkPoint) + const reverted = await this.dao.revertTo(s[0].number - 1) // Try to add a maximum of blocks - let added = true, i = 0, successfulBlocks:SwitchBlock[] = [] - while (added) { + let added = true, i = 0, successfulBlocks:T[] = [] + while (added && i < s.length) { try { - this.dao.addBlock(s[i]) + await this.dao.addBlock(s[i]) + this.logger && this.logger.info("Fork resolution: suite %s/%s added block#%s-%s", j, suites.length, s[i].number, s[i].hash) successfulBlocks.push(s[i]) } catch (e) { + this.logger && this.logger.info("Fork resolution: suite %s/%s REFUSED block#%s: %s", j, suites.length, s[0].number + i, e && e.message) added = false } i++ } // Pop the successfuly added blocks if (successfulBlocks.length) { - this.dao.revertTo(this.dao.getCurrent().number - successfulBlocks.length) + const addedToHeadLevel = successfulBlocks[successfulBlocks.length-1].number - current.number + this.logger && this.logger.info("Fork resolution: suite %s/%s reached HEAD + %s. Now rolling back.", j, suites.length, addedToHeadLevel) + await this.dao.revertTo(forkPoint) } // Push back the initial blocks that were temporarily reverted reverted.reverse() for (const b of reverted) { - this.dao.addBlock(b) + await this.dao.addBlock(b) } - // Remember the chain if it is the longest among tested chains - if ((!longestChain && successfulBlocks.length > 0) || (longestChain && longestChain.length < successfulBlocks.length)) { + // Remember the chain if it is the longest (highest HEAD) among tested chains + const longestHEAD = longestChain && longestChain[longestChain.length - 1] + const successHEAD = successfulBlocks && successfulBlocks[successfulBlocks.length - 1] + if ((!longestHEAD && successHEAD) || (longestHEAD && successHEAD && longestHEAD.number < successHEAD.number)) { longestChain = successfulBlocks } } @@ -133,10 +180,10 @@ export class Switcher { * Switch current blockchain on another chain, by poping top blocks and replacing them by new ones. * @param {SwitchBlock[]} chain */ - private switchOnChain(chain:SwitchBlock[]) { - this.dao.revertTo(chain[0].number - 1) + private async switchOnChain(chain:T[]) { + await this.dao.revertTo(chain[0].number - 1) for (const b of chain) { - this.dao.addBlock(b) + await this.dao.addBlock(b) } } @@ -144,9 +191,8 @@ export class Switcher { * Checks if a suite of chains contains a particular block in one of its chains. * @param {SwitchBlock[][]} suites * @param {SwitchBlock} block - * @returns {boolean} */ - static suitesContains(suites:SwitchBlock[][], block:SwitchBlock) { + static suitesContains<T extends SwitchBlock>(suites:T[][], block:T) { for (const suite of suites) { for (const b of suite) { if (b.number === block.number && b.hash === block.hash) { diff --git a/app/lib/common-libs/constants.ts b/app/lib/common-libs/constants.ts index 2b982e6dd..28bbae7e1 100644 --- a/app/lib/common-libs/constants.ts +++ b/app/lib/common-libs/constants.ts @@ -138,6 +138,9 @@ export const CommonConstants = { TOO_OLD_MEMBERSHIP: { httpCode: 400, uerr: { ucode: 2029, message: "Too old membership." }}, MAXIMUM_LEN_OF_OUTPUT: { httpCode: 400, uerr: { ucode: 2032, message: 'A transaction output has a maximum size of ' + MAXIMUM_LEN_OF_OUTPUT + ' characters' }}, MAXIMUM_LEN_OF_UNLOCK: { httpCode: 400, uerr: { ucode: 2033, message: 'A transaction unlock has a maximum size of ' + MAXIMUM_LEN_OF_UNLOCK + ' characters' }}, + WRONG_CURRENCY: { httpCode: 400, uerr: { ucode: 2500, message: 'Wrong currency' }}, + WRONG_POW: { httpCode: 400, uerr: { ucode: 2501, message: 'Wrong proof-of-work' }}, + OUT_OF_FORK_WINDOW: { httpCode: 400, uerr: { ucode: 2501, message: 'Out of fork window' }}, WRONG_SIGNATURE_FOR_CERT: { httpCode: 400, uerr: { ucode: 3000, message: 'Wrong signature for certification' }}, }, diff --git a/app/lib/computation/BlockchainContext.ts b/app/lib/computation/BlockchainContext.ts index 43a9a2a4b..35be72791 100644 --- a/app/lib/computation/BlockchainContext.ts +++ b/app/lib/computation/BlockchainContext.ts @@ -3,6 +3,7 @@ import {BlockDTO} from "../dto/BlockDTO" import {DuniterBlockchain} from "../blockchain/DuniterBlockchain" import {QuickSynchronizer} from "./QuickSync" import {DBHead} from "../db/DBHead" + const _ = require('underscore'); const indexer = require('../indexer').Indexer const constants = require('../constants'); @@ -100,11 +101,11 @@ export class BlockchainContext { this.logger = require('../logger').NewLogger(this.dal.profile); } - checkBlock(block: BlockDTO, withPoWAndSignature:boolean): Promise<any> { + async checkBlock(block: BlockDTO, withPoWAndSignature:boolean): Promise<any> { return DuniterBlockchain.checkBlock(block, withPoWAndSignature, this.conf, this.dal) } - async addBlock(obj: BlockDTO, index: any = null, HEAD: DBHead | null = null): Promise<any> { + private async addBlock(obj: BlockDTO, index: any = null, HEAD: DBHead | null = null): Promise<any> { const block = await this.blockchain.pushTheBlock(obj, index, HEAD, this.conf, this.dal, this.logger) this.vHEAD_1 = this.vHEAD = this.HEADrefreshed = null return block @@ -115,11 +116,10 @@ export class BlockchainContext { return dbb.toBlockDTO() } - async revertCurrentBlock(): Promise<any> { + async revertCurrentBlock(): Promise<BlockDTO> { const head_1 = await this.dal.bindexDAL.head(1); this.logger.debug('Reverting block #%s...', head_1.number); const res = await this.blockchain.revertBlock(head_1.number, head_1.hash, this.dal) - this.logger.debug('Reverted block #%s', head_1.number); // Invalidates the head, since it has changed. await this.refreshHead(); return res; @@ -133,11 +133,15 @@ export class BlockchainContext { throw constants.ERRORS.NO_POTENTIAL_FORK_AS_NEXT; } const block = forks[0]; - const { index, HEAD } = await this.checkBlock(block, constants.WITH_SIGNATURES_AND_POW); - await this.addBlock(block, index, HEAD); + await this.checkAndAddBlock(block) this.logger.debug('Applied block #%s', block.number); } + async checkAndAddBlock(block:BlockDTO) { + const { index, HEAD } = await this.checkBlock(block, constants.WITH_SIGNATURES_AND_POW); + return await this.addBlock(block, index, HEAD); + } + current(): Promise<any> { return this.dal.getCurrentBlockOrNull() } diff --git a/app/lib/constants.ts b/app/lib/constants.ts index 15a743224..a2e7c32c0 100644 --- a/app/lib/constants.ts +++ b/app/lib/constants.ts @@ -1,5 +1,6 @@ "use strict"; import {CommonConstants} from "./common-libs/constants" +import {OtherConstants} from "./other_constants" const UDID2 = "udid2;c;([A-Z-]*);([A-Z-]*);(\\d{4}-\\d{2}-\\d{2});(e\\+\\d{2}\\.\\d{2}(\\+|-)\\d{3}\\.\\d{2});(\\d+)(;?)"; const PUBKEY = CommonConstants.FORMATS.PUBKEY @@ -160,7 +161,7 @@ module.exports = { SAFE_FACTOR: 3, BLOCKS_COLLECT_THRESHOLD: 30, // Blocks to collect from memory and persist - MUTE_LOGS_DURING_UNIT_TESTS: true, + MUTE_LOGS_DURING_UNIT_TESTS: OtherConstants.MUTE_LOGS_DURING_UNIT_TESTS, SANDBOX_SIZE_TRANSACTIONS: 200, SANDBOX_SIZE_IDENTITIES: 5000, diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts index ff7fbc21d..d5bba1510 100644 --- a/app/lib/dal/fileDAL.ts +++ b/app/lib/dal/fileDAL.ts @@ -223,6 +223,10 @@ export class FileDAL { return this.getBlock(0) } + getPotentialRootBlocks() { + return this.blockDAL.getPotentialRoots() + } + lastBlockOfIssuer(issuer:string) { return this.blockDAL.lastBlockOfIssuer(issuer); } @@ -239,6 +243,10 @@ export class FileDAL { return this.blockDAL.getNextForkBlocks(current.number, current.hash) } + getPotentialForkBlocks(numberStart:number, medianTimeStart:number) { + return this.blockDAL.getPotentialForkBlocks(numberStart, medianTimeStart) + } + async getBlockCurrent() { const current = await this.blockDAL.getCurrent(); if (!current) diff --git a/app/lib/dal/sqliteDAL/BlockDAL.ts b/app/lib/dal/sqliteDAL/BlockDAL.ts index 0c8e6e4e2..380097744 100644 --- a/app/lib/dal/sqliteDAL/BlockDAL.ts +++ b/app/lib/dal/sqliteDAL/BlockDAL.ts @@ -1,6 +1,7 @@ import {AbstractSQLite} from "./AbstractSQLite" import {SQLiteDriver} from "../drivers/SQLiteDriver" import {DBBlock} from "../../db/DBBlock" + const constants = require('../../constants'); const IS_FORK = true; @@ -115,6 +116,14 @@ export class BlockDAL extends AbstractSQLite<DBBlock> { return this.query('SELECT * FROM block WHERE fork ORDER BY number'); } + getPotentialForkBlocks(numberStart:number, medianTimeStart:number) { + return this.query('SELECT * FROM block WHERE fork AND number >= ? AND medianTime >= ? ORDER BY number DESC', [numberStart, medianTimeStart]); + } + + getPotentialRoots() { + return this.query('SELECT * FROM block WHERE fork AND number = ?', [0]) + } + getDividendBlocks() { return this.query('SELECT * FROM block WHERE dividend IS NOT NULL ORDER BY number'); } diff --git a/app/lib/indexer.ts b/app/lib/indexer.ts index 9ec4f90e5..ade897b1a 100644 --- a/app/lib/indexer.ts +++ b/app/lib/indexer.ts @@ -1066,7 +1066,8 @@ export class Indexer { nbPersonalBlocksInFrame = 0; medianOfBlocksInFrame = 1; } else { - const blocksInFrame = _.filter(await range(1, HEAD_1.issuersFrame), (b:BlockDTO) => b.number <= HEAD_1.number); + const ranged = await range(1, HEAD_1.issuersFrame) + const blocksInFrame = _.filter(ranged, (b:BlockDTO) => b.number <= HEAD_1.number); const issuersInFrame = blocksInFrame.map((b:BlockDTO) => b.issuer); blocksOfIssuer = _.filter(blocksInFrame, (entry:BlockDTO) => entry.issuer == HEAD.issuer); nbPersonalBlocksInFrame = count(blocksOfIssuer); @@ -1335,7 +1336,9 @@ export class Indexer { // BR_G72 static ruleCertificationSignature(cindex: CindexEntry[]) { for (const ENTRY of cindex) { - if (!ENTRY.sigOK) return false; + if (!ENTRY.sigOK) { + return false + } } return true } diff --git a/app/lib/other_constants.ts b/app/lib/other_constants.ts new file mode 100644 index 000000000..5eeec3809 --- /dev/null +++ b/app/lib/other_constants.ts @@ -0,0 +1,4 @@ +export const OtherConstants = { + + MUTE_LOGS_DURING_UNIT_TESTS: true +} \ No newline at end of file diff --git a/app/lib/rules/local_rules.ts b/app/lib/rules/local_rules.ts index fc1aafff7..b2cfb00c3 100644 --- a/app/lib/rules/local_rules.ts +++ b/app/lib/rules/local_rules.ts @@ -27,11 +27,15 @@ export const LOCAL_RULES_FUNCTIONS = { return true; }, - checkProofOfWork: async (block:BlockDTO) => { + isProofOfWorkCorrect: (block:BlockDTO) => { let remainder = block.powMin % 16; let nb_zeros = (block.powMin - remainder) / 16; const powRegexp = new RegExp('^0{' + nb_zeros + '}'); - if (!block.hash.match(powRegexp)) { + return !!block.hash.match(powRegexp) + }, + + checkProofOfWork: async (block:BlockDTO) => { + if (!LOCAL_RULES_FUNCTIONS.isProofOfWorkCorrect(block)) { throw Error('Not a proof-of-work'); } return true; diff --git a/app/modules/crawler/lib/crawler.ts b/app/modules/crawler/lib/crawler.ts index 93a33b858..ef1e29638 100644 --- a/app/modules/crawler/lib/crawler.ts +++ b/app/modules/crawler/lib/crawler.ts @@ -405,7 +405,7 @@ export class BlockCrawler { async applyMainBranch(block: BlockDTO): Promise<boolean> { const existing = await server.dal.getAbsoluteBlockByNumberAndHash(block.number, block.hash) if (!existing) { - let addedBlock = await server.writeBlock(block, false) + let addedBlock = await server.writeBlock(block, false, true) if (!this.lastDownloaded) { this.lastDownloaded = await dao.remoteCurrent(node); } @@ -417,9 +417,6 @@ export class BlockCrawler { server.streamPush(addedBlock); } } - } else { - this.crawler.logger && this.crawler.logger.info('Downloaded already known block#%s-%s, try to fork...', block.number, block.hash) - await server.BlockchainService.tryToFork() } return true } @@ -427,8 +424,7 @@ export class BlockCrawler { return true } async isMemberPeer(thePeer: PeerDTO): Promise<boolean> { - let idty = await server.dal.getWrittenIdtyByPubkey(thePeer.pubkey); - return (idty && idty.member) || false; + return true } async downloadBlocks(thePeer: any, fromNumber: number, count?: number | undefined): Promise<BlockDTO[]> { if (!count) { @@ -444,8 +440,7 @@ export class BlockCrawler { return blocks; } })(this) - - await dao.pull(server.conf, server.logger); + await dao.pull(server.conf, server.logger) } catch (e) { if (this.isConnectionError(e)) { this.logger && this.logger.info("Peer %s unreachable: now considered as DOWN.", p.pubkey); @@ -459,6 +454,12 @@ export class BlockCrawler { } } })) + + await this.server.BlockchainService.pushFIFO("crawlerResolution", async () => { + await server.BlockchainService.blockResolution() + await server.BlockchainService.forkResolution() + }) + this.pullingEvent(server, 'end', current.number); } this.logger && this.logger.info('Will pull blocks from the network in %s min %s sec', Math.floor(this.pullingActualIntervalDuration / 60), Math.floor(this.pullingActualIntervalDuration % 60)); diff --git a/app/modules/crawler/lib/sync.ts b/app/modules/crawler/lib/sync.ts index 8fe575f42..aa7e6e6a1 100644 --- a/app/modules/crawler/lib/sync.ts +++ b/app/modules/crawler/lib/sync.ts @@ -258,7 +258,7 @@ export class Synchroniser extends stream.Duplex { return block; } async applyMainBranch(block: BlockDTO): Promise<boolean> { - const addedBlock = await this.BlockchainService.submitBlock(block, true, CrawlerConstants.FORK_ALLOWED); + const addedBlock = await this.BlockchainService.submitBlock(block) this.server.streamPush(addedBlock); this.watcher.appliedPercent(Math.floor(block.number / to * 100)); return true diff --git a/app/service/BlockchainService.ts b/app/service/BlockchainService.ts index d3640f02f..faf238fb0 100644 --- a/app/service/BlockchainService.ts +++ b/app/service/BlockchainService.ts @@ -11,12 +11,13 @@ import {GLOBAL_RULES_HELPERS} from "../lib/rules/global_rules" import {parsers} from "../lib/common-libs/parsers/index" import {HttpIdentityRequirement} from "../modules/bma/lib/dtos" import {FIFOService} from "./FIFOService" +import {CommonConstants} from "../lib/common-libs/constants" +import {LOCAL_RULES_FUNCTIONS} from "../lib/rules/local_rules" +import {Switcher, SwitcherDao} from "../lib/blockchain/Switcher" const _ = require('underscore'); const constants = require('../lib/constants'); -const CHECK_ALL_RULES = true; - export class BlockchainService extends FIFOService { mainContext:BlockchainContext @@ -25,16 +26,70 @@ export class BlockchainService extends FIFOService { logger:any selfPubkey:string quickSynchronizer:QuickSynchronizer + switcherDao:SwitcherDao<BlockDTO> constructor(private server:any, fifoPromiseHandler:GlobalFifoPromise) { super(fifoPromiseHandler) this.mainContext = new BlockchainContext() + this.switcherDao = new (class ForkDao implements SwitcherDao<BlockDTO> { + + constructor(private bcService:BlockchainService) {} + + getCurrent(): Promise<BlockDTO> { + return this.bcService.current() + } + + async getPotentials(numberStart: number, timeStart: number): Promise<BlockDTO[]> { + const blocks = await this.bcService.dal.getPotentialForkBlocks(numberStart, timeStart) + return blocks.map((b:any) => BlockDTO.fromJSONObject(b)) + } + + async getBlockchainBlock(number: number, hash: string): Promise<BlockDTO | null> { + try { + return BlockDTO.fromJSONObject(await this.bcService.dal.getBlockByNumberAndHash(number, hash)) + } catch (e) { + return null + } + } + + async getSandboxBlock(number: number, hash: string): Promise<BlockDTO | null> { + const block = await this.bcService.dal.getAbsoluteBlockByNumberAndHash(number, hash) + if (block && block.fork) { + return BlockDTO.fromJSONObject(block) + } else { + return null + } + } + + async revertTo(number: number): Promise<BlockDTO[]> { + const blocks:BlockDTO[] = [] + const current = await this.bcService.current(); + for (let i = 0, count = current.number - number; i < count; i++) { + const reverted = await this.bcService.mainContext.revertCurrentBlock() + blocks.push(BlockDTO.fromJSONObject(reverted)) + } + if (current.number < number) { + throw "Already below this number" + } + return blocks + } + + async addBlock(block: BlockDTO): Promise<BlockDTO> { + return await this.bcService.mainContext.checkAndAddBlock(block) + } + + })(this) } + /** + * Mandatory with stream.Readable + * @private + */ + _read() {} + getContext() { return this.mainContext } - setConfDAL(newConf:ConfDTO, newDAL:FileDAL, newKeyPair:any) { this.dal = newDAL; @@ -62,196 +117,96 @@ export class BlockchainService extends FIFOService { } async branches() { - let forkBlocks = await this.dal.blockDAL.getForkBlocks(); - const current = await this.mainContext.current(); - forkBlocks = _.sortBy(forkBlocks, 'number'); - forkBlocks = _.filter(forkBlocks, (b:DBBlock) => current.number - b.number < this.conf.forksize) - // Get the blocks refering current blockchain - const forkables = []; - for (const block of forkBlocks) { - const refered = await this.dal.getAbsoluteBlockByNumberAndHash(block.number - 1, block.previousHash); - if (refered) { - forkables.push(block); - } else { - this.logger.info('Missing block #%s-%s', block.number - 1, block.previousHash.substr(0, 8)) - } - } - const branches = BlockchainService.getBranches(forkables, _.difference(forkBlocks, forkables)); - const forks = branches.map((branch) => branch[branch.length - 1]); - return forks.concat([current]); - } - - static getBranches(forkables:any[], others:any[]) { - // All starting branches - let branches = forkables.map((fork) => [fork]); - // For each "pending" block, we try to add it to all branches - for (const other of others) { - for (let j = 0, len2 = branches.length; j < len2; j++) { - const branch = branches[j]; - const last = branch[branch.length - 1]; - if (other.number == last.number + 1 && other.previousHash == last.hash) { - branch.push(other); - } else if (branch[1]) { - // We try to find out if another fork block can be forked - const diff = other.number - branch[0].number; - if (diff > 0 && branch[diff - 1] && branch[diff - 1].hash == other.previousHash) { - // We duplicate the branch, and we add the block to this second branch - branches.push(branch.slice()); - // First we remove the blocks this are not part of the fork - branch.splice(diff, branch.length - diff); - branch.push(other); - j++; - } - } - } - } - branches = _.sortBy(branches, (branch:any) => -branch.length); - if (branches.length) { - const maxSize = branches[0].length; - const longestsBranches = []; - for (const branch of branches) { - if (branch.length == maxSize) { - longestsBranches.push(branch); - } - } - return longestsBranches; - } - return []; + const current = await this.current() + const switcher = new Switcher(this.switcherDao, this.conf.avgGenTime, this.conf.forksize, this.conf.switchOnHeadAdvance, this.logger) + const heads = await switcher.findPotentialSuitesHeads(current) + return heads.concat([current]) } - submitBlock(obj:any, doCheck:boolean, forkAllowed:boolean): Promise<BlockDTO> { + submitBlock(blockToAdd:any, noResolution = false): Promise<BlockDTO> { + const obj = parsers.parseBlock.syncWrite(BlockDTO.fromJSONObject(blockToAdd).getRawSigned()) const dto = BlockDTO.fromJSONObject(obj) const hash = dto.getHash() - return this.pushFIFO(hash, () => { - return this.checkAndAddBlock(obj, doCheck, forkAllowed) - }) - } - - private async checkAndAddBlock(blockToAdd:any, doCheck:boolean, forkAllowed:boolean = false): Promise<BlockDTO> { - // Check global format, notably version number - const obj = parsers.parseBlock.syncWrite(BlockDTO.fromJSONObject(blockToAdd).getRawSigned()); - // Force usage of local currency name, do not accept other currencies documents - if (this.conf.currency) { - obj.currency = this.conf.currency || obj.currency; - } else { - this.conf.currency = obj.currency; - } - let existing = await this.dal.getBlockByNumberAndHashOrNull(obj.number, obj.hash); - if (existing) { - throw constants.ERRORS.BLOCK_ALREADY_PROCESSED; - } - let current = await this.mainContext.current(); - let followsCurrent = !current || (obj.number == current.number + 1 && obj.previousHash == current.hash); - if (followsCurrent) { - // try to add it on main blockchain - const dto = BlockDTO.fromJSONObject(obj) - if (doCheck) { - const { index, HEAD } = await this.mainContext.checkBlock(dto, constants.WITH_SIGNATURES_AND_POW); - return await this.mainContext.addBlock(dto, index, HEAD) - } else { - return await this.mainContext.addBlock(dto) + return this.pushFIFO(hash, async () => { + // Check basic fields: + // * currency relatively to conf + if (this.conf && this.conf.currency && this.conf.currency !== dto.currency) { + throw CommonConstants.ERRORS.WRONG_CURRENCY } - } else { - // add it as side chain - if (parseInt(current.number) - parseInt(obj.number) + 1 >= this.conf.forksize) { - throw 'Block out of fork window'; + // * hash relatively to powMin + if (!LOCAL_RULES_FUNCTIONS.isProofOfWorkCorrect(dto)) { + throw CommonConstants.ERRORS.WRONG_POW } - let absolute = await this.dal.getAbsoluteBlockByNumberAndHash(obj.number, obj.hash) - let res = null; + // * number relatively to fork window and current block + if (this.conf && this.conf.forksize !== undefined) { + const current = await this.current() + if (current && dto.number < current.number - this.conf.forksize) { + throw CommonConstants.ERRORS.OUT_OF_FORK_WINDOW + } + } + const absolute = await this.dal.getAbsoluteBlockByNumberAndHash(obj.number, obj.hash) if (!absolute) { - res = await this.mainContext.addSideBlock(obj) - // we eventually try to swith **only if** we do not already have this blocK. Otherwise the block will be - // spread again to the network, which can end in an infinite ping-pong. - if (forkAllowed) { - await this.eventuallySwitchOnSideChain(current); + // Save the block in the sandbox + await this.mainContext.addSideBlock(dto); + // Trigger the save + block resolution in an async way: this allows to answer immediately that the submission + // was accepted, and that the document can be rerouted and is under treatment. + // This will enhence the block propagation on the network, thus will avoid potential forks to emerge. + if (!noResolution) { + (() => { + return this.pushFIFO('resolution_' + dto.getHash(), async () => { + // Resolve the potential new HEAD + await this.blockResolution() + // Resolve the potential forks + await this.forkResolution() + }) + })() } } else { - throw "Fork block already known" + throw "Block already known" } - return res; - } - } - - async tryToFork() { - return this.pushFIFO("tryToFork", async () => { - const current = await this.mainContext.current() - await this.eventuallySwitchOnSideChain(current) + return dto }) } - private async eventuallySwitchOnSideChain(current:DBBlock) { - const branches = await this.branches() - const blocksAdvanceInBlocks = this.conf.switchOnHeadAdvance - const timeAdvance = this.conf.switchOnHeadAdvance * this.conf.avgGenTime - let potentials = _.without(branches, current); - // We switch only to blockchain with X_BLOCKS in advance considering both theoretical time by block / avgGenTime, + written time / avgGenTime - this.logger.trace('SWITCH: %s branches...', branches.length); - this.logger.trace('SWITCH: required is >= %s for blockDistance and %s for timeAdvance for both values to try to follow the fork', blocksAdvanceInBlocks, timeAdvance) - potentials.reverse() - potentials = _.filter(potentials, (p:DBBlock) => { - const effectiveBlockAdvance = p.number - current.number - const effectiveTimeAdvance = p.medianTime - current.medianTime - const retained = effectiveBlockAdvance >= blocksAdvanceInBlocks && effectiveTimeAdvance >= timeAdvance - this.logger.trace('SWITCH: found branch #%s-%s has blockDistance %s ; timeDistance %s ; retained: %s', p.number, p.hash.substr(0, 8), effectiveBlockAdvance, effectiveTimeAdvance, retained ? 'YES' : 'NO'); - return retained - }); - this.logger.trace('SWITCH: %s potential side chains...', potentials.length); - for (const potential of potentials) { - this.logger.info('SWITCH: get side chain #%s-%s...', potential.number, potential.hash); - const sideChain = await this.getWholeForkBranch(potential) - this.logger.info('SWITCH: revert main chain to block #%s...', sideChain[0].number - 1); - await this.revertToBlock(sideChain[0].number - 1) - try { - this.logger.info('SWITCH: apply side chain #%s-%s...', potential.number, potential.hash); - await this.applySideChain(sideChain) - } catch (e) { - this.logger.warn('SWITCH: error %s', e.stack || e); - // Revert the revert (so we go back to original chain) - const revertedChain = await this.getWholeForkBranch(current) - await this.revertToBlock(revertedChain[0].number - 1) - await this.applySideChain(revertedChain) - await this.markSideChainAsWrong(sideChain) + async blockResolution() { + let added = true + while (added) { + const current = await this.current() + let potentials = [] + if (current) { + potentials = await this.dal.getForkBlocksFollowing(current) + this.logger.info('Block resolution: %s potential blocks after current#%s...', potentials.length, current.number) + } else { + potentials = await this.dal.getPotentialRootBlocks() + this.logger.info('Block resolution: %s potential blocks for root block...', potentials.length) + } + added = false + let i = 0 + while (!added && i < potentials.length) { + const dto = BlockDTO.fromJSONObject(potentials[i]) + try { + await this.mainContext.checkAndAddBlock(dto) + added = true + } catch (e) { + this.logger.error(e) + added = false + this.push({ + blockResolutionError: e && e.message + }) + } + i++ } } } - private async getWholeForkBranch(topForkBlock:DBBlock) { - const fullBranch = []; - let isForkBlock = true; - let next = topForkBlock; - while (isForkBlock) { - fullBranch.push(next); - this.logger.trace('SWITCH: get absolute #%s-%s...', next.number - 1, next.previousHash); - next = await this.dal.getAbsoluteBlockByNumberAndHash(next.number - 1, next.previousHash); - isForkBlock = next.fork; - } - //fullBranch.push(next); - // Revert order so we have a crescending branch - return fullBranch.reverse(); - } - - private async revertToBlock(number:number) { - let nowCurrent = await this.current(); - this.logger.trace('SWITCH: main chain current = #%s-%s...', nowCurrent.number, nowCurrent.hash); - while (nowCurrent.number > number) { - this.logger.trace('SWITCH: main chain revert #%s-%s...', nowCurrent.number, nowCurrent.hash); - await this.mainContext.revertCurrentBlock(); - nowCurrent = await this.current(); - } - } - - private async applySideChain(chain:DBBlock[]) { - for (const block of chain) { - this.logger.trace('SWITCH: apply side block #%s-%s -> #%s-%s...', block.number, block.hash, block.number - 1, block.previousHash); - await this.checkAndAddBlock(block, CHECK_ALL_RULES); - } - } - - private async markSideChainAsWrong(chain:DBBlock[]) { - for (const block of chain) { - block.wrong = true; - // Saves the block (DAL) - await this.dal.saveSideBlockInFile(block); + async forkResolution() { + const switcher = new Switcher(this.switcherDao, this.conf.avgGenTime, this.conf.forksize, this.conf.switchOnHeadAdvance, this.logger) + const newCurrent = await switcher.tryToFork() + if (newCurrent) { + this.push({ + bcEvent: 'switched', + block: newCurrent + }) } } diff --git a/app/service/FIFOService.ts b/app/service/FIFOService.ts index 8b7f6d467..77d41dd79 100644 --- a/app/service/FIFOService.ts +++ b/app/service/FIFOService.ts @@ -1,8 +1,11 @@ import {GlobalFifoPromise} from "./GlobalFifoPromise"; +import * as stream from 'stream'; -export abstract class FIFOService { +export abstract class FIFOService extends stream.Readable { - constructor(private fifoPromiseHandler:GlobalFifoPromise) {} + constructor(private fifoPromiseHandler:GlobalFifoPromise) { + super({ objectMode: true }) + } async pushFIFO<T>(operationId: string, p: () => Promise<T>): Promise<T> { return this.fifoPromiseHandler.pushFIFOPromise(operationId, p) diff --git a/doc/validator-guide.md b/doc/validator-guide.md index 618a08e95..b7b37a267 100644 --- a/doc/validator-guide.md +++ b/doc/validator-guide.md @@ -42,6 +42,10 @@ Duniter should be able to start in a daemonized way with its UI available at `ht Duniter must respect a set of behaviors once started. +###Â Memory consumption + +Duniter must have a footprint of ~150MB in memory. If this amount grows, there is a memory leak. + ### New blocks detection Duniter should detect eventual new blocks available on the network on its startup, pull and add them to its HEAD branch. diff --git a/server.ts b/server.ts index 708e0c616..3a2f9ece8 100644 --- a/server.ts +++ b/server.ts @@ -12,7 +12,6 @@ import {KeyGen, randomKey} from "./app/lib/common-libs/crypto/keyring" import {parsers} from "./app/lib/common-libs/parsers/index" import {Cloneable} from "./app/lib/dto/Cloneable" import {DuniterDocument, duniterDocument2str} from "./app/lib/common-libs/constants" -import {CrawlerConstants} from "./app/modules/crawler/lib/constants" import {GlobalFifoPromise} from "./app/service/GlobalFifoPromise" import {BlockchainContext} from "./app/lib/computation/BlockchainContext" import {BlockDTO} from "./app/lib/dto/BlockDTO" @@ -37,6 +36,7 @@ const path = require('path'); const archiver = require('archiver'); const unzip = require('unzip2'); const fs = require('fs'); +const es = require('event-stream'); const daemonize = require("daemonize2") const constants = require('./app/lib/constants'); const jsonpckg = require('./package.json'); @@ -177,6 +177,13 @@ export class Server extends stream.Duplex implements HookableServer { this.PeeringService.setConfDAL(this.conf, this.dal, this.keyPair) this.BlockchainService.setConfDAL(this.conf, this.dal, this.keyPair) this.TransactionsService.setConfDAL(this.conf, this.dal) + + // Messages piping + this.BlockchainService + .pipe(es.mapSync((e:any) => { + this.streamPush(e) + })) + return this.conf; } @@ -192,8 +199,8 @@ export class Server extends stream.Duplex implements HookableServer { return await this.writeBlock(obj) } - async writeBlock(obj:any, notify = true) { - const res = await this.BlockchainService.submitBlock(obj, true, CrawlerConstants.FORK_ALLOWED) + async writeBlock(obj:any, notify = true, noResolution = false) { + const res = await this.BlockchainService.submitBlock(obj, noResolution) if (notify) { this.emitDocument(res, DuniterDocument.ENTITY_BLOCK) } @@ -297,6 +304,10 @@ export class Server extends stream.Duplex implements HookableServer { await this.revert(); } } + // Eventual block resolution + await this.BlockchainService.blockResolution() + // Eventual fork resolution + await this.BlockchainService.forkResolution() } recomputeSelfPeer() { diff --git a/test/fast/fork-resolution-3-3.ts b/test/fast/fork-resolution-3-3.ts index 2d951c3d7..2a8e3215b 100644 --- a/test/fast/fork-resolution-3-3.ts +++ b/test/fast/fork-resolution-3-3.ts @@ -1,14 +1,19 @@ import * as assert from 'assert' import {SwitchBlock, Switcher, SwitcherDao} from "../../app/lib/blockchain/Switcher" +import {NewLogger} from "../../app/lib/logger" + +const logger = NewLogger() + +const avgGenTime = 5 * 60 +const forkWindowSize = 5 +const switchOnHeadAdvance = 3 describe("Fork resolution 3-3 algo", () => { - it('should switch on a valid fork', () => { + it('should switch on a valid fork', async () => { // B10 -- B11 -- B12 -- B13 - // | `- C12 -- C13 -- C14 -- C15 -- C16 - // | - // `- (= B13 - ForkWindowSize) + // `- C12 -- C13 -- C14 -- C15 -- C16 const bc = new Blockchain(Block.from("B10")) bc.add(Block.from("B11")) @@ -22,8 +27,8 @@ describe("Fork resolution 3-3 algo", () => { Block.from("C15"), Block.from("C16") ]) - const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize) - switcher.tryToFork() + const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize, switchOnHeadAdvance, logger) + await switcher.tryToFork() assert.equal(bc.current.number, 16) assert.equal(bc.current.hash, "C16") }) @@ -31,9 +36,7 @@ describe("Fork resolution 3-3 algo", () => { it('should not switch if no fork block 3-3 exist', async () => { // B10 -- B11 -- B12 -- B13 - // | `- C12 -- C13 -- C14 -- C15 - // | - // `- (= B13 - ForkWindowSize) + // `- C12 -- C13 -- C14 -- C15 const bc = new Blockchain(Block.from("B10")) bc.add(Block.from("B11")) @@ -46,8 +49,8 @@ describe("Fork resolution 3-3 algo", () => { Block.from("C14"), Block.from("C15") ]) - const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize) - switcher.tryToFork() + const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize, switchOnHeadAdvance) + await switcher.tryToFork() assert.equal(bc.current.number, 13) assert.equal(bc.current.hash, "B13") }) @@ -55,9 +58,7 @@ describe("Fork resolution 3-3 algo", () => { it('should eliminate a fork with missing blocks', async () => { // B10 -- B11 -- B12 -- B13 - // | `- C12 -- C13 -- C14 -- C15 -- C16 - // | - // `- (= B13 - ForkWindowSize) + // `- C12 -- C13 -- C14 -- C15 -- C16 const bc = new Blockchain(Block.from("B10")) bc.add(Block.from("B11")) @@ -68,8 +69,8 @@ describe("Fork resolution 3-3 algo", () => { Block.from("C14"), Block.from("C15") ]) - const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize) - switcher.tryToFork() + const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize, switchOnHeadAdvance) + await switcher.tryToFork() assert.equal(bc.current.number, 13) assert.equal(bc.current.hash, "B13") }) @@ -77,9 +78,7 @@ describe("Fork resolution 3-3 algo", () => { it('should eliminate a fork out of fork window', async () => { // B10 -- B11 -- B12 -- B13 - // + -- C11 -- C12 -- C13 -- C14 -- C15 -- C16 - // | - // `- (= B13 - ForkWindowSize) + // `- C11 -- C12 -- C13 -- C14 -- C15 -- C16 const bc = new Blockchain(Block.from("B10")) bc.add(Block.from("B11")) @@ -95,18 +94,16 @@ describe("Fork resolution 3-3 algo", () => { Block.from("C15"), Block.from("C16") ]) - const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize) - switcher.tryToFork() + const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize, switchOnHeadAdvance) + await switcher.tryToFork() assert.equal(bc.current.number, 13) assert.equal(bc.current.hash, "B13") }) - it('should accept a fork right on the limit of the fork window', async () => { + it('should refuse a fork right on the limit of the fork window', async () => { // B10 -- B11 -- B12 -- B13 - // |` -- C11 -- C12 -- C13 -- C14 -- C15 -- C16 - // | - // `- (= B13 - ForkWindowSize) + // `- C11 -- C12 -- C13 -- C14 -- C15 -- C16 const bc = new Blockchain(Block.from("B10")) bc.add(Block.from("B11")) @@ -121,18 +118,16 @@ describe("Fork resolution 3-3 algo", () => { Block.from("C15"), Block.from("C16") ]) - const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize) - switcher.tryToFork() - assert.equal(bc.current.number, 16) - assert.equal(bc.current.hash, "C16") + const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize, switchOnHeadAdvance) + await switcher.tryToFork() + assert.equal(bc.current.number, 13) + assert.equal(bc.current.hash, "B13") }) it('should eliminate a fork whose 2nd block is invalid', async () => { // B10 -- B11 -- B12 -- B13 - // | `- C12 -- C13 -- C14 -- C15 -- C16 - // | - // `- (= B13 - ForkWindowSize) + // `- C12 -- C13 -- C14 -- C15 -- C16 const bc = new Blockchain(Block.from("B10")) bc.add(Block.from("B11")) @@ -146,72 +141,67 @@ describe("Fork resolution 3-3 algo", () => { Block.from("C15"), Block.from("C16") ]) - const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize) - switcher.tryToFork() + const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize, switchOnHeadAdvance) + await switcher.tryToFork() assert.equal(bc.current.number, 13) assert.equal(bc.current.hash, "B13") }) it('should select the longest fork', async () => { - // B10 -- B11 -- B12 -- B13 - // | `- C12 -- C13 -- C14 -- C15 -- C16 - // | `- D13 -- D14 -- D15 -- D16 -- D17 - // | - // `- (= B13 - ForkWindowSize) + // B10 -- B11 -- B12 -- B13 -- B14 + // ` `- C14 -- C15 <-- "length" 2 + // | `- D15 -- D16 <-- "length" 3 (should be selected) + // `- E12 -- E13 -- E14 -- E15 <-- "length" 4 const bc = new Blockchain(Block.from("B10")) bc.add(Block.from("B11")) bc.add(Block.from("B12")) bc.add(Block.from("B13")) - assert.equal(bc.current.number, 13) + bc.add(Block.from("B14")) + assert.equal(bc.current.number, 14) const sbx = new BlockSandbox([ - Block.from("C12", "B11"), - Block.from("C13"), - Block.from("C14"), + Block.from("C14", "B13"), Block.from("C15"), - Block.from("C16"), - Block.from("D13", "C12"), - Block.from("D14"), - Block.from("D15"), + Block.from("D15", "C14"), Block.from("D16"), - Block.from("D17") + Block.from("E12", "B11"), + Block.from("E13"), + Block.from("E14"), + Block.from("E15") ]) - const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize) - switcher.tryToFork() - assert.equal(bc.current.number, 17) - assert.equal(bc.current.hash, "D17") + const switcher = new Switcher(new TestingSwitcherDao(bc, sbx), avgGenTime, forkWindowSize, 1) + await switcher.tryToFork() + assert.equal(16, bc.current.number) + assert.equal("D16", bc.current.hash) }) }) -const avgGenTime = 5 * 60 -const forkWindowSize = 3 - -class TestingSwitcherDao implements SwitcherDao { +class TestingSwitcherDao implements SwitcherDao<Block> { - getCurrent(): SwitchBlock { + async getCurrent(): Promise<Block> { return this.bc.current } - getPotentials(numberStart:number, timeStart:number) { + async getPotentials(numberStart:number, timeStart:number) { return this.sbx.getPotentials(numberStart, timeStart) } - getBlockchainBlock(number: number, hash: string): SwitchBlock|null { + async getBlockchainBlock(number: number, hash: string): Promise<Block|null> { return this.bc.getBlock(number, hash) } - getSandboxBlock(number: number, hash: string): SwitchBlock | any { + async getSandboxBlock(number: number, hash: string): Promise<Block | any> { return this.sbx.getBlock(number, hash) } - revertTo(number: number): SwitchBlock[] { + async revertTo(number: number): Promise<Block[]> { return this.bc.revertTo(number) } - addBlock(block: Block): SwitchBlock { + async addBlock(block: Block): Promise<Block> { return this.bc.add(block) } diff --git a/test/integration/branches2.js b/test/integration/branches2.js index bed9f8a8f..3b13b99f6 100644 --- a/test/integration/branches2.js +++ b/test/integration/branches2.js @@ -1,246 +1,214 @@ "use strict"; - -const co = require('co'); -const _ = require('underscore'); -const duniter = require('../../index'); -const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma; -const user = require('./tools/user'); -const constants = require('../../app/lib/constants'); -const rp = require('request-promise'); -const httpTest = require('./tools/http'); -const commit = require('./tools/commit'); -const sync = require('./tools/sync'); -const shutDownEngine = require('./tools/shutDownEngine'); - -const expectJSON = httpTest.expectJSON; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const other_constants_1 = require("../../app/lib/other_constants"); +const logger_1 = require("../../app/lib/logger"); +const index_1 = require("../../app/modules/bma/index"); +const index_2 = require("../../app/modules/crawler/index"); +const toolbox_1 = require("./tools/toolbox"); +const co = require('co'); +const _ = require('underscore'); +const duniter = require('../../index'); +const bma = index_1.BmaDependency.duniter.methods.bma; +const user = require('./tools/user'); +const rp = require('request-promise'); +const httpTest = require('./tools/http'); +const commit = require('./tools/commit'); +const sync = require('./tools/sync'); +const shutDownEngine = require('./tools/shutDownEngine'); +const expectJSON = httpTest.expectJSON; const expectHttpCode = httpTest.expectHttpCode; - -if (constants.MUTE_LOGS_DURING_UNIT_TESTS) { - require('../../app/lib/logger').NewLogger().mute(); +if (other_constants_1.OtherConstants.MUTE_LOGS_DURING_UNIT_TESTS) { + logger_1.NewLogger().mute(); } - // Trace these errors process.on('unhandledRejection', (reason) => { - console.error('Unhandled rejection: ' + reason); - console.error(reason); + console.error('Unhandled rejection: ' + reason); + console.error(reason); }); - const MEMORY_MODE = true; const commonConf = { - ipv4: '127.0.0.1', - currency: 'bb', - httpLogs: true, - forksize: 10, - switchOnHeadAdvance: 6, - avgGenTime: 30 * 60, - sigQty: 1 + ipv4: '127.0.0.1', + currency: 'bb', + httpLogs: true, + forksize: 10, + switchOnHeadAdvance: 6, + avgGenTime: 30 * 60, + sigQty: 1 }; - -let s1, s2, cat, toc - +let s1, s2, cat, toc; const now = Math.round(new Date().getTime() / 1000); - -describe("SelfFork", function() { - - before(() => co(function *() { - - s1 = duniter( - '/bb4', - MEMORY_MODE, - _.extend({ - port: '7781', - pair: { - pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', - sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' - } - }, commonConf)); - - s2 = duniter( - '/bb5', - MEMORY_MODE, - _.extend({ - port: '7782', - pair: { - pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', - sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' - } - }, commonConf)); - - cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: s1 }); - toc = user('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 }); - - const commitS1 = commit(s1); - const commitS2 = commit(s2, { - time: now + 37180 - }); - - yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); - yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); - s1.getMainEndpoint = require('../../app/modules/bma').BmaDependency.duniter.methods.getMainEndpoint - s2.getMainEndpoint = require('../../app/modules/bma').BmaDependency.duniter.methods.getMainEndpoint - - // Server 1 - yield cat.createIdentity(); - yield toc.createIdentity(); - yield toc.cert(cat); - yield cat.cert(toc); - yield cat.join(); - yield toc.join(); - - yield commitS1({ - time: now - }); - yield commitS1(); - yield commitS1(); - yield commitS1(); - - // Server 2 - yield sync(0, 2, s1, s2); - yield function*() { - yield (cb) => setTimeout(cb, 1000); - }; - let s2p = yield s2.PeeringService.peer(); - - yield commitS2(); - yield commitS2(); - yield commitS2(); - yield commitS2(); - yield commitS2(); - yield commitS2(); - yield commitS2(); - - yield s1.writePeer(s2p); - - // Forking S1 from S2 - return require('../../app/modules/crawler').CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey); - })); - - after(() => { - return Promise.all([ - shutDownEngine(s1), - shutDownEngine(s2) - ]) - }) - - describe("Server 1 /blockchain", function() { - - it('/block/0 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/0', { json: true }), { - number: 0, - issuersCount: 0, - issuersFrame: 1, - issuersFrameVar: 0 - }); - }); - - it('/block/1 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/1', { json: true }), { - number: 1, - issuersCount: 1, - issuersFrame: 1, - issuersFrameVar: 5 - }); - }); - - it('/block/2 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/2', { json: true }), { - number: 2, - issuersCount: 1, - issuersFrame: 2, - issuersFrameVar: 4 - }); - }); - - it('/block/3 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/3', { json: true }), { - number: 3, - issuersCount: 1, - issuersFrame: 3, - issuersFrameVar: 3 - }); - }); - - it('/block/4 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/4', { json: true }), { - number: 4, - issuersCount: 2, - issuersFrame: 4, - issuersFrameVar: 7 - }); - }); - - it('/block/5 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/5', { json: true }), { - number: 5, - issuersCount: 2, - issuersFrame: 5, - issuersFrameVar: 6 - }); - }); - - it('/block/6 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/6', { json: true }), { - number: 6, - issuersCount: 2, - issuersFrame: 6, - issuersFrameVar: 5 - }); - }); - - it('/block/7 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/7', { json: true }), { - number: 7, - issuersCount: 2, - issuersFrame: 7, - issuersFrameVar: 4 - }); - }); - - it('/block/88 should not exist', function() { - return expectHttpCode(404, rp('http://127.0.0.1:7781/blockchain/block/88')); - }); - - it('/current should exist', function() { - return expectJSON(rp('http://127.0.0.1:7781/blockchain/current', { json: true }), { - number: 9 - }); - }); - - it('should have 2 branch', function() { - return s1.BlockchainService.branches() - .then(function(branches){ - branches.should.have.length(2); +describe("SelfFork", function () { + before(() => co(function* () { + s1 = duniter('/bb4', MEMORY_MODE, _.extend({ + port: '7781', + pair: { + pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', + sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' + } + }, commonConf)); + s2 = duniter('/bb5', MEMORY_MODE, _.extend({ + port: '7782', + pair: { + pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', + sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' + } + }, commonConf)); + cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 }); + toc = user('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 }); + const commitS1 = commit(s1); + const commitS2 = commit(s2, { + time: now + 37180 }); - }); - }); - - describe("Server 2 /blockchain", function() { - - it('/block/0 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/0', { json: true }), { - number: 0 - }); - }); - - it('/block/1 should exist', function() { - return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/1', { json: true }), { - number: 1 - }); - }); - - it('/block/88 should not exist', function() { - return expectHttpCode(404, rp('http://127.0.0.1:7782/blockchain/block/88')); - }); - - it('/current should exist', function() { - return expectJSON(rp('http://127.0.0.1:7782/blockchain/current', { json: true }), { - number: 9 - }); - }); - - it('should have 1 branch', () => co(function*() { - const branches = yield s2.BlockchainService.branches(); - branches.should.have.length(1); + yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); + yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); + s1.getMainEndpoint = index_1.BmaDependency.duniter.methods.getMainEndpoint; + s2.getMainEndpoint = index_1.BmaDependency.duniter.methods.getMainEndpoint; + // Server 1 + yield cat.createIdentity(); + yield toc.createIdentity(); + yield toc.cert(cat); + yield cat.cert(toc); + yield cat.join(); + yield toc.join(); + yield commitS1({ + time: now + }); + yield commitS1(); + yield commitS1(); + yield commitS1(); + // Server 2 + yield sync(0, 2, s1, s2); + yield toolbox_1.waitToHaveBlock(s2, 2); + let s2p = yield s2.PeeringService.peer(); + yield commitS2(); + yield commitS2(); + yield commitS2(); + yield commitS2(); + yield commitS2(); + yield commitS2(); + yield commitS2(); + yield s1.writePeer(s2p); + // Forking S1 from S2 + yield Promise.all([ + toolbox_1.waitForkResolution(s1, 9), + index_2.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey) + ]); })); - }); + after(() => { + return Promise.all([ + shutDownEngine(s1), + shutDownEngine(s2) + ]); + }); + describe("Server 1 /blockchain", function () { + it('/block/0 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/0', { json: true }), { + number: 0, + issuersCount: 0, + issuersFrame: 1, + issuersFrameVar: 0 + }); + }); + it('/block/1 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/1', { json: true }), { + number: 1, + issuersCount: 1, + issuersFrame: 1, + issuersFrameVar: 5 + }); + }); + it('/block/2 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/2', { json: true }), { + number: 2, + issuersCount: 1, + issuersFrame: 2, + issuersFrameVar: 4 + }); + }); + it('/block/3 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/3', { json: true }), { + number: 3, + issuersCount: 1, + issuersFrame: 3, + issuersFrameVar: 3 + }); + }); + it('/block/4 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/4', { json: true }), { + number: 4, + issuersCount: 2, + issuersFrame: 4, + issuersFrameVar: 7 + }); + }); + it('/block/5 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/5', { json: true }), { + number: 5, + issuersCount: 2, + issuersFrame: 5, + issuersFrameVar: 6 + }); + }); + it('/block/6 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/6', { json: true }), { + number: 6, + issuersCount: 2, + issuersFrame: 6, + issuersFrameVar: 5 + }); + }); + it('/block/7 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/7', { json: true }), { + number: 7, + issuersCount: 2, + issuersFrame: 7, + issuersFrameVar: 4 + }); + }); + it('/block/88 should not exist', function () { + return expectHttpCode(404, rp('http://127.0.0.1:7781/blockchain/block/88')); + }); + it('/current should exist', function () { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/current', { json: true }), { + number: 9 + }); + }); + it('should have 2 branch', () => __awaiter(this, void 0, void 0, function* () { + const branches = yield s1.BlockchainService.branches(); + branches.should.have.length(1); + })); + }); + describe("Server 2 /blockchain", function () { + it('/block/0 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/0', { json: true }), { + number: 0 + }); + }); + it('/block/1 should exist', function () { + return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/1', { json: true }), { + number: 1 + }); + }); + it('/block/88 should not exist', function () { + return expectHttpCode(404, rp('http://127.0.0.1:7782/blockchain/block/88')); + }); + it('/current should exist', function () { + return expectJSON(rp('http://127.0.0.1:7782/blockchain/current', { json: true }), { + number: 9 + }); + }); + it('should have 1 branch', () => co(function* () { + const branches = yield s2.BlockchainService.branches(); + branches.should.have.length(1); + })); + }); }); +//# sourceMappingURL=branches2.js.map \ No newline at end of file diff --git a/test/integration/branches2.ts b/test/integration/branches2.ts new file mode 100644 index 000000000..a63e06007 --- /dev/null +++ b/test/integration/branches2.ts @@ -0,0 +1,247 @@ +import {OtherConstants} from "../../app/lib/other_constants" +import {NewLogger} from "../../app/lib/logger" +import {BmaDependency} from "../../app/modules/bma/index"; +import {CrawlerDependency} from "../../app/modules/crawler/index" +import {waitForkResolution, waitToHaveBlock} from "./tools/toolbox" + +const co = require('co'); +const _ = require('underscore'); +const duniter = require('../../index'); +const bma = BmaDependency.duniter.methods.bma; +const user = require('./tools/user'); +const rp = require('request-promise'); +const httpTest = require('./tools/http'); +const commit = require('./tools/commit'); +const sync = require('./tools/sync'); +const shutDownEngine = require('./tools/shutDownEngine'); + +const expectJSON = httpTest.expectJSON; +const expectHttpCode = httpTest.expectHttpCode; + +if (OtherConstants.MUTE_LOGS_DURING_UNIT_TESTS) { + NewLogger().mute(); +} + +// Trace these errors +process.on('unhandledRejection', (reason) => { + console.error('Unhandled rejection: ' + reason); + console.error(reason); +}); + +const MEMORY_MODE = true; +const commonConf = { + ipv4: '127.0.0.1', + currency: 'bb', + httpLogs: true, + forksize: 10, + switchOnHeadAdvance: 6, + avgGenTime: 30 * 60, + sigQty: 1 +}; + +let s1:any, s2:any, cat, toc + +const now = Math.round(new Date().getTime() / 1000); + +describe("SelfFork", function() { + + before(() => co(function *() { + + s1 = duniter( + '/bb4', + MEMORY_MODE, + _.extend({ + port: '7781', + pair: { + pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', + sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' + } + }, commonConf)); + + s2 = duniter( + '/bb5', + MEMORY_MODE, + _.extend({ + port: '7782', + pair: { + pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', + sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' + } + }, commonConf)); + + cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: s1 }); + toc = user('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 }); + + const commitS1 = commit(s1); + const commitS2 = commit(s2, { + time: now + 37180 + }); + + yield s1.initWithDAL().then(bma).then((bmapi:any) => bmapi.openConnections()); + yield s2.initWithDAL().then(bma).then((bmapi:any) => bmapi.openConnections()); + s1.getMainEndpoint = BmaDependency.duniter.methods.getMainEndpoint + s2.getMainEndpoint = BmaDependency.duniter.methods.getMainEndpoint + + // Server 1 + yield cat.createIdentity(); + yield toc.createIdentity(); + yield toc.cert(cat); + yield cat.cert(toc); + yield cat.join(); + yield toc.join(); + + yield commitS1({ + time: now + }); + yield commitS1(); + yield commitS1(); + yield commitS1(); + + // Server 2 + yield sync(0, 2, s1, s2); + yield waitToHaveBlock(s2, 2) + let s2p = yield s2.PeeringService.peer(); + + yield commitS2(); + yield commitS2(); + yield commitS2(); + yield commitS2(); + yield commitS2(); + yield commitS2(); + yield commitS2(); + + yield s1.writePeer(s2p); + // Forking S1 from S2 + yield Promise.all([ + waitForkResolution(s1, 9), + CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey) + ]) + })); + + after(() => { + return Promise.all([ + shutDownEngine(s1), + shutDownEngine(s2) + ]) + }) + + describe("Server 1 /blockchain", function() { + + it('/block/0 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/0', { json: true }), { + number: 0, + issuersCount: 0, + issuersFrame: 1, + issuersFrameVar: 0 + }); + }); + + it('/block/1 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/1', { json: true }), { + number: 1, + issuersCount: 1, + issuersFrame: 1, + issuersFrameVar: 5 + }); + }); + + it('/block/2 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/2', { json: true }), { + number: 2, + issuersCount: 1, + issuersFrame: 2, + issuersFrameVar: 4 + }); + }); + + it('/block/3 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/3', { json: true }), { + number: 3, + issuersCount: 1, + issuersFrame: 3, + issuersFrameVar: 3 + }); + }); + + it('/block/4 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/4', { json: true }), { + number: 4, + issuersCount: 2, + issuersFrame: 4, + issuersFrameVar: 7 + }); + }); + + it('/block/5 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/5', { json: true }), { + number: 5, + issuersCount: 2, + issuersFrame: 5, + issuersFrameVar: 6 + }); + }); + + it('/block/6 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/6', { json: true }), { + number: 6, + issuersCount: 2, + issuersFrame: 6, + issuersFrameVar: 5 + }); + }); + + it('/block/7 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/block/7', { json: true }), { + number: 7, + issuersCount: 2, + issuersFrame: 7, + issuersFrameVar: 4 + }); + }); + + it('/block/88 should not exist', function() { + return expectHttpCode(404, rp('http://127.0.0.1:7781/blockchain/block/88')); + }); + + it('/current should exist', function() { + return expectJSON(rp('http://127.0.0.1:7781/blockchain/current', { json: true }), { + number: 9 + }); + }); + + it('should have 2 branch', async () => { + const branches:any[] = await s1.BlockchainService.branches() + branches.should.have.length(1) + }) + }); + + describe("Server 2 /blockchain", function() { + + it('/block/0 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/0', { json: true }), { + number: 0 + }); + }); + + it('/block/1 should exist', function() { + return expectJSON(rp('http://127.0.0.1:7782/blockchain/block/1', { json: true }), { + number: 1 + }); + }); + + it('/block/88 should not exist', function() { + return expectHttpCode(404, rp('http://127.0.0.1:7782/blockchain/block/88')); + }); + + it('/current should exist', function() { + return expectJSON(rp('http://127.0.0.1:7782/blockchain/current', { json: true }), { + number: 9 + }); + }); + + it('should have 1 branch', () => co(function*() { + const branches = yield s2.BlockchainService.branches(); + branches.should.have.length(1); + })); + }); +}); diff --git a/test/integration/branches_revert2.js b/test/integration/branches_revert2.js index 3d1289591..9800c3ad7 100644 --- a/test/integration/branches_revert2.js +++ b/test/integration/branches_revert2.js @@ -184,6 +184,7 @@ describe("Revert two blocks", function() { before(() => co(function*() { yield s1.dal.txsDAL.sqlDeleteAll() yield cat.sendP(19, toc); + yield s1.dal.blockDAL.exec('DELETE FROM block WHERE fork AND number = 3') yield commit(s1)({ time: now + 1 }); })) diff --git a/test/integration/branches_revert_balance.js b/test/integration/branches_revert_balance.js index fc7c448f8..3a5fc3912 100644 --- a/test/integration/branches_revert_balance.js +++ b/test/integration/branches_revert_balance.js @@ -62,6 +62,7 @@ describe("Revert balance", () => { it('cat should be able to RE-send 60 units to tac', () => co(function*() { const txsPending = yield s1.dal.txsDAL.getAllPending(1) + yield s1.dal.blockDAL.exec('DELETE FROM block WHERE fork AND number = 3') txsPending.should.have.length(1) yield s1.commit({ time: now + 1 }) yield s1.expect('/tx/sources/' + cat.pub, (res) => { diff --git a/test/integration/branches_switch.js b/test/integration/branches_switch.js index 52fe0bfa9..e85ffb4ef 100644 --- a/test/integration/branches_switch.js +++ b/test/integration/branches_switch.js @@ -1,119 +1,98 @@ "use strict"; - +Object.defineProperty(exports, "__esModule", { value: true }); +const index_1 = require("../../app/modules/crawler/index"); const co = require('co'); -const _ = require('underscore'); -const duniter = require('../../index'); -const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma; -const user = require('./tools/user'); -const rp = require('request-promise'); -const httpTest = require('./tools/http'); -const commit = require('./tools/commit'); -const sync = require('./tools/sync'); -const shutDownEngine = require('./tools/shutDownEngine'); -const constants = require('../../app/lib/constants'); - -const expectJSON = httpTest.expectJSON; - +const _ = require('underscore'); +const duniter = require('../../index'); +const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma; +const user = require('./tools/user'); +const rp = require('request-promise'); +const httpTest = require('./tools/http'); +const commit = require('./tools/commit'); +const sync = require('./tools/sync'); +const shutDownEngine = require('./tools/shutDownEngine'); +const expectJSON = httpTest.expectJSON; const MEMORY_MODE = true; const commonConf = { - ipv4: '127.0.0.1', - currency: 'bb', - httpLogs: true, - forksize: 30, - avgGenTime: 1, - sigQty: 1 + ipv4: '127.0.0.1', + currency: 'bb', + httpLogs: true, + forksize: 30, + avgGenTime: 1, + sigQty: 1 }; - -let s1, s2, cat, toc - -describe("Switch", function() { - - before(() => co(function *() { - - s1 = duniter( - '/bb11', - MEMORY_MODE, - _.extend({ - switchOnHeadAdvance: 0, - port: '7788', - pair: { - pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', - sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' - }, - rootoffset: 10, - sigQty: 1, dt: 1, ud0: 120 - }, commonConf)); - - s2 = duniter( - '/bb12', - MEMORY_MODE, - _.extend({ - switchOnHeadAdvance: 0, - port: '7789', - pair: { - pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', - sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' - } - }, commonConf)); - - cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: s1 }); - toc = user('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 }); - - yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); - yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); - s1.getMainEndpoint = require('../../app/modules/bma').BmaDependency.duniter.methods.getMainEndpoint - s2.getMainEndpoint = require('../../app/modules/bma').BmaDependency.duniter.methods.getMainEndpoint - yield cat.createIdentity(); - yield toc.createIdentity(); - yield toc.cert(cat); - yield cat.cert(toc); - yield cat.join(); - yield toc.join(); - yield commit(s1)(); - yield commit(s1)(); - yield commit(s1)(); - yield sync(0, 2, s1, s2); - - let s2p = yield s2.PeeringService.peer(); - - yield commit(s1)(); - yield commit(s1)(); - yield commit(s2)(); - yield commit(s2)(); - yield commit(s2)(); - yield commit(s2)(); - yield commit(s2)(); - yield commit(s2)(); - yield commit(s2)(); - // So we now have: - // S1 01234 - // S2 `3456789 - yield s1.writePeer(s2p) - - // Forking S1 from S2 - yield require('../../app/modules/crawler').CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey); - // S1 should have switched to the other branch - })); - - after(() => { - return Promise.all([ - shutDownEngine(s1), - shutDownEngine(s2) - ]) - }) - - describe("Server 1 /blockchain", function() { - - it('/block/8 should exist on S1', function() { - return expectJSON(rp('http://127.0.0.1:7788/blockchain/block/8', { json: true }), { - number: 8 - }); +let s1, s2, cat, toc; +describe("Switch", function () { + before(() => co(function* () { + s1 = duniter('/bb11', MEMORY_MODE, _.extend({ + switchOnHeadAdvance: 0, + port: '7788', + pair: { + pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', + sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' + }, + rootoffset: 10, + sigQty: 1, dt: 1, ud0: 120 + }, commonConf)); + s2 = duniter('/bb12', MEMORY_MODE, _.extend({ + switchOnHeadAdvance: 0, + port: '7789', + pair: { + pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', + sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' + } + }, commonConf)); + cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' }, { server: s1 }); + toc = user('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' }, { server: s1 }); + yield s1.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); + yield s2.initWithDAL().then(bma).then((bmapi) => bmapi.openConnections()); + s1.getMainEndpoint = require('../../app/modules/bma').BmaDependency.duniter.methods.getMainEndpoint; + s2.getMainEndpoint = require('../../app/modules/bma').BmaDependency.duniter.methods.getMainEndpoint; + yield cat.createIdentity(); + yield toc.createIdentity(); + yield toc.cert(cat); + yield cat.cert(toc); + yield cat.join(); + yield toc.join(); + yield commit(s1)(); + yield commit(s1)(); + yield commit(s1)(); + yield sync(0, 2, s1, s2); + let s2p = yield s2.PeeringService.peer(); + yield commit(s1)(); + yield commit(s1)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + // So we now have: + // S1 01234 + // S2 `3456789 + yield s1.writePeer(s2p); + // Forking S1 from S2 + yield index_1.CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey); + // S1 should have switched to the other branch + })); + after(() => { + return Promise.all([ + shutDownEngine(s1), + shutDownEngine(s2) + ]); }); - - it('/block/8 should exist on S2', function() { - return expectJSON(rp('http://127.0.0.1:7789/blockchain/block/8', { json: true }), { - number: 8 - }); + describe("Server 1 /blockchain", function () { + it('/block/8 should exist on S1', function () { + return expectJSON(rp('http://127.0.0.1:7788/blockchain/block/8', { json: true }), { + number: 8 + }); + }); + it('/block/8 should exist on S2', function () { + return expectJSON(rp('http://127.0.0.1:7789/blockchain/block/8', { json: true }), { + number: 8 + }); + }); }); - }); }); +//# sourceMappingURL=branches_switch.js.map \ No newline at end of file diff --git a/test/integration/branches_switch.ts b/test/integration/branches_switch.ts new file mode 100644 index 000000000..934e4e1ec --- /dev/null +++ b/test/integration/branches_switch.ts @@ -0,0 +1,119 @@ +"use strict"; +import {CrawlerDependency} from "../../app/modules/crawler/index" + +const co = require('co'); +const _ = require('underscore'); +const duniter = require('../../index'); +const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma; +const user = require('./tools/user'); +const rp = require('request-promise'); +const httpTest = require('./tools/http'); +const commit = require('./tools/commit'); +const sync = require('./tools/sync'); +const shutDownEngine = require('./tools/shutDownEngine'); + +const expectJSON = httpTest.expectJSON; + +const MEMORY_MODE = true; +const commonConf = { + ipv4: '127.0.0.1', + currency: 'bb', + httpLogs: true, + forksize: 30, + avgGenTime: 1, + sigQty: 1 +}; + +let s1:any, s2:any, cat, toc + +describe("Switch", function() { + + before(() => co(function *() { + + s1 = duniter( + '/bb11', + MEMORY_MODE, + _.extend({ + switchOnHeadAdvance: 0, + port: '7788', + pair: { + pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', + sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP' + }, + rootoffset: 10, + sigQty: 1, dt: 1, ud0: 120 + }, commonConf)); + + s2 = duniter( + '/bb12', + MEMORY_MODE, + _.extend({ + switchOnHeadAdvance: 0, + port: '7789', + pair: { + pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', + sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F' + } + }, commonConf)); + + cat = user('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: s1 }); + toc = user('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 }); + + yield s1.initWithDAL().then(bma).then((bmapi:any) => bmapi.openConnections()); + yield s2.initWithDAL().then(bma).then((bmapi:any) => bmapi.openConnections()); + s1.getMainEndpoint = require('../../app/modules/bma').BmaDependency.duniter.methods.getMainEndpoint + s2.getMainEndpoint = require('../../app/modules/bma').BmaDependency.duniter.methods.getMainEndpoint + yield cat.createIdentity(); + yield toc.createIdentity(); + yield toc.cert(cat); + yield cat.cert(toc); + yield cat.join(); + yield toc.join(); + yield commit(s1)(); + yield commit(s1)(); + yield commit(s1)(); + yield sync(0, 2, s1, s2); + + let s2p = yield s2.PeeringService.peer(); + + yield commit(s1)(); + yield commit(s1)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + yield commit(s2)(); + // So we now have: + // S1 01234 + // S2 `3456789 + yield s1.writePeer(s2p) + + // Forking S1 from S2 + yield CrawlerDependency.duniter.methods.pullBlocks(s1, s2p.pubkey); + // S1 should have switched to the other branch + })); + + after(() => { + return Promise.all([ + shutDownEngine(s1), + shutDownEngine(s2) + ]) + }) + + describe("Server 1 /blockchain", function() { + + it('/block/8 should exist on S1', function() { + return expectJSON(rp('http://127.0.0.1:7788/blockchain/block/8', { json: true }), { + number: 8 + }); + }); + + it('/block/8 should exist on S2', function() { + return expectJSON(rp('http://127.0.0.1:7789/blockchain/block/8', { json: true }), { + number: 8 + }); + }); + }); +}); diff --git a/test/integration/continuous-proof.js b/test/integration/continuous-proof.js index 845693fb8..c2481d7ab 100644 --- a/test/integration/continuous-proof.js +++ b/test/integration/continuous-proof.js @@ -1,6 +1,7 @@ "use strict"; const co = require('co'); +const es = require('event-stream'); const should = require('should'); const user = require('./tools/user'); const toolbox = require('./tools/toolbox'); @@ -112,6 +113,15 @@ describe("Continous proof-of-work", function() { s2.stopBlockComputation(); yield [ require('../../app/modules/crawler').CrawlerDependency.duniter.methods.pullBlocks(s3), + new Promise(res => { + s3.pipe(es.mapSync((e) => { + if (e.number === 15) { + res() + } + return e + })) + + }), s3.startBlockComputation() ]; const current = yield s3.get('/blockchain/current') diff --git a/test/integration/documents-currency.ts b/test/integration/documents-currency.ts index 1ff932ba0..e18173af5 100644 --- a/test/integration/documents-currency.ts +++ b/test/integration/documents-currency.ts @@ -130,7 +130,7 @@ describe("Document pool currency", function() { } catch (e) { should.exist(e.error); e.should.be.an.Object(); - e.error.message.should.match(/Wrong inner hash/); // Because currency is dynamically replaced + e.error.message.should.match(/Wrong currency/); } })); diff --git a/test/integration/http_api.js b/test/integration/http_api.js index 06d93ea75..df5a845e0 100644 --- a/test/integration/http_api.js +++ b/test/integration/http_api.js @@ -219,7 +219,20 @@ function postBlock(server2) { return function(block) { return post(server2, '/blockchain/block')({ block: typeof block == 'string' ? block : block.getRawSigned() - }); + }) + .then((result) => co(function*() { + const numberToReach = block.number + yield new Promise((res) => { + const interval = setInterval(() => co(function*() { + const current = yield server2.dal.getCurrentBlockOrNull() + if (current && current.number == numberToReach) { + res() + clearInterval(interval) + } + }), 1) + }) + return result + })) }; } diff --git a/test/integration/identity-kicking-by-certs.js b/test/integration/identity-kicking-by-certs.js index 995a76b59..199bf75e1 100644 --- a/test/integration/identity-kicking-by-certs.js +++ b/test/integration/identity-kicking-by-certs.js @@ -1,13 +1,8 @@ "use strict"; -const _ = require('underscore'); const co = require('co'); const assert = require('assert'); -const should = require('should'); -const duniter = require('../../index'); -const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma; const user = require('./tools/user'); -const constants = require('../../app/lib/constants'); const toolbox = require('./tools/toolbox'); const now = 1480000000; @@ -68,17 +63,7 @@ describe("Identities kicking by certs", function() { yield s1.commit({ time: now + 8 }); yield s1.commit({ time: now + 8 }); yield cat.revoke(); - let err; - try { - yield s1.commit({ time: now + 8, excluded: ['3conGDUXdrTGbQPMQQhEC4Ubu1MCAnFrAYvUaewbUhtk'] }); - } catch (e) { - err = e; - } - should.exist(err); - should.deepEqual(JSON.parse(err.error), { - "ucode": 1002, - "message": "ruleToBeKickedArePresent" - }); + yield s1.commitWaitError({ time: now + 8, excluded: ['3conGDUXdrTGbQPMQQhEC4Ubu1MCAnFrAYvUaewbUhtk'] }, "ruleToBeKickedArePresent") yield s1.commit({ time: now + 8 }); })); diff --git a/test/integration/membership_chainability.ts b/test/integration/membership_chainability.ts index e6eb0d7b4..af873b8be 100644 --- a/test/integration/membership_chainability.ts +++ b/test/integration/membership_chainability.ts @@ -62,10 +62,10 @@ describe("Membership chainability", function() { }) it('should refuse a block with a too early membership in it', async () => { - await toolbox.shouldFail(s1.commit({ + await s1.commitWaitError({ time: now + 20, actives: ['HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd:SiCD1MSyDiZKWLp/SP/2Vj5T3JMgjNnIIKMI//yvKRdWMzKjEn6/ZT+TCjyjnl85qRfmEuWv1jLmQSoe8GXSDg==:1-0DEE2A8EA05322FCC4355D5F0E7A2830F4A22ACEBDC4B62399484E091A5CCF27:0-E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855:cat'] - }), '500 - "{\\n \\"ucode\\": 1002,\\n \\"message\\": \\"ruleMembershipPeriod\\"\\n}"') + }, 'ruleMembershipPeriod') }) it('should not be able to renew immediately', async () => { diff --git a/test/integration/network-update.js b/test/integration/network-update.js index c3f8c89df..7d533a097 100644 --- a/test/integration/network-update.js +++ b/test/integration/network-update.js @@ -84,6 +84,7 @@ describe("Network updating", function() { const b3 = yield s1.makeNext({}); yield s1.postBlock(b3); yield s2.postBlock(b3); + yield s2.waitToHaveBlock(b3.number); yield s1.recomputeSelfPeer(); // peer#1 yield s1.sharePeeringWith(s2); }); diff --git a/test/integration/peer-outdated.js b/test/integration/peer-outdated.js index eb921f6d0..31af4bc78 100644 --- a/test/integration/peer-outdated.js +++ b/test/integration/peer-outdated.js @@ -59,6 +59,7 @@ describe("Peer document expiry", function() { yield commitS1(); // block#2 yield s1.recomputeSelfPeer(); // peer#2 yield s2.syncFrom(s1, 0, 2); + yield s2.waitToHaveBlock(2) })); after(() => { diff --git a/test/integration/peerings.js b/test/integration/peerings.js index 3bda0b60d..9af8166e7 100644 --- a/test/integration/peerings.js +++ b/test/integration/peerings.js @@ -11,6 +11,7 @@ const rp = require('request-promise'); const httpTest = require('./tools/http'); const commit = require('./tools/commit'); const sync = require('./tools/sync'); +const toolbox = require('./tools/toolbox'); const contacter = require('../../app/modules/crawler').CrawlerDependency.duniter.methods.contacter; const until = require('./tools/until'); const shutDownEngine = require('./tools/shutDownEngine'); @@ -115,41 +116,44 @@ describe("Network", function() { yield commitS1(); // Server 2 syncs block 0 yield sync(0, 0, s1, s2); + yield toolbox.serverWaitBlock(s1, 0) // Server 3 syncs block 0 yield sync(0, 0, s1, s3); + yield toolbox.serverWaitBlock(s3, 0) yield nodeS1.getPeer().then((peer) => nodeS2.postPeer(PeerDTO.fromJSONObject(peer).getRawSigned())).catch(e => console.error(e)) yield nodeS2.getPeer().then((peer) => nodeS1.postPeer(PeerDTO.fromJSONObject(peer).getRawSigned())).catch(e => console.error(e)) yield nodeS3.getPeer().then((peer) => nodeS1.postPeer(PeerDTO.fromJSONObject(peer).getRawSigned())).catch(e => console.error(e)) yield commitS1(); yield [ - until(s2, 'block', 1), - until(s3, 'block', 1) + toolbox.serverWaitBlock(s2, 1), + toolbox.serverWaitBlock(s3, 1) ]; // A block was successfully spread accross the network yield s2.bma.closeConnections(); yield commitS1(); yield [ - until(s3, 'block', 1) + toolbox.serverWaitBlock(s3, 2) ]; // Server 2 syncs block number 2 (it did not have it) yield sync(2, 2, s1, s2); + yield toolbox.serverWaitBlock(s2, 2) yield s2.recomputeSelfPeer(); yield s2.bma.openConnections(); yield new Promise((resolve) => setTimeout(resolve, 1000)); yield [ - until(s2, 'block', 2), - until(s3, 'block', 2), + toolbox.serverWaitBlock(s2, 4), + toolbox.serverWaitBlock(s3, 4), commitS1() .then(commitS1) ]; yield [ - until(s1, 'block', 1), - until(s2, 'block', 1), + toolbox.serverWaitBlock(s1, 5), + toolbox.serverWaitBlock(s2, 5), commitS3() ]; yield [ - until(s1, 'block', 1), - until(s3, 'block', 1), + toolbox.serverWaitBlock(s1, 6), + toolbox.serverWaitBlock(s3, 6), commitS2() ]; }); @@ -179,7 +183,7 @@ describe("Network", function() { }); }); - it('/current should exist and be 1', function() { + it('/current should exist and be 6', function() { return expectJSON(rp('http://127.0.0.1:7784/blockchain/current', { json: true }), { number: 6 }); diff --git a/test/integration/peers-same-pubkey.js b/test/integration/peers-same-pubkey.js index 312d3515d..6afed89fa 100644 --- a/test/integration/peers-same-pubkey.js +++ b/test/integration/peers-same-pubkey.js @@ -3,13 +3,11 @@ const co = require('co'); const _ = require('underscore'); const should = require('should'); -const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma; const user = require('./tools/user'); const commit = require('./tools/commit'); const sync = require('./tools/sync'); const until = require('./tools/until'); const toolbox = require('./tools/toolbox'); -const multicaster = require('../../app/lib/streams/multicaster'); const PeerDTO = require('../../app/lib/dto/PeerDTO').PeerDTO const catKeyPair = { @@ -54,6 +52,7 @@ describe("Peer document", function() { yield commitS1(); // block#2 // // s2 syncs from s1 yield sync(0, 2, s1, s2); + yield toolbox.serverWaitBlock(s1, 2) yield [ s1.get('/network/peering').then((peer) => s2.post('/network/peering/peers', { peer: PeerDTO.fromJSONObject(peer).getRawSigned() })), // peer#2 until(s2, 'peer', 1) @@ -61,10 +60,11 @@ describe("Peer document", function() { yield [ commitS2(), // block#3 - until(s1, 'block', 1) + toolbox.serverWaitBlock(s1, 3) ]; yield sync(0, 3, s1, s3); + yield toolbox.serverWaitBlock(s3, 3) const peer1 = yield s1.get('/network/peering'); peer1.should.have.property("block").match(/^2-/); @@ -77,14 +77,14 @@ describe("Peer document", function() { yield [ commitS2(), // block#4 - until(s1, 'block', 1), - until(s3, 'block', 1) + toolbox.serverWaitBlock(s1, 4), + toolbox.serverWaitBlock(s3, 4) ]; yield [ commitS1(), // block#5 - until(s2, 'block', 1), - until(s3, 'block', 1) + toolbox.serverWaitBlock(s2, 5), + toolbox.serverWaitBlock(s3, 5) ]; })); diff --git a/test/integration/register-fork-blocks.js b/test/integration/register-fork-blocks.js index 5cd514056..cf1faf59b 100644 --- a/test/integration/register-fork-blocks.js +++ b/test/integration/register-fork-blocks.js @@ -94,6 +94,8 @@ describe("Fork blocks", function() { yield s3.writeBlock(b0) yield s3.writeBlock(b1) yield s3.writeBlock(b2) + yield s2.waitToHaveBlock(2) + yield s3.waitToHaveBlock(2) })) it('should exist the same block on each node', () => co(function*() { @@ -110,6 +112,8 @@ describe("Fork blocks", function() { const b3b = yield s2.commit({ time: now + 33 }) yield s1.writeBlock(b3b) yield s2.writeBlock(b3a) + yield s1.waitToHaveBlock(3) + yield s2.waitToHaveBlock(3) })) it('should exist a different third block on each node', () => co(function*() { @@ -148,6 +152,7 @@ describe("Fork blocks", function() { b6a = yield s1.commit({time: now + 66}) b7a = yield s1.commit({time: now + 77}) b8a = yield s1.commit({time: now + 88}) + yield s1.waitToHaveBlock(8) })) it('should refuse known fork blocks', () => co(function*() { @@ -159,7 +164,7 @@ describe("Fork blocks", function() { const event = CommonConstants.DocumentError s2.on(event, (e) => { try { - assert.equal(e, 'Fork block already known') + assert.equal(e, 'Block already known') res() } catch (e) { rej(e) @@ -175,6 +180,8 @@ describe("Fork blocks", function() { yield s2.writeBlock(b6a) yield s2.writeBlock(b7a) yield s2.writeBlock(b8a) + yield s2.waitToHaveBlock(8) + yield s2.waitForkResolution(8) })) it('should exist a same current block on each node', () => co(function*() { @@ -192,18 +199,18 @@ describe("Fork blocks", function() { yield s1.expect('/blockchain/branches', (res) => { assert.equal(res.blocks.length, 3) assert.equal(res.blocks[0].number, 3) - assert.equal(res.blocks[0].hash, '2C3555F4009461C81F7209EAAD7DA831D8451708D06BB1173CCB40746CD0641B') // This is s2 fork! + assert.equal(res.blocks[0].hash, '9A0FA1F0899124444ADC5B2C0AB66AC5B4303A0D851BED2E7382BB57E10AA2C5') assert.equal(res.blocks[1].number, 3) - assert.equal(res.blocks[1].hash, '9A0FA1F0899124444ADC5B2C0AB66AC5B4303A0D851BED2E7382BB57E10AA2C5') + assert.equal(res.blocks[1].hash, '2C3555F4009461C81F7209EAAD7DA831D8451708D06BB1173CCB40746CD0641B') // This is s2 fork! assert.equal(res.blocks[2].number, 8) assert.equal(res.blocks[2].hash, 'B8D2AA2A5556F7A2837FB4B881FCF50595F855D0BF8F71C0B432E27216BBA40B') }) yield s2.expect('/blockchain/branches', (res) => { assert.equal(res.blocks.length, 3) assert.equal(res.blocks[0].number, 3) - assert.equal(res.blocks[0].hash, '2C3555F4009461C81F7209EAAD7DA831D8451708D06BB1173CCB40746CD0641B') // This is s2 fork! + assert.equal(res.blocks[0].hash, '9A0FA1F0899124444ADC5B2C0AB66AC5B4303A0D851BED2E7382BB57E10AA2C5') assert.equal(res.blocks[1].number, 3) - assert.equal(res.blocks[1].hash, '9A0FA1F0899124444ADC5B2C0AB66AC5B4303A0D851BED2E7382BB57E10AA2C5') + assert.equal(res.blocks[1].hash, '2C3555F4009461C81F7209EAAD7DA831D8451708D06BB1173CCB40746CD0641B') // This is s2 fork! assert.equal(res.blocks[2].number, 8) assert.equal(res.blocks[2].hash, 'B8D2AA2A5556F7A2837FB4B881FCF50595F855D0BF8F71C0B432E27216BBA40B') }) diff --git a/test/integration/revocation-test.js b/test/integration/revocation-test.js index 3e32e984c..63300af0e 100644 --- a/test/integration/revocation-test.js +++ b/test/integration/revocation-test.js @@ -185,6 +185,7 @@ describe("Revocation", function() { it('if we revert the commit, cat should not be revoked', () => co(function *() { yield s1.revert(); yield s1.revert(); + yield s1.dal.blockDAL.exec('DELETE FROM block WHERE fork AND number >= 2') return expectAnswer(rp('http://127.0.0.1:9964/wot/lookup/cat', { json: true }), function(res) { res.should.have.property('results').length(1); res.results[0].should.have.property('uids').length(1); diff --git a/test/integration/single-document-treatment.js b/test/integration/single-document-treatment.js index f164fcc21..a525dd8b5 100644 --- a/test/integration/single-document-treatment.js +++ b/test/integration/single-document-treatment.js @@ -64,6 +64,7 @@ describe("Single document treatment", function() { yield s2.writeBlock(b0) yield s2.writeBlock(b1) yield s2.writeBlock(b2) + yield toolbox.serverWaitBlock(s2, 2) })) it('should exist the same block on each node', () => co(function*() { diff --git a/test/integration/start_generate_blocks.js b/test/integration/start_generate_blocks.js index d389f877d..f25261ab3 100644 --- a/test/integration/start_generate_blocks.js +++ b/test/integration/start_generate_blocks.js @@ -9,7 +9,7 @@ const rp = require('request-promise'); const httpTest = require('./tools/http'); const commit = require('./tools/commit'); const until = require('./tools/until'); -const multicaster = require('../../app/lib/streams/multicaster'); +const toolbox = require('./tools/toolbox'); const PeerDTO = require('../../app/lib/dto/PeerDTO').PeerDTO const contacter = require('../../app/modules/crawler').CrawlerDependency.duniter.methods.contacter; const sync = require('./tools/sync'); @@ -104,8 +104,8 @@ describe("Generation", function() { s1.conf.powDelay = 2000; s2.conf.powDelay = 2000; yield [ - until(s1, 'block', 2), - until(s2, 'block', 2) + toolbox.serverWaitBlock(s1, 3), + toolbox.serverWaitBlock(s2, 3) ]; s1.stopBlockComputation(); s2.stopBlockComputation(); diff --git a/test/integration/tools/commit.js b/test/integration/tools/commit.js index 95aa8f25a..8501f0759 100644 --- a/test/integration/tools/commit.js +++ b/test/integration/tools/commit.js @@ -4,9 +4,10 @@ var _ = require('underscore'); var co = require('co'); var rp = require('request-promise'); var logger = require('../../../app/lib/logger').NewLogger('test'); +const until = require('./until') const BlockProver = require('../../../app/modules/prover/lib/blockProver').BlockProver -module.exports = function makeBlockAndPost(theServer, extraProps) { +module.exports = function makeBlockAndPost(theServer, extraProps, noWait) { return function(manualValues) { if (extraProps) { manualValues = manualValues || {}; @@ -18,7 +19,20 @@ module.exports = function makeBlockAndPost(theServer, extraProps) { theServer._utGenerator = require('../../../app/modules/prover').ProverDependency.duniter.methods.blockGenerator(theServer, theServer._utProver) } let proven = yield theServer._utGenerator.makeNextBlock(null, null, manualValues) - const block = yield postBlock(theServer)(proven); + const numberToReach = proven.number + const block = yield postBlock(theServer)(proven) + yield new Promise((res) => { + if (noWait) { + return res(block) + } + const interval = setInterval(() => co(function*() { + const current = yield theServer.dal.getCurrentBlockOrNull() + if (current && current.number == numberToReach) { + res() + clearInterval(interval) + } + }), 1) + }) return block }); }; diff --git a/test/integration/tools/toolbox.ts b/test/integration/tools/toolbox.ts index d26842937..1b3833ab3 100644 --- a/test/integration/tools/toolbox.ts +++ b/test/integration/tools/toolbox.ts @@ -18,14 +18,13 @@ import {TransactionDTO} from "../../../app/lib/dto/TransactionDTO" const _ = require('underscore'); const rp = require('request-promise'); +const es = require('event-stream'); const httpTest = require('../tools/http'); const sync = require('../tools/sync'); const commit = require('../tools/commit'); const user = require('../tools/user'); const until = require('../tools/until'); const bma = require('../../../app/modules/bma').BmaDependency.duniter.methods.bma; -const multicaster = require('../../../app/lib/streams/multicaster'); -const dtos = require('../../../app/modules/bma').BmaDependency.duniter.methods.dtos; const logger = require('../../../app/lib/logger').NewLogger('toolbox'); require('../../../app/modules/bma').BmaDependency.duniter.methods.noLimit(); // Disables the HTTP limiter @@ -215,6 +214,34 @@ export const NewTestingServer = (conf:any) => { return new TestingServer(port, server) } +export const serverWaitBlock = async (server:Server, number:number) => { + await new Promise((res) => { + const interval = setInterval(async () => { + const current = await server.dal.getCurrentBlockOrNull() + if (current && current.number == number) { + res() + clearInterval(interval) + } + }, 1) + }) +} + +export const waitToHaveBlock = async (server:Server, number:number) => { + return serverWaitBlock(server, number) +} + +export const waitForkResolution = async (server:Server, number:number) => { + await new Promise(res => { + server.pipe(es.mapSync((e:any) => { + if (e.bcEvent === 'switched' && e.block.number === number) { + res() + } + return e + })) + + }) +} + export class TestingServer { private prover:Prover @@ -363,12 +390,28 @@ export class TestingServer { return until(this.server, type, count); } - async commit(options:any = null) { const raw = await commit(this.server)(options); return JSON.parse(raw); } + async commitWaitError(options:any, expectedError:string) { + const results = await Promise.all([ + new Promise(res => { + this.server.pipe(es.mapSync((e:any) => { + if (e.blockResolutionError === expectedError) { + res() + } + })) + }), + (async () => { + const raw = await commit(this.server, null, true)(options); + return JSON.parse(raw); + })() + ]) + return results[1] + } + async commitExpectError(options:any) { try { const raw = await commit(this.server)(options); @@ -413,6 +456,14 @@ export class TestingServer { return this.get('/network/peering') } + waitToHaveBlock(number:number) { + return waitToHaveBlock(this.server, number) + } + + waitForkResolution(number:number) { + return waitForkResolution(this.server, number) + } + postIdentity(idty:any) { return this.post('/wot/add', { identity: idty.getRawSigned() diff --git a/test/integration/v0.6-difficulties.js b/test/integration/v0.6-difficulties.js index 67912b395..ec8e5a65d 100644 --- a/test/integration/v0.6-difficulties.js +++ b/test/integration/v0.6-difficulties.js @@ -2,8 +2,6 @@ const co = require('co'); const should = require('should'); -const bma = require('../../app/modules/bma').BmaDependency.duniter.methods.bma; -const constants = require('../../app/lib/constants'); const toolbox = require('./tools/toolbox'); const conf = { @@ -38,13 +36,13 @@ describe("Protocol 0.6 Difficulties", function() { it('should be able to emit a block#1 by a different user', () => co(function*() { yield [ s1.commit({ time: now }), // medianOfBlocksInFrame = MEDIAN([1]) = 1 - s2.until('block', 1), - s1.until('block', 1) + toolbox.serverWaitBlock(s1, 1), + toolbox.serverWaitBlock(s2, 1) ]; yield [ s2.commit({ time: now }), // medianOfBlocksInFrame = MEDIAN([1]) = 1 - s2.until('block', 1), - s1.until('block', 1) + toolbox.serverWaitBlock(s1, 2), + toolbox.serverWaitBlock(s2, 2) ]; yield s1.expectJSON('/blockchain/current', { number: 2, -- GitLab