diff --git a/app/modules/crawler/index.ts b/app/modules/crawler/index.ts index 2175e3eb1a1d203cf3791a849d479770682523da..b14d31075d7651eebc0c0e79d578b60df0b44c9e 100644 --- a/app/modules/crawler/index.ts +++ b/app/modules/crawler/index.ts @@ -27,6 +27,9 @@ import {RemoteSynchronizer} from "./lib/sync/RemoteSynchronizer" import {AbstractSynchronizer} from "./lib/sync/AbstractSynchronizer" import {LocalPathSynchronizer} from "./lib/sync/LocalPathSynchronizer" import {CommonConstants} from "../../lib/common-libs/constants" +import {DataErrors} from "../../lib/common-libs/errors" +import {NewLogger} from "../../lib/logger" +import {CrawlerConstants} from "./lib/constants" export const CrawlerDependency = { duniter: { @@ -136,6 +139,15 @@ export const CrawlerDependency = { return strategy.syncPeers(true) } else { const remote = new Synchroniser(server, strategy, interactive === true) + + // If the sync fail, stop the program + process.on('unhandledRejection', (reason) => { + if (reason.message === DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) { + NewLogger().error('Synchronization interrupted: no node was found to continue downloading after %s tries.', CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND) + process.exit(1) + } + }) + return remote.sync(upTo, chunkLength, askedCautious) } } diff --git a/app/modules/crawler/lib/constants.ts b/app/modules/crawler/lib/constants.ts index da99667c9e32c2969b25a4fce5cb25e00a6e72ce..bff0afbfd58eb8beff89655330b0ffdf49206a19 100644 --- a/app/modules/crawler/lib/constants.ts +++ b/app/modules/crawler/lib/constants.ts @@ -33,6 +33,7 @@ export const CrawlerConstants = { TEST_PEERS_INTERVAL: 10, // In seconds SYNC_PEERS_INTERVAL: 3, // Every 3 block average generation time SYNC_CHUNKS_IN_ADVANCE: 10, // We want to have that much chunks in advance when syncing + SYNC_MAX_FAIL_NO_NODE_FOUND: 20, DURATIONS: { TEN_SECONDS: 10, diff --git a/app/modules/crawler/lib/sync.ts b/app/modules/crawler/lib/sync.ts index 358bd1547f3965ba34e286c393541709531f3944..9b28381c932cb01ffe51a445f508d383f980068c 100644 --- a/app/modules/crawler/lib/sync.ts +++ b/app/modules/crawler/lib/sync.ts @@ -57,6 +57,7 @@ export class Synchroniser extends stream.Duplex { this.watcher.onEvent('wantToDownload', (data) => this.push({ p2pData: { name: 'wantToDownload', data }})) this.watcher.onEvent('wantToLoad', (data) => this.push({ p2pData: { name: 'wantToLoad', data }})) this.watcher.onEvent('beforeReadyNodes', (data) => this.push({ p2pData: { name: 'beforeReadyNodes', data }})) + this.watcher.onEvent('syncFailNoNodeFound', (data) => this.push({ p2pData: { name: 'syncFailNoNodeFound', data }})) this.syncStrategy.setWatcher(this.watcher) diff --git a/app/modules/crawler/lib/sync/P2PSyncDownloader.ts b/app/modules/crawler/lib/sync/P2PSyncDownloader.ts index b5b59a7f61e53cf4c26d12ebff0abd3937c46065..ffdf4f2bcb5ff71bec11aaa9b2f56dda429f6946 100644 --- a/app/modules/crawler/lib/sync/P2PSyncDownloader.ts +++ b/app/modules/crawler/lib/sync/P2PSyncDownloader.ts @@ -14,6 +14,7 @@ import {CommonConstants} from "../../../../lib/common-libs/constants" import {DataErrors} from "../../../../lib/common-libs/errors" import {ASyncDownloader} from "./ASyncDownloader" import {P2pCandidate} from "./p2p/p2p-candidate" +import {CrawlerConstants} from "../constants" export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloader { @@ -29,7 +30,7 @@ export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloade private nbDownloading = 0 private downloads: { [chunk: number]: P2pCandidate } = {} private fifoPromise = new GlobalFifoPromise() - private nbWaitFailed = 0 + private noNodeFoundCumulation = 0 constructor( private currency: string, @@ -152,9 +153,18 @@ export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloade count = this.nbBlocksToDownload % this.chunkSize || this.chunkSize; } try { - return await this.p2pDownload(from, count, index) as BlockDTO[] + const res = await this.p2pDownload(from, count, index) as BlockDTO[] + this.noNodeFoundCumulation = 0 + return res } catch (e) { this.logger.error(e); + if (e.message === DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) { + this.noNodeFoundCumulation++ + if (this.noNodeFoundCumulation >= CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND) { + this.watcher.syncFailNoNodeFound() + throw Error(DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) + } + } await new Promise(res => setTimeout(res, 1000)) // Wait 1s before retrying return this.downloadChunk(index); } diff --git a/app/modules/crawler/lib/sync/Watcher.ts b/app/modules/crawler/lib/sync/Watcher.ts index 9e0c945a1baaef4bceb99ae8cd4376cc4404dace..3bc8cb0df67627821c178784022ac0607933275f 100644 --- a/app/modules/crawler/lib/sync/Watcher.ts +++ b/app/modules/crawler/lib/sync/Watcher.ts @@ -30,6 +30,8 @@ export interface Watcher { wantToLoad(chunkIndex: number): void beforeReadyNodes(p2pCandidates: P2pCandidate[]): void + + syncFailNoNodeFound(): void } export type EventName = 'downloadChange'|'storageChange'|'appliedChange'|'sbxChange'|'peersChange' @@ -42,6 +44,7 @@ export type EventName = 'downloadChange'|'storageChange'|'appliedChange'|'sbxCha | 'wantToDownload' | 'wantToLoad' | 'beforeReadyNodes' + | 'syncFailNoNodeFound' export class EventWatcher extends events.EventEmitter implements Watcher { @@ -135,6 +138,10 @@ export class EventWatcher extends events.EventEmitter implements Watcher { beforeReadyNodes(p2pCandidates: P2pCandidate[]): void { this.emit('beforeReadyNodes', { nodes: p2pCandidates }) } + + syncFailNoNodeFound(): void { + this.emit('syncFailNoNodeFound', {}) + } } export class MultimeterWatcher implements Watcher { @@ -272,6 +279,9 @@ export class MultimeterWatcher implements Watcher { beforeReadyNodes(p2pCandidates: P2pCandidate[]): void { } + syncFailNoNodeFound(): void { + } + } export class LoggerWatcher implements Watcher { @@ -364,4 +374,7 @@ export class LoggerWatcher implements Watcher { beforeReadyNodes(p2pCandidates: P2pCandidate[]): void { } + syncFailNoNodeFound(): void { + } + } diff --git a/app/modules/crawler/lib/sync/v2/DownloadStream.ts b/app/modules/crawler/lib/sync/v2/DownloadStream.ts index 7c392d2d6318db9c78a6d003cd129f35c0b0e5ef..e3587d4a37eda09ed202e61f7773b3c62fcb5b09 100644 --- a/app/modules/crawler/lib/sync/v2/DownloadStream.ts +++ b/app/modules/crawler/lib/sync/v2/DownloadStream.ts @@ -114,9 +114,6 @@ export class DownloadStream extends Duplex { this.chunks[i] = chunk delete this.dowloading[i] }) - .catch(err => { - throw err - }) return this.dowloading[i] || this.chunks[i] } return this.dowloading[i] || this.chunks[i]