Skip to content
Snippets Groups Projects
Commit eaf88870 authored by Cédric Moreau's avatar Cédric Moreau
Browse files

[enh] #1325 Crash with a message when synchronization fails (or spread it when running with UI)

parent 25592ec7
Branches
Tags
No related merge requests found
......@@ -27,6 +27,9 @@ import {RemoteSynchronizer} from "./lib/sync/RemoteSynchronizer"
import {AbstractSynchronizer} from "./lib/sync/AbstractSynchronizer"
import {LocalPathSynchronizer} from "./lib/sync/LocalPathSynchronizer"
import {CommonConstants} from "../../lib/common-libs/constants"
import {DataErrors} from "../../lib/common-libs/errors"
import {NewLogger} from "../../lib/logger"
import {CrawlerConstants} from "./lib/constants"
export const CrawlerDependency = {
duniter: {
......@@ -136,6 +139,15 @@ export const CrawlerDependency = {
return strategy.syncPeers(true)
} else {
const remote = new Synchroniser(server, strategy, interactive === true)
// If the sync fail, stop the program
process.on('unhandledRejection', (reason) => {
if (reason.message === DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) {
NewLogger().error('Synchronization interrupted: no node was found to continue downloading after %s tries.', CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND)
process.exit(1)
}
})
return remote.sync(upTo, chunkLength, askedCautious)
}
}
......
......@@ -33,6 +33,7 @@ export const CrawlerConstants = {
TEST_PEERS_INTERVAL: 10, // In seconds
SYNC_PEERS_INTERVAL: 3, // Every 3 block average generation time
SYNC_CHUNKS_IN_ADVANCE: 10, // We want to have that much chunks in advance when syncing
SYNC_MAX_FAIL_NO_NODE_FOUND: 20,
DURATIONS: {
TEN_SECONDS: 10,
......
......@@ -57,6 +57,7 @@ export class Synchroniser extends stream.Duplex {
this.watcher.onEvent('wantToDownload', (data) => this.push({ p2pData: { name: 'wantToDownload', data }}))
this.watcher.onEvent('wantToLoad', (data) => this.push({ p2pData: { name: 'wantToLoad', data }}))
this.watcher.onEvent('beforeReadyNodes', (data) => this.push({ p2pData: { name: 'beforeReadyNodes', data }}))
this.watcher.onEvent('syncFailNoNodeFound', (data) => this.push({ p2pData: { name: 'syncFailNoNodeFound', data }}))
this.syncStrategy.setWatcher(this.watcher)
......
......@@ -14,6 +14,7 @@ import {CommonConstants} from "../../../../lib/common-libs/constants"
import {DataErrors} from "../../../../lib/common-libs/errors"
import {ASyncDownloader} from "./ASyncDownloader"
import {P2pCandidate} from "./p2p/p2p-candidate"
import {CrawlerConstants} from "../constants"
export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloader {
......@@ -29,7 +30,7 @@ export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloade
private nbDownloading = 0
private downloads: { [chunk: number]: P2pCandidate } = {}
private fifoPromise = new GlobalFifoPromise()
private nbWaitFailed = 0
private noNodeFoundCumulation = 0
constructor(
private currency: string,
......@@ -152,9 +153,18 @@ export class P2PSyncDownloader extends ASyncDownloader implements ISyncDownloade
count = this.nbBlocksToDownload % this.chunkSize || this.chunkSize;
}
try {
return await this.p2pDownload(from, count, index) as BlockDTO[]
const res = await this.p2pDownload(from, count, index) as BlockDTO[]
this.noNodeFoundCumulation = 0
return res
} catch (e) {
this.logger.error(e);
if (e.message === DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]) {
this.noNodeFoundCumulation++
if (this.noNodeFoundCumulation >= CrawlerConstants.SYNC_MAX_FAIL_NO_NODE_FOUND) {
this.watcher.syncFailNoNodeFound()
throw Error(DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK])
}
}
await new Promise(res => setTimeout(res, 1000)) // Wait 1s before retrying
return this.downloadChunk(index);
}
......
......@@ -30,6 +30,8 @@ export interface Watcher {
wantToLoad(chunkIndex: number): void
beforeReadyNodes(p2pCandidates: P2pCandidate[]): void
syncFailNoNodeFound(): void
}
export type EventName = 'downloadChange'|'storageChange'|'appliedChange'|'sbxChange'|'peersChange'
......@@ -42,6 +44,7 @@ export type EventName = 'downloadChange'|'storageChange'|'appliedChange'|'sbxCha
| 'wantToDownload'
| 'wantToLoad'
| 'beforeReadyNodes'
| 'syncFailNoNodeFound'
export class EventWatcher extends events.EventEmitter implements Watcher {
......@@ -135,6 +138,10 @@ export class EventWatcher extends events.EventEmitter implements Watcher {
beforeReadyNodes(p2pCandidates: P2pCandidate[]): void {
this.emit('beforeReadyNodes', { nodes: p2pCandidates })
}
syncFailNoNodeFound(): void {
this.emit('syncFailNoNodeFound', {})
}
}
export class MultimeterWatcher implements Watcher {
......@@ -272,6 +279,9 @@ export class MultimeterWatcher implements Watcher {
beforeReadyNodes(p2pCandidates: P2pCandidate[]): void {
}
syncFailNoNodeFound(): void {
}
}
export class LoggerWatcher implements Watcher {
......@@ -364,4 +374,7 @@ export class LoggerWatcher implements Watcher {
beforeReadyNodes(p2pCandidates: P2pCandidate[]): void {
}
syncFailNoNodeFound(): void {
}
}
......@@ -114,9 +114,6 @@ export class DownloadStream extends Duplex {
this.chunks[i] = chunk
delete this.dowloading[i]
})
.catch(err => {
throw err
})
return this.dowloading[i] || this.chunks[i]
}
return this.dowloading[i] || this.chunks[i]
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment