Commit 9c2e6ef5 authored by Cédric Moreau's avatar Cédric Moreau

[fix] #1037 Migrate modules "export-bc" "daemon"

parent 86488871
......@@ -20,5 +20,7 @@ app/modules/reset.js
app/modules/reapply.js
app/modules/peersignal.js
app/modules/plugin.js
app/modules/daemon.js
app/modules/export-bc.js
test/*.js
test/**/*.js
\ No newline at end of file
......@@ -58,4 +58,6 @@ app/modules/revert.js*
app/modules/reapply.js*
app/modules/reset.js*
app/modules/peersignal.js*
app/modules/plugin.js*
\ No newline at end of file
app/modules/plugin.js*
app/modules/export-bc.js*
app/modules/daemon.js*
\ No newline at end of file
......@@ -6,6 +6,7 @@ export interface Keypair {
export class ConfDTO {
constructor(
public loglevel: string,
public currency: string,
public endpoints: string[],
public rmEndpoints: string[],
......@@ -45,6 +46,6 @@ export class ConfDTO {
) {}
static mock() {
return new ConfDTO("", [], [], 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, false, 0, false, 0, 0, 0, 0, 0, null, 0, "", "", "")
return new ConfDTO("", "", [], [], 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, false, 0, false, 0, 0, 0, 0, 0, null, 0, "", "", "")
}
}
\ No newline at end of file
This diff is collapsed.
import {ConfDTO} from "../lib/dto/ConfDTO"
"use strict";
const qfs = require('q-io/fs');
const directory = require('../lib/system/directory');
const constants = require('../lib/constants');
const path = require('path');
const Tail = require("tail").Tail
module.exports = {
duniter: {
cliOptions: [
{ value: '--loglevel <level>', desc: 'Logs level, either [error,warning,info,debug,trace]. default to `info`.' }
],
service: {
process: (server:any) => ServerService(server)
},
config: {
/*****
* Tries to load a specific parameter `conf.loglevel`
*/
onLoading: async (conf:ConfDTO, program:any) => {
conf.loglevel = program.loglevel || conf.loglevel || 'info'
}
},
cli: [{
name: 'start',
desc: 'Starts Duniter as a daemon (background task).',
logs: false,
onConfiguredExecute: async (server:any, conf:ConfDTO, program:any, params:any) => {
await server.checkConfig()
const daemon = server.getDaemon('direct_start', 'start')
await startDaemon(daemon)
}
}, {
name: 'stop',
desc: 'Stops Duniter daemon if it is running.',
logs: false,
onConfiguredExecute: async (server:any, conf:ConfDTO, program:any, params:any) => {
const daemon = server.getDaemon()
await stopDaemon(daemon)
}
}, {
name: 'restart',
desc: 'Stops Duniter daemon and restart it.',
logs: false,
onConfiguredExecute: async (server:any, conf:ConfDTO, program:any, params:any) => {
await server.checkConfig()
const daemon = server.getDaemon('direct_start', 'restart')
await stopDaemon(daemon)
await startDaemon(daemon)
}
}, {
name: 'status',
desc: 'Get Duniter daemon status.',
logs: false,
onConfiguredExecute: async (server:any, conf:ConfDTO, program:any, params:any) => {
await server.checkConfig()
const pid = server.getDaemon().status()
if (pid) {
console.log('Duniter is running using PID %s.', pid)
} else {
console.log('Duniter is not running.')
}
}
}, {
name: 'logs',
desc: 'Follow duniter logs.',
logs: false,
onConfiguredExecute: async (server:any, conf:ConfDTO, program:any, params:any) => {
printTailAndWatchFile(directory.INSTANCE_HOMELOG_FILE, constants.NB_INITIAL_LINES_TO_SHOW)
// Never ending command
return new Promise(res => null)
}
}, {
name: 'direct_start',
desc: 'Start Duniter node with direct output, non-daemonized.',
onDatabaseExecute: async (server:any, conf:ConfDTO, program:any, params:any, startServices:any) => {
const logger = server.logger;
logger.info(">> Server starting...");
await server.checkConfig();
// Add signing & public key functions to PeeringService
logger.info('Node version: ' + server.version);
logger.info('Node pubkey: ' + server.conf.pair.pub);
// Services
await startServices();
logger.info('>> Server ready!');
return new Promise(() => null); // Never ending
}
}]
}
};
function ServerService(server:any) {
server.startService = () => Promise.resolve();
server.stopService = () => Promise.resolve();
return server;
}
function startDaemon(daemon:any) {
return new Promise((resolve, reject) => daemon.start((err:any) => {
if (err) return reject(err)
resolve()
}))
}
function stopDaemon(daemon:any) {
return new Promise((resolve, reject) => daemon.stop((err:any) => {
err && console.error(err);
if (err) return reject(err)
resolve()
}))
}
async function printTailAndWatchFile(file:any, tailSize:number) {
if (await qfs.exists(file)) {
const content = await qfs.read(file)
const lines = content.split('\n')
const from = Math.max(0, lines.length - tailSize)
const lastLines = lines.slice(from).join('\n')
console.log(lastLines)
}
watchFile(file)
}
function watchFile(file:any) {
const tail = new Tail(file);
// Specific errors handling
process.on('uncaughtException', (err:any) => {
if (err.code === "ENOENT") {
console.error('EXCEPTION: ', err.message);
setTimeout(() => watchFile(file), 1000) // Wait a second
}
});
// On new line
tail.on("line", function(data:any) {
console.log(data);
});
tail.on("error", function(error:any) {
console.error('ERROR: ', error);
});
}
"use strict";
const co = require('co');
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", { value: true });
const _ = require('underscore');
const Block = require('../lib/entity/block');
module.exports = {
duniter: {
cli: [{
name: 'export-bc [upto]',
desc: 'Exports the whole blockchain as JSON array, up to [upto] block number (excluded).',
logs: false,
onDatabaseExecute: (server, conf, program, params) => co(function*() {
const upto = params[0];
const logger = server.logger;
try {
let CHUNK_SIZE = 500;
let jsoned = [];
let current = yield server.dal.getCurrentBlockOrNull();
let lastNumber = current ? current.number + 1 : -1;
if (upto !== undefined && upto.match(/\d+/)) {
lastNumber = Math.min(parseInt(upto), lastNumber);
}
let chunksCount = Math.floor(lastNumber / CHUNK_SIZE);
let chunks = [];
// Max-size chunks
for (let i = 0, len = chunksCount; i < len; i++) {
chunks.push({start: i * CHUNK_SIZE, to: i * CHUNK_SIZE + CHUNK_SIZE - 1});
}
// A last chunk
if (lastNumber > chunksCount * CHUNK_SIZE) {
chunks.push({start: chunksCount * CHUNK_SIZE, to: lastNumber});
}
for (const chunk of chunks) {
let blocks = yield server.dal.getBlocksBetween(chunk.start, chunk.to);
blocks.forEach(function (block) {
jsoned.push(_(new Block(block).json()).omit('raw'));
});
}
if (!program.nostdout) {
console.log(JSON.stringify(jsoned, null, " "));
}
yield server.disconnect();
return jsoned;
} catch(err) {
logger.warn(err.message || err);
yield server.disconnect();
}
})
}]
}
}
duniter: {
cli: [{
name: 'export-bc [upto]',
desc: 'Exports the whole blockchain as JSON array, up to [upto] block number (excluded).',
logs: false,
onDatabaseExecute: (server, conf, program, params) => __awaiter(this, void 0, void 0, function* () {
const upto = params[0];
const logger = server.logger;
try {
let CHUNK_SIZE = 500;
let jsoned = [];
let current = yield server.dal.getCurrentBlockOrNull();
let lastNumber = current ? current.number + 1 : -1;
if (upto !== undefined && upto.match(/\d+/)) {
lastNumber = Math.min(parseInt(upto), lastNumber);
}
let chunksCount = Math.floor(lastNumber / CHUNK_SIZE);
let chunks = [];
// Max-size chunks
for (let i = 0, len = chunksCount; i < len; i++) {
chunks.push({ start: i * CHUNK_SIZE, to: i * CHUNK_SIZE + CHUNK_SIZE - 1 });
}
// A last chunk
if (lastNumber > chunksCount * CHUNK_SIZE) {
chunks.push({ start: chunksCount * CHUNK_SIZE, to: lastNumber });
}
for (const chunk of chunks) {
let blocks = yield server.dal.getBlocksBetween(chunk.start, chunk.to);
blocks.forEach(function (block) {
jsoned.push(_(new Block(block).json()).omit('raw'));
});
}
if (!program.nostdout) {
console.log(JSON.stringify(jsoned, null, " "));
}
yield server.disconnect();
return jsoned;
}
catch (err) {
logger.warn(err.message || err);
yield server.disconnect();
}
})
}]
}
};
//# sourceMappingURL=export-bc.js.map
\ No newline at end of file
"use strict";
import {ConfDTO} from "../lib/dto/ConfDTO"
const _ = require('underscore');
const Block = require('../lib/entity/block');
module.exports = {
duniter: {
cli: [{
name: 'export-bc [upto]',
desc: 'Exports the whole blockchain as JSON array, up to [upto] block number (excluded).',
logs: false,
onDatabaseExecute: async (server:any, conf:ConfDTO, program:any, params:any) => {
const upto = params[0];
const logger = server.logger;
try {
let CHUNK_SIZE = 500;
let jsoned:any = [];
let current = await server.dal.getCurrentBlockOrNull();
let lastNumber = current ? current.number + 1 : -1;
if (upto !== undefined && upto.match(/\d+/)) {
lastNumber = Math.min(parseInt(upto), lastNumber);
}
let chunksCount = Math.floor(lastNumber / CHUNK_SIZE);
let chunks = [];
// Max-size chunks
for (let i = 0, len = chunksCount; i < len; i++) {
chunks.push({start: i * CHUNK_SIZE, to: i * CHUNK_SIZE + CHUNK_SIZE - 1});
}
// A last chunk
if (lastNumber > chunksCount * CHUNK_SIZE) {
chunks.push({start: chunksCount * CHUNK_SIZE, to: lastNumber});
}
for (const chunk of chunks) {
let blocks = await server.dal.getBlocksBetween(chunk.start, chunk.to);
blocks.forEach(function (block:any) {
jsoned.push(_(new Block(block).json()).omit('raw'));
});
}
if (!program.nostdout) {
console.log(JSON.stringify(jsoned, null, " "));
}
await server.disconnect();
return jsoned;
} catch(err) {
logger.warn(err.message || err);
await server.disconnect();
}
}
}]
}
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment