diff --git a/.gitignore b/.gitignore index b88a3dfca8d904c9c179e1d9f3e75aba5d5470e4..4974614429cd403bf3840450f723b36c520e07be 100644 --- a/.gitignore +++ b/.gitignore @@ -32,7 +32,8 @@ test/blockchain/*.js test/blockchain/*.js.map test/blockchain/lib/*.js test/blockchain/lib/*.js.map -server.js* +/index.js* +/server.js* app/cli.js* app/lib/*.js* app/lib/blockchain/*.js diff --git a/index.js b/index.js index 0b6ed6a4f5d4b45966a65c5d1806edeef28c6d44..6d7bd24367583ef28c4e5bc0718e49985c73fe16 100644 --- a/index.js +++ b/index.js @@ -1,503 +1,490 @@ "use strict"; - -const co = require('co'); +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const cli_1 = require("./app/cli"); +const stream = require("stream"); +const server_1 = require("./server"); const path = require('path'); -const util = require('util'); -const stream = require('stream'); const _ = require('underscore'); -const Server = require('./server').Server const directory = require('./app/lib/system/directory'); const constants = require('./app/lib/constants'); -const CLI = require('./app/cli').ExecuteCommand const logger = require('./app/lib/logger').NewLogger('duniter'); - -const configDependency = require('./app/modules/config'); -const wizardDependency = require('./app/modules/wizard'); -const resetDependency = require('./app/modules/reset'); +const configDependency = require('./app/modules/config'); +const wizardDependency = require('./app/modules/wizard'); +const resetDependency = require('./app/modules/reset'); const checkConfDependency = require('./app/modules/check-config'); -const exportBcDependency = require('./app/modules/export-bc'); -const reapplyDependency = require('./app/modules/reapply'); -const revertDependency = require('./app/modules/revert'); -const daemonDependency = require('./app/modules/daemon'); -const pSignalDependency = require('./app/modules/peersignal'); -const routerDependency = require('./app/modules/router'); -const pluginDependency = require('./app/modules/plugin'); -const proverDependency = require('./app/modules/prover').ProverDependency; - +const exportBcDependency = require('./app/modules/export-bc'); +const reapplyDependency = require('./app/modules/reapply'); +const revertDependency = require('./app/modules/revert'); +const daemonDependency = require('./app/modules/daemon'); +const pSignalDependency = require('./app/modules/peersignal'); +const routerDependency = require('./app/modules/router'); +const pluginDependency = require('./app/modules/plugin'); +const proverDependency = require('./app/modules/prover').ProverDependency; +class Stacks { + static quickRun(...args) { + return __awaiter(this, void 0, void 0, function* () { + const deps = Array.from(args).map((f, index) => { + const canonicalPath = path.resolve(f); + return { + name: 'duniter-quick-module-' + index, + required: require(canonicalPath) + }; + }); + const stack = Stacks.autoStack(deps); + let res; + try { + res = yield stack.executeStack(Stacks.quickRunGetArgs()); + } + catch (e) { + console.error(e); + } + Stacks.onRunDone(); + return res; + }); + } + static quickRunGetArgs() { + return process.argv.slice(); + } + static onRunDone() { + return Stacks.todoOnRunDone(); + } + static autoStack(priorityModules) { + const duniterModules = []; + let duniterDeps = []; + try { + const pjson = require(path.resolve('./package.json')); + // Look for compliant packages + const prodDeps = Object.keys(pjson.dependencies || {}); + const devDeps = Object.keys(pjson.devDependencies || {}); + duniterDeps = prodDeps.concat(devDeps); + } + catch (e) { } + for (const dep of duniterDeps) { + try { + const required = require(dep); + if (required.duniter) { + duniterModules.push({ + name: dep, + required + }); + } + } + catch (e) { } + } + // The final stack + return new Stack((priorityModules || []).concat(PRODUCTION_DEPENDENCIES).concat(duniterModules)); + } +} +Stacks.todoOnRunDone = () => process.exit(); const MINIMAL_DEPENDENCIES = [ - { name: 'duniter-config', required: configDependency } + { name: 'duniter-config', required: configDependency } ]; - const DEFAULT_DEPENDENCIES = MINIMAL_DEPENDENCIES.concat([ - { name: 'duniter-wizard', required: wizardDependency }, - { name: 'duniter-reset', required: resetDependency }, - { name: 'duniter-chkconf', required: checkConfDependency }, - { name: 'duniter-exportbc', required: exportBcDependency }, - { name: 'duniter-reapply', required: reapplyDependency }, - { name: 'duniter-revert', required: revertDependency }, - { name: 'duniter-daemon', required: daemonDependency }, - { name: 'duniter-psignal', required: pSignalDependency }, - { name: 'duniter-router', required: routerDependency }, - { name: 'duniter-plugin', required: pluginDependency }, - { name: 'duniter-prover', required: proverDependency } -]); - -const PRODUCTION_DEPENDENCIES = DEFAULT_DEPENDENCIES.concat([ + { name: 'duniter-wizard', required: wizardDependency }, + { name: 'duniter-reset', required: resetDependency }, + { name: 'duniter-chkconf', required: checkConfDependency }, + { name: 'duniter-exportbc', required: exportBcDependency }, + { name: 'duniter-reapply', required: reapplyDependency }, + { name: 'duniter-revert', required: revertDependency }, + { name: 'duniter-daemon', required: daemonDependency }, + { name: 'duniter-psignal', required: pSignalDependency }, + { name: 'duniter-router', required: routerDependency }, + { name: 'duniter-plugin', required: pluginDependency }, + { name: 'duniter-prover', required: proverDependency } ]); - +const PRODUCTION_DEPENDENCIES = DEFAULT_DEPENDENCIES.concat([]); module.exports = function (home, memory, overConf) { - return new Server(home, memory, overConf); + return new server_1.Server(home, memory, overConf); }; - module.exports.statics = { - - logger: logger, - - /** - * Creates a new stack with minimal registrations only. - */ - minimalStack: () => new Stack(MINIMAL_DEPENDENCIES), - - /** - * Creates a new stack with core registrations only. - */ - simpleStack: () => new Stack(DEFAULT_DEPENDENCIES), - - /** - * Creates a new stack pre-registered with compliant modules found in package.json - */ - autoStack: (priorityModules) => { - const duniterModules = []; - let duniterDeps = [] - try { - const pjson = require(path.resolve('./package.json')) - // Look for compliant packages - const prodDeps = Object.keys(pjson.dependencies || {}); - const devDeps = Object.keys(pjson.devDependencies || {}); - duniterDeps = prodDeps.concat(devDeps) - } catch (e) { /* duniter as a dependency might not be run from an NPM project */ } - for(const dep of duniterDeps) { - try { - const required = require(dep); - if (required.duniter) { - duniterModules.push({ - name: dep, - required - }); - } - } catch (e) { /* Silent errors for packages that fail to load */ } - } - - // The final stack - return new Stack((priorityModules || []).concat(PRODUCTION_DEPENDENCIES).concat(duniterModules)); - }, - - quickRun: function() { - const deps = Array.from(arguments).map((f, index) => { - const canonicalPath = path.resolve(f) - return { - name: 'duniter-quick-module-' + index, - required: require(canonicalPath) - } - }) - const that = this - const stack = this.autoStack(deps) - return co(function*() { - let res - try { - res = yield stack.executeStack(that.quickRunGetArgs()) - } catch(e) { - console.error(e) - } - that.onRunDone() - return res - }) - }, - - quickRunGetArgs: () => process.argv.slice(), - onRunDone: () => process.exit() -}; - -function Stack(dependencies) { - - const that = this; - const cli = CLI(); - const configLoadingCallbacks = []; - const configBeforeSaveCallbacks = []; - const resetDataHooks = []; - const resetConfigHooks = []; - const INPUT = new InputStream(); - const PROCESS = new ProcessStream(); - const loaded = {}; - const wizardTasks = {}; - - const definitions = []; - const streams = { - input: [], - process: [], - output: [], - neutral: [] - }; - - // Part of modules API - this.getModule = (name) => loaded[name] - - this.registerDependency = (requiredObject, name) => { - if (name && loaded[name]) { - // Do not try to load it twice - return; - } - loaded[name] = requiredObject; - const def = requiredObject.duniter; - definitions.push(def); - for (const opt of (def.cliOptions || [])) { - cli.addOption(opt.value, opt.desc, opt.parser); - } - for (const command of (def.cli || [])) { - cli.addCommand({ - name: command.name, - desc: command.desc - }, (...args) => that.processCommand.apply(null, [command].concat(args))); - } - + logger: logger, /** - * Configuration injection - * ----------------------- + * Creates a new stack with minimal registrations only. */ - if (def.config) { - if (def.config.onLoading) { - configLoadingCallbacks.push(def.config.onLoading); - } - // Before the configuration is saved, the module can make some injection/cleaning - if (def.config.beforeSave) { - configBeforeSaveCallbacks.push(def.config.beforeSave); - } - } - + minimalStack: () => new Stack(MINIMAL_DEPENDENCIES), /** - * Reset data/config injection - * ----------------------- + * Creates a new stack with core registrations only. */ - if (def.onReset) { - if (def.onReset.data) { - resetDataHooks.push(def.onReset.data); - } - // Before the configuration is saved, the module can make some injection/cleaning - if (def.onReset.config) { - resetConfigHooks.push(def.onReset.config); - } - } - + simpleStack: () => new Stack(DEFAULT_DEPENDENCIES), /** - * Wizard injection - * ----------------------- + * Creates a new stack pre-registered with compliant modules found in package.json */ - if (def.wizard) { - const tasks = Object.keys(def.wizard); - for (const name of tasks) { - wizardTasks[name] = def.wizard[name]; - } + autoStack: (...args) => { + return Stacks.autoStack.apply(null, args); + }, + quickRun: (path) => { + return Stacks.quickRun(path); + }, + setOnRunDone: (f) => { + return Stacks.todoOnRunDone = f; } - }; - - this.processCommand = (...args) => co(function*() { - const command = args[0]; - const program = args[1]; - const params = args.slice(2); - params.pop(); // Don't need the command argument - - const dbName = program.mdb; - const dbHome = program.home; - const home = directory.getHome(dbName, dbHome); - - if (command.logs === false) { - logger.mute(); +}; +class Stack { + constructor(dependencies) { + this.dependencies = dependencies; + this.definitions = []; + this.streams = { + input: [], + process: [], + output: [], + neutral: [] + }; + this.cli = cli_1.ExecuteCommand(); + this.configLoadingCallbacks = []; + this.configBeforeSaveCallbacks = []; + this.resetDataHooks = []; + this.resetConfigHooks = []; + this.INPUT = new InputStream(); + this.PROCESS = new ProcessStream(); + this.loaded = {}; + this.wizardTasks = {}; + // We register the initial dependencies right now. Others can be added thereafter. + for (const dep of dependencies) { + this.registerDependency(dep.required, dep.name); + } } - - // Add log files for this instance (non-memory instances only) - if (!program.memory) { - logger.addHomeLogs(home, program.loglevel); + // Part of modules API + getModule(name) { + return this.loaded[name]; } - - const server = new Server(home, program.memory === true, commandLineConf(program)); - - // If ever the process gets interrupted - let isSaving = false; - process.on('SIGINT', () => { - co(function*() { - if (!isSaving) { - isSaving = true; - // Save DB - try { - yield server.disconnect(); - process.exit(); - } catch (e) { - logger.error(e); - process.exit(3); - } + registerDependency(requiredObject, name) { + if (name && this.loaded[name]) { + // Do not try to load it twice + return; } - }); - }); - - // Config or Data reset hooks - server.resetDataHook = () => co(function*() { - for (const callback of resetDataHooks) { - yield callback(server.conf, program, logger, server.dal.confDAL); - } - }) - server.resetConfigHook = () => co(function*() { - for (const callback of resetConfigHooks) { - yield callback(server.conf, program, logger, server.dal.confDAL); - } - }) - - // Initialize server (db connection, ...) - try { - server.onPluggedFSHook = () => co(function*() { - - // Register the configuration hook for loading phase (overrides the loaded data) - server.dal.loadConfHook = (conf) => co(function*() { - // Loading injection - for (const callback of configLoadingCallbacks) { - yield callback(conf, program, logger, server.dal.confDAL); - } + this.loaded[name] = requiredObject; + const def = requiredObject.duniter; + this.definitions.push(def); + for (const opt of (def.cliOptions || [])) { + this.cli.addOption(opt.value, opt.desc, opt.parser); + } + for (const command of (def.cli || [])) { + this.cli.addCommand({ + name: command.name, + desc: command.desc + }, (...args) => this.processCommand.apply(this, [command].concat(args))); + } + /** + * Configuration injection + * ----------------------- + */ + if (def.config) { + if (def.config.onLoading) { + this.configLoadingCallbacks.push(def.config.onLoading); + } + // Before the configuration is saved, the module can make some injection/cleaning + if (def.config.beforeSave) { + this.configBeforeSaveCallbacks.push(def.config.beforeSave); + } + } + /** + * Reset data/config injection + * ----------------------- + */ + if (def.onReset) { + if (def.onReset.data) { + this.resetDataHooks.push(def.onReset.data); + } + // Before the configuration is saved, the module can make some injection/cleaning + if (def.onReset.config) { + this.resetConfigHooks.push(def.onReset.config); + } + } + /** + * Wizard injection + * ----------------------- + */ + if (def.wizard) { + const tasks = Object.keys(def.wizard); + for (const name of tasks) { + this.wizardTasks[name] = def.wizard[name]; + } + } + } + ; + processCommand(...args) { + return __awaiter(this, void 0, void 0, function* () { + const command = args[0]; + const program = args[1]; + const params = args.slice(2); + params.pop(); // Don't need the command argument + const dbName = program.mdb; + const dbHome = program.home; + const home = directory.getHome(dbName, dbHome); + if (command.logs === false) { + logger.mute(); + } + // Add log files for this instance (non-memory instances only) + if (!program.memory) { + logger.addHomeLogs(home, program.loglevel); + } + const server = new server_1.Server(home, program.memory === true, commandLineConf(program)); + // If ever the process gets interrupted + let isSaving = false; + process.on('SIGINT', () => __awaiter(this, void 0, void 0, function* () { + if (!isSaving) { + isSaving = true; + // Save DB + try { + yield server.disconnect(); + process.exit(); + } + catch (e) { + logger.error(e); + process.exit(3); + } + } + })); + // Config or Data reset hooks + server.resetDataHook = () => __awaiter(this, void 0, void 0, function* () { + for (const callback of this.resetDataHooks) { + yield callback(server.conf, program, logger, server.dal.confDAL); + } + }); + server.resetConfigHook = () => __awaiter(this, void 0, void 0, function* () { + for (const callback of this.resetConfigHooks) { + yield callback(server.conf, program, logger, server.dal.confDAL); + } + }); + // Initialize server (db connection, ...) + try { + server.onPluggedFSHook = () => __awaiter(this, void 0, void 0, function* () { + // Register the configuration hook for loading phase (overrides the loaded data) + server.dal.loadConfHook = (conf) => __awaiter(this, void 0, void 0, function* () { + // Loading injection + for (const callback of this.configLoadingCallbacks) { + yield callback(conf, program, logger, server.dal.confDAL); + } + }); + // Register the configuration hook for saving phase (overrides the saved data) + server.dal.saveConfHook = (conf) => __awaiter(this, void 0, void 0, function* () { + const clonedConf = _.clone(conf); + for (const callback of this.configBeforeSaveCallbacks) { + yield callback(clonedConf, program, logger, server.dal.confDAL); + } + return clonedConf; + }); + }); + yield server.plugFileSystem(); + const conf = yield server.loadConf(); + // Eventually change the log level + // Add log files for this instance (non-memory instances only) + if (!program.memory) { + logger.addHomeLogs(home, conf.loglevel); + } + // Auto-configuration default + yield configure(program, server, server.conf || {}); + // Autosave conf + try { + yield server.dal.saveConf(conf); + logger.debug("Configuration saved."); + } + catch (e) { + logger.error("Configuration could not be saved: " + e); + throw Error(e); + } + const daemon = server.getDaemon(); + if (command.preventIfRunning && daemon.status()) { + throw 'Your node is currently running. Please stop it and relaunch your command.'; + } + // First possible class of commands: post-config + if (command.onConfiguredExecute) { + return yield command.onConfiguredExecute(server, conf, program, params, this.wizardTasks, this); + } + // Second possible class of commands: post-service + yield server.initDAL(conf); + /** + * Service injection + * ----------------- + */ + for (const def of this.definitions) { + if (def.service) { + // To feed data coming from some I/O (network, disk, other module, ...) + if (def.service.input) { + this.streams.input.push(def.service.input(server, conf, logger)); + } + // To handle data this has been submitted by INPUT stream + if (def.service.process) { + this.streams.process.push(def.service.process(server, conf, logger)); + } + // To handle data this has been validated by PROCESS stream + if (def.service.output) { + this.streams.output.push(def.service.output(server, conf, logger)); + } + // Special service which does not stream anything particular (ex.: piloting the `server` object) + if (def.service.neutral) { + this.streams.neutral.push(def.service.neutral(server, conf, logger)); + } + } + } + // All inputs write to global INPUT stream + for (const module of this.streams.input) + module.pipe(this.INPUT); + // All processes read from global INPUT stream + for (const module of this.streams.process) + this.INPUT.pipe(module); + // All processes write to global PROCESS stream + for (const module of this.streams.process) + module.pipe(this.PROCESS); + // All ouputs read from global PROCESS stream + for (const module of this.streams.output) + this.PROCESS.pipe(module); + return yield command.onDatabaseExecute(server, conf, program, params, + // Start services and streaming between them + () => __awaiter(this, void 0, void 0, function* () { + const modules = this.streams.input.concat(this.streams.process).concat(this.streams.output).concat(this.streams.neutral); + yield Promise.all(modules.map((module) => module.startService())); + }), + // Stop services and streaming between them + () => __awaiter(this, void 0, void 0, function* () { + const modules = this.streams.input.concat(this.streams.process).concat(this.streams.output).concat(this.streams.neutral); + // Any streaming module must implement a `stopService` method + yield Promise.all(modules.map((module) => module.stopService())); + // // Stop reading inputs + // for (const module of streams.input) module.unpipe(); + // Stop reading from global INPUT + // INPUT.unpipe(); + // for (const module of streams.process) module.unpipe(); + // // Stop reading from global PROCESS + // PROCESS.unpipe(); + }), this); + } + catch (e) { + server.disconnect(); + throw e; + } }); - - // Register the configuration hook for saving phase (overrides the saved data) - server.dal.saveConfHook = (conf) => co(function*() { - const clonedConf = _.clone(conf); - for (const callback of configBeforeSaveCallbacks) { - yield callback(clonedConf, program, logger, server.dal.confDAL); - } - return clonedConf; + } + executeStack(argv) { + // Trace these errors + process.on('unhandledRejection', (reason) => { + logger.error('Unhandled rejection: ' + reason); + logger.error(reason); }); - }) - yield server.plugFileSystem(); - - const conf = yield server.loadConf(); - - // Eventually change the log level - // Add log files for this instance (non-memory instances only) - if (!program.memory) { - logger.addHomeLogs(home, conf.loglevel); - } - - // Auto-configuration default - yield configure(program, server, server.conf || {}); - // Autosave conf - try { - yield server.dal.saveConf(conf); - logger.debug("Configuration saved."); - } catch (e) { - logger.error("Configuration could not be saved: " + e); - throw Error(e); - } - - const daemon = server.getDaemon() - if (command.preventIfRunning && daemon.status()) { - throw 'Your node is currently running. Please stop it and relaunch your command.' - } - - // First possible class of commands: post-config - if (command.onConfiguredExecute) { - return yield command.onConfiguredExecute(server, conf, program, params, wizardTasks, that); - } - // Second possible class of commands: post-service - yield server.initDAL(conf); - - /** - * Service injection - * ----------------- - */ - for (const def of definitions) { - if (def.service) { - // To feed data coming from some I/O (network, disk, other module, ...) - if (def.service.input) { - streams.input.push(def.service.input(server, conf, logger)); - } - // To handle data that has been submitted by INPUT stream - if (def.service.process) { - streams.process.push(def.service.process(server, conf, logger)); - } - // To handle data that has been validated by PROCESS stream - if (def.service.output) { - streams.output.push(def.service.output(server, conf, logger)); - } - // Special service which does not stream anything particular (ex.: piloting the `server` object) - if (def.service.neutral) { - streams.neutral.push(def.service.neutral(server, conf, logger)); - } - } - } - // All inputs write to global INPUT stream - for (const module of streams.input) module.pipe(INPUT); - // All processes read from global INPUT stream - for (const module of streams.process) INPUT.pipe(module); - // All processes write to global PROCESS stream - for (const module of streams.process) module.pipe(PROCESS); - // All ouputs read from global PROCESS stream - for (const module of streams.output) PROCESS.pipe(module); - - return yield command.onDatabaseExecute(server, conf, program, params, - - // Start services and streaming between them - () => co(function*() { - // Any streaming module must implement a `startService` method - for (const m of streams.input) { - yield m.startService(); - } - const modules = [].concat(streams.process).concat(streams.output).concat(streams.neutral); - yield modules.map(module => module.startService()); - }), - - // Stop services and streaming between them - () => co(function*() { - const modules = streams.input.concat(streams.process).concat(streams.output); - // Any streaming module must implement a `stopService` method - yield modules.map(module => module.stopService()); - // // Stop reading inputs - // for (const module of streams.input) module.unpipe(); - // Stop reading from global INPUT - // INPUT.unpipe(); - // for (const module of streams.process) module.unpipe(); - // // Stop reading from global PROCESS - // PROCESS.unpipe(); - }), that); - } catch (e) { - server.disconnect(); - throw e; + // Executes the command + return this.cli.execute(argv); } - }); - - this.executeStack = (argv) => { - - // Trace these errors - process.on('unhandledRejection', (reason) => { - logger.error('Unhandled rejection: ' + reason); - logger.error(reason); - }); - - // Executes the command - return cli.execute(argv); - }; - - // We register the initial dependencies right now. Others can be added thereafter. - for (const dep of dependencies) { - that.registerDependency(dep.required, dep.name); - } } - -function commandLineConf(program, conf) { - - conf = conf || {}; - conf.sync = conf.sync || {}; - const cli = { - currency: program.currency, - cpu: program.cpu, - server: { - port: program.port, - }, - db: { - mport: program.mport, - mdb: program.mdb, - home: program.home - }, - logs: { - http: program.httplogs, - nohttp: program.nohttplogs - }, - endpoints: [], - rmEndpoints: [], - isolate: program.isolate, - forksize: program.forksize, - nofork: program.nofork, - timeout: program.timeout - }; - - // Update conf - if (cli.currency) conf.currency = cli.currency; - if (cli.server.port) conf.port = cli.server.port; - if (cli.cpu) conf.cpu = Math.max(0.01, Math.min(1.0, cli.cpu)); - if (cli.logs.http) conf.httplogs = true; - if (cli.logs.nohttp) conf.httplogs = false; - if (cli.db.mport) conf.mport = cli.db.mport; - if (cli.db.home) conf.home = cli.db.home; - if (cli.db.mdb) conf.mdb = cli.db.mdb; - if (cli.isolate) conf.isolate = cli.isolate; - if (cli.timeout) conf.timeout = cli.timeout; - if (cli.forksize != null) conf.forksize = cli.forksize; - - return conf; +function commandLineConf(program, conf = {}) { + conf = conf || {}; + conf.sync = conf.sync || {}; + const cli = { + currency: program.currency, + cpu: program.cpu, + server: { + port: program.port, + }, + db: { + mport: program.mport, + mdb: program.mdb, + home: program.home + }, + logs: { + http: program.httplogs, + nohttp: program.nohttplogs + }, + endpoints: [], + rmEndpoints: [], + isolate: program.isolate, + forksize: program.forksize, + nofork: program.nofork, + timeout: program.timeout + }; + // Update conf + if (cli.currency) + conf.currency = cli.currency; + if (cli.server.port) + conf.port = cli.server.port; + if (cli.cpu) + conf.cpu = Math.max(0.01, Math.min(1.0, cli.cpu)); + if (cli.logs.http) + conf.httplogs = true; + if (cli.logs.nohttp) + conf.httplogs = false; + if (cli.db.mport) + conf.mport = cli.db.mport; + if (cli.db.home) + conf.home = cli.db.home; + if (cli.db.mdb) + conf.mdb = cli.db.mdb; + if (cli.isolate) + conf.isolate = cli.isolate; + if (cli.timeout) + conf.timeout = cli.timeout; + if (cli.forksize != null) + conf.forksize = cli.forksize; + return conf; } - function configure(program, server, conf) { - return co(function *() { - if (typeof server == "string" || typeof conf == "string") { - throw constants.ERRORS.CLI_CALLERR_CONFIG; - } - // Try to add an endpoint if provided - if (program.addep) { - if (conf.endpoints.indexOf(program.addep) === -1) { - conf.endpoints.push(program.addep); - } - // Remove it from "to be removed" list - const indexInRemove = conf.rmEndpoints.indexOf(program.addep); - if (indexInRemove !== -1) { - conf.rmEndpoints.splice(indexInRemove, 1); - } - } - // Try to remove an endpoint if provided - if (program.remep) { - if (conf.rmEndpoints.indexOf(program.remep) === -1) { - conf.rmEndpoints.push(program.remep); - } - // Remove it from "to be added" list - const indexInToAdd = conf.endpoints.indexOf(program.remep); - if (indexInToAdd !== -1) { - conf.endpoints.splice(indexInToAdd, 1); - } - } - }); + return __awaiter(this, void 0, void 0, function* () { + if (typeof server == "string" || typeof conf == "string") { + throw constants.ERRORS.CLI_CALLERR_CONFIG; + } + // Try to add an endpoint if provided + if (program.addep) { + if (conf.endpoints.indexOf(program.addep) === -1) { + conf.endpoints.push(program.addep); + } + // Remove it from "to be removed" list + const indexInRemove = conf.rmEndpoints.indexOf(program.addep); + if (indexInRemove !== -1) { + conf.rmEndpoints.splice(indexInRemove, 1); + } + } + // Try to remove an endpoint if provided + if (program.remep) { + if (conf.rmEndpoints.indexOf(program.remep) === -1) { + conf.rmEndpoints.push(program.remep); + } + // Remove it from "to be added" list + const indexInToAdd = conf.endpoints.indexOf(program.remep); + if (indexInToAdd !== -1) { + conf.endpoints.splice(indexInToAdd, 1); + } + } + }); } - /** - * InputStream is a special stream that filters what passes in. + * InputStream is a special stream this filters what passes in. * Only DUP-like documents should be treated by the processing tools, to avoid JSON injection and save CPU cycles. * @constructor */ -function InputStream() { - - const that = this; - - stream.Transform.call(this, { objectMode: true }); - - this._write = function (str, enc, done) { - if (typeof str === 'string') { - // Keep only strings - const matches = str.match(/Type: (.*)\n/); - if (matches && matches[1].match(/(Block|Membership|Identity|Certification|Transaction|Peer)/)) { - const type = matches[1].toLowerCase(); - that.push({ type, doc: str }); - } +class InputStream extends stream.Transform { + constructor() { + super({ objectMode: true }); + } + _write(str, enc, done) { + if (typeof str === 'string') { + // Keep only strings + const matches = str.match(/Type: (.*)\n/); + if (matches && matches[1].match(/(Block|Membership|Identity|Certification|Transaction|Peer)/)) { + const type = matches[1].toLowerCase(); + this.push({ type, doc: str }); + } + } + done && done(); } - done && done(); - }; + ; } - -function ProcessStream() { - - const that = this; - - stream.Transform.call(this, { objectMode: true }); - - this._write = function (obj, enc, done) { - // Never close the stream - if (obj !== undefined && obj !== null) { - that.push(obj); +class ProcessStream extends stream.Transform { + constructor() { + super({ objectMode: true }); + } + _write(obj, enc, done) { + // Never close the stream + if (obj !== undefined && obj !== null) { + this.push(obj); + } + done && done(); } - done && done(); - }; + ; } - -util.inherits(InputStream, stream.Transform); -util.inherits(ProcessStream, stream.Transform); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/index.ts b/index.ts new file mode 100644 index 0000000000000000000000000000000000000000..8ac403be553fadedf84fb792a1b9d6433c4ae527 --- /dev/null +++ b/index.ts @@ -0,0 +1,536 @@ +import {ExecuteCommand} from "./app/cli" +import * as stream from "stream" +import {Server} from "./server" +import {ConfDTO} from "./app/lib/dto/ConfDTO" + +const path = require('path'); +const _ = require('underscore'); +const directory = require('./app/lib/system/directory'); +const constants = require('./app/lib/constants'); +const logger = require('./app/lib/logger').NewLogger('duniter'); + +const configDependency = require('./app/modules/config'); +const wizardDependency = require('./app/modules/wizard'); +const resetDependency = require('./app/modules/reset'); +const checkConfDependency = require('./app/modules/check-config'); +const exportBcDependency = require('./app/modules/export-bc'); +const reapplyDependency = require('./app/modules/reapply'); +const revertDependency = require('./app/modules/revert'); +const daemonDependency = require('./app/modules/daemon'); +const pSignalDependency = require('./app/modules/peersignal'); +const routerDependency = require('./app/modules/router'); +const pluginDependency = require('./app/modules/plugin'); +const proverDependency = require('./app/modules/prover').ProverDependency; + +class Stacks { + + static todoOnRunDone:() => any = () => process.exit() + + static async quickRun(...args:any[]) { + const deps = Array.from(args).map((f, index) => { + const canonicalPath = path.resolve(f) + return { + name: 'duniter-quick-module-' + index, + required: require(canonicalPath) + } + }) + const stack = Stacks.autoStack(deps) + let res + try { + res = await stack.executeStack(Stacks.quickRunGetArgs()) + } catch(e) { + console.error(e) + } + Stacks.onRunDone() + return res + } + + static quickRunGetArgs() { + return process.argv.slice() + } + + static onRunDone() { + return Stacks.todoOnRunDone() + } + + static autoStack(priorityModules:any) { + + const duniterModules = []; + let duniterDeps:any = [] + + try { + const pjson = require(path.resolve('./package.json')) + // Look for compliant packages + const prodDeps = Object.keys(pjson.dependencies || {}); + const devDeps = Object.keys(pjson.devDependencies || {}); + duniterDeps = prodDeps.concat(devDeps) + } catch (e) { /* duniter as a dependency might not be run from an NPM project */ } + + for(const dep of duniterDeps) { + try { + const required = require(dep); + if (required.duniter) { + duniterModules.push({ + name: dep, + required + }); + } + } catch (e) { /* Silent errors for packages this fail to load */ } + } + + // The final stack + return new Stack((priorityModules || []).concat(PRODUCTION_DEPENDENCIES).concat(duniterModules)); + } +} + +const MINIMAL_DEPENDENCIES = [ + { name: 'duniter-config', required: configDependency } +]; + +const DEFAULT_DEPENDENCIES = MINIMAL_DEPENDENCIES.concat([ + { name: 'duniter-wizard', required: wizardDependency }, + { name: 'duniter-reset', required: resetDependency }, + { name: 'duniter-chkconf', required: checkConfDependency }, + { name: 'duniter-exportbc', required: exportBcDependency }, + { name: 'duniter-reapply', required: reapplyDependency }, + { name: 'duniter-revert', required: revertDependency }, + { name: 'duniter-daemon', required: daemonDependency }, + { name: 'duniter-psignal', required: pSignalDependency }, + { name: 'duniter-router', required: routerDependency }, + { name: 'duniter-plugin', required: pluginDependency }, + { name: 'duniter-prover', required: proverDependency } +]); + +const PRODUCTION_DEPENDENCIES = DEFAULT_DEPENDENCIES.concat([ +]); + +module.exports = function (home:string, memory:boolean, overConf:any) { + return new Server(home, memory, overConf); +} + +module.exports.statics = { + + logger: logger, + + /** + * Creates a new stack with minimal registrations only. + */ + minimalStack: () => new Stack(MINIMAL_DEPENDENCIES), + + /** + * Creates a new stack with core registrations only. + */ + simpleStack: () => new Stack(DEFAULT_DEPENDENCIES), + + /** + * Creates a new stack pre-registered with compliant modules found in package.json + */ + autoStack: (...args:any[]) => { + return Stacks.autoStack.apply(null, args) + }, + + quickRun: (path:string) => { + return Stacks.quickRun(path) + }, + + setOnRunDone: (f:()=>any) => { + return Stacks.todoOnRunDone = f + } +}; + +interface StreamingDuniterModule extends stream.Readable { + startService: () => Promise<any> + stopService: () => Promise<any> +} + +class Stack { + + private cli:any + private configLoadingCallbacks:any[] + private configBeforeSaveCallbacks:any[] + private resetDataHooks:any[] + private resetConfigHooks:any[] + private INPUT:any + private PROCESS:any + private loaded:any + private wizardTasks:any + private definitions:any[] = [] + private streams: { + input: stream.Readable[] + process: stream.Transform[] + output: stream.Transform[] + neutral: stream.Transform[] + } = { + input: [], + process: [], + output: [], + neutral: [] + } + + constructor(private dependencies:any[]) { + this.cli = ExecuteCommand() + this.configLoadingCallbacks = [] + this.configBeforeSaveCallbacks = [] + this.resetDataHooks = [] + this.resetConfigHooks = [] + this.INPUT = new InputStream() + this.PROCESS = new ProcessStream() + this.loaded = {} + this.wizardTasks = {} + + // We register the initial dependencies right now. Others can be added thereafter. + for (const dep of dependencies) { + this.registerDependency(dep.required, dep.name); + } + } + + // Part of modules API + getModule(name:string) { + return this.loaded[name] + } + + registerDependency(requiredObject:any, name:string) { + if (name && this.loaded[name]) { + // Do not try to load it twice + return; + } + this.loaded[name] = requiredObject; + const def = requiredObject.duniter; + this.definitions.push(def); + for (const opt of (def.cliOptions || [])) { + this.cli.addOption(opt.value, opt.desc, opt.parser); + } + for (const command of (def.cli || [])) { + this.cli.addCommand({ + name: command.name, + desc: command.desc + }, (...args:any[]) => this.processCommand.apply(this, [command].concat(args))); + } + + /** + * Configuration injection + * ----------------------- + */ + if (def.config) { + if (def.config.onLoading) { + this.configLoadingCallbacks.push(def.config.onLoading); + } + // Before the configuration is saved, the module can make some injection/cleaning + if (def.config.beforeSave) { + this.configBeforeSaveCallbacks.push(def.config.beforeSave); + } + } + + /** + * Reset data/config injection + * ----------------------- + */ + if (def.onReset) { + if (def.onReset.data) { + this.resetDataHooks.push(def.onReset.data); + } + // Before the configuration is saved, the module can make some injection/cleaning + if (def.onReset.config) { + this.resetConfigHooks.push(def.onReset.config); + } + } + + /** + * Wizard injection + * ----------------------- + */ + if (def.wizard) { + const tasks = Object.keys(def.wizard); + for (const name of tasks) { + this.wizardTasks[name] = def.wizard[name]; + } + } + }; + + async processCommand (...args:any[]) { + const command = args[0]; + const program = args[1]; + const params = args.slice(2); + params.pop(); // Don't need the command argument + + const dbName = program.mdb; + const dbHome = program.home; + const home = directory.getHome(dbName, dbHome); + + if (command.logs === false) { + logger.mute(); + } + + // Add log files for this instance (non-memory instances only) + if (!program.memory) { + logger.addHomeLogs(home, program.loglevel); + } + + const server = new Server(home, program.memory === true, commandLineConf(program)); + + // If ever the process gets interrupted + let isSaving = false; + process.on('SIGINT', async () => { + if (!isSaving) { + isSaving = true; + // Save DB + try { + await server.disconnect(); + process.exit(); + } catch (e) { + logger.error(e); + process.exit(3); + } + } + }); + + // Config or Data reset hooks + server.resetDataHook = async () => { + for (const callback of this.resetDataHooks) { + await callback(server.conf, program, logger, server.dal.confDAL); + } + } + server.resetConfigHook = async () => { + for (const callback of this.resetConfigHooks) { + await callback(server.conf, program, logger, server.dal.confDAL); + } + } + + // Initialize server (db connection, ...) + try { + server.onPluggedFSHook = async () => { + + // Register the configuration hook for loading phase (overrides the loaded data) + server.dal.loadConfHook = async (conf:ConfDTO) => { + // Loading injection + for (const callback of this.configLoadingCallbacks) { + await callback(conf, program, logger, server.dal.confDAL); + } + } + + // Register the configuration hook for saving phase (overrides the saved data) + server.dal.saveConfHook = async (conf:ConfDTO) => { + const clonedConf = _.clone(conf); + for (const callback of this.configBeforeSaveCallbacks) { + await callback(clonedConf, program, logger, server.dal.confDAL); + } + return clonedConf; + } + } + await server.plugFileSystem(); + + const conf = await server.loadConf(); + + // Eventually change the log level + // Add log files for this instance (non-memory instances only) + if (!program.memory) { + logger.addHomeLogs(home, conf.loglevel); + } + + // Auto-configuration default + await configure(program, server, server.conf || {}); + // Autosave conf + try { + await server.dal.saveConf(conf); + logger.debug("Configuration saved."); + } catch (e) { + logger.error("Configuration could not be saved: " + e); + throw Error(e); + } + + const daemon = server.getDaemon() + if (command.preventIfRunning && daemon.status()) { + throw 'Your node is currently running. Please stop it and relaunch your command.' + } + + // First possible class of commands: post-config + if (command.onConfiguredExecute) { + return await command.onConfiguredExecute(server, conf, program, params, this.wizardTasks, this); + } + // Second possible class of commands: post-service + await server.initDAL(conf); + + /** + * Service injection + * ----------------- + */ + for (const def of this.definitions) { + if (def.service) { + // To feed data coming from some I/O (network, disk, other module, ...) + if (def.service.input) { + this.streams.input.push(def.service.input(server, conf, logger)); + } + // To handle data this has been submitted by INPUT stream + if (def.service.process) { + this.streams.process.push(def.service.process(server, conf, logger)); + } + // To handle data this has been validated by PROCESS stream + if (def.service.output) { + this.streams.output.push(def.service.output(server, conf, logger)); + } + // Special service which does not stream anything particular (ex.: piloting the `server` object) + if (def.service.neutral) { + this.streams.neutral.push(def.service.neutral(server, conf, logger)); + } + } + } + // All inputs write to global INPUT stream + for (const module of this.streams.input) module.pipe(this.INPUT); + // All processes read from global INPUT stream + for (const module of this.streams.process) this.INPUT.pipe(module); + // All processes write to global PROCESS stream + for (const module of this.streams.process) module.pipe(this.PROCESS); + // All ouputs read from global PROCESS stream + for (const module of this.streams.output) this.PROCESS.pipe(module); + + return await command.onDatabaseExecute(server, conf, program, params, + + // Start services and streaming between them + async () => { + const modules = this.streams.input.concat(this.streams.process).concat(this.streams.output).concat(this.streams.neutral); + await Promise.all(modules.map((module:StreamingDuniterModule) => module.startService())) + }, + + // Stop services and streaming between them + async () => { + const modules = this.streams.input.concat(this.streams.process).concat(this.streams.output).concat(this.streams.neutral); + // Any streaming module must implement a `stopService` method + await Promise.all(modules.map((module:StreamingDuniterModule) => module.stopService())) + // // Stop reading inputs + // for (const module of streams.input) module.unpipe(); + // Stop reading from global INPUT + // INPUT.unpipe(); + // for (const module of streams.process) module.unpipe(); + // // Stop reading from global PROCESS + // PROCESS.unpipe(); + }, + + this); + + } catch (e) { + server.disconnect(); + throw e; + } + } + + executeStack(argv:string[]) { + + // Trace these errors + process.on('unhandledRejection', (reason) => { + logger.error('Unhandled rejection: ' + reason); + logger.error(reason); + }); + + // Executes the command + return this.cli.execute(argv); + } +} + +function commandLineConf(program:any, conf:any = {}) { + + conf = conf || {}; + conf.sync = conf.sync || {}; + const cli = { + currency: program.currency, + cpu: program.cpu, + server: { + port: program.port, + }, + db: { + mport: program.mport, + mdb: program.mdb, + home: program.home + }, + logs: { + http: program.httplogs, + nohttp: program.nohttplogs + }, + endpoints: [], + rmEndpoints: [], + isolate: program.isolate, + forksize: program.forksize, + nofork: program.nofork, + timeout: program.timeout + }; + + // Update conf + if (cli.currency) conf.currency = cli.currency; + if (cli.server.port) conf.port = cli.server.port; + if (cli.cpu) conf.cpu = Math.max(0.01, Math.min(1.0, cli.cpu)); + if (cli.logs.http) conf.httplogs = true; + if (cli.logs.nohttp) conf.httplogs = false; + if (cli.db.mport) conf.mport = cli.db.mport; + if (cli.db.home) conf.home = cli.db.home; + if (cli.db.mdb) conf.mdb = cli.db.mdb; + if (cli.isolate) conf.isolate = cli.isolate; + if (cli.timeout) conf.timeout = cli.timeout; + if (cli.forksize != null) conf.forksize = cli.forksize; + + return conf; +} + +async function configure(program:any, server:Server, conf:ConfDTO) { + if (typeof server == "string" || typeof conf == "string") { + throw constants.ERRORS.CLI_CALLERR_CONFIG; + } + // Try to add an endpoint if provided + if (program.addep) { + if (conf.endpoints.indexOf(program.addep) === -1) { + conf.endpoints.push(program.addep); + } + // Remove it from "to be removed" list + const indexInRemove = conf.rmEndpoints.indexOf(program.addep); + if (indexInRemove !== -1) { + conf.rmEndpoints.splice(indexInRemove, 1); + } + } + // Try to remove an endpoint if provided + if (program.remep) { + if (conf.rmEndpoints.indexOf(program.remep) === -1) { + conf.rmEndpoints.push(program.remep); + } + // Remove it from "to be added" list + const indexInToAdd = conf.endpoints.indexOf(program.remep); + if (indexInToAdd !== -1) { + conf.endpoints.splice(indexInToAdd, 1); + } + } +} + +/** + * InputStream is a special stream this filters what passes in. + * Only DUP-like documents should be treated by the processing tools, to avoid JSON injection and save CPU cycles. + * @constructor + */ +class InputStream extends stream.Transform { + + constructor() { + super({ objectMode: true }) + } + + _write(str:string, enc:any, done:any) { + if (typeof str === 'string') { + // Keep only strings + const matches = str.match(/Type: (.*)\n/); + if (matches && matches[1].match(/(Block|Membership|Identity|Certification|Transaction|Peer)/)) { + const type = matches[1].toLowerCase(); + this.push({ type, doc: str }); + } + } + done && done(); + }; +} +class ProcessStream extends stream.Transform { + + constructor() { + super({ objectMode: true }) + } + + _write(obj:any, enc:any, done:any) { + // Never close the stream + if (obj !== undefined && obj !== null) { + this.push(obj); + } + done && done(); + }; +} \ No newline at end of file diff --git a/server.ts b/server.ts index dc5a7e472b29b275306423ee0f6c1a436a26bf9e..2ea5ac885b4000279a73a9bba01821cbdba1a95c 100644 --- a/server.ts +++ b/server.ts @@ -416,7 +416,7 @@ export class Server extends stream.Duplex implements HookableServer { * @param insteadOfCmd The current command to be replaced by `overrideCommand` command. * @returns {*} The daemon handle. */ - getDaemon(overrideCommand:string, insteadOfCmd:string) { + getDaemon(overrideCommand:string = "", insteadOfCmd:string = "") { const mainModule = process.argv[1] const cwd = path.resolve(mainModule, '../..') const argv = this.getCommand(overrideCommand, insteadOfCmd) @@ -449,7 +449,7 @@ export class Server extends stream.Duplex implements HookableServer { * @param insteadOfCmd * @returns {*} */ - private getCommand(cmd:string, insteadOfCmd:string) { + private getCommand(cmd:string = "", insteadOfCmd:string = "") { if (insteadOfCmd) { // Return the same command args, except the command `insteadOfCmd` which is replaced by `cmd` return process.argv.slice(2).map((arg) => { @@ -480,49 +480,49 @@ export class Server extends stream.Duplex implements HookableServer { /** * Default endpoint. To be overriden by a module to specify another endpoint value (for ex. BMA). */ - getMainEndpoint() { + getMainEndpoint(): Promise<any> { return Promise.resolve('DEFAULT_ENDPOINT') } /** * Default WoT incoming data for new block. To be overriden by a module. */ - generatorGetJoinData() { + generatorGetJoinData(): Promise<any> { return Promise.resolve({}) } /** * Default WoT incoming certifications for new block, filtering wrong certs. To be overriden by a module. */ - generatorComputeNewCerts() { + generatorComputeNewCerts(): Promise<any> { return Promise.resolve({}) } /** * Default WoT transforming method for certs => links. To be overriden by a module. */ - generatorNewCertsToLinks() { + generatorNewCertsToLinks(): Promise<any> { return Promise.resolve({}) } /** * Default hook on file system plugging. To be overriden by module system. */ - onPluggedFSHook() { + onPluggedFSHook(): Promise<any> { return Promise.resolve({}) } /** * Default hook on data reset. To be overriden by module system. */ - resetDataHook() { + resetDataHook(): Promise<any> { return Promise.resolve({}) } /** * Default hook on data reset. To be overriden by module system. */ - resetConfigHook() { + resetConfigHook(): Promise<any> { return Promise.resolve({}) } } \ No newline at end of file diff --git a/test/integration/v1.0-modules-api.js b/test/integration/v1.0-modules-api.js index 8b5c04395c8c9c5be4447b50152b0ee369e3dac7..79a13af03099411c1a189185e4d5a98d5619cb91 100644 --- a/test/integration/v1.0-modules-api.js +++ b/test/integration/v1.0-modules-api.js @@ -13,9 +13,9 @@ const querablep = require('querablep'); describe("v1.0 Module API", () => { it('should be able to execute `hello` command with quickRun', () => co(function*() { - duniter.statics.quickRunGetArgs = () => ['', '', 'hello-world'] - duniter.statics.onRunDone = () => { /* Do not exit the process */ } + duniter.statics.setOnRunDone(() => { /* Do not exit the process */ }) const absolutePath = path.join(__dirname, './scenarios/hello-plugin.js') + process.argv = ['', absolutePath, 'hello-world'] const res = yield duniter.statics.quickRun(absolutePath) res.should.equal('Hello world! from within Duniter.') })) diff --git a/tsconfig.json b/tsconfig.json index cb18c28f5f5a9efaf873956191ade38cb90277d2..171c2148ef6a63c041f1d14927ce671f18ea5740 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -11,6 +11,7 @@ }, "include": [ "server.ts", + "index.ts", "app", "bin", "test"