Commit ae3c7840 authored by Cédric Moreau's avatar Cédric Moreau
Browse files

[enh] Refactoring: migrated remaining .js files to .ts

parent 3f7dd79a
......@@ -35,78 +35,11 @@ vagrant/duniter
.nyc_output
coverage/
# TS migration
test/blockchain/*.js*
test/blockchain/*.d.ts
test/blockchain/lib/*.js*
test/blockchain/lib/*.d.ts
/index.js*
/index.d.ts
/server.js*
/server.d.ts
app/**/*.js*
app/**/*.d.ts
test/integration/revoked_pubkey_replay.js
test/integration/server-shutdown.js
test/integration/transactions-csv-cltv-sig.js
test/integration/ws2p*js
test/integration/*.js.map
test/integration/*.d.ts
test/integration/membership_chainability.js*
test/integration/membership_chainability.d.ts
test/integration/tools/toolbox.js*
test/integration/tools/toolbox.d.ts
test/integration/tools/TestUser.js*
test/integration/tools/TestUser.d.ts
test/integration/documents-currency.js*
test/integration/documents-currency.d.ts
test/integration/forwarding.js
test/integration/branches_switch.js
test/integration/branches2.js
test/integration/transactions-chaining.js
test/unit-tools.js*
test/unit-tools.d.ts
test/fast/modules/crawler/block_pulling.js*
test/fast/modules/crawler/block_pulling.d.ts
test/fast/fork*.js*
test/fast/fork*.d.ts
test/fast/proxies*.js*
test/fast/proxies*.d.ts
test/fast/modules/ws2p/*.js*
test/fast/modules/ws2p/*.d.ts
test/fast/modules/common/grammar.js*
test/fast/modules/common/grammar.d.ts
test/fast/prover/pow-1-cluster.d.ts
test/fast/prover/pow-1-cluster.js
test/fast/prover/pow-1-cluster.js.map
test/fast/protocol-local-rule-chained-tx-depth.js
test/fast/protocol-local-rule-chained-tx-depth.js.map
test/fast/protocol-local-rule-chained-tx-depth.d.ts
test/fast/dal/*-loki.d.ts
test/fast/dal/*-loki.js*
test/dal/loki.d.ts
test/dal/loki.js*
test/dal/blockchain-archive.d.ts
test/dal/blockchain-archive.js*
test/dal/basic-dal-tests.d.ts
test/dal/basic-dal-tests.js*
# In test folders
test/integration/wot/*.d.ts
test/integration/wot/*.js*
test/integration/transactions/*.d.ts
test/integration/transactions/*.js*
test/integration/network/*.d.ts
test/integration/network/*.js*
test/integration/misc/*.d.ts
test/integration/misc/*.js*
test/integration/identity/*.d.ts
test/integration/identity/*.js*
test/integration/certification/*.d.ts
test/integration/certification/*.js*
test/integration/branches/*.d.ts
test/integration/branches/*.js*
test/integration/blocks/*.d.ts
test/integration/blocks/*.js*
test/fast/index/*.d.ts
test/fast/index/*.js*
\ No newline at end of file
test/**/*.d.ts
test/**/*.js*
\ No newline at end of file
export enum DataErrors {
BLOCK_WASNT_COMMITTED,
CANNOT_ARCHIVE_CHUNK_WRONG_SIZE,
CORRUPTED_DATABASE,
BLOCKCHAIN_NOT_INITIALIZED_YET,
......
......@@ -117,7 +117,7 @@ export class BlockchainContext {
return DuniterBlockchain.checkBlock(block, withPoWAndSignature, this.conf, this.dal)
}
private async addBlock(obj: BlockDTO, index: any = null, HEAD: DBHead | null = null): Promise<any> {
private async addBlock(obj: BlockDTO, index: any = null, HEAD: DBHead | null = null): Promise<BlockDTO> {
const block = await DuniterBlockchain.pushTheBlock(obj, index, HEAD, this.conf, this.dal, this.logger)
this.vHEAD_1 = this.vHEAD = null
return block
......
......@@ -30,4 +30,8 @@ export interface TxsDAO extends GenericDAO<DBTx> {
removeAll(): Promise<void>
sandbox:SandBox<{ issuers: string[], output_base:number, output_amount:number }>
getSandboxRoom(): Promise<number>
setSandboxSize(size:number): void
}
......@@ -156,6 +156,6 @@ logger.addHomeLogs(Directory.INSTANCE_HOME)
/**
* Convenience function to get logger directly
*/
export function NewLogger() {
export function NewLogger(name?:string) {
return logger
}
......@@ -35,7 +35,7 @@ export const CrawlerDependency = {
contacter: (host:string, port:number, opts?:any) => new Contacter(host, port, opts),
pullBlocks: async (server:Server, pubkey:string) => {
pullBlocks: async (server:Server, pubkey = "") => {
const crawler = new Crawler(server, server.conf, server.logger);
return crawler.pullBlocks(server, pubkey);
},
......
......@@ -51,7 +51,7 @@ export class Crawler extends stream.Transform implements DuniterService {
this.sandboxCrawler = new SandboxCrawler(server, conf, logger)
}
pullBlocks(server:Server, pubkey:string) {
pullBlocks(server:Server, pubkey = "") {
return this.blockCrawler.pullBlocks(server, pubkey)
}
......@@ -344,7 +344,7 @@ export class BlockCrawler {
this.syncBlockFifo.kill();
}
pullBlocks(server:Server, pubkey:string) {
pullBlocks(server:Server, pubkey = "") {
return this.syncBlock(server, pubkey)
}
......
......@@ -691,7 +691,7 @@ export class BlockGeneratorWhichProves extends BlockGenerator {
super(server)
}
async makeNextBlock(block:DBBlock|null, trial:number, manualValues:any = null) {
async makeNextBlock(block:DBBlock|null, trial?:number|null, manualValues:any = null) {
const unsignedBlock = block || (await this.nextBlock(manualValues))
const trialLevel = trial || (await this.mainContext.getIssuerPersonalizedDifficulty(this.selfPubkey))
return this.prover.prove(unsignedBlock, trialLevel, (manualValues && manualValues.time) || null);
......
......@@ -209,7 +209,7 @@ export class PermanentProver {
permanenceResolve()
}
async blockchainChanged(gottenBlock:any) {
async blockchainChanged(gottenBlock?:any) {
if (this.server && (!gottenBlock || !this.lastComputedBlock || gottenBlock.hash !== this.lastComputedBlock.hash)) {
// Cancel any processing proof
await this.prover.cancel()
......
......@@ -223,9 +223,10 @@ export class BlockchainService extends FIFOService {
})
}
async blockResolution() {
let added = true
while (added) {
async blockResolution(): Promise<BlockDTO|null> {
let lastAdded:BlockDTO|null = null
let added:BlockDTO|null
do {
const current = await this.current()
let potentials = []
if (current) {
......@@ -235,7 +236,7 @@ export class BlockchainService extends FIFOService {
potentials = await this.dal.getPotentialRootBlocks()
this.logger.info('Block resolution: %s potential blocks for root block...', potentials.length)
}
added = false
added = null
let i = 0
while (!added && i < potentials.length) {
const dto = BlockDTO.fromJSONObject(potentials[i])
......@@ -245,17 +246,16 @@ export class BlockchainService extends FIFOService {
await this.dal.removeTxByHash(tx.hash);
}
}
const addedBlock = await this.mainContext.checkAndAddBlock(dto)
added = true
lastAdded = added = await this.mainContext.checkAndAddBlock(dto)
this.push({
bcEvent: OtherConstants.BC_EVENT.HEAD_CHANGED,
block: addedBlock
block: added
})
// Clear invalid forks' cache
this.invalidForks.splice(0, this.invalidForks.length)
} catch (e) {
this.logger.error(e)
added = false
added = null
const theError = e && (e.message || e)
this.push({
blockResolutionError: theError
......@@ -263,7 +263,8 @@ export class BlockchainService extends FIFOService {
}
i++
}
}
} while (added)
return lastAdded
}
async forkResolution() {
......
......@@ -11,41 +11,39 @@
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
"use strict";
const co = require('co');
import {FileDAL} from "../../app/lib/dal/fileDAL"
import {Directory} from "../../app/lib/system/directory"
const should = require('should');
const FileDAL = require('../../app/lib/dal/fileDAL').FileDAL
const dir = require('../../app/lib/system/directory').Directory
const indexer = require('../../app/lib/indexer').Indexer
let dal;
let dal:FileDAL
describe("Source DAL", function(){
before(() => co(function *() {
dal = new FileDAL(yield dir.getHomeParams(true, 'db0'));
yield dal.init();
}));
before(async () => {
dal = new FileDAL(await Directory.getHomeParams(true, 'db0'));
await dal.init({} as any)
})
it('should be able to feed the sindex with unordered rows', () => co(function *() {
yield dal.sindexDAL.insertBatch([
it('should be able to feed the sindex with unordered rows', async () => {
await dal.sindexDAL.insertBatch([
{ op: 'UPDATE', tx: null, identifier: 'SOURCE_1', pos: 4, written_on: '139-H', writtenOn: 139, written_time: 4500, consumed: true, conditions: 'SIG(ABC)' },
{ op: 'CREATE', tx: null, identifier: 'SOURCE_1', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: 'SIG(ABC)' },
{ op: 'CREATE', tx: null, identifier: 'SOURCE_2', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: 'SIG(ABC)' },
{ op: 'CREATE', tx: null, identifier: 'SOURCE_3', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: 'SIG(DEF)' }
]);
(yield dal.sindexDAL.findRaw({ identifier: 'SOURCE_1' })).should.have.length(2);
(yield dal.sindexDAL.findRaw({ pos: 4 })).should.have.length(4);
] as any);
(await dal.sindexDAL.findRaw({ identifier: 'SOURCE_1' })).should.have.length(2);
(await dal.sindexDAL.findRaw({ pos: 4 })).should.have.length(4);
// Source availability
const sourcesOfDEF = yield dal.sindexDAL.getAvailableForPubkey('DEF');
const sourcesOfDEF = await dal.sindexDAL.getAvailableForPubkey('DEF');
sourcesOfDEF.should.have.length(1);
const sourcesOfABC = yield dal.sindexDAL.getAvailableForPubkey('ABC');
const sourcesOfABC = await dal.sindexDAL.getAvailableForPubkey('ABC');
sourcesOfABC.should.have.length(1);
const source1 = yield dal.sindexDAL.getSource('SOURCE_1', 4);
const source1 = await dal.sindexDAL.getSource('SOURCE_1', 4) as any
source1.should.have.property('consumed').equal(true);
const udSources = yield dal.sindexDAL.getUDSources('ABC');
const udSources = await dal.sindexDAL.getUDSources('ABC');
udSources.should.have.length(2);
udSources[0].should.have.property('consumed').equal(false);
udSources[1].should.have.property('consumed').equal(true);
}));
});
})
})
......@@ -11,132 +11,131 @@
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Affero General Public License for more details.
"use strict";
const co = require('co');
import {FileDAL} from "../../app/lib/dal/fileDAL"
import {Directory} from "../../app/lib/system/directory"
import {Indexer} from "../../app/lib/indexer"
import {simpleNodeWith2Users} from "../integration/tools/toolbox"
const should = require('should');
const FileDAL = require('../../app/lib/dal/fileDAL').FileDAL
const dir = require('../../app/lib/system/directory').Directory
const indexer = require('../../app/lib/indexer').Indexer
const toolbox = require('../integration/tools/toolbox');
let dal;
let dal:FileDAL
describe("Triming", function(){
before(() => co(function *() {
dal = new FileDAL(yield dir.getHomeParams(true, 'db0'));
yield dal.init();
}));
before(async () => {
dal = new FileDAL(await Directory.getHomeParams(true, 'db0'));
await dal.init({} as any)
})
it('should be able to feed the bindex', () => co(function *() {
yield dal.bindexDAL.insertBatch([
it('should be able to feed the bindex', async () => {
await dal.bindexDAL.insertBatch([
{ number: 121, version: 6, bsize: 0, hash: "HASH", issuer: "ISSUER", time: 0, membersCount: 3, issuersCount: 2, issuersFrame: 1, issuersFrameVar: 2, avgBlockSize: 0, medianTime: 1482500000, dividend: 100, mass: 300, massReeval: 300, unitBase: 2, powMin: 70, udTime: 0, udReevalTime: 0, diffNumber: 5, speed: 1.0 },
{ number: 122, version: 6, bsize: 0, hash: "HASH", issuer: "ISSUER", time: 0, membersCount: 3, issuersCount: 2, issuersFrame: 1, issuersFrameVar: 2, avgBlockSize: 0, medianTime: 1482500000, dividend: 100, mass: 300, massReeval: 300, unitBase: 2, powMin: 70, udTime: 0, udReevalTime: 0, diffNumber: 5, speed: 1.0 },
{ number: 123, version: 6, bsize: 0, hash: "HASH", issuer: "ISSUER", time: 0, membersCount: 3, issuersCount: 2, issuersFrame: 1, issuersFrameVar: 2, avgBlockSize: 0, medianTime: 1482500000, dividend: 100, mass: 300, massReeval: 300, unitBase: 2, powMin: 70, udTime: 0, udReevalTime: 0, diffNumber: 5, speed: 1.0 },
{ number: 124, version: 6, bsize: 0, hash: "HASH", issuer: "ISSUER", time: 0, membersCount: 3, issuersCount: 2, issuersFrame: 1, issuersFrameVar: 2, avgBlockSize: 0, medianTime: 1482500000, dividend: 100, mass: 300, massReeval: 300, unitBase: 2, powMin: 70, udTime: 0, udReevalTime: 0, diffNumber: 5, speed: 1.0 },
{ number: 125, version: 6, bsize: 0, hash: "HASH", issuer: "ISSUER", time: 0, membersCount: 3, issuersCount: 2, issuersFrame: 1, issuersFrameVar: 2, avgBlockSize: 0, medianTime: 1482500000, dividend: 100, mass: 300, massReeval: 300, unitBase: 2, powMin: 70, udTime: 0, udReevalTime: 0, diffNumber: 5, speed: 1.0 }
]);
}));
] as any);
})
it('should have bindex head(1) = 125', () => co(function *() {
const head = yield dal.bindexDAL.head(1);
it('should have bindex head(1) = 125', async () => {
const head = await dal.bindexDAL.head(1);
head.should.have.property('number').equal(125);
}));
})
it('should have bindex range(1, 3) = 125, 124, 123', () => co(function *() {
const range = yield dal.bindexDAL.range(1,3);
it('should have bindex range(1, 3) = 125, 124, 123', async () => {
const range = await dal.bindexDAL.range(1,3);
range.should.have.length(3);
range[0].should.have.property('number').equal(125);
range[1].should.have.property('number').equal(124);
range[2].should.have.property('number').equal(123);
}));
})
it('should be able to feed the iindex', () => co(function *() {
yield dal.iindexDAL.insertBatch([
it('should be able to feed the iindex', async () => {
await dal.iindexDAL.insertBatch([
{ op: 'CREATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', uid: 'cat', created_on: '121-H', written_on: '122-H', writtenOn: 122, member: true, wasMember: true, kick: false },
{ op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', uid: null, created_on: '121-H', written_on: '123-H', writtenOn: 123, member: null, wasMember: null, kick: true },
{ op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', uid: null, created_on: '121-H', written_on: '124-H', writtenOn: 124, member: false, wasMember: null, kick: false }
]);
let lignes = yield dal.iindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
] as any);
let lignes = await dal.iindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
lignes.should.have.length(3);
indexer.DUP_HELPERS.reduce(lignes).should.have.property('member').equal(false);
}));
Indexer.DUP_HELPERS.reduce(lignes).should.have.property('member').equal(false);
})
it('should be able to trim the iindex', () => co(function *() {
it('should be able to trim the iindex', async () => {
// Triming
yield dal.trimIndexes(124);
const lignes = yield dal.iindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
await dal.trimIndexes(124);
const lignes = await dal.iindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
lignes.should.have.length(2);
indexer.DUP_HELPERS.reduce(lignes).should.have.property('member').equal(false);
}));
Indexer.DUP_HELPERS.reduce(lignes).should.have.property('member').equal(false);
})
it('triming again the iindex should have no effet', () => co(function *() {
it('triming again the iindex should have no effet', async () => {
// Triming
yield dal.trimIndexes(124);
const lignes = yield dal.iindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
await dal.trimIndexes(124);
const lignes = await dal.iindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
lignes.should.have.length(2);
indexer.DUP_HELPERS.reduce(lignes).should.have.property('member').equal(false);
}));
Indexer.DUP_HELPERS.reduce(lignes).should.have.property('member').equal(false);
})
it('should be able to feed the mindex', () => co(function *() {
yield dal.mindexDAL.insertBatch([
it('should be able to feed the mindex', async () => {
await dal.mindexDAL.insertBatch([
{ op: 'CREATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', created_on: '121-H', written_on: '122-H', writtenOn: 122, expires_on: 1000, expired_on: null },
{ op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', created_on: '121-H', written_on: '123-H', writtenOn: 123, expires_on: 1200, expired_on: null },
{ op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', created_on: '121-H', written_on: '124-H', writtenOn: 124, expires_on: null, expired_on: null },
{ op: 'UPDATE', pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', created_on: '121-H', written_on: '125-H', writtenOn: 125, expires_on: 1400, expired_on: null }
]);
const lignes = yield dal.mindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
] as any);
const lignes = await dal.mindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
lignes.should.have.length(4);
indexer.DUP_HELPERS.reduce(lignes).should.have.property('expires_on').equal(1400);
}));
Indexer.DUP_HELPERS.reduce(lignes).should.have.property('expires_on').equal(1400);
})
it('should be able to trim the mindex', () => co(function *() {
it('should be able to trim the mindex', async () => {
// Triming
yield dal.trimIndexes(124);
const lignes = yield dal.mindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
await dal.trimIndexes(124);
const lignes = await dal.mindexDAL.reducable('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
lignes.should.have.length(3);
indexer.DUP_HELPERS.reduce(lignes).should.have.property('expires_on').equal(1400);
}));
Indexer.DUP_HELPERS.reduce(lignes).should.have.property('expires_on').equal(1400);
})
it('should be able to feed the cindex', () => co(function *() {
yield dal.cindexDAL.insertBatch([
it('should be able to feed the cindex', async () => {
await dal.cindexDAL.insertBatch([
{ op: 'CREATE', issuer: 'HgTT', receiver: 'DNan', created_on: '121-H', written_on: '126-H', writtenOn: 126, expires_on: 1000, expired_on: null },
{ op: 'UPDATE', issuer: 'HgTT', receiver: 'DNan', created_on: '121-H', written_on: '126-H', writtenOn: 126, expires_on: null, expired_on: 3000 },
{ op: 'CREATE', issuer: 'DNan', receiver: 'HgTT', created_on: '125-H', written_on: '126-H', writtenOn: 126, expires_on: null, expired_on: null }
]);
(yield dal.cindexDAL.findRaw({ issuer: 'HgTT' })).should.have.length(2);
(yield dal.cindexDAL.findRaw({ issuer: 'DNan' })).should.have.length(1);
}));
] as any);
(await dal.cindexDAL.findRaw({ issuer: 'HgTT' })).should.have.length(2);
(await dal.cindexDAL.findRaw({ issuer: 'DNan' })).should.have.length(1);
})
it('should be able to trim the cindex', () => co(function *() {
it('should be able to trim the cindex', async () => {
// Triming
yield dal.trimIndexes(127);
(yield dal.cindexDAL.findRaw({ issuer: 'HgTT' })).should.have.length(0);
await dal.trimIndexes(127);
(await dal.cindexDAL.findRaw({ issuer: 'HgTT' })).should.have.length(0);
// { op: 'UPDATE', issuer: 'DNan', receiver: 'HgTT', created_on: '125-H', written_on: '126-H', writtenOn: 126, expires_on: 3600, expired_on: null },/**/
(yield dal.cindexDAL.findRaw({ issuer: 'DNan' })).should.have.length(1);
}));
(await dal.cindexDAL.findRaw({ issuer: 'DNan' })).should.have.length(1);
})
it('should be able to feed the sindex', () => co(function *() {
yield dal.sindexDAL.insertBatch([
it('should be able to feed the sindex', async () => {
await dal.sindexDAL.insertBatch([
{ op: 'CREATE', identifier: 'SOURCE_1', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false },
{ op: 'UPDATE', identifier: 'SOURCE_1', pos: 4, written_on: '139-H', writtenOn: 139, written_time: 4500, consumed: true },
{ op: 'CREATE', identifier: 'SOURCE_2', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false },
{ op: 'CREATE', identifier: 'SOURCE_3', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false }
]);
(yield dal.sindexDAL.findRaw({ identifier: 'SOURCE_1' })).should.have.length(2);
(yield dal.sindexDAL.findRaw({ pos: 4 })).should.have.length(4);
}));
] as any);
(await dal.sindexDAL.findRaw({ identifier: 'SOURCE_1' })).should.have.length(2);
(await dal.sindexDAL.findRaw({ pos: 4 })).should.have.length(4);
})
it('should be able to trim the sindex', () => co(function *() {
it('should be able to trim the sindex', async () => {
// Triming
yield dal.trimIndexes(140);
(yield dal.sindexDAL.findRaw({ identifier: 'SOURCE_1' })).should.have.length(0);
(yield dal.sindexDAL.findRaw({ pos: 4 })).should.have.length(2);
}));
await dal.trimIndexes(140);
(await dal.sindexDAL.findRaw({ identifier: 'SOURCE_1' })).should.have.length(0);
(await dal.sindexDAL.findRaw({ pos: 4 })).should.have.length(2);
})
it('should be able to trim the bindex', () => co(function *() {
it('should be able to trim the bindex', async () => {
// Triming
const server = (yield toolbox.simpleNodeWith2Users({
const server = (await simpleNodeWith2Users({
forksize: 9,
sigQty: 1,
dtDiffEval: 2,
......@@ -144,13 +143,13 @@ describe("Triming", function(){
})).s1;
// const s1 = server.s1;
for (let i = 0; i < 13; i++) {
yield server.commit();
await server.commit();
}
(yield server.dal.bindexDAL.head(1)).should.have.property('number').equal(12);
(yield server.dal.bindexDAL.head(13)).should.have.property('number').equal(0);
yield server.commit();
should.not.exists(yield server.dal.bindexDAL.head(14)); // Trimed
yield server.closeCluster()
}));
});
(await server.dal.bindexDAL.head(1)).should.have.property('number').equal(12);
(await server.dal.bindexDAL.head(13)).should.have.property('number').equal(0);
await server.commit();
should.not.exists(await server.dal.bindexDAL.head(14)); // Trimed
await server.closeCluster()
})
})
......@@ -12,7 +12,7 @@
// GNU Affero General Public License for more details.
module.exports = {
export const BLOCK_TEST_DATA = {
WRONG_SIGNATURE:
"Version: 10\n" +
"Type: Block\n" +
......
......@@ -25,7 +25,7 @@ describe('Linting', () => {
];
// Specify style of output
const options = {};
const options:any = {};
options.formatter = 'stylish';
// Run the tests
......
......@@ -12,31 +12,27 @@
// GNU Affero General Public License for more details.
"use strict";
const should = require('should');
const co = require('co');
const nacl = require('tweetnacl');
const base58 = require('../../../app/lib/common-libs/crypto/base58')
const naclUtil = require('../../../app/lib/common-libs/crypto/nacl-util')
const keyring = require('../../../app/lib/common-libs/crypto/keyring')
import {Base58decode, Base58encode} from "../../../app/lib/common-libs/crypto/base58"
import {decodeBase64, decodeUTF8, encodeBase64, encodeUTF8} from "../../../app/lib/common-libs/crypto/nacl-util"
import {KeyGen, verify} from "../../../app/lib/common-libs/crypto/keyring"
const Base58decode = base58.Base58decode
const Base58encode = base58.Base58encode
const should = require('should');
const enc = naclUtil.encodeBase64
const dec = naclUtil.decodeBase64
const enc = encodeBase64
const dec = decodeBase64
let pub, sec, rawPub, rawSec;
let pub:Uint8Array, sec:Uint8Array, rawPub:string, rawSec:string
describe('ed25519 tests:', function(){
before(() => co(function*() {
before(async () => {
// Generate the keypair
const keyPair = keyring.KeyGen('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP');
const k