Skip to content
GitLab
Projects
Groups
Snippets
Help
Loading...
Help
What's new
7
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Open sidebar
nodes
typescript
modules
duniter-currency-monit
Commits
062e0b7b
Commit
062e0b7b
authored
Apr 19, 2020
by
Cédric Moreau
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
[mod] Monit now uses a local blockchain storage periodically indexed
parent
0863718a
Changes
12
Expand all
Hide whitespace changes
Inline
Side-by-side
Showing
12 changed files
with
468 additions
and
83 deletions
+468
-83
lib/DataFinder.ts
lib/DataFinder.ts
+300
-41
lib/SqliteBlockchain.ts
lib/SqliteBlockchain.ts
+105
-0
lib/main.js
lib/main.js
+1
-1
lib/updateCache2.ts
lib/updateCache2.ts
+13
-6
lib/webserver2.ts
lib/webserver2.ts
+36
-22
routes/blockCount2.ts
routes/blockCount2.ts
+2
-2
routes/gaussianWotQuality2.ts
routes/gaussianWotQuality2.ts
+1
-1
routes/members2.ts
routes/members2.ts
+4
-4
routes/membersCount2.ts
routes/membersCount2.ts
+2
-2
routes/monetaryMass2.ts
routes/monetaryMass2.ts
+2
-2
routes/willMembers2.ts
routes/willMembers2.ts
+1
-1
routes/wotex2.ts
routes/wotex2.ts
+1
-1
No files found.
lib/DataFinder.ts
View file @
062e0b7b
This diff is collapsed.
Click to expand it.
lib/SqliteBlockchain.ts
0 → 100644
View file @
062e0b7b
import
{
SqliteTable
}
from
"
duniter/app/lib/dal/indexDAL/sqlite/SqliteTable
"
;
import
{
SQLiteDriver
}
from
"
duniter/app/lib/dal/drivers/SQLiteDriver
"
;
import
{
SqlNotNullableFieldDefinition
,
SqlNullableFieldDefinition
}
from
"
duniter/app/lib/dal/indexDAL/sqlite/SqlFieldDefinition
"
;
import
{
MonitorExecutionTime
}
from
"
./MonitorExecutionTime
"
;
import
{
DBBlock
}
from
"
duniter/app/lib/db/DBBlock
"
;
export
class
SqliteBlockchain
extends
SqliteTable
<
MonitDBBlock
>
{
constructor
(
getSqliteDB
:
(
dbName
:
string
)
=>
Promise
<
SQLiteDriver
>
)
{
super
(
'
monit
'
,
{
'
archived
'
:
new
SqlNotNullableFieldDefinition
(
'
BOOLEAN
'
,
true
),
'
fork
'
:
new
SqlNotNullableFieldDefinition
(
'
BOOLEAN
'
,
true
),
'
hash
'
:
new
SqlNotNullableFieldDefinition
(
'
VARCHAR
'
,
false
,
64
),
'
inner_hash
'
:
new
SqlNotNullableFieldDefinition
(
'
VARCHAR
'
,
false
,
64
),
'
signature
'
:
new
SqlNotNullableFieldDefinition
(
'
VARCHAR
'
,
false
,
100
),
'
currency
'
:
new
SqlNotNullableFieldDefinition
(
'
VARCHAR
'
,
false
,
50
),
'
issuer
'
:
new
SqlNotNullableFieldDefinition
(
'
VARCHAR
'
,
false
,
50
),
'
version
'
:
new
SqlNotNullableFieldDefinition
(
'
INT
'
,
false
),
'
membersCount
'
:
new
SqlNotNullableFieldDefinition
(
'
INT
'
,
false
),
'
medianTime
'
:
new
SqlNotNullableFieldDefinition
(
'
INT
'
,
true
),
// DATETIME?
'
time
'
:
new
SqlNotNullableFieldDefinition
(
'
INT
'
,
false
),
// DATETIME?
'
powMin
'
:
new
SqlNotNullableFieldDefinition
(
'
INT
'
,
false
),
'
number
'
:
new
SqlNotNullableFieldDefinition
(
'
INT
'
,
false
),
'
nonce
'
:
new
SqlNotNullableFieldDefinition
(
'
INT
'
,
false
),
'
issuersCount
'
:
new
SqlNotNullableFieldDefinition
(
'
INT
'
,
false
),
'
parameters
'
:
new
SqlNullableFieldDefinition
(
'
VARCHAR
'
,
false
,
255
),
'
previousHash
'
:
new
SqlNullableFieldDefinition
(
'
VARCHAR
'
,
false
,
64
),
'
previousIssuer
'
:
new
SqlNullableFieldDefinition
(
'
VARCHAR
'
,
false
,
50
),
'
monetaryMass
'
:
new
SqlNullableFieldDefinition
(
'
VARCHAR
'
,
false
,
100
),
'
UDTime
'
:
new
SqlNullableFieldDefinition
(
'
INT
'
,
false
),
// DATETIME
'
dividend
'
:
new
SqlNullableFieldDefinition
(
'
INT
'
,
false
),
// DEFAULT \'0\'
'
unitbase
'
:
new
SqlNullableFieldDefinition
(
'
INT
'
,
false
),
'
transactions
'
:
new
SqlNullableFieldDefinition
(
'
TEXT
'
,
false
),
'
certifications
'
:
new
SqlNullableFieldDefinition
(
'
TEXT
'
,
false
),
'
identities
'
:
new
SqlNullableFieldDefinition
(
'
TEXT
'
,
false
),
'
joiners
'
:
new
SqlNullableFieldDefinition
(
'
TEXT
'
,
false
),
'
actives
'
:
new
SqlNullableFieldDefinition
(
'
TEXT
'
,
false
),
'
leavers
'
:
new
SqlNullableFieldDefinition
(
'
TEXT
'
,
false
),
'
revoked
'
:
new
SqlNullableFieldDefinition
(
'
TEXT
'
,
false
),
'
excluded
'
:
new
SqlNullableFieldDefinition
(
'
TEXT
'
,
false
),
},
getSqliteDB
);
this
.
name
=
'
block
'
}
@
MonitorExecutionTime
()
async
insertBatch
(
records
:
MonitDBBlock
[]):
Promise
<
void
>
{
records
.
forEach
((
b
:
any
)
=>
{
for
(
const
prop
of
[
'
joiners
'
,
'
actives
'
,
'
leavers
'
,
'
identities
'
,
'
certifications
'
,
'
transactions
'
,
'
revoked
'
,
'
excluded
'
])
{
b
[
prop
]
=
JSON
.
stringify
(
b
[
prop
]);
}
return
b
});
if
(
records
.
length
)
{
return
this
.
insertBatchInTable
(
this
.
driver
,
records
)
}
}
@
MonitorExecutionTime
()
async
query
(
sql
:
string
,
params
?:
any
[]):
Promise
<
any
>
{
return
this
.
driver
.
sqlRead
(
sql
,
params
||
[])
}
async
getBlock
(
number
:
number
):
Promise
<
MonitDBBlock
|
null
>
{
const
blocks
=
await
this
.
driver
.
sqlRead
(
'
SELECT * FROM block WHERE number = ?
'
,
[
number
])
return
blocks
.
length
?
blocks
[
0
]
:
null
}
async
getHighestBlock
():
Promise
<
MonitDBBlock
|
null
>
{
const
blocks
=
await
this
.
driver
.
sqlRead
(
'
SELECT * FROM block ORDER BY number DESC LIMIT 1
'
,
[])
return
blocks
.
length
?
blocks
[
0
]
:
null
}
async
getHighestBlockNumber
():
Promise
<
number
>
{
const
block
=
await
this
.
getHighestBlock
()
return
block
&&
block
.
number
||
-
1
}
async
getHighestArchivedBlockNumber
():
Promise
<
number
>
{
const
block
=
await
this
.
driver
.
sqlRead
(
'
SELECT * FROM block WHERE archived ORDER BY number DESC LIMIT 1
'
,
[])
return
block
.
length
&&
block
[
0
].
number
||
-
1
}
trimNonArchived
()
{
return
this
.
driver
.
sqlWrite
(
'
DELETE FROM block WHERE NOT archived
'
,
[])
}
setArchived
(
currentCeil
:
number
)
{
return
this
.
driver
.
sqlWrite
(
'
UPDATE block SET archived = ? WHERE number <= ? AND NOT archived
'
,
[
true
,
currentCeil
])
}
deleteAll
()
{
return
this
.
driver
.
sqlWrite
(
'
DELETE FROM block
'
,
[])
}
}
export
interface
MonitDBBlock
extends
DBBlock
{
archived
:
boolean
}
lib/main.js
View file @
062e0b7b
...
...
@@ -4,7 +4,7 @@ const co = require('co');
const
os
=
require
(
'
os
'
);
const
fs
=
require
(
'
fs
'
);
const
webserver
=
require
(
__dirname
+
'
/webserver.js
'
);
const
webserver
=
require
(
__dirname
+
'
/webserver
2
.js
'
);
const
timestampToDatetime
=
require
(
__dirname
+
'
/timestampToDatetime.js
'
);
/****************************
...
...
lib/updateCache2.ts
View file @
062e0b7b
...
...
@@ -3,6 +3,7 @@
import
{
DataFinder
}
from
"
./DataFinder
"
;
import
{
DBBlock
}
from
"
duniter/app/lib/db/DBBlock
"
;
import
{
MonitConstants
}
from
"
./constants2
"
;
import
{
Server
}
from
"
duniter/server
"
;
const
co
=
require
(
'
co
'
);
...
...
@@ -12,9 +13,9 @@ const co = require('co');
*/
module
.
exports
=
async
(
req
:
any
,
res
:
any
,
next
:
any
)
=>
{
var
{
duniterServer
,
cache
}
=
req
.
app
.
locals
var
{
duniterServer
,
cache
}
=
req
.
app
.
locals
as
{
duniterServer
:
Server
,
cache
:
MonitCache
};
const
dataFinder
=
new
DataFinder
(
duniterServer
)
const
dataFinder
=
await
DataFinder
.
getInstanceReindexedIfNecessary
(
)
try
{
// Définition des constantes
...
...
@@ -110,7 +111,7 @@ module.exports = async (req:any, res:any, next:any) => {
else
if
(
req
.
query
.
begin
>
cache
.
endBlock
[
0
].
number
)
{
let
beginTime
=
cache
.
endBlock
[
0
].
medianTime
-
(
parseInt
(
cache
.
step
)
*
unitTime
*
MonitConstants
.
STEP_COUNT_MIN
);
cache
.
beginBlock
=
[
await
dataFinder
.
getBlockWhereMedianTimeGte
(
beginTime
)
]
;
cache
.
beginBlock
=
await
dataFinder
.
getBlockWhereMedianTimeGte
(
beginTime
);
}
else
{
cache
.
beginBlock
=
[
await
dataFinder
.
getBlock
(
req
.
query
.
begin
)];
}
...
...
@@ -125,20 +126,22 @@ module.exports = async (req:any, res:any, next:any) => {
}
else
{
cache
.
adaptMaxPoints
=
"
begin
"
;
}
if
(
!
cache
.
beginBlock
||
!
cache
.
beginBlock
[
0
])
{
throw
Error
(
"
No begin block
"
)
}
// Apply nbMaxPoints and adaptMaxPoints
if
(
cache
.
adaptMaxPoints
==
"
begin
"
)
{
if
(
Math
.
ceil
((
cache
.
endBlock
[
0
].
medianTime
-
cache
.
beginBlock
[
0
].
medianTime
)
/
(
cache
.
step
*
unitTime
))
>
cache
.
nbMaxPoints
)
{
let
newBeginTime
=
cache
.
endBlock
[
0
].
medianTime
-
cache
.
step
*
cache
.
nbMaxPoints
*
unitTime
;
cache
.
beginBlock
=
[
await
dataFinder
.
getBlockWhereMedianTimeGte
(
newBeginTime
)
]
;
cache
.
beginBlock
=
await
dataFinder
.
getBlockWhereMedianTimeGte
(
newBeginTime
);
}
}
else
if
(
cache
.
adaptMaxPoints
==
"
step
"
)
{
cache
.
step
=
Math
.
ceil
((
cache
.
endBlock
[
0
].
medianTime
-
cache
.
beginBlock
[
0
].
medianTime
)
/
(
MonitConstants
.
STEP_COUNT_MAX
*
unitTime
));
}
else
{
let
newEndTime
=
cache
.
beginBlock
[
0
].
medianTime
+
cache
.
step
*
cache
.
nbMaxPoints
*
unitTime
;
cache
.
endBlock
=
[
await
dataFinder
.
getBlockWhereMedianTimeLte
(
newEndTime
)
]
;
cache
.
endBlock
=
await
dataFinder
.
getBlockWhereMedianTimeLte
(
newEndTime
);
}
// Calculate stepTime
...
...
@@ -293,3 +296,7 @@ module.exports = async (req:any, res:any, next:any) => {
}
}
interface
MonitCache
{
[
k
:
string
]:
any
beginBlock
:
null
|
DBBlock
[]
}
lib/webserver
.j
s
→
lib/webserver
2.t
s
View file @
062e0b7b
"
use strict
"
;
import
{
Server
}
from
"
duniter/server
"
;
import
{
initMonitDB
}
from
"
./DataFinder
"
;
const
fs
=
require
(
'
fs
'
);
//const util = require('util');
const
Q
=
require
(
'
q
'
);
...
...
@@ -13,13 +16,13 @@ const bodyParser = require('body-parser');
const
routes
=
require
(
__dirname
+
'
/../routes
'
);
const
tpl
=
require
(
__dirname
+
'
/tplit.js
'
);
module
.
exports
=
(
host
,
port
,
appParente
,
duniterServer
,
monitDatasPath
,
offset
,
cache
)
=>
{
module
.
exports
=
(
host
:
any
,
port
:
any
,
appParente
:
any
,
duniterServer
:
Server
,
monitDatasPath
:
any
,
offset
:
any
,
cache
:
any
,
resetData
:
boolean
=
false
)
=>
{
var
app
=
express
();
app
.
use
(
morgan
(
'
\
x1b[90m:remote-addr :remote-user [:date[clf]] :method :url HTTP/:http-version :status :res[content-length] - :response-time ms
\
x1b[0m
'
,
{
stream
:
{
write
:
function
(
message
){
write
:
function
(
message
:
any
){
message
&&
console
.
log
(
message
.
replace
(
/
\n
$/
,
''
));
}
}
...
...
@@ -44,7 +47,7 @@ module.exports = (host, port, appParente, duniterServer, monitDatasPath, offset,
/***************************************
* CSV des membres calculants
***************************************/
app
.
get
(
'
/csvCalculatorsRank
'
,
function
(
req
,
res
)
{
app
.
get
(
'
/csvCalculatorsRank
'
,
function
(
req
:
any
,
res
:
any
)
{
let
files
=
fs
.
readdirSync
(
monitDatasPath
+
'
/calculators_rank/
'
)
let
maxTimestamp
=
0
for
(
let
file
of
files
)
{
...
...
@@ -61,35 +64,46 @@ module.exports = (host, port, appParente, duniterServer, monitDatasPath, offset,
if
(
appParente
==
null
)
{
let
httpServer
=
http
.
createServer
(
app
);
httpServer
.
on
(
'
error
'
,
function
(
err
)
{
httpServer
.
on
(
'
error
'
,
function
(
err
:
any
)
{
httpServer
.
errorPropagates
(
err
);
});
return
{
openConnection
:
()
=>
co
(
function
*
()
{
try
{
yield
Q
.
Promise
((
resolve
,
reject
)
=>
{
// Weird the need of such a hack to catch an exception...
httpServer
.
errorPropagates
=
function
(
err
)
{
reject
(
err
);
};
openConnection
:
async
()
=>
{
try
{
await
Q
.
Promise
((
resolve
:
any
,
reject
:
any
)
=>
{
// Weird the need of such a hack to catch an exception...
httpServer
.
errorPropagates
=
function
(
err
:
any
)
{
reject
(
err
);
};
httpServer
.
listen
(
port
,
host
,
(
err
:
any
)
=>
{
if
(
err
)
return
reject
(
err
);
resolve
(
httpServer
);
});
});
// Init + first incremental indexation
await
initMonitDB
(
duniterServer
,
resetData
)
console
.
log
(
'
Server listening on http://
'
+
host
+
'
:
'
+
port
);
httpServer
.
listen
(
port
,
host
,
(
err
)
=>
{
if
(
err
)
return
reject
(
err
);
resolve
(
httpServer
);
});
});
console
.
log
(
'
Server listening on http://
'
+
host
+
'
:
'
+
port
);
}
catch
(
e
)
{
console
.
warn
(
'
Could NOT listen to http://
'
+
host
+
'
:
'
+
port
);
console
.
warn
(
e
);
}
}),
}
catch
(
e
)
{
console
.
warn
(
'
Could NOT listen to http://
'
+
host
+
'
:
'
+
port
);
console
.
warn
(
e
);
}
},
};
}
else
{
appParente
.
use
(
"
/currency-monit
"
,
app
);
return
{
openConnection
:
async
()
=>
{
console
.
log
(
'
No connection to open
'
)
}
};
}
...
...
routes/blockCount2.ts
View file @
062e0b7b
...
...
@@ -15,9 +15,9 @@ var previousBlockchainTime= 0;
module
.
exports
=
async
(
req
:
any
,
res
:
any
,
next
:
any
)
=>
{
var
{
duniterServer
,
monitDatasPath
}
=
req
.
app
.
locals
var
{
monitDatasPath
}
=
req
.
app
.
locals
const
dataFinder
=
new
DataFinder
(
duniterServer
)
const
dataFinder
=
await
DataFinder
.
getInstanceReindexedIfNecessary
(
)
try
{
// get GET parameters
...
...
routes/gaussianWotQuality2.ts
View file @
062e0b7b
...
...
@@ -11,7 +11,7 @@ module.exports = async (req:any, res:any, next:any) => {
var
{
duniterServer
}
=
req
.
app
.
locals
const
dataFinder
=
new
DataFinder
(
duniterServer
)
const
dataFinder
=
await
DataFinder
.
getInstanceReindexedIfNecessary
(
)
try
{
// get GET parameters
...
...
routes/members2.ts
View file @
062e0b7b
...
...
@@ -45,7 +45,7 @@ module.exports = async (req: any, res: any, next: any) => {
var
{
duniterServer
}
=
req
.
app
.
locals
const
dataFinder
=
new
DataFinder
(
duniterServer
)
const
dataFinder
=
await
DataFinder
.
getInstanceReindexedIfNecessary
(
)
try
{
// Initaliser les constantes
...
...
@@ -181,7 +181,7 @@ module.exports = async (req: any, res: any, next: any) => {
for
(
let
m
=
0
;
m
<
membersList
.
length
;
m
++
)
{
// Récupérer les blockstamp d'écriture et date d'expiration du membership courant du membre m
let
tmpQueryResult
=
await
dataFinder
.
membershipWrittenOnExpiresOn
(
membersList
[
m
].
pub
);
let
tmpQueryResult
=
[
await
dataFinder
.
membershipWrittenOnExpiresOn
(
membersList
[
m
].
pub
)
]
;
membershipsExpireTimeList
.
push
(
tmpQueryResult
[
0
].
expires_on
);
// Extraire le numéro de bloc du blockstamp d'écriture du membership courant
...
...
@@ -274,11 +274,11 @@ module.exports = async (req: any, res: any, next: any) => {
let
tmpQueryGetUidProtagonistCert
if
(
mode
==
'
emitted
'
)
{
tmpQueryGetUidProtagonistCert
=
await
dataFinder
.
getProtagonist
(
tmpQueryCertifsList
[
i
].
receiver
)
tmpQueryGetUidProtagonistCert
=
[
await
dataFinder
.
getProtagonist
(
tmpQueryCertifsList
[
i
].
receiver
)
]
}
else
{
tmpQueryGetUidProtagonistCert
=
await
dataFinder
.
getProtagonist
(
tmpQueryCertifsList
[
i
].
issuer
)
tmpQueryGetUidProtagonistCert
=
[
await
dataFinder
.
getProtagonist
(
tmpQueryCertifsList
[
i
].
issuer
)
]
}
let
tmpBlockWrittenOn
=
tmpQueryCertifsList
[
i
].
written_on
.
split
(
"
-
"
);
...
...
routes/membersCount2.ts
View file @
062e0b7b
...
...
@@ -11,9 +11,9 @@ const getLang = require(__dirname + '/../lib/getLang')
module
.
exports
=
async
(
req
:
any
,
res
:
any
,
next
:
any
)
=>
{
var
{
duniterServer
,
cache
}
=
req
.
app
.
locals
var
{
cache
}
=
req
.
app
.
locals
const
dataFinder
=
new
DataFinder
(
duniterServer
)
const
dataFinder
=
await
DataFinder
.
getInstanceReindexedIfNecessary
(
)
try
{
// get GET parameters
...
...
routes/monetaryMass2.ts
View file @
062e0b7b
...
...
@@ -9,8 +9,8 @@ module.exports = async (req:any, res:any, next:any) => {
var
{
duniterServer
}
=
req
.
app
.
locals
const
dataFinder
=
new
DataFinder
(
duniterServer
)
const
dataFinder
=
await
DataFinder
.
getInstanceReindexedIfNecessary
(
)
try
{
// get GET parameters
var
begin
=
req
.
query
.
begin
>=
2
&&
req
.
query
.
begin
||
2
;
// Default Value
...
...
routes/willMembers2.ts
View file @
062e0b7b
...
...
@@ -27,7 +27,7 @@ module.exports = async (req: any, res: any, next: any) => {
const
locals
:
{
duniterServer
:
Server
}
=
req
.
app
.
locals
const
duniterServer
=
locals
.
duniterServer
const
dataFinder
=
new
DataFinder
(
duniterServer
)
const
dataFinder
=
await
DataFinder
.
getInstanceReindexedIfNecessary
(
)
try
{
// get blockchain timestamp
...
...
routes/wotex2.ts
View file @
062e0b7b
...
...
@@ -10,7 +10,7 @@ module.exports = async (req:any, res:any, next:any) => {
var
{
duniterServer
}
=
req
.
app
.
locals
const
dataFinder
=
new
DataFinder
(
duniterServer
)
const
dataFinder
=
await
DataFinder
.
getInstanceReindexedIfNecessary
(
)
try
{
// get GET parameters
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment