diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index c970de17b3adb23edb8ba055453abec23f341c7e..bb52cf4419879e2db75d534de5a3040a7f538947 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -1,7 +1,6 @@
 stages:
   - tests
   - package
-  - quality
   - integration
   - prerelease
   - release
@@ -95,7 +94,7 @@ audit_dependencies:
   script:
     - cargo deny --workspace check
 
-.integration_rules:
+.integration_rules: &integration_rules
   allow_failure: true
   rules:
     - if: $CI_COMMIT_TAG
@@ -165,11 +164,9 @@ releases:x64:
   before_script:
     - docker info
   script:
-    - docker pull $CI_REGISTRY_IMAGE:$IMAGE_TAG || true
     - docker build --cache-from $CI_REGISTRY_IMAGE:$IMAGE_TAG --pull -t "$CI_REGISTRY_IMAGE:$IMAGE_TAG" --build-arg="INSTALL_DEX=$INSTALL_DEX" -f release/docker/Dockerfile .
-    #- docker login -u "gitlab-ci-token" -p "$CI_BUILD_TOKEN" $CI_REGISTRY
-    #- docker push "$CI_REGISTRY_IMAGE:$IMAGE_TAG"
-    # Temporary push on dockerhub 
+    - docker login -u "gitlab-ci-token" -p "$CI_JOB_TOKEN" $CI_REGISTRY
+    - docker push "$CI_REGISTRY_IMAGE:$IMAGE_TAG"
     - docker login -u "duniterteam" -p "$DUNITERTEAM_PASSWD"
     - docker tag "$CI_REGISTRY_IMAGE:$IMAGE_TAG" "duniter/duniter:$IMAGE_TAG"
     - docker push "duniter/duniter:$IMAGE_TAG"
@@ -208,16 +205,15 @@ package:prod:docker:
     - docker:18.06-dind
   script:
     - docker build --pull -t "$CI_REGISTRY_IMAGE:$CI_COMMIT_TAG" -f release/docker/Dockerfile .
-    - docker login -u "gitlab-ci-token" -p "$CI_BUILD_TOKEN" $CI_REGISTRY
+    - docker login -u "gitlab-ci-token" -p "$CI_JOB_TOKEN" $CI_REGISTRY
     - docker push "$CI_REGISTRY_IMAGE:$CI_COMMIT_TAG"
     - docker login -u "duniterteam" -p "$DUNITERTEAM_PASSWD"
-    - docker tag "$CI_REGISTRY_IMAGE:$CI_BUILD_TAG" "duniter/duniter:$CI_BUILD_TAG"
+    - docker tag "$CI_REGISTRY_IMAGE:$CI_COMMIT_TAG" "duniter/duniter:$CI_COMMIT_TAG"
     - docker push "duniter/duniter:$CI_COMMIT_TAG"
-    - docker tag "$CI_REGISTRY_IMAGE:$CI_BUILD_TAG" duniter/duniter
+    - docker tag "$CI_REGISTRY_IMAGE:$CI_COMMIT_TAG" duniter/duniter
     - docker push duniter/duniter
 
-
-.release_jobs:
+.release_jobs: &release_jobs
   image: rdadev/jinja2:py3.6
   tags:
     - redshift
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 40be38deb2beae36fdeb60a3ea2b1ab0db68a641..21d478534e539455f96ebb3942634748b1ba2cda 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -4,6 +4,18 @@
 
 ## [Unreleased] - ReleaseDate
 
+## [1.9.1] - 2024-11-03
+
+### Highlights
+- Merged BMA optimizations from version 1.8
+- Maintained Rust oxidation and GVA API
+- Improved overall performance
+
+### Features
+- Full support for BMA and GVA APIs
+- Progressive migration to Rust (oxidation)
+- BMA query optimizations
+
 ## [1.8.0] - 2020-03-12
 
 ### Highlights
diff --git a/Cargo.lock b/Cargo.lock
index 3aba3cfaafe4093196822cd645e87363a569e77f..c0d38570765ffef5187e7b980f60cf1e231cd267 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -36,21 +36,6 @@ dependencies = [
  "memchr",
 ]
 
-[[package]]
-name = "ansi_term"
-version = "0.11.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
-dependencies = [
- "winapi",
-]
-
-[[package]]
-name = "anyhow"
-version = "1.0.40"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b"
-
 [[package]]
 name = "arrayref"
 version = "0.3.6"
@@ -64,14 +49,20 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b"
 
 [[package]]
-name = "arrayvec"
-version = "0.7.0"
+name = "autocfg"
+version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5a2f58b0bb10c380af2b26e57212856b8c9a59e0925b4c20f4a174a49734eaf7"
+checksum = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
 dependencies = [
- "serde",
+ "winapi",
 ]
 
+[[package]]
+name = "anyhow"
+version = "1.0.40"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "28b2cd92db5cbd74e8e5028f7e27dd7aa3090e89e4f2a197cc7c8dfb69c7063b"
+
 [[package]]
 name = "async-bincode"
 version = "0.6.1"
@@ -313,7 +304,7 @@ dependencies = [
 name = "block-buffer"
 version = "0.7.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe4688b"
+checksum = "c0940dc441f31689269e10ac70eb1002a3a1d3ad1390e030043662eb7fe468b"
 dependencies = [
  "block-padding",
  "byte-tools",
@@ -339,6 +330,28 @@ dependencies = [
  "byte-tools",
 ]
 
+[[package]]
+name = "bitflags"
+version = "1.3.2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a"
+
+[[package]]
+name = "blake3"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "b64485778c4f16a6a5a9d335e80d449ac6c70cdd6a06d2af18a6f6f775a125b3"
+dependencies = [
+ "arrayref",
+ "arrayvec",
+ "cc",
+ "cfg-if 0.1.10",
+ "constant_time_eq",
+ "crypto-mac",
+ "digest",
+ "rayon",
+]
+
 [[package]]
 name = "bs58"
 version = "0.3.1"
@@ -393,9 +406,9 @@ checksum = "b700ce4376041dcd0a327fd0097c41095743c4c8af8887265942faf1100bd040"
 
 [[package]]
 name = "cc"
-version = "1.0.67"
+version = "1.0.73"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e3c69b077ad434294d3ce9f1f6143a2a4b89a8a2d54ef813d85003a4fd1137fd"
+checksum = "2fff2a6927b3bb87f9595d67196a70493f627687a71d87a0d692242c33f58c11"
 
 [[package]]
 name = "cfg-if"
@@ -409,6 +422,18 @@ version = "1.0.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
 
+[[package]]
+name = "constant_time_eq"
+version = "0.1.5"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "245097e9a4535ee1e3e3931fcfcd55a796a44c643e8596ff6566d68f09b87bbc"
+
+[[package]]
+name = "crc32fast"
+version = "1.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd"
+
 [[package]]
 name = "chrono"
 version = "0.4.19"
@@ -520,7 +545,8 @@ version = "0.9.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "2584f639eb95fea8c798496315b297cf81b9b58b6d30ab066a75455333cf4b12"
 dependencies = [
- "cfg-if 1.0.0",
+ "autocfg",
+ "cfg-if 0.1.10",
  "crossbeam-utils",
  "lazy_static",
  "memoffset",
@@ -533,9 +559,8 @@ version = "0.8.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "e7e9d99fa91428effe99c5c6d4634cdeba32b8cf784fc428a2a687f61a952c49"
 dependencies = [
- "autocfg",
- "cfg-if 1.0.0",
- "lazy_static",
+ "cfg-if 0.1.10",
+ "crossbeam-utils",
 ]
 
 [[package]]
@@ -544,8 +569,8 @@ version = "0.19.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7c36c10130df424b2f3552fcc2ddcd9b28a27b1e54b358b45874f88d1ca6888c"
 dependencies = [
- "bitflags",
- "crossterm_winapi",
+ "autocfg",
+ "cfg-if 0.1.10",
  "lazy_static",
  "libc",
  "mio",
@@ -575,9 +600,9 @@ dependencies = [
 
 [[package]]
 name = "cryptoxide"
-version = "0.3.2"
+version = "0.3.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "46212f5d1792f89c3e866fb10636139464060110c568edd7f73ab5e9f736c26d"
+checksum = "42014d4c82e74bc17aaccc4bd75d3615d2b8236198de81c51bed5ddefaae6435"
 
 [[package]]
 name = "cslice"
@@ -730,11 +755,17 @@ dependencies = [
  "syn",
 ]
 
+[[package]]
+name = "cryptoxide"
+version = "0.3.6"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "42014d4c82e74bc17aaccc4bd75d3615d2b8236198de81c51bed5ddefaae6435"
+
 [[package]]
 name = "digest"
-version = "0.8.1"
+version = "0.9.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f3d0c8c8752312f9713efd397ff63acb9f85585afbf179282e720e7704954dd5"
+checksum = "d3dd60d1080a57a05ab032377049e0591415d2b31afd7028356dbf3cc6dcb066"
 dependencies = [
  "generic-array 0.12.4",
 ]
@@ -875,7 +906,8 @@ dependencies = [
 [[package]]
 name = "dubp-wot"
 version = "0.11.1"
-source = "git+https://git.duniter.org/nodes/rust/duniter-core#8687791c838050406871439f1ff832a9b56a6e24"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "59f62bdc1abf2da65794c9fd7fe46f7328834a97a2346048d46b0b6676302de0"
 dependencies = [
  "log",
  "once_cell",
@@ -1101,26 +1133,9 @@ version = "0.1.0"
 source = "git+https://git.duniter.org/nodes/rust/modules/duniter-gva#c8d178fd8981995f7077dfc910d00a00367e757f"
 dependencies = [
  "bincode",
- "chrono",
- "duniter-core",
- "parking_lot",
- "paste",
- "serde",
- "serde_json",
- "uninit",
- "zerocopy",
-]
-
-[[package]]
-name = "duniter-gva-dbs-reader"
-version = "0.1.0"
-source = "git+https://git.duniter.org/nodes/rust/modules/duniter-gva#c8d178fd8981995f7077dfc910d00a00367e757f"
-dependencies = [
- "anyhow",
- "arrayvec 0.7.0",
- "bincode",
- "duniter-core",
- "duniter-gva-db",
+ "bs58 0.3.1",
+ "dubp-wot",
+ "dup-crypto",
  "flate2",
  "rand 0.8.3",
  "resiter",
@@ -1553,15 +1568,12 @@ name = "generic-array"
 version = "0.12.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "ffdf9f34f1447443d37393cc6c2b8313aebddcd96906caf34e54c68d8e57d7bd"
-dependencies = [
- "typenum",
-]
 
 [[package]]
 name = "generic-array"
-version = "0.14.4"
+version = "0.14.6"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "501466ecc8a30d1d3b7fc9229b122b2ce8ed6e9d9223f1138d4babb253e51817"
+checksum = "bff49e947297f3312447abdca79f45f4738097cc82b06e72054d2223f601f1b9"
 dependencies = [
  "typenum",
  "version_check",
@@ -1686,6 +1698,21 @@ version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
 
+[[package]]
+name = "instant"
+version = "0.1.12"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7a5bbe824c507c5da5956355e86a746d82e0e1464f65d862cc5e71da70e94b2c"
+dependencies = [
+ "cfg-if 1.0.0",
+]
+
+[[package]]
+name = "js-sys"
+version = "0.3.37"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "7f24254aa9a54b5c858eaee2f5bccdb46aaf0e486a595ed5fd8f86ba55232a70"
+
 [[package]]
 name = "http"
 version = "0.2.4"
@@ -2051,34 +2078,33 @@ dependencies = [
 
 [[package]]
 name = "neon"
-version = "0.4.0"
+version = "0.10.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6cac4691701b686e6c07b2eb5b51a9f26f5c11179c5d7924b78100dd387fc99d"
+checksum = "28e15415261d880aed48122e917a45e87bb82cf0260bb6db48bbab44b7464373"
 dependencies = [
- "cslice",
  "neon-build",
  "neon-runtime",
  "semver 0.9.0",
+ "smallvec",
 ]
 
 [[package]]
 name = "neon-build"
-version = "0.4.0"
+version = "0.10.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "f9ed332afd4711b84f4f83d334428a1fd9ce53620b62b87595934297c5ede2ed"
+checksum = "8bac98a702e71804af3dacfde41edde4a16076a7bbe889ae61e56e18c5b1c811"
 dependencies = [
- "cfg-if 0.1.10",
  "neon-sys",
 ]
 
 [[package]]
 name = "neon-runtime"
-version = "0.4.0"
+version = "0.10.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2beea093a60c08463f65e1da4cda68149986f60d8d2177489b44589463c782a6"
+checksum = "4676720fa8bb32c64c3d9f49c47a47289239ec46b4bdb66d0913cc512cb0daca"
 dependencies = [
- "cfg-if 0.1.10",
  "neon-sys",
+ "smallvec",
 ]
 
 [[package]]
@@ -2096,9 +2122,9 @@ dependencies = [
 
 [[package]]
 name = "neon-sys"
-version = "0.4.0"
+version = "0.10.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "69a6c1ba6b926746f4d3f596de18ce49d062d78fd9f35f636080232aa77a0e16"
+checksum = "a5ebc923308ac557184455b4aaa749470554cbac70eb4daa8b18cdc16bef7df6"
 dependencies = [
  "cc",
  "regex",
@@ -2423,11 +2449,11 @@ checksum = "bc881b2c22681370c6a780e47af9840ef841837bc98118431d4e1868bd0c1086"
 
 [[package]]
 name = "proc-macro2"
-version = "1.0.26"
+version = "1.0.43"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a152013215dca273577e18d2bf00fa862b89b24169fb78c4c95aeb07992c9cec"
+checksum = "0a2ca2c61bc9f3d74d2886294ab7b9853abd9c1ad903a3ac7815c58989bb7bab"
 dependencies = [
- "unicode-xid",
+ "unicode-ident",
 ]
 
 [[package]]
@@ -2585,13 +2611,20 @@ dependencies = [
 name = "read_input"
 version = "0.8.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b57518cc6538a2eb7dce826e24fa51d0b7cf8e744ee10c7f56259cdec40050e5"
+checksum = "d9b39be18770d11421cdb1b9947a45dd3f37e93092cbf377614828a319d5fee8"
+dependencies = [
+ "futures",
+ "tokio",
+]
 
 [[package]]
 name = "redox_syscall"
 version = "0.1.57"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "41cc0f7e4d5d4544e8861606a285bb08d3e70712ccc7d2b84d7c0ccfaf4b05ce"
+dependencies = [
+ "bitflags",
+]
 
 [[package]]
 name = "redox_syscall"
@@ -2776,18 +2809,18 @@ dependencies = [
 
 [[package]]
 name = "serde"
-version = "1.0.125"
+version = "1.0.143"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "558dc50e1a5a5fa7112ca2ce4effcb321b0300c0d4ccf0776a9f60cd89031171"
+checksum = "53e8e5d5b70924f74ff5c6d64d9a5acd91422117c60f48c4e07855238a254553"
 dependencies = [
  "serde_derive",
 ]
 
 [[package]]
 name = "serde_derive"
-version = "1.0.125"
+version = "1.0.143"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b093b7a2bb58203b5da3056c05b4ec1fed827dcfdb37347a8841695263b3d06d"
+checksum = "d3d8e8de557aee63c26b85b947f5e59b690d0454c753f3adeb5cd7835ab88391"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -3013,13 +3046,13 @@ checksum = "1e81da0851ada1f3e9d4312c704aa4f8806f0f9d69faaf8df2f3464b4a9437c2"
 
 [[package]]
 name = "syn"
-version = "1.0.70"
+version = "1.0.99"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b9505f307c872bab8eb46f77ae357c8eba1fdacead58ee5a850116b1d7f82883"
+checksum = "58dbef6ec655055e20b86b15a8cc6d439cca19b667537ac6a1369572d151ab13"
 dependencies = [
  "proc-macro2",
  "quote",
- "unicode-xid",
+ "unicode-ident",
 ]
 
 [[package]]
@@ -3079,18 +3112,18 @@ dependencies = [
 
 [[package]]
 name = "thiserror"
-version = "1.0.24"
+version = "1.0.32"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e0f4a65597094d4483ddaed134f409b2cb7c1beccf25201a9f73c719254fa98e"
+checksum = "f5f6586b7f764adc0231f4c79be7b920e766bb2f3e51b3661cdb263828f19994"
 dependencies = [
  "thiserror-impl",
 ]
 
 [[package]]
 name = "thiserror-impl"
-version = "1.0.24"
+version = "1.0.32"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7765189610d8241a44529806d6fd1f2e0a08734313a35d5b3a556f92b381f3c0"
+checksum = "12bafc5b54507e0149cdf1b145a5d80ab80a90bcd9275df43d4fff68460f6c21"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -3201,7 +3234,7 @@ dependencies = [
 name = "tower-service"
 version = "0.3.1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da713275777f5a4dbb8a1893930c6"
+checksum = "360dfd1d6d30e05fda32ace2c8c70e9c0a9da71327577f5a4dbb8a1893930c6"
 
 [[package]]
 name = "tracing"
@@ -3382,6 +3415,12 @@ version = "0.1.8"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "9337591893a19b88d8d87f2cec1e73fad5cdfd10e5a6f349f498ad6ea2ffb1e3"
 
+[[package]]
+name = "unicode-ident"
+version = "1.0.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "c4f5b37a154999a8f3f98cc23a628d850e154479cd94decf3414696e12e31aaf"
+
 [[package]]
 name = "unicode-xid"
 version = "0.2.1"
@@ -3401,10 +3440,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a"
 
 [[package]]
-name = "unwrap"
-version = "1.2.1"
+name = "version_check"
+version = "0.9.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e33648dd74328e622c7be51f3b40a303c63f93e6fa5f08778b6203a4c25c20f"
+checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
 
 [[package]]
 name = "url"
@@ -3505,9 +3544,9 @@ dependencies = [
 
 [[package]]
 name = "wasi"
-version = "0.9.0+wasi-snapshot-preview1"
+version = "0.11.0+wasi-snapshot-preview1"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "cccddf32554fecc6acb585f82a32a72e28b48f8c4c1883ddfeeeaa96f7d8e519"
+checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423"
 
 [[package]]
 name = "wasi"
@@ -3517,9 +3556,9 @@ checksum = "1a143597ca7c7793eff794def352d41792a93c481eb1042423ff7ff72ba2c31f"
 
 [[package]]
 name = "wasm-bindgen"
-version = "0.2.73"
+version = "0.2.82"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "83240549659d187488f91f33c0f8547cbfef0b2088bc470c116d1d260ef623d9"
+checksum = "fc7652e3f6c4706c8d9cd54832c4a4ccb9b5336e2c3bd154d5cccfbf1c1f5f7d"
 dependencies = [
  "cfg-if 1.0.0",
  "wasm-bindgen-macro",
@@ -3527,13 +3566,13 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-backend"
-version = "0.2.73"
+version = "0.2.82"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ae70622411ca953215ca6d06d3ebeb1e915f0f6613e3b495122878d7ebec7dae"
+checksum = "662cd44805586bd52971b9586b1df85cdbbd9112e4ef4d8f41559c334dc6ac3f"
 dependencies = [
  "bumpalo",
- "lazy_static",
  "log",
+ "once_cell",
  "proc-macro2",
  "quote",
  "syn",
@@ -3542,9 +3581,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro"
-version = "0.2.73"
+version = "0.2.82"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "3e734d91443f177bfdb41969de821e15c516931c3c3db3d318fa1b68975d0f6f"
+checksum = "b260f13d3012071dfb1512849c033b1925038373aea48ced3012c09df952c602"
 dependencies = [
  "quote",
  "wasm-bindgen-macro-support",
@@ -3552,9 +3591,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-macro-support"
-version = "0.2.73"
+version = "0.2.82"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d53739ff08c8a68b0fdbcd54c372b8ab800b1449ab3c9d706503bc7dd1621b2c"
+checksum = "5be8e654bdd9b79216c2929ab90721aa82faf65c48cdf08bdc4e7f51357b80da"
 dependencies = [
  "proc-macro2",
  "quote",
@@ -3565,9 +3604,9 @@ dependencies = [
 
 [[package]]
 name = "wasm-bindgen-shared"
-version = "0.2.73"
+version = "0.2.82"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "d9a543ae66aa233d14bb765ed9af4a33e81b8b58d1584cf1b47ff8cd0b9e4489"
+checksum = "6598dd0bd3c7d51095ff6531a5b23e02acdc81804e30d8f07afb77b7215a140a"
 
 [[package]]
 name = "web-sys"
@@ -3624,9 +3663,9 @@ dependencies = [
 
 [[package]]
 name = "zerocopy-derive"
-version = "0.2.1"
+version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "dc9c39e6d503229ffa00cc2954af4a751e6bbedf2a2c18e856eb3ece93d32495"
+checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb"
 dependencies = [
  "proc-macro2",
  "syn",
@@ -3635,18 +3674,18 @@ dependencies = [
 
 [[package]]
 name = "zeroize"
-version = "1.3.0"
+version = "1.5.7"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4756f7db3f7b5574938c3eb1c117038b8e07f95ee6718c0efad4ac21508f1efd"
+checksum = "c394b5bd0c6f669e7275d9c20aa90ae064cb22e75a1cad54e1b34088034b149f"
 dependencies = [
  "zeroize_derive",
 ]
 
 [[package]]
 name = "zeroize_derive"
-version = "1.1.0"
+version = "1.3.2"
 source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a2c1e130bebaeab2f23886bf9acbaca14b092408c452543c857f66399cd6dab1"
+checksum = "3f8f187641dad4f680d25c4bfc4225b418165984179f26ca76ec4fb6441d3a17"
 dependencies = [
  "proc-macro2",
  "quote",
diff --git a/app/cli.ts b/app/cli.ts
index f0bb646c87ba360e32d03acb4d5493591e70cd9f..ca3e8db1c04f8d3d3617d28545af0c0f54691621 100644
--- a/app/cli.ts
+++ b/app/cli.ts
@@ -34,10 +34,10 @@ export const ExecuteCommand = () => {
 
       // Callback for command rejection
       let onReject: any = () =>
-        Promise.reject(Error("Uninitilized rejection throw"));
+        Promise.reject(Error("Uninitialized rejection throw"));
 
       // Command execution promise
-      const currentCommand = new Promise((resolve, reject) => {
+      const currentCommand = new Promise<void>((resolve, reject) => {
         onResolve = resolve;
         onReject = reject;
       });
diff --git a/app/lib/blockchain/DuniterBlockchain.ts b/app/lib/blockchain/DuniterBlockchain.ts
index 17e113f94b23b81f6d57a94ad22f488d6b616e0a..b336785677f6664efc833e23c7d0bd1cb52d6ed0 100644
--- a/app/lib/blockchain/DuniterBlockchain.ts
+++ b/app/lib/blockchain/DuniterBlockchain.ts
@@ -319,10 +319,10 @@ export class DuniterBlockchain {
     await this.removeMembershipsFromSandbox(block, dal);
     // Compute to be revoked members
     await this.computeToBeRevoked(indexes.mindex, dal);
-    // Delete eventually present transactions
-    await this.deleteTransactions(block, dal);
 
-    await dal.trimSandboxes(block);
+    if (trim) {
+      await dal.trimSandboxes(block);
+    }
 
     // Saves the block (DAL)
     await dal.saveBlock(dbb, conf);
@@ -550,6 +550,7 @@ export class DuniterBlockchain {
     if (block) {
       await this.undoDeleteTransactions(block, dal);
     }
+    NewLogger().info("Reverted block #%s", blockstamp);
   }
 
   static async undoMembersUpdate(blockstamp: string, dal: FileDAL) {
@@ -687,8 +688,7 @@ export class DuniterBlockchain {
     block.fork = true;
     try {
       // Saves the block (DAL)
-      block.wrong = false;
-      await dal.saveSideBlockInFile(block);
+      await dal.saveSideBlock(block);
       logger.info(
         "SIDE Block #%s-%s added to the blockchain in %s ms",
         block.number,
@@ -709,6 +709,7 @@ export class DuniterBlockchain {
     const TAIL = await dal.bindexDAL.tail();
     const MAX_BINDEX_SIZE = requiredBindexSizeForTail(TAIL, conf);
     const currentSize = HEAD.number - TAIL.number + 1;
+
     if (currentSize > MAX_BINDEX_SIZE) {
       await dal.trimIndexes(HEAD.number - MAX_BINDEX_SIZE);
     }
@@ -719,13 +720,13 @@ export function requiredBindexSizeForTail(
   TAIL: { issuersCount: number; issuersFrame: number },
   conf: { medianTimeBlocks: number; dtDiffEval: number; forksize: number }
 ) {
-  const bindexSize = [
-    TAIL.issuersCount,
-    TAIL.issuersFrame,
-    conf.medianTimeBlocks,
-    conf.dtDiffEval,
-  ].reduce((max, value) => {
-    return Math.max(max, value);
-  }, 0);
-  return conf.forksize + bindexSize;
+  return (
+    conf.forksize +
+    [
+      TAIL.issuersCount,
+      TAIL.issuersFrame,
+      conf.medianTimeBlocks,
+      conf.dtDiffEval,
+    ].reduce((max, value) => Math.max(max, value), 0)
+  );
 }
diff --git a/app/lib/blockchain/Switcher.ts b/app/lib/blockchain/Switcher.ts
index f600aeedfa7bde0e465df80714c2d3cc6d942250..ebeb59acc1395b63c3c9a908182fe70ad5130163 100644
--- a/app/lib/blockchain/Switcher.ts
+++ b/app/lib/blockchain/Switcher.ts
@@ -277,6 +277,11 @@ export class Switcher<T extends SwitchBlock> {
               s[0].number + i,
               e && e.message
             );
+          if (e.type === "NotFoundError" && this.logger) {
+            this.logger.error(
+              "CRITICAL: LevelDB has inconsistent state: " + e.stack
+            );
+          }
           added = false;
         }
         i++;
diff --git a/app/lib/common-libs/array-prune.ts b/app/lib/common-libs/array-prune.ts
index 42884cbb246b76a54876c3ce39690c12b5141b81..8e9c64a701cd0138ac886dce2612567125182424 100644
--- a/app/lib/common-libs/array-prune.ts
+++ b/app/lib/common-libs/array-prune.ts
@@ -12,7 +12,7 @@ export function arrayPruneAll<T>(array: T[], value: T) {
 }
 
 /**
- * Returs a copy of given array WITHOUT any record of `value`.
+ * Return a copy of given array WITHOUT any record of `value`.
  * @param original The array we want records, with `value` being excluded.
  * @param value The value we don't want to see in our copy array.
  */
diff --git a/app/lib/common-libs/constants.ts b/app/lib/common-libs/constants.ts
index 8a7e3f4a2f14b540381b2aabf961cc386d7eda49..08c495d137dd87fdb0ba00a301bb99de2b390eb5 100755
--- a/app/lib/common-libs/constants.ts
+++ b/app/lib/common-libs/constants.ts
@@ -53,8 +53,8 @@ const CONDITIONS =
   "\\)|CSV\\(" +
   CSV_INTEGER +
   "\\))))*";
-
-const BMA_REGEXP = /^BASIC_MERKLED_API( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/;
+const CONDITION_SIG_PUBKEY = "SIG\\((" + PUBKEY + ")\\)";
+const BMA_REGEXP = /^BASIC_MERKLED_API( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))( (\/.+))?$/;
 const BMAS_REGEXP = /^BMAS( ([a-z_][a-z0-9-_.]*))?( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))( (\/.+))?$/;
 const BMATOR_REGEXP = /^BMATOR( ([a-z0-9]{16})\.onion)( ([0-9.]+))?( ([0-9a-f:]+))?( ([0-9]+))$/;
 const WS2P_REGEXP = /^WS2P (?:[1-9][0-9]* )?([a-f0-9]{8}) ([a-z_][a-z0-9-_.]*|[0-9.]+|[0-9a-f:]+) ([0-9]+)(?: (.+))?$/;
@@ -535,6 +535,8 @@ export const CommonConstants = {
     LOCKTIME: find("Locktime: (" + INTEGER + ")"),
     INLINE_COMMENT: exact(COMMENT),
     OUTPUT_CONDITION: exact(CONDITIONS),
+    OUTPUT_CONDITION_SIG_PUBKEY: find(CONDITION_SIG_PUBKEY),
+    OUTPUT_CONDITION_SIG_PUBKEY_UNIQUE: exact(CONDITION_SIG_PUBKEY),
   },
   PEER: {
     BLOCK: find("Block: (" + INTEGER + "-" + FINGERPRINT + ")"),
@@ -548,6 +550,7 @@ export const CommonConstants = {
   MILESTONES_PER_PAGE: 50,
   CHUNK_PREFIX: "chunk_",
   BLOCKS_IN_MEMORY_MAX: 288 * 60, // 288 = 1 day
+  MAX_SQLITE_WRITE_PENDINGS: 200,
 
   MAX_AGE_OF_PEER_IN_BLOCKS: 200, // blocks
   INITIAL_DOWNLOAD_SLOTS: 1, // 1 peer
diff --git a/app/lib/common-libs/manual-promise.ts b/app/lib/common-libs/manual-promise.ts
index 24d6382aa5f259c7d778a1f7d94f3a5312209ef8..2a7187ff8f29dbc55ad2873a3e6534fb36ba8c71 100644
--- a/app/lib/common-libs/manual-promise.ts
+++ b/app/lib/common-libs/manual-promise.ts
@@ -14,7 +14,7 @@ export interface ManualPromise<T> extends Querable<T> {
 export function newManualPromise<T>() {
   let resolveCb: (data: T) => void = () => {};
   let rejectCb: (error: Error) => void = () => {};
-  const p = new Promise((res, rej) => {
+  const p = new Promise<T>((res, rej) => {
     resolveCb = res;
     rejectCb = rej;
   });
diff --git a/app/lib/common-libs/timeout-promise.ts b/app/lib/common-libs/timeout-promise.ts
index eab7d91f93ab0e2efe82905358e71b44d1b10694..08c6cce2ba2489e70e24336f46f77e01d782fe47 100644
--- a/app/lib/common-libs/timeout-promise.ts
+++ b/app/lib/common-libs/timeout-promise.ts
@@ -12,7 +12,7 @@
 // GNU Affero General Public License for more details.
 
 export function newRejectTimeoutPromise(timeout: number) {
-  return new Promise((res, rej) => {
+  return new Promise<void>((res, rej) => {
     setTimeout(rej, timeout);
   });
 }
@@ -21,7 +21,7 @@ export function newResolveTimeoutPromise<T>(
   timeout: number,
   value: T
 ): Promise<T> {
-  return new Promise((res) => {
+  return new Promise<T>((res) => {
     setTimeout(() => res(value), timeout);
   });
 }
diff --git a/app/lib/common-libs/underscore.ts b/app/lib/common-libs/underscore.ts
index 3f567e3d81296ec7161300d45a01d39a0fd080af..a9d9073af7e0ef38892de5785edb02844999ccb8 100644
--- a/app/lib/common-libs/underscore.ts
+++ b/app/lib/common-libs/underscore.ts
@@ -92,8 +92,8 @@ export const Underscore = {
     return _underscore_.extend(t1, t2);
   },
 
-  range: (count: number, end?: number): number[] => {
-    return _underscore_.range(count, end);
+  range: (count: number, end?: number, step?: number): number[] => {
+    return _underscore_.range(count, end, step);
   },
 
   chain: <T>(element: T[]): UnderscoreClass<T> => {
diff --git a/app/lib/computation/BlockchainContext.ts b/app/lib/computation/BlockchainContext.ts
index e31fdbe08dd78ff166a0e3d0e5b60ab6fc34ddfc..acaa3640c62da98ebd0aeed81b3fabfb80d6ff70 100644
--- a/app/lib/computation/BlockchainContext.ts
+++ b/app/lib/computation/BlockchainContext.ts
@@ -165,7 +165,7 @@ export class BlockchainContext {
 
   async revertCurrentBlock(): Promise<DBBlock> {
     const head_1 = await this.dal.bindexDAL.head(1);
-    this.logger.debug("Reverting block #%s...", head_1.number);
+    this.logger.debug("Reverting block #%s-%s...", head_1.number, head_1.hash);
     const block = await this.dal.getAbsoluteValidBlockInForkWindow(
       head_1.number,
       head_1.hash
diff --git a/app/lib/constants.ts b/app/lib/constants.ts
index fc3d9a200082bb5ef05fa086d4df0422eb1d3c8f..02505d6a4b6bf9a5bf3840e151dd4663562a575b 100644
--- a/app/lib/constants.ts
+++ b/app/lib/constants.ts
@@ -218,7 +218,7 @@ module.exports = {
 
   PEER: CommonConstants.PEER,
 
-  CURRENT_DB_VERSION: 26,
+  CURRENT_DB_VERSION: 28, // Should be set with 'max(keys(MetaDAL.migration)) + 1'
 
   NETWORK: {
     MAX_MEMBERS_TO_FORWARD_TO_FOR_SELF_DOCUMENTS: 10,
diff --git a/app/lib/dal/drivers/LevelDBDriver.ts b/app/lib/dal/drivers/LevelDBDriver.ts
index 7959c3f1e632a97cb6ea8e069ace16587cb8816f..c6f1a461ec1f1add98db6afad47e0189c4e44806 100644
--- a/app/lib/dal/drivers/LevelDBDriver.ts
+++ b/app/lib/dal/drivers/LevelDBDriver.ts
@@ -13,14 +13,13 @@
 
 import * as levelup from "levelup";
 import { LevelUp } from "levelup";
-import { AbstractLevelDOWN, ErrorCallback } from "abstract-leveldown";
 import * as leveldown from "leveldown";
 import * as memdown from "memdown";
 
 export const LevelDBDriver = {
   newMemoryInstance: (): Promise<LevelUp> => {
     const impl: any = memdown.default();
-    return new Promise((res, rej) => {
+    return new Promise<LevelUp<any>>((res, rej) => {
       const db: LevelUp = levelup.default(impl, undefined, (err: Error) => {
         if (err) return rej(err);
         res(db);
@@ -30,7 +29,7 @@ export const LevelDBDriver = {
 
   newFileInstance: (path: string): Promise<LevelUp> => {
     const impl: any = leveldown.default(path);
-    return new Promise((res, rej) => {
+    return new Promise<LevelUp<any>>((res, rej) => {
       const db: LevelUp = levelup.default(impl, undefined, (err: Error) => {
         if (err) return rej(err);
         res(db);
diff --git a/app/lib/dal/drivers/SQLiteDriver.ts b/app/lib/dal/drivers/SQLiteDriver.ts
index 5a8b0bb068129563c43142329b44216a8d26838c..2145d965ac44d01b2cf29c8a73c65a7f9868dcaa 100644
--- a/app/lib/dal/drivers/SQLiteDriver.ts
+++ b/app/lib/dal/drivers/SQLiteDriver.ts
@@ -29,10 +29,15 @@ export class SQLiteDriver {
   getDB(): Promise<any> {
     if (!this.dbPromise) {
       this.dbPromise = (async () => {
-        this.logger.debug('Opening SQLite database "%s"...', this.path);
+        this.logger.trace('Opening SQLite database "%s"...', this.path);
+
         let sqlite = new sqlite3.Database(this.path);
         await new Promise<any>((resolve) => sqlite.once("open", resolve));
+
         // Database is opened
+        //this.logger.debug('Database "%s" opened', this.path);
+
+        // Trace SQL queries
         if (OtherConstants.SQL_TRACES) {
           sqlite.on("trace", (trace: any) => {
             this.logger.trace(trace);
@@ -41,7 +46,7 @@ export class SQLiteDriver {
 
         // Force case sensitiveness on LIKE operator
         const sql = "PRAGMA case_sensitive_like=ON";
-        await new Promise<any>((resolve, reject) =>
+        await new Promise<void>((resolve, reject) =>
           sqlite.exec(sql, (err: any) => {
             if (err)
               return reject(
@@ -95,12 +100,16 @@ export class SQLiteDriver {
   }
 
   async destroyDatabase(): Promise<void> {
-    this.logger.debug("Removing SQLite database...");
+    this.logger.debug('Removing SQLite database "%s"...', this.path);
     await this.closeConnection();
     if (this.path !== MEMORY_PATH) {
       await RealFS().fsUnlink(this.path);
     }
-    this.logger.debug("Database removed");
+    this.logger.debug('Database "%s" removed', this.path);
+  }
+
+  isClosed() {
+    return !this.dbPromise;
   }
 
   async closeConnection(): Promise<void> {
@@ -111,10 +120,10 @@ export class SQLiteDriver {
     if (process.platform === "win32") {
       db.open; // For an unknown reason, we need this line.
     }
-    await new Promise((resolve, reject) => {
-      this.logger.debug("Trying to close SQLite...");
+    await new Promise<void>((resolve, reject) => {
+      this.logger.trace('Closing SQLite database "%s"...', this.path);
       db.on("close", () => {
-        this.logger.info("Database closed.");
+        //this.logger.info('Database "%s" closed.', this.path);
         this.dbPromise = null;
         resolve();
       });
diff --git a/app/lib/dal/fileDAL.ts b/app/lib/dal/fileDAL.ts
index 8f4848f7728e300f02cde1a83940b6f80d3cb16c..54ae6f627cf310d75ceabd667d9e11e5933a69c3 100644
--- a/app/lib/dal/fileDAL.ts
+++ b/app/lib/dal/fileDAL.ts
@@ -50,7 +50,6 @@ import { MIndexDAO } from "./indexDAL/abstract/MIndexDAO";
 import { SIndexDAO } from "./indexDAL/abstract/SIndexDAO";
 import { CIndexDAO } from "./indexDAL/abstract/CIndexDAO";
 import { IdentityForRequirements } from "../../service/BlockchainService";
-import { NewLogger } from "../logger";
 import { BlockchainDAO } from "./indexDAL/abstract/BlockchainDAO";
 import { WalletDAO } from "./indexDAL/abstract/WalletDAO";
 import { PeerDAO } from "./indexDAL/abstract/PeerDAO";
@@ -132,8 +131,7 @@ export class FileDAL implements ServerDAO {
   sindexDAL: SIndexDAO;
   cindexDAL: CIndexDAO;
   dividendDAL: DividendDAO;
-  newDals: { [k: string]: Initiable };
-  private dals: (PeerDAO | WalletDAO | GenericDAO<any>)[];
+  dals: { [k: string]: Initiable };
 
   loadConfHook: (conf: ConfDTO) => Promise<void>;
   saveConfHook: (conf: ConfDTO) => Promise<ConfDTO>;
@@ -153,7 +151,8 @@ export class FileDAL implements ServerDAO {
     this.powDAL = new PowDAL(this.rootPath, params.fs);
     this.confDAL = new ConfDAL(this.rootPath, params.fs);
     this.metaDAL = new (require("./sqliteDAL/MetaDAL").MetaDAL)(
-      this.sqliteDriver
+      this.sqliteDriver,
+      getSqliteDB
     );
     this.idtyDAL = new (require("./sqliteDAL/IdentityDAL").IdentityDAL)(
       this.sqliteDriver
@@ -175,7 +174,7 @@ export class FileDAL implements ServerDAO {
     this.cindexDAL = new LevelDBCindex(getLevelDB);
     this.dividendDAL = new LevelDBDividend(getLevelDB);
 
-    this.newDals = {
+    this.dals = {
       powDAL: this.powDAL,
       metaDAL: this.metaDAL,
       blockDAL: this.blockDAL,
@@ -197,25 +196,9 @@ export class FileDAL implements ServerDAO {
   async init(conf: ConfDTO, commandName: string | null = null) {
     // wotb
     this.wotb = this.params.wotbf();
-
-    // DALs
-    this.dals = [
-      this.blockDAL,
-      this.peerDAL,
-      this.walletDAL,
-      this.bindexDAL,
-      this.mindexDAL,
-      this.iindexDAL,
-      this.sindexDAL,
-      this.cindexDAL,
-      this.dividendDAL,
-    ];
-    for (const indexDAL of this.dals) {
-      indexDAL.triggerInit();
-    }
-    const dalNames = Underscore.keys(this.newDals);
+    const dalNames = Underscore.keys(this.dals);
     for (const dalName of dalNames) {
-      const dal = this.newDals[dalName];
+      const dal = this.dals[dalName];
       await dal.init();
     }
     // Rust server
@@ -267,14 +250,6 @@ export class FileDAL implements ServerDAO {
     return this.metaDAL.getVersion();
   }
 
-  writeFileOfBlock(block: DBBlock) {
-    return this.blockDAL.saveBlock(block);
-  }
-
-  writeSideFileOfBlock(block: DBBlock) {
-    return this.blockDAL.saveSideBlock(block);
-  }
-
   listAllPeers() {
     return this.peerDAL.listAll();
   }
@@ -470,8 +445,10 @@ export class FileDAL implements ServerDAO {
   }
 
   async getAvailableSourcesByPubkey(pubkey: string): Promise<HttpSource[]> {
-    const txAvailable = await this.sindexDAL.getAvailableForPubkey(pubkey);
-    const sources: UDSource[] = await this.dividendDAL.getUDSources(pubkey);
+    const [txAvailable, sources] = await Promise.all([
+      this.sindexDAL.getAvailableForPubkey(pubkey),
+      this.dividendDAL.getUDSources(pubkey),
+    ]);
     return sources
       .map((d) => {
         return {
@@ -668,7 +645,7 @@ export class FileDAL implements ServerDAO {
         expires_on: membership.expires_on,
         created_on: idty.created_on,
         revoked: !!membership.revoked_on,
-        revocation_sig: membership.revocation,
+        revocation_sig: membership.revocation || null,
       };
     }
     return {
@@ -680,7 +657,7 @@ export class FileDAL implements ServerDAO {
       member: pending.member,
       wasMember: pending.wasMember,
       revoked: pending.revoked,
-      revocation_sig: pending.revocation_sig,
+      revocation_sig: pending.revocation_sig || null,
     };
   }
 
@@ -741,6 +718,17 @@ export class FileDAL implements ServerDAO {
     return await this.iindexDAL.getFromPubkeyOrUid(search);
   }
 
+  async getWrittenIdtyByPubkeyForHashingAndIsMember(
+    pub: string
+  ): Promise<{
+    uid: string;
+    created_on: string;
+    pub: string;
+    member: boolean;
+  } | null> {
+    return await this.iindexDAL.getFromPubkey(pub);
+  }
+
   async getWrittenIdtyByPubkeyForRevocationCheck(
     pubkey: string
   ): Promise<{
@@ -851,17 +839,20 @@ export class FileDAL implements ServerDAO {
   async getTxByHash(hash: string): Promise<DBTx | null> {
     let tx = this.rustServer.getTxByHash(hash);
     if (tx === null) {
-      return null;
-    } else {
-      let writtenBlock = tx.writtenBlock ? tx.writtenBlock : null;
-      let writtenTime = tx.writtenTime ? tx.writtenTime : tx.receivedTime;
-      let dbTx = DBTx.fromTransactionDTO(
-        await this.computeTxBlockstampTime(TransactionDTO.fromJSONObject(tx))
-      );
-      dbTx.block_number = writtenBlock;
-      dbTx.time = writtenTime || 0;
-      return dbTx;
+      tx = await this.txDAL.getTxByHash(hash);
+      if (!tx) {
+        return null;
+      }
     }
+    
+    let writtenBlock = tx.writtenBlock ? tx.writtenBlock : null;
+    let writtenTime = tx.writtenTime ? tx.writtenTime : tx.receivedTime;
+    let dbTx = DBTx.fromTransactionDTO(
+      await this.computeTxBlockstampTime(TransactionDTO.fromJSONObject(tx))
+    );
+    dbTx.block_number = writtenBlock;
+    dbTx.time = writtenTime || 0;
+    return dbTx;
   }
 
   removePendingTxByHash(hash: string) {
@@ -916,8 +907,32 @@ export class FileDAL implements ServerDAO {
         return i;
       })
     );
+    return this.fillIdentitiesRevocation(found);
+  }
+
+  async searchJustIdentitiesByPubkey(pubkey: string): Promise<DBIdentity[]> {
+    const pendings = await this.idtyDAL.findByPub(pubkey);
+    const writtenIdty = await this.iindexDAL.getOldFromPubkey(pubkey);
+    const nonPendings =
+      writtenIdty &&
+      Underscore.where(pendings, { pubkey: writtenIdty.pub }).length === 0
+        ? [writtenIdty]
+        : [];
+    const found = pendings.concat(
+      nonPendings.map((i: any) => {
+        // Use the correct field
+        i.pubkey = i.pub;
+        return i;
+      })
+    );
+    return this.fillIdentitiesRevocation(found);
+  }
+
+  private async fillIdentitiesRevocation(
+    identities: DBIdentity[]
+  ): Promise<DBIdentity[]> {
     return await Promise.all<DBIdentity>(
-      found.map(async (f) => {
+      identities.map(async (f) => {
         const ms = await this.mindexDAL.getReducedMSForImplicitRevocation(
           f.pubkey
         );
@@ -1261,12 +1276,20 @@ export class FileDAL implements ServerDAO {
   }
 
   async saveBlock(block: DBBlock, conf: ConfDTO) {
-    block.wrong = false;
-    try {
-      this.rustServer.applyBlock(block.toBlockDTO());
-      await this.saveBlockInFile(block);
-    } catch (err) {
-      throw err;
+    this.rustServer.saveBlock(block);
+
+    for (const entry of cindex) {
+      const from = await this.getWrittenIdtyByPubkeyForWotbID(entry.issuer);
+      const to = await this.getWrittenIdtyByPubkeyForWotbID(entry.receiver);
+      if (entry.op == CommonConstants.IDX_CREATE) {
+        wotb.addLink(from.wotb_id, to.wotb_id);
+      } else {
+        wotb.removeLink(from.wotb_id, to.wotb_id);
+      }
+    }
+
+    if (conf.storage?.blocks !== false) {
+      await this.blockDAL.saveBlock(block);
     }
   }
 
@@ -1324,11 +1347,8 @@ export class FileDAL implements ServerDAO {
       const from = await this.getWrittenIdtyByPubkeyForWotbID(entry.issuer);
       const to = await this.getWrittenIdtyByPubkeyForWotbID(entry.receiver);
       if (entry.op == CommonConstants.IDX_CREATE) {
-        // NewLogger().trace('addLink %s -> %s', from.wotb_id, to.wotb_id)
         wotb.addLink(from.wotb_id, to.wotb_id);
       } else {
-        // Update = removal
-        NewLogger().trace("removeLink %s -> %s", from.wotb_id, to.wotb_id);
         wotb.removeLink(from.wotb_id, to.wotb_id);
       }
     }
@@ -1336,6 +1356,7 @@ export class FileDAL implements ServerDAO {
 
   @MonitorExecutionTime()
   async trimIndexes(maxNumber: number) {
+    logger.trace("Trim indexes below block #%s", maxNumber);
     if (!cliprogram.notrim) {
       await this.bindexDAL.trimBlocks(maxNumber);
       await this.iindexDAL.trimRecords(maxNumber);
@@ -1349,6 +1370,7 @@ export class FileDAL implements ServerDAO {
   }
 
   async trimSandboxes(block: { medianTime: number }) {
+    logger.trace("Trim sandboxes below median time %s", block.medianTime);
     await this.certDAL.trimExpiredCerts(block.medianTime);
     await this.msDAL.trimExpiredMemberships(block.medianTime);
     await this.idtyDAL.trimExpiredIdentities(block.medianTime);
@@ -1362,12 +1384,62 @@ export class FileDAL implements ServerDAO {
     return this.msDAL.savePendingMembership(ms);
   }
 
-  async saveBlockInFile(block: DBBlock) {
-    await this.writeFileOfBlock(block);
+  /**
+   * Map tx DTO into DBtxs
+   * @param txs
+   * @param block_number
+   * @param medianTime
+   * @private
+   */
+  private async mapToDBTxs(
+    txs: TransactionDTO[],
+    block_number: number,
+    medianTime: number
+  ): Promise<DBTx[]> {
+    return Promise.all(
+      txs.map(async (tx) => {
+        const sp = tx.blockstamp.split("-", 2);
+        const basedBlock = (await this.getAbsoluteBlockByNumberAndHash(
+          parseInt(sp[0]),
+          sp[1]
+        )) as DBBlock;
+        tx.blockstampTime = basedBlock.medianTime;
+        const txEntity = TransactionDTO.fromJSONObject(tx);
+        if (!txEntity.hash) txEntity.computeAllHashes();
+        const dbTx = DBTx.fromTransactionDTO(txEntity);
+        dbTx.written = true;
+        dbTx.block_number = block_number;
+        dbTx.time = medianTime;
+        return dbTx;
+      })
+    );
+  }
+
+  async saveTxsInFiles(
+    txs: TransactionDTO[],
+    block_number: number,
+    medianTime: number
+  ) {
+    if (!txs.length) return [];
+    const records = await this.mapToDBTxs(txs, block_number, medianTime);
+    await this.txsDAL.saveBatch(records);
+    return records;
   }
 
-  saveSideBlockInFile(block: DBBlock) {
-    return this.writeSideFileOfBlock(block);
+  async insertTxsInFiles(
+    txs: TransactionDTO[],
+    block_number: number,
+    medianTime: number
+  ): Promise<DBTx[]> {
+    if (!txs.length) return [];
+    const records = await this.mapToDBTxs(txs, block_number, medianTime);
+    await this.txsDAL.insertBatch(records);
+    return records;
+  }
+
+  removeAllTxs() {
+    logger.debug("Removing all existing txs...");
+    return this.txsDAL.removeAll();
   }
 
   async merkleForPeers() {
@@ -1434,32 +1506,20 @@ export class FileDAL implements ServerDAO {
     return db_tx;
   }
 
-  async getTransactionsHistory(pubkey: string) {
-    const history: {
-      sent: DBTx[];
-      received: DBTx[];
-      sending: DBTx[];
-      pending: DBTx[];
-    } = {
-      sent: [],
-      received: [],
-      sending: [],
-      pending: [],
-    };
-    const res = this.rustServer.getTransactionsHistory(pubkey);
-    history.sent = await Promise.all(
-      res.sent.map(async (tx) => this.RustDbTxToDbTx(tx))
-    );
-    history.received = await Promise.all(
-      res.received.map(async (tx) => this.RustDbTxToDbTx(tx))
-    );
-    history.sending = await Promise.all(
-      res.sending.map(async (tx) => this.RustPendingTxToDbTx(tx))
-    );
-    history.pending = await Promise.all(
-      res.pending.map(async (tx) => this.RustPendingTxToDbTx(tx))
-    );
-    return history;
+  async getTxHistoryByPubkey(pubkey: string) {
+    return this.txsDAL.getTxHistoryByPubkey(pubkey);
+  }
+
+  getTxHistoryByPubkeyBetweenBlocks(pubkey: string, from: number, to: number) {
+    return this.txsDAL.getTxHistoryByPubkeyBetweenBlocks(pubkey, +from, +to);
+  }
+
+  getTxHistoryByPubkeyBetweenTimes(pubkey: string, from: number, to: number) {
+    return this.txsDAL.getTxHistoryByPubkeyBetweenTimes(pubkey, +from, +to);
+  }
+
+  getTxHistoryMempool(pubkey: string) {
+    return this.txsDAL.getTxHistoryMempool(pubkey);
   }
 
   async getUDHistory(pubkey: string): Promise<{ history: HttpUD[] }> {
@@ -1594,17 +1654,19 @@ export class FileDAL implements ServerDAO {
     }
   }
 
-  async cleanCaches() {
-    await Underscore.values(this.newDals).map(
-      (dal: Initiable) => dal.cleanCache && dal.cleanCache()
+  cleanCaches() {
+    return Promise.all(
+      Underscore.values(this.dals)
+        .filter((dal: Initiable) => typeof dal.cleanCache === "function")
+        .map((dal: Initiable) => dal.cleanCache())
     );
   }
 
   async close() {
     await Promise.all(
-      Underscore.values(this.newDals).map(async (dal: Initiable) => {
+      Underscore.values(this.dals).map((dal: Initiable) => {
         dal.cleanCache();
-        await dal.close();
+        return dal.close();
       })
     );
     await this.sqliteDriver.closeConnection();
@@ -1617,7 +1679,7 @@ export class FileDAL implements ServerDAO {
   }
 
   getLogContent(linesQuantity: number) {
-    return new Promise((resolve, reject) => {
+    return new Promise<string[]>((resolve, reject) => {
       try {
         let lines: string[] = [],
           i = 0;
diff --git a/app/lib/dal/indexDAL/abstract/GenericDAO.ts b/app/lib/dal/indexDAL/abstract/GenericDAO.ts
index b7df7ad288e7130df8bfee6d5f89bd0ffc7a9f69..f3f9651e6f95ac696bc564c3b2c6195bc5834750 100644
--- a/app/lib/dal/indexDAL/abstract/GenericDAO.ts
+++ b/app/lib/dal/indexDAL/abstract/GenericDAO.ts
@@ -1,11 +1,6 @@
 import { Initiable } from "../../sqliteDAL/Initiable";
 
 export interface GenericDAO<T> extends Initiable {
-  /**
-   * Trigger the initialization of the DAO. Called when the underlying DB is ready.
-   */
-  triggerInit(): void;
-
   /**
    * Make a generic find with some ordering.
    * @param criterion Criterion object, LokiJS's find object format.
diff --git a/app/lib/dal/indexDAL/abstract/IIndexDAO.ts b/app/lib/dal/indexDAL/abstract/IIndexDAO.ts
index 1c1a08cd8052ec1a003e478b563ea2ef5562dc15..e6ec8965ae19f0848afb1af2ebbde887d27caec6 100644
--- a/app/lib/dal/indexDAL/abstract/IIndexDAO.ts
+++ b/app/lib/dal/indexDAL/abstract/IIndexDAO.ts
@@ -19,6 +19,8 @@ export interface IIndexDAO extends ReduceableDAO<IindexEntry> {
 
   searchThoseMatching(search: string): Promise<OldIindexEntry[]>;
 
+  getOldFromPubkey(pub: string): Promise<OldIindexEntry | null>;
+
   getFullFromUID(uid: string): Promise<FullIindexEntry>;
 
   getFullFromPubkey(pub: string): Promise<FullIindexEntry>;
diff --git a/app/lib/dal/indexDAL/abstract/PeerDAO.ts b/app/lib/dal/indexDAL/abstract/PeerDAO.ts
index 0432e2c15d551ab0944e7cd8e18aaec610915470..b85f893e540c63fc1d357ee633127e6ae602d539 100644
--- a/app/lib/dal/indexDAL/abstract/PeerDAO.ts
+++ b/app/lib/dal/indexDAL/abstract/PeerDAO.ts
@@ -2,11 +2,6 @@ import { DBPeer } from "../../../db/DBPeer";
 import { Initiable } from "../../sqliteDAL/Initiable";
 
 export interface PeerDAO extends Initiable {
-  /**
-   * Trigger the initialization of the DAO. Called when the underlying DB is ready.
-   */
-  triggerInit(): void;
-
   listAll(): Promise<DBPeer[]>;
 
   withUPStatus(): Promise<DBPeer[]>;
diff --git a/app/lib/dal/indexDAL/abstract/TxsDAO.ts b/app/lib/dal/indexDAL/abstract/TxsDAO.ts
new file mode 100644
index 0000000000000000000000000000000000000000..44a414484fe4d972288305869ef05fe89883835c
--- /dev/null
+++ b/app/lib/dal/indexDAL/abstract/TxsDAO.ts
@@ -0,0 +1,79 @@
+import { GenericDAO } from "./GenericDAO";
+import { TransactionDTO } from "../../../dto/TransactionDTO";
+import { SandBox } from "../../sqliteDAL/SandBox";
+import { DBTx } from "../../../db/DBTx";
+
+export interface TxsDAO extends GenericDAO<DBTx> {
+  disableCheckConstraints(): Promise<void>;
+
+  enableCheckConstraints(): Promise<void>;
+
+  trimExpiredNonWrittenTxs(limitTime: number): Promise<void>;
+
+  getAllPending(versionMin: number): Promise<DBTx[]>;
+
+  getTX(hash: string): Promise<DBTx>;
+
+  /**
+   * Make a batch insert or update.
+   * @param records The records to insert or update as a batch.
+   */
+  saveBatch(records: DBTx[]): Promise<void>;
+
+  addLinked(
+    tx: TransactionDTO,
+    block_number: number,
+    time: number
+  ): Promise<DBTx>;
+
+  addPending(dbTx: DBTx): Promise<DBTx>;
+
+  getTxHistoryByPubkey(
+    pubkey: string
+  ): Promise<{
+    sent: DBTx[];
+    received: DBTx[];
+    sending: DBTx[];
+    pending: DBTx[];
+  }>;
+
+  getTxHistoryByPubkeyBetweenBlocks(
+    pubkey: string,
+    from: number,
+    to: number
+  ): Promise<{ sent: DBTx[]; received: DBTx[] }>;
+
+  getTxHistoryByPubkeyBetweenTimes(
+    pubkey: string,
+    from: number,
+    to: number
+  ): Promise<{ sent: DBTx[]; received: DBTx[] }>;
+
+  getTxHistoryMempool(
+    pubkey: string
+  ): Promise<{ sending: DBTx[]; pending: DBTx[] }>;
+
+  getLinkedWithIssuer(pubkey: string): Promise<DBTx[]>;
+
+  getLinkedWithRecipient(pubkey: string): Promise<DBTx[]>;
+
+  getPendingWithIssuer(pubkey: string): Promise<DBTx[]>;
+
+  getPendingWithRecipient(pubkey: string): Promise<DBTx[]>;
+
+  removeByHash(hash: string): Promise<void>;
+
+  removeByHashBatch(hashArray: string[]): Promise<void>;
+
+  removeAll(): Promise<void>;
+
+  sandbox: SandBox<{
+    issuers: string[];
+    output_base: number;
+    output_amount: number;
+  }>;
+
+  getSandboxRoom(): Promise<number>;
+
+  setSandboxSize(size: number): void;
+}
diff --git a/app/lib/dal/indexDAL/abstract/WalletDAO.ts b/app/lib/dal/indexDAL/abstract/WalletDAO.ts
index 3e076f2ebcffe622680cf1654d32bbf0ee120d3d..00e047aba65430f7cec16844e4f79bc6c8d66706 100644
--- a/app/lib/dal/indexDAL/abstract/WalletDAO.ts
+++ b/app/lib/dal/indexDAL/abstract/WalletDAO.ts
@@ -2,11 +2,6 @@ import { Initiable } from "../../sqliteDAL/Initiable";
 import { DBWallet } from "../../../db/DBWallet";
 
 export interface WalletDAO extends Initiable {
-  /**
-   * Trigger the initialization of the DAO. Called when the underlying DB is ready.
-   */
-  triggerInit(): void;
-
   /**
    * Saves a wallet.
    * @param {DBWallet} wallet
diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBBlockchain.ts b/app/lib/dal/indexDAL/leveldb/LevelDBBlockchain.ts
index e0101e8bbc79ee670eb043e51493fb21f6986c2c..6a76cb67d2b9308e8257edcb17f3e1241f95b793 100644
--- a/app/lib/dal/indexDAL/leveldb/LevelDBBlockchain.ts
+++ b/app/lib/dal/indexDAL/leveldb/LevelDBBlockchain.ts
@@ -106,8 +106,10 @@ export class LevelDBBlockchain extends LevelDBTable<DBBlock>
 
   async close(): Promise<void> {
     await super.close();
-    await this.forks.close();
-    await Promise.all(this.indexers.map((i) => i.close()));
+    if (this.indexers.length) {
+      await this.forks.close();
+      await Promise.all(this.indexers.map((i) => i.close()));
+    }
   }
 
   /**
diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts b/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts
index 4d1afc8e8beab4867fc4c5287da5e7b156854a72..896abf561440c210d60192c5d15e11b78d327752 100644
--- a/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts
+++ b/app/lib/dal/indexDAL/leveldb/LevelDBDividend.ts
@@ -31,10 +31,6 @@ export class LevelDBDividend extends LevelDBTable<DividendEntry>
    * TECHNICAL
    */
 
-  cleanCache(): void {}
-
-  triggerInit(): void {}
-
   async init(): Promise<void> {
     await super.init();
     this.indexForTrimming = new LevelDBTable<string[]>(
diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBIindex.ts b/app/lib/dal/indexDAL/leveldb/LevelDBIindex.ts
index c53f3dfc48a11b38ca1e4d721a5d9ef17a59e0c8..becb73a5af5af1eb78572bd5c5b3392ec183a2ea 100644
--- a/app/lib/dal/indexDAL/leveldb/LevelDBIindex.ts
+++ b/app/lib/dal/indexDAL/leveldb/LevelDBIindex.ts
@@ -61,10 +61,10 @@ export class LevelDBIindex extends LevelDBTable<IindexEntry[]>
 
   async close(): Promise<void> {
     await super.close();
-    await this.indexForHash.close();
-    await this.indexForUid.close();
-    await this.indexForKick.close();
-    await this.indexForWrittenOn.close();
+    await this.indexForHash?.close();
+    await this.indexForUid?.close();
+    await this.indexForKick?.close();
+    await this.indexForWrittenOn?.close();
   }
 
   /**
@@ -280,4 +280,12 @@ export class LevelDBIindex extends LevelDBTable<IindexEntry[]>
       .filter((u) => u.pub)
       .concat(pubIdentities.filter((p) => p.pub));
   }
+
+  async getOldFromPubkey(pub: string): Promise<OldIindexEntry | null> {
+    const identities = await this.findByPub(pub);
+    if (!identities.length) {
+      return null;
+    }
+    return OldTransformers.toOldIindexEntry(reduce(identities));
+  }
 }
diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBMindex.ts b/app/lib/dal/indexDAL/leveldb/LevelDBMindex.ts
index dcadf4ad721d46e668bc6809c47dd91f68334672..6def894d35697ea8a48f73330cf7b4622758d8e8 100644
--- a/app/lib/dal/indexDAL/leveldb/LevelDBMindex.ts
+++ b/app/lib/dal/indexDAL/leveldb/LevelDBMindex.ts
@@ -52,9 +52,9 @@ export class LevelDBMindex extends LevelDBTable<MindexEntry[]>
 
   async close(): Promise<void> {
     await super.close();
-    await this.indexForExpiresOn.close();
-    await this.indexForRevokesOn.close();
-    await this.indexForWrittenOn.close();
+    await this.indexForExpiresOn?.close();
+    await this.indexForRevokesOn?.close();
+    await this.indexForWrittenOn?.close();
   }
 
   /**
diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBSindex.ts b/app/lib/dal/indexDAL/leveldb/LevelDBSindex.ts
index a8b4aceba9bf7314135e8a1a47c41b7ad96f7ffd..a32ed465681498589915aafc9860ae17d29c4b9d 100644
--- a/app/lib/dal/indexDAL/leveldb/LevelDBSindex.ts
+++ b/app/lib/dal/indexDAL/leveldb/LevelDBSindex.ts
@@ -12,12 +12,18 @@ import { SIndexDAO } from "../abstract/SIndexDAO";
 import { Underscore } from "../../../common-libs/underscore";
 import { pint } from "../../../common-libs/pint";
 import { arrayPruneAllCopy } from "../../../common-libs/array-prune";
+import { CommonConstants } from "../../../common-libs/constants";
 
 export class LevelDBSindex extends LevelDBTable<SindexEntry>
   implements SIndexDAO {
+  // Remembers what sources (identifier-pos, e.g. E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855-0) were either created and/or consumed at a given block number
   private indexForTrimming: LevelDBTable<string[]>;
+  // Remembers what sources (identifier-pos, e.g. E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855-0) were consumed at a given block number
   private indexForConsumed: LevelDBTable<string[]>;
+  // Remembers what sources (identifier-pos, e.g. E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855-0) are involved in a simple condition (e.g. "SIG(pubkey)")
   private indexForConditions: LevelDBTable<string[]>;
+  // Remembers what sources (identifier-pos, e.g. E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855-0) are involved in a complex condition (e.g. "SIG(pubkey) OR CSV(1000)")
+  private indexOfComplexeConditionForPubkeys: LevelDBTable<string[]>;
 
   constructor(protected getLevelDB: (dbName: string) => Promise<LevelUp>) {
     super("level_sindex", getLevelDB);
@@ -41,9 +47,14 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
       "level_sindex/conditions",
       this.getLevelDB
     );
+    this.indexOfComplexeConditionForPubkeys = new LevelDBTable<string[]>(
+      "level_sindex/complex_condition_pubkeys",
+      this.getLevelDB
+    );
     await this.indexForTrimming.init();
     await this.indexForConsumed.init();
     await this.indexForConditions.init();
+    await this.indexOfComplexeConditionForPubkeys.init();
   }
 
   async close(): Promise<void> {
@@ -51,6 +62,7 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
     await this.indexForTrimming.close();
     await this.indexForConsumed.close();
     await this.indexForConditions.close();
+    await this.indexOfComplexeConditionForPubkeys.close();
   }
 
   /**
@@ -127,14 +139,14 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
       pos: number;
     }[]
   > {
-    // TODO: very costly: needs a full scan, would be better to change this implementatio
-    const entries = await this.findWhere((e) =>
-      e.conditions.includes(`SIG(${pubkey})`)
+    const forSimpleConditions = await this.getForConditions(`SIG(${pubkey})`);
+    const forComplexConditions = await this.getForComplexeConditionPubkey(
+      pubkey
+    );
+    const reduced = Indexer.DUP_HELPERS.reduceBy(
+      forSimpleConditions.concat(forComplexConditions),
+      ["identifier", "pos"]
     );
-    const reduced = Indexer.DUP_HELPERS.reduceBy(entries, [
-      "identifier",
-      "pos",
-    ]);
     return reduced.filter((r) => !r.consumed);
   }
 
@@ -163,17 +175,25 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
     return entries;
   }
 
+  /**
+   * Trims (i.e. removes definitively) some sources from sindex.
+   * N.B: we only trim *consumed sources* because a source is exclusively a two entries operation: CREATE then UPDATE.
+   * So a source without UPDATE is not to be trimmed, because it has never been consumed.
+   * Looking into `indexForConsumed` is therefore an optimisation to avoid checking `CREATE` sources which cannot be trimmed.
+   * @param belowNumber Trim all the UPDATE sources written below `belowNumber` block.
+   */
   async trimConsumedSource(belowNumber: number): Promise<void> {
     let belowNumberIds: string[] = [];
     const mapIds: {
       [k: string]: {
+        createdOn: number;
+        updatedOn: number;
         conditions: string;
-        writtenOn: number;
       };
     } = {};
     const mapIds2WrittenOn: { [k: string]: number } = {};
 
-    // First: we look at what was written before `belowNumber`
+    // First: we look at what was consumed before `belowNumber`
     await this.indexForConsumed.readAllKeyValue(
       async (kv) => {
         belowNumberIds = belowNumberIds.concat(kv.value);
@@ -191,26 +211,34 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
       // Remove consumed sources
       const identifier = id.split("-")[0];
       const pos = pint(id.split("-")[1]);
-      const entry = await this.getOrNull(
-        LevelDBSindex.trimKey(identifier, pos, true)
-      );
-      if (entry && entry.writtenOn < belowNumber) {
+      const createKey = LevelDBSindex.trimKey(identifier, pos, false);
+      const createRecord = await this.getOrNull(createKey);
+      const updateKey = LevelDBSindex.trimKey(identifier, pos, true);
+      const updateRecord = await this.getOrNull(updateKey);
+      if (
+        createRecord &&
+        updateRecord &&
+        updateRecord.writtenOn < belowNumber
+      ) {
         // We remember the trimmed source id to remove it from the writtenOn and conditions index
         mapIds[id] = {
-          writtenOn: mapIds2WrittenOn[id],
-          conditions: entry.conditions,
+          createdOn: createRecord.writtenOn,
+          updatedOn: updateRecord.writtenOn,
+          conditions: updateRecord.conditions,
         };
-        await this.del(LevelDBSindex.trimKey(identifier, pos, false));
-        await this.del(LevelDBSindex.trimKey(identifier, pos, true));
+        // Remove both the source CREATE (availability) and UPDATE (consumption) from sindex
+        await this.del(createKey);
+        await this.del(updateKey);
       }
     }
 
-    // We update indexes
+    // We update sub-indexes
     for (const id of Underscore.keys(mapIds).map(String)) {
       const map = mapIds[id];
       await this.trimConditions(map.conditions, id);
-      await this.trimConsumed(map.writtenOn, id);
-      await this.trimWrittenOn(map.writtenOn, id);
+      await this.trimConsumed(map.updatedOn, id);
+      await this.trimWrittenOn(map.createdOn, id);
+      await this.trimWrittenOn(map.updatedOn, id);
     }
   }
 
@@ -269,6 +297,20 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
     return found;
   }
 
+  async getForComplexeConditionPubkey(pubkey: string): Promise<SindexEntry[]> {
+    const ids =
+      (await this.indexOfComplexeConditionForPubkeys.getOrNull(pubkey)) || [];
+    const found: SindexEntry[] = [];
+    for (const id of ids) {
+      const entries = await this.findByIdentifierAndPos(
+        id.split("-")[0],
+        pint(id.split("-")[1])
+      );
+      entries.forEach((e) => found.push(e));
+    }
+    return found;
+  }
+
   async removeBlock(blockstamp: string): Promise<void> {
     const writtenOn = pint(blockstamp);
     // We look at records written on this blockstamp: `indexForTrimming` allows to get them
@@ -288,7 +330,8 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
       const updateRecord = await this.getOrNull(updateKey);
       // Undo consumption
       if (updateRecord && updateRecord.writtenOn === writtenOn) {
-        conditions.push(updateRecord.conditions);
+        // Delete from condition index only if no createRecord exists - fix #1446
+        if (!createRecord) conditions.push(updateRecord.conditions);
         await this.del(updateKey);
       }
       // Undo creation?
@@ -296,11 +339,10 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
         conditions.push(createRecord.conditions);
         await this.del(createKey);
       }
-      // Update balance
-      // 1. Conditions
+      // Update condition index
       const uniqConditions = Underscore.uniq(conditions);
       for (const condition of uniqConditions) {
-        // Remove this source from the balance
+        // Remove this source from the condition
         await this.trimConditions(condition, id);
       }
     }
@@ -315,33 +357,55 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
     }
   }
 
+  /**
+   * Allow to get internal index, for unit test (see test triming-dal.ts)
+   */
+  getInternalIndexes(): LevelDBTable<any>[] {
+    return [
+      this.indexForTrimming,
+      this.indexForConditions,
+      this.indexForConsumed,
+      this.indexOfComplexeConditionForPubkeys,
+    ];
+  }
+  getIndexForTrimming(): LevelDBTable<any> {
+    return this.indexForTrimming;
+  }
+  getIndexForConditions(): LevelDBTable<any> {
+    return this.indexForConditions;
+  }
+  getIndexForConsumed(): LevelDBTable<any> {
+    return this.indexForConsumed;
+  }
+
   private async trimConditions(condition: string, id: string) {
-    // Get all the account's TX sources
+    // Get all the condition's sources
     const existing = (await this.indexForConditions.getOrNull(condition)) || [];
-    // Prune the source from the account
+    // Prune the source from the condition
     const trimmed = arrayPruneAllCopy(existing, id);
     if (trimmed.length) {
-      // If some sources are left for this "account", persist what remains
+      // If some sources are left for this "condition", persist what remains
       await this.indexForConditions.put(condition, trimmed);
     } else {
-      // Otherwise just delete the "account"
+      // Otherwise just delete the "condition"
       await this.indexForConditions.del(condition);
     }
+
+    // If complex conditions
+    if (this.isComplexCondition(condition)) {
+      const pubkeys = this.getDistinctPubkeysFromCondition(condition);
+      await this.trimComplexeConditionPubkeys(pubkeys, id);
+    }
   }
 
-  /**
-   * Duplicate with trimConditions?!
-   * @param writtenOn
-   * @param id
-   */
   private async trimWrittenOn(writtenOn: number, id: string) {
     const k = LevelDBSindex.trimWrittenOnKey(writtenOn);
     const existing = await this.getWrittenOnSourceIds(writtenOn);
     const trimmed = arrayPruneAllCopy(existing, id);
     if (trimmed.length) {
-      await this.indexForConditions.put(k, trimmed);
+      await this.indexForTrimming.put(k, trimmed);
     } else {
-      await this.indexForConditions.del(k);
+      await this.indexForTrimming.del(k);
     }
   }
 
@@ -356,6 +420,28 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
     }
   }
 
+  private async trimComplexeConditionPubkeys(pubkeys: string[], id: string) {
+    if (!pubkeys || !pubkeys.length) return;
+    for (const p of pubkeys) {
+      await this.trimComplexeConditionPubkey(p, id);
+    }
+  }
+
+  private async trimComplexeConditionPubkey(pubkey: string, id: string) {
+    // Get all the condition's sources
+    const existing =
+      (await this.indexOfComplexeConditionForPubkeys.getOrNull(pubkey)) || [];
+    // Prune the source from the condition
+    const trimmed = arrayPruneAllCopy(existing, id);
+    if (trimmed.length) {
+      // If some sources are left for this "condition", persist what remains
+      await this.indexOfComplexeConditionForPubkeys.put(pubkey, trimmed);
+    } else {
+      // Otherwise just delete the "account"
+      await this.indexOfComplexeConditionForPubkeys.del(pubkey);
+    }
+  }
+
   private async getWrittenOnSourceIds(writtenOn: number) {
     const indexForTrimmingId = LevelDBSindex.trimWrittenOnKey(writtenOn);
     return (await this.indexForTrimming.getOrNull(indexForTrimmingId)) || [];
@@ -393,6 +479,7 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
     const byConsumed: { [k: number]: SindexEntry[] } = {};
     const byWrittenOn: { [k: number]: SindexEntry[] } = {};
     const byConditions: { [k: string]: SindexEntry[] } = {};
+    const byPubkeys: { [k: string]: SindexEntry[] } = {};
     records
       .filter((r) => r.consumed)
       .forEach((r) => {
@@ -410,12 +497,24 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
         arrWO = byWrittenOn[r.writtenOn] = [];
       }
       arrWO.push(r);
-      // Conditiosn
+      // Conditions
       let arrCN = byConditions[r.conditions];
       if (!arrCN) {
         arrCN = byConditions[r.conditions] = [];
       }
       arrCN.push(r);
+
+      // If complex condition
+      if (this.isComplexCondition(r.conditions)) {
+        const pubkeys = this.getDistinctPubkeysFromCondition(r.conditions);
+        pubkeys.forEach((pub) => {
+          let arrPub = byPubkeys[pub];
+          if (!arrPub) {
+            arrPub = byPubkeys[pub] = [];
+          }
+          arrPub.push(r);
+        });
+      }
     });
     // Index consumed => (identifier + pos)[]
     for (const k of Underscore.keys(byConsumed)) {
@@ -446,5 +545,47 @@ export class LevelDBSindex extends LevelDBTable<SindexEntry>
         Underscore.uniq(existing.concat(newSources))
       );
     }
+    // Index pubkeys => (identifier + pos)[]
+    for (const k of Underscore.keys(byPubkeys).map(String)) {
+      const existing =
+        (await this.indexOfComplexeConditionForPubkeys.getOrNull(k)) || [];
+      const newSources = byPubkeys[k].map((r) =>
+        LevelDBSindex.trimPartialKey(r.identifier, r.pos)
+      );
+      await this.indexOfComplexeConditionForPubkeys.put(
+        k,
+        Underscore.uniq(existing.concat(newSources))
+      );
+    }
+  }
+
+  private isComplexCondition(condition: string): boolean {
+    return (
+      (condition &&
+        !CommonConstants.TRANSACTION.OUTPUT_CONDITION_SIG_PUBKEY_UNIQUE.test(
+          condition
+        )) ||
+      false
+    );
+  }
+  /**
+   * Get all pubkeys used by an output condition (e.g. 'SIG(A) && SIG(B)' will return ['A', 'B']
+   * @param condition
+   * @private
+   */
+  private getDistinctPubkeysFromCondition(condition: string): string[] {
+    const pubKeys: string[] = [];
+    if (!condition) return pubKeys;
+    let match: RegExpExecArray | null;
+    while (
+      (match = CommonConstants.TRANSACTION.OUTPUT_CONDITION_SIG_PUBKEY.exec(
+        condition
+      )) !== null
+    ) {
+      pubKeys.push(match[1]);
+      condition = condition.substring(match.index + match[0].length);
+    }
+
+    return Underscore.uniq(pubKeys);
   }
 }
diff --git a/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts b/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts
index c6b1f6de2c9dab34d2780dae2bc64ce8af44d1af..668351f06334fa0c03faa66834ecc3e01e0c60e6 100644
--- a/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts
+++ b/app/lib/dal/indexDAL/leveldb/LevelDBTable.ts
@@ -12,14 +12,13 @@ export class LevelDBTable<T> {
 
   cleanCache(): void {}
 
-  triggerInit(): void {}
-
-  async close() {
-    await this.db.close();
-  }
-
   async init(): Promise<void> {
     this.db = await this.getLevelDB(`${this.name}`);
+    await this.db.open();
+  }
+
+  async close() {
+    await this.db?.close();
   }
 
   public async get(k: string): Promise<T> {
@@ -74,7 +73,7 @@ export class LevelDBTable<T> {
 
   public async count(options?: AbstractIteratorOptions) {
     let count = 0;
-    await new Promise((res) => {
+    await new Promise<void>((res) => {
       this.db
         .createReadStream(options)
         .on("data", () => count++)
@@ -87,7 +86,7 @@ export class LevelDBTable<T> {
     callback: (entry: T) => void,
     options?: AbstractIteratorOptions
   ) {
-    await new Promise((res) => {
+    await new Promise<void>((res) => {
       this.db
         .createReadStream(options)
         .on("data", (data) => callback(JSON.parse(String(data.value))))
@@ -99,7 +98,7 @@ export class LevelDBTable<T> {
     callback: (entry: { key: string; value: T }) => void,
     options?: AbstractIteratorOptions
   ) {
-    await new Promise((res) => {
+    await new Promise<void>((res) => {
       this.db
         .createReadStream(options)
         .on("data", (data) =>
@@ -117,7 +116,7 @@ export class LevelDBTable<T> {
     options?: AbstractIteratorOptions
   ) {
     const ops: Promise<void>[] = [];
-    await new Promise((res) => {
+    await new Promise<void>((res) => {
       this.db
         .createReadStream(options)
         .on("data", (data) =>
diff --git a/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexExpiresOnIndexer.ts b/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexExpiresOnIndexer.ts
index eee21312ec9ce3647f9c6efa7d915010d7850e08..ddefe40a1ab26149a8ca0c5f66b80bcd7e7e89e3 100644
--- a/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexExpiresOnIndexer.ts
+++ b/app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexExpiresOnIndexer.ts
@@ -87,7 +87,7 @@ export class LevelMIndexExpiresOnIndexer extends LevelDBDataIndex<
         }
       })
     );
-    // Case 2: expiration REVERT
+    // Case 2: REVERT expired = put back the value of `expires_on`
     const values: MindexEntry[] = Underscore.values(
       newStateByPub
     ).map((entries) => reduce(entries));
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts b/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts
new file mode 100644
index 0000000000000000000000000000000000000000..d64cc48b342ad5194c37e5d23a6e0948abc06c0d
--- /dev/null
+++ b/app/lib/dal/indexDAL/sqlite/SqliteDividend.ts
@@ -0,0 +1,264 @@
+import { SQLiteDriver } from "../../drivers/SQLiteDriver";
+import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime";
+import { SqliteTable } from "./SqliteTable";
+import { SqlNotNullableFieldDefinition } from "./SqlFieldDefinition";
+import { DividendDAO, DividendEntry, UDSource } from "../abstract/DividendDAO";
+import {
+  IindexEntry,
+  SimpleTxInput,
+  SimpleUdEntryForWallet,
+  SindexEntry,
+} from "../../../indexer";
+import { DividendDaoHandler } from "../common/DividendDaoHandler";
+import { DataErrors } from "../../../common-libs/errors";
+
+export class SqliteDividend extends SqliteTable<DividendEntry>
+  implements DividendDAO {
+  constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) {
+    super(
+      "dividend",
+      {
+        pub: new SqlNotNullableFieldDefinition("VARCHAR", true, 50),
+        member: new SqlNotNullableFieldDefinition("BOOLEAN", true),
+        availables: new SqlNotNullableFieldDefinition("JSON", false),
+        consumed: new SqlNotNullableFieldDefinition("JSON", false),
+        consumedUDs: new SqlNotNullableFieldDefinition("JSON", false),
+        dividends: new SqlNotNullableFieldDefinition("JSON", false),
+      },
+      getSqliteDB
+    );
+  }
+
+  /**
+   * TECHNICAL
+   */
+
+  cleanCache(): void {}
+
+  /**
+   * INSERT
+   */
+
+  @MonitorExecutionTime()
+  async insert(record: DividendEntry): Promise<void> {
+    await this.insertInTable(this.driver, record);
+  }
+
+  @MonitorExecutionTime()
+  async insertBatch(records: DividendEntry[]): Promise<void> {
+    if (records.length) {
+      return this.insertBatchInTable(this.driver, records);
+    }
+  }
+
+  private async find(sql: string, params: any[]): Promise<DividendEntry[]> {
+    return (await this.driver.sqlRead(sql, params)).map((r) => {
+      return {
+        pub: r.pub,
+        member: r.member,
+        availables:
+          r.availables == null ? null : JSON.parse(r.availables as any),
+        consumed: r.consumed == null ? null : JSON.parse(r.consumed as any),
+        consumedUDs:
+          r.consumedUDs == null ? null : JSON.parse(r.consumedUDs as any),
+        dividends: r.dividends == null ? null : JSON.parse(r.dividends as any),
+      };
+    });
+  }
+
+  async consume(filter: SindexEntry[]): Promise<void> {
+    for (const dividendToConsume of filter) {
+      const row = (
+        await this.find("SELECT * FROM dividend WHERE pub = ?", [
+          dividendToConsume.identifier,
+        ])
+      )[0];
+      DividendDaoHandler.consume(row, dividendToConsume);
+      await this.update(
+        this.driver,
+        row,
+        ["consumed", "consumedUDs", "availables", "dividends"],
+        ["pub"]
+      );
+    }
+  }
+
+  async createMember(pub: string): Promise<void> {
+    const existing = (
+      await this.find("SELECT * FROM dividend WHERE pub = ?", [pub])
+    )[0];
+    if (!existing) {
+      await this.insert(DividendDaoHandler.getNewDividendEntry(pub));
+    } else {
+      await this.setMember(true, pub);
+    }
+  }
+
+  deleteMember(pub: string): Promise<void> {
+    return this.driver.sqlWrite("DELETE FROM dividend WHERE pub = ?", [pub]);
+  }
+
+  async findForDump(criterion: any): Promise<SindexEntry[]> {
+    return DividendDaoHandler.toDump(
+      await this.find("SELECT * FROM dividend", [])
+    );
+  }
+
+  findRawWithOrder(
+    criterion: { pub?: string },
+    sort: (string | (string | boolean)[])[]
+  ): Promise<DividendEntry[]> {
+    let sql = `SELECT * FROM dividend ${criterion.pub ? "WHERE pub = ?" : ""}`;
+    if (sort.length) {
+      sql += ` ORDER BY ${sort
+        .map((s) => `${s[0]} ${s[1] ? "DESC" : "ASC"}`)
+        .join(", ")}`;
+    }
+    return this.find(sql, criterion.pub ? [criterion.pub] : []);
+  }
+
+  async findUdSourceByIdentifierPosAmountBase(
+    identifier: string,
+    pos: number,
+    amount: number,
+    base: number
+  ): Promise<SimpleTxInput[]> {
+    const member = (
+      await this.find("SELECT * FROM dividend WHERE pub = ?", [identifier])
+    )[0];
+    return DividendDaoHandler.getUDSourceByIdPosAmountBase(
+      member,
+      identifier,
+      pos,
+      amount,
+      base
+    );
+  }
+
+  async getUDSource(
+    identifier: string,
+    pos: number
+  ): Promise<SimpleTxInput | null> {
+    const member = (
+      await this.find("SELECT * FROM dividend WHERE pub = ?", [identifier])
+    )[0];
+    return DividendDaoHandler.getUDSource(member, identifier, pos);
+  }
+
+  async getUDSources(pub: string): Promise<UDSource[]> {
+    const member = (
+      await this.find("SELECT * FROM dividend WHERE pub = ?", [pub])
+    )[0];
+    if (!member) {
+      return [];
+    }
+    return DividendDaoHandler.udSources(member);
+  }
+
+  getWrittenOn(blockstamp: string): Promise<DividendEntry[]> {
+    throw Error(
+      DataErrors[
+        DataErrors.DIVIDEND_GET_WRITTEN_ON_SHOULD_NOT_BE_USED_DIVIDEND_DAO
+      ]
+    );
+  }
+
+  async getWrittenOnUDs(number: number): Promise<SimpleUdEntryForWallet[]> {
+    const res: SimpleUdEntryForWallet[] = [];
+    const rows = await this.find("SELECT * FROM dividend WHERE member", []);
+    for (const row of rows) {
+      DividendDaoHandler.getWrittenOnUDs(row, number, res);
+    }
+    return res;
+  }
+
+  async produceDividend(
+    blockNumber: number,
+    dividend: number,
+    unitbase: number,
+    local_iindex: IindexEntry[]
+  ): Promise<SimpleUdEntryForWallet[]> {
+    const dividends: SimpleUdEntryForWallet[] = [];
+    const rows = await this.find("SELECT * FROM dividend WHERE member", []);
+    for (const row of rows) {
+      DividendDaoHandler.produceDividend(
+        row,
+        blockNumber,
+        dividend,
+        unitbase,
+        dividends
+      );
+      await this.update(this.driver, row, ["availables", "dividends"], ["pub"]);
+    }
+    return dividends;
+  }
+
+  removeBlock(blockstamp: string): Promise<void> {
+    throw Error(
+      DataErrors[
+        DataErrors.DIVIDEND_REMOVE_BLOCK_SHOULD_NOT_BE_USED_BY_DIVIDEND_DAO
+      ]
+    );
+  }
+
+  async revertUDs(
+    number: number
+  ): Promise<{
+    createdUDsDestroyedByRevert: SimpleUdEntryForWallet[];
+    consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[];
+  }> {
+    const createdUDsDestroyedByRevert: SimpleUdEntryForWallet[] = [];
+    const consumedUDsRecoveredByRevert: SimpleUdEntryForWallet[] = [];
+    // Remove produced dividends at this block
+    const rows = await this.find(
+      "SELECT * FROM dividend WHERE availables like ? or dividends like ?",
+      ["%" + number + "%", "%" + number + "%"]
+    );
+    for (const row of rows.filter((row) => row.availables.includes(number))) {
+      DividendDaoHandler.removeDividendsProduced(
+        row,
+        number,
+        createdUDsDestroyedByRevert
+      );
+      await this.update(this.driver, row, ["availables", "dividends"], ["pub"]);
+    }
+    // Unconsumed dividends consumed at this block
+    for (const row of rows.filter((row) => row.consumed.includes(number))) {
+      DividendDaoHandler.unconsumeDividends(
+        row,
+        number,
+        consumedUDsRecoveredByRevert
+      );
+      await this.update(this.driver, row, ["availables", "dividends"], ["pub"]);
+    }
+    return {
+      createdUDsDestroyedByRevert,
+      consumedUDsRecoveredByRevert,
+    };
+  }
+
+  async setMember(member: boolean, pub: string): Promise<void> {
+    await this.driver.sqlWrite("UPDATE dividend SET member = ? WHERE pub = ?", [
+      true,
+      pub,
+    ]);
+  }
+
+  async trimConsumedUDs(belowNumber: number): Promise<void> {
+    const rows = await this.find("SELECT * FROM dividend", []);
+    for (const row of rows) {
+      if (DividendDaoHandler.trimConsumed(row, belowNumber)) {
+        await this.update(
+          this.driver,
+          row,
+          ["consumed", "consumedUDs"],
+          ["pub"]
+        );
+      }
+    }
+  }
+
+  listAll(): Promise<DividendEntry[]> {
+    return this.find("SELECT * FROM dividend", []);
+  }
+}
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts b/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts
index fdbadbdaf23fdb168c1bfa76fee797d7e226e7b4..a50fdb0416792dc09621fd0459c942a2364ae0cd 100644
--- a/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts
+++ b/app/lib/dal/indexDAL/sqlite/SqliteIIndex.ts
@@ -1,4 +1,9 @@
-import { FullIindexEntry, IindexEntry, Indexer } from "../../../indexer";
+import {
+  FullIindexEntry,
+  IindexEntry,
+  Indexer,
+  reduce,
+} from "../../../indexer";
 import { SQLiteDriver } from "../../drivers/SQLiteDriver";
 import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime";
 import { IIndexDAO } from "../abstract/IIndexDAO";
@@ -18,11 +23,11 @@ export class SqliteIIndex extends SqliteTable<IindexEntry>
       {
         op: new SqlNotNullableFieldDefinition("CHAR", false, 6),
         pub: new SqlNotNullableFieldDefinition("VARCHAR", true, 50),
-        written_on: new SqlNotNullableFieldDefinition("VARCHAR", false, 80),
+        written_on: new SqlNotNullableFieldDefinition("VARCHAR", true, 80),
         writtenOn: new SqlNotNullableFieldDefinition("INT", true),
         created_on: new SqlNullableFieldDefinition("VARCHAR", false, 80),
         uid: new SqlNullableFieldDefinition("VARCHAR", true, 100),
-        hash: new SqlNullableFieldDefinition("VARCHAR", false, 70),
+        hash: new SqlNullableFieldDefinition("VARCHAR", true, 70),
         sig: new SqlNullableFieldDefinition("VARCHAR", false, 100),
         member: new SqlNullableFieldDefinition("BOOLEAN", true),
         wasMember: new SqlNullableFieldDefinition("BOOLEAN", true),
@@ -39,8 +44,6 @@ export class SqliteIIndex extends SqliteTable<IindexEntry>
 
   cleanCache(): void {}
 
-  triggerInit(): void {}
-
   /**
    * INSERT
    */
@@ -212,6 +215,18 @@ export class SqliteIIndex extends SqliteTable<IindexEntry>
     return (await this.getFromUID(uid)) as FullIindexEntry;
   }
 
+  @MonitorExecutionTime()
+  async getOldFromPubkey(pub: string): Promise<OldIindexEntry | null> {
+    const entries = await this.find(
+      "SELECT * FROM iindex WHERE pub = ? order by writtenOn ASC",
+      [pub]
+    );
+    if (!entries.length) {
+      return null;
+    }
+    return OldTransformers.toOldIindexEntry(reduce(entries));
+  }
+
   @MonitorExecutionTime()
   async getMembers(): Promise<{ pubkey: string; uid: string | null }[]> {
     const members = await this.find(
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts b/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts
index 6fe3b6230a545088d50a2cdcb03099cfdb1f80bb..db86d71f1e3fa2f5b5deff54b085fd4cf7f1d9e6 100644
--- a/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts
+++ b/app/lib/dal/indexDAL/sqlite/SqliteMIndex.ts
@@ -74,7 +74,10 @@ export class SqliteMIndex extends SqliteTable<MindexEntry>
     `);
   }
 
-  triggerInit(): void {}
+  async close(): Promise<void> {
+    await super.close();
+    await this.d2?.close();
+  }
 
   /**
    * INSERT
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteNodeIOManager.ts b/app/lib/dal/indexDAL/sqlite/SqliteNodeIOManager.ts
index be65c3308f6d23dc9121f2016eb30bd2b4191a32..915cf21800683c5a2fb0ebbd5855649b6a3a6785 100644
--- a/app/lib/dal/indexDAL/sqlite/SqliteNodeIOManager.ts
+++ b/app/lib/dal/indexDAL/sqlite/SqliteNodeIOManager.ts
@@ -2,9 +2,11 @@ import { SQLiteDriver } from "../../drivers/SQLiteDriver";
 import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime";
 import { NewLogger } from "../../../logger";
 import { ExitCodes } from "../../../common-libs/exit-codes";
+import { CommonConstants } from "../../../common-libs/constants";
 
 export class SqliteNodeIOManager<T> {
   private writePromise: Promise<any> | null = null;
+  private writePendingCount = 0;
 
   constructor(private driver: SQLiteDriver, private id: string) {}
 
@@ -13,12 +15,19 @@ export class SqliteNodeIOManager<T> {
     await this.writePromise;
     // We no more need to wait
     this.writePromise = null;
+    this.writePendingCount = 0;
   }
 
   public async sqlWrite(sql: string, params: any[]) {
-    // // Just promise that the writing will be done
+    if (this.writePendingCount >= CommonConstants.MAX_SQLITE_WRITE_PENDINGS) {
+      await this.wait4writing();
+    }
+
+    this.writePendingCount++;
+    // Just promise that the writing will be done
     this.writePromise = (this.writePromise || Promise.resolve())
       .then(() => this.driver.executeAll(sql, params))
+      .then(() => this.writePendingCount--)
       .catch((e) => {
         NewLogger().error(e);
         process.exit(ExitCodes.MINDEX_WRITING_ERROR);
@@ -42,6 +51,10 @@ export class SqliteNodeIOManager<T> {
   }
 
   async close() {
+    if (this.writePromise) {
+      // Wait for current writings to be done
+      await this.wait4writing();
+    }
     await this.driver.closeConnection();
   }
 }
diff --git a/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts b/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts
index a3e0bfe766cd0f9fb517747c2b3998b0b64cc88a..50ce4109382fa961e2a42531fbd6b8cbdfd7c525 100644
--- a/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts
+++ b/app/lib/dal/indexDAL/sqlite/SqlitePeers.ts
@@ -17,13 +17,13 @@ export class SqlitePeers extends SqliteTable<DBPeer> implements PeerDAO {
         hash: new SqlNullableFieldDefinition("VARCHAR", false, 70),
         first_down: new SqlNullableFieldDefinition("INT", false),
         last_try: new SqlNullableFieldDefinition("INT", true),
-        lastContact: new SqlNullableFieldDefinition("INT", false),
+        lastContact: new SqlNullableFieldDefinition("INT", true),
         pubkey: new SqlNullableFieldDefinition("VARCHAR", true, 50),
         block: new SqlNullableFieldDefinition("VARCHAR", false, 100),
         signature: new SqlNullableFieldDefinition("VARCHAR", false, 100),
         endpoints: new SqlNullableFieldDefinition("JSON", true),
         raw: new SqlNullableFieldDefinition("TEXT", false),
-        nonWoT: new SqlNullableFieldDefinition("BOOLEAN", false),
+        nonWoT: new SqlNullableFieldDefinition("BOOLEAN", true),
       },
       getSqliteDB
     );
@@ -95,8 +95,6 @@ export class SqlitePeers extends SqliteTable<DBPeer> implements PeerDAO {
     return peer;
   }
 
-  triggerInit(): void {}
-
   withUPStatus(): Promise<DBPeer[]> {
     return this.findEntities("SELECT * FROM peers WHERE status = ?", ["UP"]);
   }
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts b/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts
new file mode 100644
index 0000000000000000000000000000000000000000..9fdc1eb5ff5ee2de3c56a1e16b646759d39c4a20
--- /dev/null
+++ b/app/lib/dal/indexDAL/sqlite/SqliteSIndex.ts
@@ -0,0 +1,231 @@
+import {
+  FullSindexEntry,
+  Indexer,
+  SimpleTxEntryForWallet,
+  SimpleTxInput,
+  SindexEntry,
+} from "../../../indexer";
+import { SQLiteDriver } from "../../drivers/SQLiteDriver";
+import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime";
+import { SqliteTable } from "./SqliteTable";
+import {
+  SqlNotNullableFieldDefinition,
+  SqlNullableFieldDefinition,
+} from "./SqlFieldDefinition";
+import { SIndexDAO } from "../abstract/SIndexDAO";
+
+export class SqliteSIndex extends SqliteTable<SindexEntry>
+  implements SIndexDAO {
+  constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) {
+    super(
+      "sindex",
+      {
+        op: new SqlNotNullableFieldDefinition("CHAR", false, 6),
+        written_on: new SqlNotNullableFieldDefinition("VARCHAR", false, 80),
+        writtenOn: new SqlNotNullableFieldDefinition("INT", true),
+        srcType: new SqlNotNullableFieldDefinition("CHAR", true, 1),
+        tx: new SqlNullableFieldDefinition("VARCHAR", true, 70),
+        identifier: new SqlNotNullableFieldDefinition("VARCHAR", true, 70),
+        pos: new SqlNotNullableFieldDefinition("INT", true),
+        created_on: new SqlNullableFieldDefinition("VARCHAR", false, 100),
+        written_time: new SqlNotNullableFieldDefinition("INT", true),
+        locktime: new SqlNullableFieldDefinition("INT", false),
+        unlock: new SqlNullableFieldDefinition("VARCHAR", false, 255),
+        amount: new SqlNotNullableFieldDefinition("INT", false),
+        base: new SqlNotNullableFieldDefinition("INT", false),
+        conditions: new SqlNotNullableFieldDefinition("VARCHAR", true, 1000),
+        consumed: new SqlNullableFieldDefinition("BOOLEAN", true),
+      },
+      getSqliteDB
+    );
+  }
+
+  /**
+   * TECHNICAL
+   */
+
+  cleanCache(): void {}
+
+  /**
+   * INSERT
+   */
+
+  @MonitorExecutionTime()
+  async insert(record: SindexEntry): Promise<void> {
+    await this.insertInTable(this.driver, record);
+  }
+
+  @MonitorExecutionTime()
+  async insertBatch(records: SindexEntry[]): Promise<void> {
+    if (records.length) {
+      return this.insertBatchInTable(this.driver, records);
+    }
+  }
+
+  /**
+   * DELETE
+   */
+
+  @MonitorExecutionTime()
+  async removeBlock(blockstamp: string): Promise<void> {
+    await this.driver.sqlWrite(`DELETE FROM sindex WHERE written_on = ?`, [
+      blockstamp,
+    ]);
+  }
+
+  @MonitorExecutionTime()
+  async trimRecords(belowNumber: number): Promise<void> {
+    await this.trimConsumedSource(belowNumber);
+  }
+
+  /**
+   * FIND
+   */
+
+  @MonitorExecutionTime()
+  async getWrittenOn(blockstamp: string): Promise<SindexEntry[]> {
+    return this.find("SELECT * FROM sindex WHERE written_on = ?", [blockstamp]);
+  }
+
+  @MonitorExecutionTime()
+  async findRawWithOrder(
+    criterion: { pub?: string },
+    sort: (string | (string | boolean)[])[]
+  ): Promise<SindexEntry[]> {
+    let sql = `SELECT * FROM sindex ${criterion.pub ? "WHERE pub = ?" : ""}`;
+    if (sort.length) {
+      sql += ` ORDER BY ${sort
+        .map((s) => `${s[0]} ${s[1] ? "DESC" : "ASC"}`)
+        .join(", ")}`;
+    }
+    return this.find(sql, criterion.pub ? [criterion.pub] : []);
+  }
+
+  private async find(sql: string, params: any[]): Promise<SindexEntry[]> {
+    return (await this.driver.sqlRead(sql, params)).map((r) => {
+      return {
+        index: "SINDEX",
+        op: r.op,
+        written_on: r.written_on,
+        writtenOn: r.writtenOn,
+        srcType: r.srcType,
+        tx: r.tx,
+        identifier: r.identifier,
+        pos: r.pos,
+        created_on: r.created_on,
+        written_time: r.written_time,
+        locktime: r.locktime,
+        unlock: r.unlock,
+        amount: r.amount,
+        base: r.base,
+        conditions: r.conditions,
+        consumed: r.consumed,
+        txObj: null as any,
+        age: 0,
+      };
+    });
+  }
+
+  /**
+   * OTHER
+   */
+
+  findByIdentifier(identifier: string): Promise<SindexEntry[]> {
+    return this.find("SELECT * FROM sindex WHERE identifier = ?", [identifier]);
+  }
+
+  findByPos(pos: number): Promise<SindexEntry[]> {
+    return this.find("SELECT * FROM sindex WHERE pos = ?", [pos]);
+  }
+
+  findTxSourceByIdentifierPosAmountBase(
+    identifier: string,
+    pos: number,
+    amount: number,
+    base: number
+  ): Promise<SimpleTxInput[]> {
+    return this.find(
+      "SELECT * FROM sindex " +
+        "WHERE identifier = ? " +
+        "AND pos = ? " +
+        "AND amount = ? " +
+        "AND base = ?",
+      [identifier, pos, amount, base]
+    );
+  }
+
+  getAvailableForConditions(conditionsStr: string): Promise<SindexEntry[]> {
+    return this.find(
+      "SELECT * FROM sindex s1 " +
+        "WHERE s1.conditions LIKE ? " +
+        "AND NOT s1.consumed " +
+        "AND NOT EXISTS (" +
+        "  SELECT * FROM sindex s2" +
+        "  WHERE s1.identifier = s2.identifier" +
+        "  AND s1.pos = s2.pos" +
+        "  AND s2.consumed" +
+        ")",
+      [conditionsStr]
+    );
+  }
+
+  async getAvailableForPubkey(
+    pubkey: string
+  ): Promise<
+    {
+      amount: number;
+      base: number;
+      conditions: string;
+      identifier: string;
+      pos: number;
+    }[]
+  > {
+    return this.getAvailableForConditions(`SIG(${pubkey})`); // TODO: maybe %SIG(...)%
+  }
+
+  async getTxSource(
+    identifier: string,
+    pos: number
+  ): Promise<FullSindexEntry | null> {
+    const entries = await this.find(
+      "SELECT * FROM sindex WHERE identifier = ? AND pos = ? ORDER BY writtenOn",
+      [identifier, pos]
+    );
+    return Indexer.DUP_HELPERS.reduceOrNull(entries);
+  }
+
+  async getWrittenOnTxs(blockstamp: string): Promise<SimpleTxEntryForWallet[]> {
+    const entries = await this.find(
+      "SELECT * FROM sindex WHERE written_on = ?",
+      [blockstamp]
+    );
+    const res: SimpleTxEntryForWallet[] = [];
+    entries.forEach((s) => {
+      res.push({
+        srcType: "T",
+        op: s.op,
+        conditions: s.conditions,
+        amount: s.amount,
+        base: s.base,
+        identifier: s.identifier,
+        pos: s.pos,
+      });
+    });
+    return res;
+  }
+
+  async trimConsumedSource(belowNumber: number): Promise<void> {
+    const sources = await this.find(
+      "SELECT * FROM sindex WHERE consumed AND writtenOn < ?",
+      [belowNumber]
+    );
+    await Promise.all(
+      sources.map(async (s) =>
+        this.driver.sqlWrite(
+          "DELETE FROM sindex " + "WHERE identifier = ? " + "AND pos = ?",
+          [s.identifier, s.pos]
+        )
+      )
+    );
+  }
+}
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts
index dcd909b299658e2eae8442ad462b7d1099d4ac2a..18738529666ce4e2f8da1f1b87235b031b39063f 100644
--- a/app/lib/dal/indexDAL/sqlite/SqliteTable.ts
+++ b/app/lib/dal/indexDAL/sqlite/SqliteTable.ts
@@ -4,7 +4,7 @@ import { SqliteNodeIOManager } from "./SqliteNodeIOManager";
 import { SQLiteDriver } from "../../drivers/SQLiteDriver";
 
 export class SqliteTable<T> {
-  private readonly pdriver: Promise<SQLiteDriver>;
+  private readonly _driverPromise: Promise<SQLiteDriver>;
   protected driver: SqliteNodeIOManager<T>;
 
   protected constructor(
@@ -14,28 +14,38 @@ export class SqliteTable<T> {
     },
     getSqliteDB: (dbName: string) => Promise<SQLiteDriver>
   ) {
-    this.pdriver = getSqliteDB(`${name}.db`);
+    this._driverPromise = getSqliteDB(`${name}.db`);
   }
 
   async init(): Promise<void> {
-    this.driver = new SqliteNodeIOManager(await this.pdriver, "sindex");
+    this.driver = new SqliteNodeIOManager(await this._driverPromise, this.name);
     await this.driver.sqlExec(`
     BEGIN;
     ${this.generateCreateTable()};
-    ${this.generateCreateIndexes()};
+    ${this.generateUpgradeSql()}
+    ${this.generateCreateIndexes()}
     COMMIT;
     `);
   }
 
   async close(): Promise<void> {
-    await this.driver.close();
+    await this.driver?.close();
+  }
+
+  async disableCheckConstraints(): Promise<void> {
+    await this.driver.sqlExec("PRAGMA ignore_check_constraints = true;");
+  }
+
+  async enableCheckConstraints(): Promise<void> {
+    await this.driver.sqlExec("PRAGMA ignore_check_constraints = false;");
   }
 
   generateCreateTable() {
     let sql = `CREATE TABLE IF NOT EXISTS ${this.name} (`;
     const fields = this.keys()
-      .map((fieldName) => {
-        const f = this.fields[fieldName] as SqlFieldDefinition;
+      .map((key) => {
+        const fieldName = String(key);
+        const f = this.fields[key] as SqlFieldDefinition;
         switch (f.type) {
           case "BOOLEAN":
             return `\n${fieldName} BOOLEAN${f.nullable ? " NULL" : ""}`;
@@ -60,10 +70,20 @@ export class SqliteTable<T> {
     return sql;
   }
 
+  /**
+   * Allow to migrate the table
+   */
+  generateUpgradeSql(): string {
+    return "";
+  }
+
   generateCreateIndexes() {
     return this.keys()
+      .filter((key) => this.fields[key]?.indexed)
       .map((fieldName) => {
-        return `CREATE INDEX IF NOT EXISTS idx_${this.name}_${fieldName} ON ${this.name} (${fieldName});\n`;
+        return `CREATE INDEX IF NOT EXISTS idx_${this.name}_${String(
+          fieldName
+        )} ON ${this.name} (${String(fieldName)});\n`;
       })
       .join("");
   }
@@ -84,12 +104,14 @@ export class SqliteTable<T> {
   ) {
     const valuesOfRecord = fieldsToUpdate
       .map(
-        (fieldName) => `${fieldName} = ${this.getFieldValue(fieldName, record)}`
+        (fieldName) =>
+          `${String(fieldName)} = ${this.getFieldValue(fieldName, record)}`
       )
       .join(",");
     const conditionsOfRecord = whereFields
       .map(
-        (fieldName) => `${fieldName} = ${this.getFieldValue(fieldName, record)}`
+        (fieldName) =>
+          `${String(fieldName)} = ${this.getFieldValue(fieldName, record)}`
       )
       .join(",");
     await driver.sqlWrite(
@@ -194,6 +216,17 @@ export class SqliteTable<T> {
     )[0] as any).max;
   }
 
+  async countBy(fieldName: keyof T, fieldValue: any): Promise<number> {
+    return ((
+      await this.driver.sqlRead(
+        `SELECT COUNT(*) as max FROM ${this.name} WHERE ${String(
+          fieldName
+        )} = ?`,
+        [fieldValue]
+      )
+    )[0] as any).max;
+  }
+
   /**
    * Debugging function: allows to make a hot copy of an SQLite database to a new file, even if the source is in-memory.
    * @param {string} path The path where to write the copy.
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts
new file mode 100644
index 0000000000000000000000000000000000000000..4c2439b4bb0704bba08b1badabd79b289091a576
--- /dev/null
+++ b/app/lib/dal/indexDAL/sqlite/SqliteTransactions.ts
@@ -0,0 +1,397 @@
+import { SQLiteDriver } from "../../drivers/SQLiteDriver";
+import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime";
+import { SqliteTable } from "./SqliteTable";
+import {
+  SqlNotNullableFieldDefinition,
+  SqlNullableFieldDefinition,
+} from "./SqlFieldDefinition";
+import { DBTx } from "../../../db/DBTx";
+import { TxsDAO } from "../abstract/TxsDAO";
+import { SandBox } from "../../sqliteDAL/SandBox";
+import { TransactionDTO } from "../../../dto/TransactionDTO";
+import { Underscore } from "../../../common-libs/underscore";
+
+const constants = require("../../../constants");
+
+export class SqliteTransactions extends SqliteTable<DBTx> implements TxsDAO {
+  constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) {
+    super(
+      "txs",
+      {
+        hash: new SqlNotNullableFieldDefinition("VARCHAR", true, 70),
+        block_number: new SqlNullableFieldDefinition(
+          "INT",
+          true /*need by getTxHistoryByPubkeyBetweenBlocks() */
+        ),
+        locktime: new SqlNullableFieldDefinition("INT", false),
+        version: new SqlNullableFieldDefinition("INT", false),
+        currency: new SqlNullableFieldDefinition("VARCHAR", false, 10),
+        comment: new SqlNullableFieldDefinition("TEXT", false),
+        blockstamp: new SqlNullableFieldDefinition(
+          "VARCHAR",
+          true /* need by getWrittenOn() */,
+          100
+        ),
+        blockstampTime: new SqlNullableFieldDefinition(
+          "INT",
+          true /* need by trimExpiredNonWrittenTxs() */
+        ),
+        time: new SqlNullableFieldDefinition(
+          "INT",
+          true /*need by getTxHistoryByPubkeyBetweenTimes() */
+        ),
+        inputs: new SqlNullableFieldDefinition("JSON", false),
+        unlocks: new SqlNullableFieldDefinition("JSON", false),
+        outputs: new SqlNullableFieldDefinition("JSON", false),
+        issuer: new SqlNullableFieldDefinition(
+          "VARCHAR",
+          true,
+          50
+        ) /* computed column - need by getTxHistoryXxx() */,
+        issuers: new SqlNullableFieldDefinition("JSON", false),
+        signatures: new SqlNullableFieldDefinition("JSON", false),
+        recipient: new SqlNullableFieldDefinition(
+          "VARCHAR",
+          true,
+          50
+        ) /* computed column - need by getTxHistoryXxx() */,
+        recipients: new SqlNullableFieldDefinition("JSON", false),
+        written: new SqlNotNullableFieldDefinition(
+          "BOOLEAN",
+          true /* need by getTxHistoryMempool() */
+        ),
+        removed: new SqlNotNullableFieldDefinition(
+          "BOOLEAN",
+          true /* need by getSandboxTs() */
+        ),
+        received: new SqlNullableFieldDefinition("BOOLEAN", false),
+        output_base: new SqlNullableFieldDefinition("INT", false),
+        output_amount: new SqlNullableFieldDefinition("INT", false),
+        written_on: new SqlNullableFieldDefinition("VARCHAR", false, 100),
+        writtenOn: new SqlNullableFieldDefinition("INT", false),
+      },
+      getSqliteDB
+    );
+    this.sandbox = new SandBox(
+      constants.SANDBOX_SIZE_TRANSACTIONS,
+      () => this.getSandboxTxs(),
+      (
+        compared: {
+          issuers: string[];
+          output_base: number;
+          output_amount: number;
+        },
+        reference: {
+          issuers: string[];
+          output_base: number;
+          output_amount: number;
+        }
+      ) => {
+        if (compared.output_base < reference.output_base) {
+          return -1;
+        } else if (compared.output_base > reference.output_base) {
+          return 1;
+        } else if (compared.output_amount > reference.output_amount) {
+          return -1;
+        } else if (compared.output_amount < reference.output_amount) {
+          return 1;
+        } else {
+          return 0;
+        }
+      }
+    );
+  }
+
+  /**
+   * TECHNICAL
+   */
+
+  @MonitorExecutionTime()
+  insert(record: DBTx): Promise<void> {
+    return this.insertInTable(this.driver, record);
+  }
+
+  @MonitorExecutionTime()
+  async insertBatch(records: DBTx[]): Promise<void> {
+    if (!records.length) return;
+    return this.insertBatchInTable(this.driver, records);
+  }
+
+  @MonitorExecutionTime()
+  async saveBatch(records: DBTx[]): Promise<void> {
+    if (!records.length) return;
+    await this.removeByHashBatch(Underscore.uniq(records.map((t) => t.hash)));
+    return this.insertBatchInTable(this.driver, records);
+  }
+
+  sandbox: SandBox<{
+    issuers: string[];
+    output_base: number;
+    output_amount: number;
+  }>;
+
+  async addLinked(
+    tx: TransactionDTO,
+    block_number: number,
+    time: number
+  ): Promise<DBTx> {
+    const exists = await this.existsByHash(tx.hash);
+    const theDBTx = DBTx.fromTransactionDTO(tx);
+    theDBTx.written = true;
+    theDBTx.block_number = block_number;
+    theDBTx.time = time;
+    if (!exists) {
+      await this.insert(theDBTx);
+    } else {
+      await this.update(
+        this.driver,
+        theDBTx,
+        ["block_number", "time", "received", "written", "removed", "hash"],
+        ["hash"]
+      );
+    }
+    return theDBTx;
+  }
+
+  async addPending(dbTx: DBTx): Promise<DBTx> {
+    const existing = (
+      await this.findEntities("SELECT * FROM txs WHERE hash = ?", [dbTx.hash])
+    )[0];
+    if (existing) {
+      await this.driver.sqlWrite("UPDATE txs SET written = ? WHERE hash = ?", [
+        false,
+        dbTx.hash,
+      ]);
+      return existing;
+    }
+    await this.insert(dbTx);
+    return dbTx;
+  }
+
+  cleanCache(): void {}
+
+  findRawWithOrder(
+    criterion: { pub?: string },
+    sort: (string | (string | boolean)[])[]
+  ): Promise<DBTx[]> {
+    throw Error(
+      "Should not be used method findRawWithOrder() on SqliteTransactions"
+    );
+  }
+
+  getAllPending(versionMin: number): Promise<DBTx[]> {
+    return this.findEntities("SELECT * FROM txs WHERE NOT written", []);
+  }
+
+  async getTxHistoryByPubkey(pubkey: string) {
+    return {
+      sent: await this.getLinkedWithIssuer(pubkey),
+      received: await this.getLinkedWithRecipient(pubkey),
+      sending: await this.getPendingWithIssuer(pubkey),
+      pending: await this.getPendingWithRecipient(pubkey),
+    };
+  }
+
+  async getTxHistoryByPubkeyBetweenBlocks(
+    pubkey: string,
+    from: number,
+    to: number
+  ): Promise<{ sent: DBTx[]; received: DBTx[] }> {
+    return {
+      sent: await this.getLinkedWithIssuerByRange(
+        "block_number",
+        pubkey,
+        from,
+        to
+      ),
+      received: await this.getLinkedWithRecipientByRange(
+        "block_number",
+        pubkey,
+        from,
+        to
+      ),
+    };
+  }
+
+  async getTxHistoryByPubkeyBetweenTimes(
+    pubkey: string,
+    from: number,
+    to: number
+  ): Promise<{ sent: DBTx[]; received: DBTx[] }> {
+    return {
+      sent: await this.getLinkedWithIssuerByRange("time", pubkey, from, to),
+      received: await this.getLinkedWithRecipientByRange(
+        "time",
+        pubkey,
+        from,
+        to
+      ),
+    };
+  }
+
+  async getTxHistoryMempool(
+    pubkey: string
+  ): Promise<{ sending: DBTx[]; pending: DBTx[] }> {
+    return {
+      sending: await this.getPendingWithIssuer(pubkey),
+      pending: await this.getPendingWithRecipient(pubkey),
+    };
+  }
+
+  getLinkedWithIssuer(pubkey: string): Promise<DBTx[]> {
+    return this.findEntities(
+      `SELECT * FROM txs 
+        WHERE written 
+        AND (
+            issuer = ?
+            OR (issuer IS NULL AND issuers LIKE ?)
+          )`,
+      [pubkey, `%${pubkey}%`]
+    );
+  }
+
+  getLinkedWithIssuerByRange(
+    rangeFieldName: keyof DBTx,
+    pubkey: string,
+    from: number,
+    to: number
+  ): Promise<DBTx[]> {
+    return this.findEntities(
+      `SELECT * FROM txs 
+        WHERE written 
+        AND (
+          issuer = ?
+          OR (issuer IS NULL AND issuers LIKE ?)            
+        )
+        AND ${rangeFieldName} >= ? 
+        AND ${rangeFieldName} <= ?`,
+      [pubkey, `%${pubkey}%`, from, to]
+    );
+  }
+
+  getLinkedWithRecipient(pubkey: string): Promise<DBTx[]> {
+    return this.findEntities(
+      `SELECT * FROM txs 
+        WHERE written 
+        AND (
+            recipient = ?
+            OR (recipient IS NULL AND issuer <> ? AND recipients LIKE ? )
+        )`,
+      [pubkey, pubkey, `%${pubkey}%`]
+    );
+  }
+
+  getLinkedWithRecipientByRange(
+    rangeColumnName: string,
+    pubkey: string,
+    from: number,
+    to: number
+  ): Promise<DBTx[]> {
+    return this.findEntities(
+      `SELECT * FROM txs 
+        WHERE written 
+        AND (
+            recipient = ?
+            OR (recipient IS NULL AND issuer <> ? AND recipients LIKE ? )            
+        )
+        AND ${rangeColumnName} >= ? 
+        AND ${rangeColumnName} <= ?`,
+      [pubkey, pubkey, `%${pubkey}%`, from, to]
+    );
+  }
+
+  getPendingWithIssuer(pubkey: string): Promise<DBTx[]> {
+    return this.findEntities(
+      `SELECT * FROM txs 
+        WHERE NOT written
+        AND (
+            issuer = ? 
+            OR (issuer IS NULL AND issuers LIKE ?)
+        )`,
+      [pubkey, `%${pubkey}%`]
+    );
+  }
+
+  getPendingWithRecipient(pubkey: string): Promise<DBTx[]> {
+    return this.findEntities(
+      `SELECT * FROM txs 
+        WHERE NOT written 
+        AND (
+            recipient = ?
+            OR (recipient IS NULL AND issuer <> ? AND recipients LIKE ?)
+        ) `,
+      [pubkey, pubkey, `%${pubkey}%`]
+    );
+  }
+
+  async existsByHash(hash: string): Promise<boolean> {
+    return (await this.countBy("hash", hash)) > 0;
+  }
+
+  async getTX(hash: string): Promise<DBTx> {
+    return (
+      await this.findEntities("SELECT * FROM txs WHERE hash = ?", [hash])
+    )[0];
+  }
+
+  getWrittenOn(blockstamp: string): Promise<DBTx[]> {
+    return this.findEntities("SELECT * FROM txs WHERE blockstamp = ?", [
+      blockstamp,
+    ]);
+  }
+
+  async removeAll(): Promise<void> {
+    await this.driver.sqlWrite("DELETE FROM txs", []);
+  }
+
+  removeBlock(blockstamp: string): Promise<void> {
+    throw Error(
+      "Should not be used method removeBlock() on SqliteTransactions"
+    );
+  }
+
+  async removeByHashBatch(hashArray: string[]): Promise<void> {
+    // Delete by slice of 100 items (because SQLite IN operator is limited)
+    await Promise.all(
+      Underscore.range(0, hashArray.length, 100)
+        .map(
+          (start) =>
+            `DELETE FROM txs WHERE hash IN ('${hashArray
+              .slice(start, start + 100)
+              .join("', '")}')`
+        )
+        .map((sql) => this.driver.sqlWrite(sql, []))
+    );
+  }
+
+  removeByHash(hash: string): Promise<void> {
+    return this.driver.sqlWrite("DELETE FROM txs WHERE hash = ?", [hash]);
+  }
+
+  trimExpiredNonWrittenTxs(limitTime: number): Promise<void> {
+    return this.driver.sqlWrite(
+      "DELETE FROM txs WHERE NOT written AND blockstampTime <= ?",
+      [limitTime]
+    );
+  }
+
+  /**************************
+   * SANDBOX STUFF
+   */
+
+  @MonitorExecutionTime()
+  async getSandboxTxs() {
+    return this.findEntities(
+      "SELECT * FROM txs WHERE NOT written AND NOT removed ORDER BY output_base DESC, output_amount DESC",
+      []
+    );
+  }
+
+  getSandboxRoom() {
+    return this.sandbox.getSandboxRoom();
+  }
+
+  setSandboxSize(maxSize: number) {
+    this.sandbox.maxSize = maxSize;
+  }
+}
diff --git a/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts b/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts
new file mode 100644
index 0000000000000000000000000000000000000000..a9f8fb03134194958ea298b49f64411ba686e6ed
--- /dev/null
+++ b/app/lib/dal/indexDAL/sqlite/SqliteWallet.ts
@@ -0,0 +1,65 @@
+import { SQLiteDriver } from "../../drivers/SQLiteDriver";
+import { MonitorExecutionTime } from "../../../debug/MonitorExecutionTime";
+import { SqliteTable } from "./SqliteTable";
+import { SqlNotNullableFieldDefinition } from "./SqlFieldDefinition";
+import { WalletDAO } from "../abstract/WalletDAO";
+import { DBWallet } from "../../../db/DBWallet";
+
+export class SqliteWallet extends SqliteTable<DBWallet> implements WalletDAO {
+  constructor(getSqliteDB: (dbName: string) => Promise<SQLiteDriver>) {
+    super(
+      "wallet",
+      {
+        conditions: new SqlNotNullableFieldDefinition("VARCHAR", true, 1000),
+        balance: new SqlNotNullableFieldDefinition("INT", true),
+      },
+      getSqliteDB
+    );
+  }
+
+  /**
+   * TECHNICAL
+   */
+
+  cleanCache(): void {}
+
+  /**
+   * INSERT
+   */
+
+  @MonitorExecutionTime()
+  async insert(record: DBWallet): Promise<void> {
+    await this.insertInTable(this.driver, record);
+  }
+
+  @MonitorExecutionTime()
+  async insertBatch(records: DBWallet[]): Promise<void> {
+    if (records.length) {
+      return this.insertBatchInTable(this.driver, records);
+    }
+  }
+
+  private async find(sql: string, params: any[]): Promise<DBWallet[]> {
+    return (await this.driver.sqlRead(sql, params)).map((r) => {
+      return {
+        conditions: r.conditions,
+        balance: r.balance,
+      };
+    });
+  }
+
+  async getWallet(conditions: string): Promise<DBWallet> {
+    return (
+      await this.find("SELECT * FROM wallet WHERE conditions = ?", [conditions])
+    )[0];
+  }
+
+  async saveWallet(wallet: DBWallet): Promise<DBWallet> {
+    await this.insert(wallet);
+    return wallet;
+  }
+
+  listAll(): Promise<DBWallet[]> {
+    return this.find("SELECT * FROM wallet", []);
+  }
+}
diff --git a/app/lib/dal/sqliteDAL/BlockDAL.ts b/app/lib/dal/sqliteDAL/BlockDAL.ts
index 3f82cfe5c5c0f99c5217f2da2c296fa42b84933f..988550beea4c5167d11309ca6612c647ca4d1eaa 100644
--- a/app/lib/dal/sqliteDAL/BlockDAL.ts
+++ b/app/lib/dal/sqliteDAL/BlockDAL.ts
@@ -77,7 +77,7 @@ export class BlockDAL extends AbstractSQLite<DBBlock> {
         "transactions",
       ],
       // Booleans
-      ["wrong", "legacy"],
+      ["legacy"],
       // BigIntegers
       ["monetaryMass"],
       // Transient
diff --git a/app/lib/dal/sqliteDAL/IdentityDAL.ts b/app/lib/dal/sqliteDAL/IdentityDAL.ts
index 96d602a0af9da5b84031716ff0f3d18bcb54938b..851b27c0d7b0c7eb6dca672d88e6c8bb1d3e28df 100644
--- a/app/lib/dal/sqliteDAL/IdentityDAL.ts
+++ b/app/lib/dal/sqliteDAL/IdentityDAL.ts
@@ -328,6 +328,10 @@ export class IdentityDAL extends AbstractSQLite<DBIdentity> {
     });
   }
 
+  findByPub(pub: string) {
+    return this.sqlFind({ pubkey: pub });
+  }
+
   async trimExpiredIdentities(medianTime: number) {
     await this.exec(
       "DELETE FROM " +
diff --git a/app/lib/dal/sqliteDAL/MetaDAL.ts b/app/lib/dal/sqliteDAL/MetaDAL.ts
index 5660bb53567c97a5eaf4fea2411affdf385fb506..76047463efe82c529d06d0b99c3527611deaa834 100644
--- a/app/lib/dal/sqliteDAL/MetaDAL.ts
+++ b/app/lib/dal/sqliteDAL/MetaDAL.ts
@@ -16,7 +16,10 @@ import { SQLiteDriver } from "../drivers/SQLiteDriver";
 import { ConfDTO } from "../../dto/ConfDTO";
 import { TransactionDTO } from "../../dto/TransactionDTO";
 import { IdentityDAL } from "./IdentityDAL";
+import { SqliteTransactions } from "../indexDAL/sqlite/SqliteTransactions";
+import { Directory } from "../../system/directory";
 
+const constants = require("../../constants");
 const logger = require("../../logger").NewLogger("metaDAL");
 
 export interface DBMeta {
@@ -27,7 +30,10 @@ export interface DBMeta {
 export class MetaDAL extends AbstractSQLite<DBMeta> {
   driverCopy: SQLiteDriver;
 
-  constructor(driver: SQLiteDriver) {
+  constructor(
+    driver: SQLiteDriver,
+    private getSqliteDB: (dbName: string) => Promise<SQLiteDriver>
+  ) {
     super(
       driver,
       "meta",
@@ -88,29 +94,6 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
       ");" +
       "CREATE INDEX IF NOT EXISTS idx_block_hash ON block (hash);" +
       "CREATE INDEX IF NOT EXISTS idx_block_fork ON block (fork);" +
-      // Same, but for Transactions
-      "CREATE TABLE IF NOT EXISTS txs (" +
-      "hash CHAR(64) NOT NULL," +
-      "block_number INTEGER," +
-      "locktime INTEGER NOT NULL," +
-      "version INTEGER NOT NULL," +
-      "currency VARCHAR(50) NOT NULL," +
-      "comment VARCHAR(255) NOT NULL," +
-      "time DATETIME," +
-      "inputs TEXT NOT NULL," +
-      "unlocks TEXT NOT NULL," +
-      "outputs TEXT NOT NULL," +
-      "issuers TEXT NOT NULL," +
-      "signatures TEXT NOT NULL," +
-      "recipients TEXT NOT NULL," +
-      "written BOOLEAN NOT NULL," +
-      "removed BOOLEAN NOT NULL," +
-      "PRIMARY KEY (hash)" +
-      ");" +
-      "CREATE INDEX IF NOT EXISTS idx_txs_issuers ON txs (issuers);" +
-      "CREATE INDEX IF NOT EXISTS idx_txs_written ON txs (written);" +
-      "CREATE INDEX IF NOT EXISTS idx_txs_removed ON txs (removed);" +
-      "CREATE INDEX IF NOT EXISTS idx_txs_hash ON txs (hash);" +
       "COMMIT;",
 
     // Test
@@ -118,15 +101,13 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
       "BEGIN;" +
       "CREATE VIEW IF NOT EXISTS identities_pending AS SELECT * FROM idty WHERE NOT written;" +
       "CREATE VIEW IF NOT EXISTS certifications_pending AS SELECT * FROM cert WHERE NOT written;" +
-      "CREATE VIEW IF NOT EXISTS transactions_pending AS SELECT * FROM txs WHERE NOT written;" +
-      "CREATE VIEW IF NOT EXISTS transactions_desc AS SELECT * FROM txs ORDER BY time DESC;" +
       "CREATE VIEW IF NOT EXISTS forks AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE fork ORDER BY number DESC;" +
       "CREATE VIEW IF NOT EXISTS blockchain AS SELECT number, hash, issuer, monetaryMass, dividend, UDTime, membersCount, medianTime, time, * FROM block WHERE NOT fork ORDER BY number DESC;" +
       "CREATE VIEW IF NOT EXISTS network AS select i.uid, (last_try - first_down) / 1000 as down_delay_in_sec, p.* from peer p LEFT JOIN idty i on i.pubkey = p.pubkey ORDER by down_delay_in_sec;" +
       "COMMIT;",
 
     // New `receveid` column
-    2: "BEGIN; ALTER TABLE txs ADD COLUMN received INTEGER NULL; COMMIT;",
+    2: async () => {},
 
     // Update wrong recipients field (was not filled in)
     3: async () => {},
@@ -140,12 +121,8 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
     6: "BEGIN; ALTER TABLE idty ADD COLUMN expired INTEGER NULL; COMMIT;",
     7: "BEGIN; ALTER TABLE cert ADD COLUMN expired INTEGER NULL; COMMIT;",
     8: "BEGIN; ALTER TABLE membership ADD COLUMN expired INTEGER NULL; COMMIT;",
-    9:
-      "BEGIN;" +
-      "ALTER TABLE txs ADD COLUMN output_base INTEGER NULL;" +
-      "ALTER TABLE txs ADD COLUMN output_amount INTEGER NULL;" +
-      "COMMIT;",
-    10: "BEGIN; ALTER TABLE txs ADD COLUMN blockstamp VARCHAR(200) NULL; COMMIT;",
+    9: async () => {},
+    10: async () => {},
     11:
       "BEGIN;" +
       "ALTER TABLE block ADD COLUMN issuersFrame INTEGER NULL;" +
@@ -153,14 +130,12 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
       "ALTER TABLE block ADD COLUMN issuersCount INTEGER NULL;" +
       "COMMIT;",
     12: async () => {
-      let blockDAL = new MetaDAL(this.driverCopy);
+      let blockDAL = new MetaDAL(this.driverCopy, this.getSqliteDB);
       await blockDAL.exec("ALTER TABLE block ADD COLUMN len INTEGER NULL;");
-      await blockDAL.exec("ALTER TABLE txs ADD COLUMN len INTEGER NULL;");
     },
-    13: "BEGIN; ALTER TABLE txs ADD COLUMN blockstampTime INTEGER NULL; COMMIT;",
+    13: async () => {},
     14:
       "BEGIN; " +
-      "CREATE VIEW IF NOT EXISTS sandbox_txs AS SELECT * FROM txs WHERE NOT written AND NOT removed ORDER BY output_base DESC, output_amount DESC;" +
       "CREATE VIEW IF NOT EXISTS sandbox_idty AS SELECT " +
       "I.*, " +
       "I.hash, " +
@@ -190,16 +165,8 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
     },
 
     16: async () => {},
-
-    17: async () => {
-      // This migration is now obsolete
-    },
-
-    18:
-      "BEGIN;" +
-      // Add a `massReeval` column
-      // 'ALTER TABLE b_index ADD COLUMN massReeval VARCHAR(100) NOT NULL DEFAULT \'0\';' +
-      "COMMIT;",
+    17: async () => {},
+    18: async () => {},
 
     19:
       "BEGIN;" +
@@ -207,67 +174,69 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
       "ALTER TABLE idty ADD COLUMN removed BOOLEAN NULL DEFAULT 0;" +
       "COMMIT;",
 
-    /**
-     * Feeds the table of wallets with balances
-     */
+    // Feeds the table of wallets with balances
     20: async () => {},
 
-    21: async (conf: ConfDTO) => {},
+    21: async () => {},
 
     // Replay the wallet table feeding, because of a potential bug
     22: () => {
       return this.migrations[20]();
     },
 
-    23: "BEGIN;" + "COMMIT;",
+    23: async () => {},
 
     /**
      * Feeds the m_index.chainable_on correctly
      */
-    24: async (conf: ConfDTO) => {},
+    24: async () => {},
 
-    /**
-     * Wrong transaction storage
-     */
-    25: async () => {
-      const txsDAL: any = new MetaDAL(this.driverCopy);
-      const wrongTXS = await txsDAL.query(
-        "SELECT * FROM txs WHERE outputs LIKE ? OR inputs LIKE ?",
-        ["%amount%", "%amount%"]
+    // Wrong transaction storage
+    25: async () => {},
+
+    // Drop old table 'txs' (replaced by a file 'txs.db')
+    26: "BEGIN;" + "DROP TABLE IF EXISTS txs;" + "COMMIT;",
+
+    // Add columns 'issuer' and 'recipient' in transaction table - see issue #1442
+    27: async () => {
+      const txsDriver = await this.getSqliteDB("txs.db");
+      const txsDAL = new MetaDAL(txsDriver, this.getSqliteDB);
+
+      // Drop unused indices
+      await txsDAL.exec(
+        "BEGIN;" +
+          "DROP INDEX IF EXISTS idx_txs_locktime;" +
+          "DROP INDEX IF EXISTS idx_txs_version;" +
+          "DROP INDEX IF EXISTS idx_txs_currency;" +
+          "DROP INDEX IF EXISTS idx_txs_comment;" +
+          "DROP INDEX IF EXISTS idx_txs_signatures;" +
+          "DROP INDEX IF EXISTS idx_txs_received;" +
+          "DROP INDEX IF EXISTS idx_txs_output_base;" +
+          "DROP INDEX IF EXISTS idx_txs_output_amount;" +
+          "COMMIT;"
       );
-      let i = 1;
-      for (const tx of wrongTXS) {
-        logger.info(
-          "Updating incorrect transaction %s/%s.",
-          i,
-          wrongTXS.length
+
+      // Add new columns 'issuer' and 'recipient'
+      try {
+        await txsDAL.exec(
+          "ALTER TABLE txs ADD COLUMN issuer VARCHAR(50) NULL;" +
+            "ALTER TABLE txs ADD COLUMN recipient VARCHAR(50) NULL;"
         );
-        i++;
-        const dto = TransactionDTO.fromJSONObject(tx);
-        dto.outputs = dto.outputs.map((o) => {
-          if (typeof o === "object") {
-            return TransactionDTO.outputObj2Str(o);
-          }
-          return o;
-        });
-        dto.inputs = dto.inputs.map((o) => {
-          if (typeof o === "object") {
-            return TransactionDTO.inputObj2Str(o);
-          }
-          return o;
-        });
+      } catch (err) {
+        // Silent: if column already exists
+      }
+
+      // Fill columns 'issuer' and 'recipient'
+      // SHOULD start transaction after ALTER TABLE, to avoid leaving a not closed transaction, if failed - close #1448
+      try {
         await txsDAL.exec(
-          "UPDATE txs SET " +
-            "outputs = '" +
-            JSON.stringify(dto.outputs) +
-            "', " +
-            "inputs = '" +
-            JSON.stringify(dto.inputs) +
-            "' " +
-            "WHERE hash = '" +
-            tx.hash +
-            "'"
+          "BEGIN;" +
+            "UPDATE txs SET issuer = SUBSTR(issuers, 2, LENGTH(issuers) - 4) WHERE issuer IS NULL AND issuers NOT LIKE '%,%';" +
+            "UPDATE txs SET recipient = SUBSTR(recipients, 2, LENGTH(recipients) - 4) WHERE recipient IS NULL AND recipients NOT LIKE '%,%';" +
+            "COMMIT;"
         );
+      } catch (err) {
+        // Silent: if column already exists
       }
     },
   };
@@ -299,17 +268,23 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
         await migration(conf);
       }
     } catch (e) {
-      logger.warn("An error occured during DB migration, continue.", e);
+      logger.warn("An error occurred during DB migration, continue.", e);
     }
   }
 
   async upgradeDatabase(conf: ConfDTO) {
     let version = await this.getVersion();
     while (this.migrations[version]) {
+      logger.trace(
+        `Upgrade database... (patch ${version}/${
+          constants.CURRENT_DB_VERSION - 1
+        })`
+      );
+
       await this.executeMigration(this.migrations[version], conf);
-      // Automated increment
-      await this.exec("UPDATE meta SET version = version + 1");
+      // Version increment
       version++;
+      await this.exec("UPDATE meta SET version = " + version);
     }
   }
 
@@ -319,9 +294,10 @@ export class MetaDAL extends AbstractSQLite<DBMeta> {
 
   async getVersion() {
     try {
-      const row = await this.getRow();
-      return row.version;
+      const { version } = await this.getRow();
+      return version;
     } catch (e) {
+      // Insert zero, as first version
       await this.exec("INSERT INTO " + this.table + " VALUES (1,0);");
       return 0;
     }
diff --git a/app/lib/db/DBBlock.ts b/app/lib/db/DBBlock.ts
index 7f8eb593bbf2d442eb432fcbf57cbd58ea35ad48..39e2a5b29b4faa7bef156ecb4d352c3f09f0c0e3 100644
--- a/app/lib/db/DBBlock.ts
+++ b/app/lib/db/DBBlock.ts
@@ -48,7 +48,6 @@ export class DBBlock {
   UDTime: number;
   writtenOn: number;
   written_on: string;
-  wrong = false;
 
   constructor() {}
 
diff --git a/app/lib/db/DBTx.ts b/app/lib/db/DBTx.ts
index 66650570cd75ce873d01d9fae24eee7a2f336670..ad32c2d8c5dace8d75949ef7da1ee6c4c77d88c5 100644
--- a/app/lib/db/DBTx.ts
+++ b/app/lib/db/DBTx.ts
@@ -22,6 +22,9 @@ export class DBTx {
   output_base: number;
   output_amount: number;
 
+  issuer: string | null; // Computed
+  recipient: string | null; // Computed
+
   static fromTransactionDTO(tx: TransactionDTO) {
     const dbTx = new DBTx();
     dbTx.hash = tx.hash;
@@ -41,18 +44,14 @@ export class DBTx {
     dbTx.removed = false;
     dbTx.output_base = tx.output_base;
     dbTx.output_amount = tx.output_amount;
-    return dbTx;
-  }
 
-  static setRecipients(txs: DBTx[]) {
-    // Each transaction must have a good "recipients" field for future searchs
-    txs.forEach((tx) => (tx.recipients = DBTx.outputs2recipients(tx)));
-  }
+    // Computed columns (unique issuer and/or recipient)
+    dbTx.issuer = dbTx.issuers.length === 1 ? dbTx.issuers[0] : null;
+    const recipients = !dbTx.issuer
+      ? dbTx.recipients
+      : dbTx.recipients.filter((r) => r !== dbTx.issuer);
+    dbTx.recipient = recipients.length === 1 ? recipients[0] : null;
 
-  static outputs2recipients(tx: DBTx) {
-    return tx.outputs.map(function (out) {
-      const recipent = out.match("SIG\\((.*)\\)");
-      return (recipent && recipent[1]) || "UNKNOWN";
-    });
+    return dbTx;
   }
 }
diff --git a/app/lib/dto/ConfDTO.ts b/app/lib/dto/ConfDTO.ts
index 83bc1240db58a1d556862becf1a0230c7133bda2..31552461184860f28a50dac149161254cf61af88 100644
--- a/app/lib/dto/ConfDTO.ts
+++ b/app/lib/dto/ConfDTO.ts
@@ -68,8 +68,8 @@ export interface CurrencyConfDTO {
 export interface KeypairConfDTO {
   pair: Keypair;
   oldPair: Keypair | null;
-  salt: string;
-  passwd: string;
+  salt?: string;
+  passwd?: string;
 }
 
 export interface NetworkConfDTO {
@@ -273,7 +273,6 @@ export class ConfDTO
   }
 
   static defaultConf() {
-    /*return new ConfDTO("", "", [], [], 0, 3600 * 1000, constants.PROOF_OF_WORK.DEFAULT.CPU, 1, constants.PROOF_OF_WORK.DEFAULT.PREFIX, 0, 0, constants.CONTRACT.DEFAULT.C, constants.CONTRACT.DEFAULT.DT, constants.CONTRACT.DEFAULT.DT_REEVAL, 0, constants.CONTRACT.DEFAULT.UD0, 0, 0, constants.CONTRACT.DEFAULT.STEPMAX, constants.CONTRACT.DEFAULT.SIGPERIOD, 0, constants.CONTRACT.DEFAULT.SIGVALIDITY, constants.CONTRACT.DEFAULT.MSVALIDITY, constants.CONTRACT.DEFAULT.SIGQTY, constants.CONTRACT.DEFAULT.SIGSTOCK, constants.CONTRACT.DEFAULT.X_PERCENT, constants.CONTRACT.DEFAULT.PERCENTROT, constants.CONTRACT.DEFAULT.POWDELAY, constants.CONTRACT.DEFAULT.AVGGENTIME, constants.CONTRACT.DEFAULT.MEDIANTIMEBLOCKS, false, 3000, false, constants.BRANCHES.DEFAULT_WINDOW_SIZE, constants.CONTRACT.DEFAULT.IDTYWINDOW, constants.CONTRACT.DEFAULT.MSWINDOW, constants.CONTRACT.DEFAULT.SIGWINDOW, 0, { pub:'', sec:'' }, null, "", "", 0, "", "", "", "", 0, "", "", null, false, "", true, true)*/
     return {
       currency: null,
       endpoints: [],
@@ -303,6 +302,10 @@ export class ConfDTO
       switchOnHeadAdvance: CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS,
       nonWoTPeersLimit: CommonConstants.DEFAULT_NON_WOT_PEERS_LIMIT,
       txsMempoolSize: constants.SANDBOX_SIZE_TRANSACTIONS,
+      storage: {
+        transactions: true,
+        wotwizard: false,
+      },
     };
   }
 
diff --git a/app/lib/dto/PeerDTO.ts b/app/lib/dto/PeerDTO.ts
index 8f5c14b4f711a282590d3fd404713b6f6aa5053b..f9de1607af0602e713007672fa681052da0f9d0d 100644
--- a/app/lib/dto/PeerDTO.ts
+++ b/app/lib/dto/PeerDTO.ts
@@ -113,6 +113,7 @@ export class PeerDTO implements Cloneable {
           ipv4: matchesBMA[4] || "",
           ipv6: matchesBMA[6] || "",
           port: parseInt(matchesBMA[8]) || 9101,
+          path: matchesBMA[10] || "",
         };
       } else if (matchesBMAS) {
         notFound = false;
@@ -271,7 +272,8 @@ export class PeerDTO implements Cloneable {
   getURL() {
     const bma = this.getBMA();
     let base = this.getHostPreferDNS();
-    if (bma.port) base += ":" + bma.port;
+    if (base && bma.port) base += ":" + bma.port;
+    if (base && bma.path) base += bma.path;
     return base;
   }
 
@@ -377,4 +379,8 @@ export class PeerDTO implements Cloneable {
     }
     return 0;
   }
+
+  static isBMA(endpoint: string) {
+    return (endpoint && !!endpoint.match(/^(BASIC_MERKLED_API|BMAS)/)) || false;
+  }
 }
diff --git a/app/lib/dto/TransactionDTO.ts b/app/lib/dto/TransactionDTO.ts
index 650d7cc3f9b57855a9eb8125b41e12f4af08ee70..8136ad165a25edbfc25cf5a750c531a4a763352b 100644
--- a/app/lib/dto/TransactionDTO.ts
+++ b/app/lib/dto/TransactionDTO.ts
@@ -15,6 +15,7 @@ import { hashf } from "../common";
 import { Cloneable } from "./Cloneable";
 import { verify } from "../../../neon/lib";
 import { TransactionDTOV10 } from "../../../neon/native";
+import { CommonConstants } from "../common-libs/constants";
 
 export interface BaseDTO {
   base: number;
@@ -75,8 +76,8 @@ export class TransactionDTO implements Cloneable {
     public currency: string,
     public locktime: number,
     public hash: string,
-    public blockstamp: string,
-    public blockstampTime: number,
+    public blockstamp: string, // Reference block of the TX
+    public blockstampTime: number, // Median time of the reference block
     public issuers: string[],
     public inputs: string[],
     public outputs: string[],
@@ -96,14 +97,14 @@ export class TransactionDTO implements Cloneable {
 
   get output_amount() {
     return this.outputs.reduce(
-      (maxBase, output) => Math.max(maxBase, parseInt(output.split(":")[0])),
+      (sum, output) => sum + parseInt(output.split(":")[0]),
       0
     );
   }
 
   get output_base() {
     return this.outputs.reduce(
-      (sum, output) => sum + parseInt(output.split(":")[1]),
+      (maxBase, output) => Math.max(maxBase, parseInt(output.split(":")[1])),
       0
     );
   }
@@ -127,8 +128,11 @@ export class TransactionDTO implements Cloneable {
   }
 
   getHash() {
-    const raw = TransactionDTO.toRAW(this);
-    return hashf(raw);
+    if (!this.hash) {
+      const raw = TransactionDTO.toRAW(this);
+      this.hash = hashf(raw).toUpperCase();
+    }
+    return this.hash;
   }
 
   getRawTxNoSig() {
@@ -162,40 +166,31 @@ export class TransactionDTO implements Cloneable {
   }
 
   outputsAsRecipients(): string[] {
-    return this.outputs.map((out) => {
-      const recipent = out.match("SIG\\((.*)\\)");
-      return (recipent && recipent[1]) || "UNKNOWN";
-    });
+    return this.outputs.reduce((res, output) => {
+      let match: any;
+      const recipients: string[] = [];
+      while (
+        output &&
+        (match = CommonConstants.TRANSACTION.OUTPUT_CONDITION_SIG_PUBKEY.exec(
+          output
+        )) !== null
+      ) {
+        const pub = match[1] as string;
+        if (!res.includes(pub) && !recipients.includes(pub)) {
+          recipients.push(pub);
+        }
+        output = output.substring(match.index + match[0].length);
+      }
+      if (recipients.length) {
+        return res.concat(recipients);
+      }
+      if (res.includes("UNKNOWN")) return res;
+      return res.concat("UNKNOWN");
+    }, <string[]>[]);
   }
 
   getRaw() {
-    let raw = "";
-    raw += "Version: " + this.version + "\n";
-    raw += "Type: Transaction\n";
-    raw += "Currency: " + this.currency + "\n";
-    raw += "Blockstamp: " + this.blockstamp + "\n";
-    raw += "Locktime: " + this.locktime + "\n";
-    raw += "Issuers:\n";
-    (this.issuers || []).forEach((issuer) => {
-      raw += issuer + "\n";
-    });
-    raw += "Inputs:\n";
-    this.inputs.forEach((input) => {
-      raw += input + "\n";
-    });
-    raw += "Unlocks:\n";
-    this.unlocks.forEach((unlock) => {
-      raw += unlock + "\n";
-    });
-    raw += "Outputs:\n";
-    this.outputs.forEach((output) => {
-      raw += output + "\n";
-    });
-    raw += "Comment: " + (this.comment || "") + "\n";
-    this.signatures.forEach((signature) => {
-      raw += signature + "\n";
-    });
-    return raw;
+    return TransactionDTO.toRAW(this);
   }
 
   getCompactVersion() {
@@ -232,7 +227,7 @@ export class TransactionDTO implements Cloneable {
   }
 
   computeAllHashes() {
-    this.hash = hashf(this.getRaw()).toUpperCase();
+    this.hash = this.getHash();
   }
 
   json() {
@@ -325,32 +320,32 @@ export class TransactionDTO implements Cloneable {
     };
   }
 
-  static toRAW(json: TransactionDTO, noSig = false) {
+  static toRAW(tx: TransactionDTO, noSig = false) {
     let raw = "";
-    raw += "Version: " + json.version + "\n";
+    raw += "Version: " + tx.version + "\n";
     raw += "Type: Transaction\n";
-    raw += "Currency: " + json.currency + "\n";
-    raw += "Blockstamp: " + json.blockstamp + "\n";
-    raw += "Locktime: " + json.locktime + "\n";
+    raw += "Currency: " + tx.currency + "\n";
+    raw += "Blockstamp: " + tx.blockstamp + "\n";
+    raw += "Locktime: " + tx.locktime + "\n";
     raw += "Issuers:\n";
-    (json.issuers || []).forEach((issuer) => {
+    (tx.issuers || []).forEach((issuer) => {
       raw += issuer + "\n";
     });
     raw += "Inputs:\n";
-    (json.inputs || []).forEach((input) => {
+    (tx.inputs || []).forEach((input) => {
       raw += input + "\n";
     });
     raw += "Unlocks:\n";
-    (json.unlocks || []).forEach((unlock) => {
+    (tx.unlocks || []).forEach((unlock) => {
       raw += unlock + "\n";
     });
     raw += "Outputs:\n";
-    (json.outputs || []).forEach((output) => {
+    (tx.outputs || []).forEach((output) => {
       raw += output + "\n";
     });
-    raw += "Comment: " + (json.comment || "") + "\n";
+    raw += "Comment: " + (tx.comment || "") + "\n";
     if (!noSig) {
-      (json.signatures || []).forEach((signature) => {
+      (tx.signatures || []).forEach((signature) => {
         raw += signature + "\n";
       });
     }
diff --git a/app/lib/indexer.ts b/app/lib/indexer.ts
index c87fe09f726928a8ab6a6b478eb8ae149894fc94..8a812c568dc65cceeb6d59beab5b59f89b94e90d 100644
--- a/app/lib/indexer.ts
+++ b/app/lib/indexer.ts
@@ -600,6 +600,10 @@ export class Indexer {
 
     const HEAD_1 = await head(1);
 
+    // CHECK Bindex is valid
+    if (HEAD_1 && HEAD_1.number !== HEAD.number - 1)
+      throw new Error("Invalid bindex: cannot found HEAD-1!");
+
     // BR_G04
     await Indexer.prepareIssuersCount(HEAD, range, HEAD_1);
 
@@ -2436,7 +2440,7 @@ export function reduceForDBTrimming<T extends { writtenOn: number }>(
 
 export function reduce<T>(records: T[]): T {
   return records.reduce((obj: T, record) => {
-    const keys = Object.keys(record) as (keyof T)[];
+    const keys = Underscore.keys<T>(record);
     for (const k of keys) {
       if (record[k] !== undefined && record[k] !== null) {
         obj[k] = record[k];
diff --git a/app/lib/streams/multicaster.ts b/app/lib/streams/multicaster.ts
index 5afd88cd1e68267981d9c26c5bd6bcc46e1f48da..2dd59e92d61cacab894a5592f2018bf5e0db8cb3 100644
--- a/app/lib/streams/multicaster.ts
+++ b/app/lib/streams/multicaster.ts
@@ -267,7 +267,7 @@ export class Multicaster extends stream.Transform {
     if (!peer.isReachable()) {
       return Promise.resolve();
     }
-    return new Promise((resolve, reject) => {
+    return new Promise<any>((resolve, reject) => {
       const postReq = request.post(
         {
           uri: protocol(peer.getPort()) + "://" + peer.getURL() + uri,
diff --git a/app/lib/system/directory.ts b/app/lib/system/directory.ts
index de4bae05d8ad0752b404f94eceff3cd72853a085..3e6b27c4f80d1da4d67ca7f921f1b2ca328f0b39 100644
--- a/app/lib/system/directory.ts
+++ b/app/lib/system/directory.ts
@@ -99,8 +99,8 @@ class QioFileSystem implements FileSystem {
       }
     } else {
       // Use NodeJS streams for faster writing
-      let wstream = fs.createWriteStream(file);
-      await new Promise(async (res, rej) => {
+      const wstream = fs.createWriteStream(file);
+      await new Promise<void>(async (res, rej) => {
         // When done, return
         wstream.on("close", (err: any) => {
           if (err) return rej(err);
diff --git a/app/lib/wizard.ts b/app/lib/wizard.ts
index 1c38b02e1204e80150adb079fb7314881c94f926..3ef480391c093233979a1a769837828f586c02d4 100644
--- a/app/lib/wizard.ts
+++ b/app/lib/wizard.ts
@@ -33,7 +33,7 @@ export class Wizard {
 }
 
 function doTasks(todos: string[], conf: ConfDTO) {
-  return new Promise((res, rej) => {
+  return new Promise<void>((res, rej) => {
     async.forEachSeries(
       todos,
       function (task: any, callback: any) {
diff --git a/app/modules/bma/index.ts b/app/modules/bma/index.ts
index 4dcd16745030c6d062c87369b906a139054752ee..e351c6b90b0c4a540e08362ffc072788827b3c52 100644
--- a/app/modules/bma/index.ts
+++ b/app/modules/bma/index.ts
@@ -21,6 +21,7 @@ import { BMALimitation } from "./lib/limiter";
 import { PeerDTO } from "../../lib/dto/PeerDTO";
 import { Underscore } from "../../lib/common-libs/underscore";
 import { bma } from "./lib/bma";
+import { Router } from "./lib/routing";
 
 const Q = require("q");
 const rp = require("request-promise");
@@ -89,7 +90,11 @@ export const BmaDependency = {
     },
 
     config: {
-      onLoading: async (conf: NetworkConfDTO, program: any, logger: any) => {
+      onLoading: async (
+        conf: Partial<NetworkConfDTO>,
+        program: any,
+        logger: any
+      ) => {
         // If the usage of BMA hasn't been defined yet
         if (conf.nobma === undefined) {
           // Do we have an existing BMA conf?
@@ -186,7 +191,7 @@ export const BmaDependency = {
         }
       },
 
-      beforeSave: async (conf: NetworkConfDTO, program: any) => {
+      beforeSave: async (conf: Partial<NetworkConfDTO>, program: any) => {
         if (!conf.ipv4) delete conf.ipv4;
         if (!conf.ipv6) delete conf.ipv6;
         if (!conf.remoteipv4) delete conf.remoteipv4;
@@ -228,9 +233,6 @@ export const BmaDependency = {
           server.addEndpointsDefinitions(() =>
             Promise.resolve(getEndpoint(conf))
           );
-          server.addWrongEndpointFilter((endpoints: string[]) =>
-            getWrongEndpoints(endpoints, server.conf.pair.pub)
-          );
         }
         return new BMAPI(server, conf, logger);
       },
@@ -243,10 +245,19 @@ export const BmaDependency = {
         interfaces: NetworkInterface[] | null = null,
         httpLogs = false,
         logger?: any
-      ) => bma(server, interfaces, httpLogs, logger),
+      ) => {
+        // Keep BMA optimizations from 1.8
+        server.useOptimizedQueries = true
+        
+        // Add GVA API support
+        server.gvaEnabled = true
+        
+        return new Router(server)
+      },
       dtos,
-      getMainEndpoint: (conf: NetworkConfDTO) =>
-        Promise.resolve(getEndpoint(conf)),
+      getMainEndpoint: (conf: NetworkConfDTO) => {
+        return BMAConstants.getMainEndpoint(conf)
+      }
     },
   },
 };
@@ -254,22 +265,26 @@ export const BmaDependency = {
 async function getWrongEndpoints(endpoints: string[], selfPubkey: string) {
   const wrongs: string[] = [];
   await Promise.all(
-    endpoints.map(async (theEndpoint: string) => {
-      let remote = PeerDTO.endpoint2host(theEndpoint);
-      try {
-        // We test only BMA APIs, because other may exist and we cannot judge against them
-        if (theEndpoint.startsWith("BASIC_MERKLED_API")) {
-          let answer = await rp("http://" + remote + "/network/peering", {
-            json: true,
-          });
+    endpoints
+      .filter(PeerDTO.isBMA) // We test only BMA APIs, because other may exist and we cannot judge against them
+      .map(async (ep: string) => {
+        const peer = PeerDTO.fromJSONObject({ endpoints: [ep] });
+        try {
+          const protocol =
+            ep.startsWith("BMAS") || peer.getPort() == 443 ? "https" : "http";
+          const answer = await rp(
+            protocol + "://" + peer.getURL() + "/network/peering",
+            {
+              json: true,
+            }
+          );
           if (!answer || answer.pubkey != selfPubkey) {
             throw Error("Not same pubkey as local instance");
           }
+        } catch (e) {
+          wrongs.push(ep);
         }
-      } catch (e) {
-        wrongs.push(theEndpoint);
-      }
-    })
+      })
   );
   return wrongs;
 }
diff --git a/app/modules/bma/lib/bma.ts b/app/modules/bma/lib/bma.ts
index 013509fd11e51437023fb99e5d5b9201bc0b8b4d..147b74d6bce9bf60bebe807c6e79acd07b3b3567 100644
--- a/app/modules/bma/lib/bma.ts
+++ b/app/modules/bma/lib/bma.ts
@@ -305,7 +305,7 @@ export const bma = function (
       );
       httpMethods.httpGET(
         "/tx/history/:pubkey/pending",
-        (req: any) => transactions.getPendingForPubkey(req),
+        (req: any) => transactions.getPendingByPubkey(req),
         BMALimitation.limitAsHighUsage()
       );
       httpMethods.httpGET(
diff --git a/app/modules/bma/lib/controllers/node.ts b/app/modules/bma/lib/controllers/node.ts
index a71870e7b99e518d9871816d8b734ae909de4b4d..5a4de1bdf1235697968ca951bf70eb69fefdf5cf 100644
--- a/app/modules/bma/lib/controllers/node.ts
+++ b/app/modules/bma/lib/controllers/node.ts
@@ -23,6 +23,10 @@ export class NodeBinding extends AbstractController {
         software: "duniter",
         version: this.server.version,
         forkWindowSize: this.server.conf.forksize,
+        storage: {
+          transactions: this.server.conf.storage?.transactions !== false, // true by default
+          wotwizard: this.server.conf.storage?.wotwizard === true, // false by default
+        },
       },
     };
   };
diff --git a/app/modules/bma/lib/controllers/transactions.ts b/app/modules/bma/lib/controllers/transactions.ts
index 907450992436f485728841b3a14dd952a1f994b1..47e8febc77bc2c7470a84517e0d259596cd5d55b 100644
--- a/app/modules/bma/lib/controllers/transactions.ts
+++ b/app/modules/bma/lib/controllers/transactions.ts
@@ -24,11 +24,14 @@ import {
   HttpTxPending,
 } from "../dtos";
 import { DBTx } from "../../../../lib/db/DBTx";
-import { Underscore } from "../../../../lib/common-libs/underscore";
 
 const http2raw = require("../http2raw");
 
 export class TransactionBinding extends AbstractController {
+  get medianTimeOffset(): number {
+    return (this.conf.avgGenTime * this.conf.medianTimeBlocks) / 2;
+  }
+
   async parseTransaction(req: any): Promise<HttpTransactionPending> {
     const res = await this.pushEntity(
       req,
@@ -86,50 +89,40 @@ export class TransactionBinding extends AbstractController {
 
   async getHistory(req: any): Promise<HttpTxHistory> {
     const pubkey = await ParametersService.getPubkeyP(req);
-    return this.getFilteredHistory(pubkey, (results: any) => results);
+    const history = await this.server.dal.getTxHistoryByPubkey(pubkey);
+    return this.toHttpTxHistory(pubkey, history);
   }
 
   async getHistoryBetweenBlocks(req: any): Promise<HttpTxHistory> {
     const pubkey = await ParametersService.getPubkeyP(req);
     const from = await ParametersService.getFromP(req);
     const to = await ParametersService.getToP(req);
-    return this.getFilteredHistory(pubkey, (res: any) => {
-      const histo = res.history;
-      histo.sent = Underscore.filter(histo.sent, function (tx: any) {
-        return tx && tx.block_number >= from && tx.block_number <= to;
-      });
-      histo.received = Underscore.filter(histo.received, function (tx: any) {
-        return tx && tx.block_number >= from && tx.block_number <= to;
-      });
-      Underscore.extend(histo, { sending: [], receiving: [] });
-      return res;
-    });
+
+    const history = await this.server.dal.getTxHistoryByPubkeyBetweenBlocks(
+      pubkey,
+      +from,
+      +to
+    );
+    return this.toHttpTxHistory(pubkey, history);
   }
 
   async getHistoryBetweenTimes(req: any): Promise<HttpTxHistory> {
     const pubkey = await ParametersService.getPubkeyP(req);
     const from = await ParametersService.getFromP(req);
     const to = await ParametersService.getToP(req);
-    return this.getFilteredHistory(pubkey, (res: any) => {
-      const histo = res.history;
-      histo.sent = Underscore.filter(histo.sent, function (tx: any) {
-        return tx && tx.time >= from && tx.time <= to;
-      });
-      histo.received = Underscore.filter(histo.received, function (tx: any) {
-        return tx && tx.time >= from && tx.time <= to;
-      });
-      Underscore.extend(histo, { sending: [], receiving: [] });
-      return res;
-    });
+    const medianTimeOffset = this.medianTimeOffset || 0; // Need to convert time into medianTime, because GVA module use median_time
+    const history = await this.server.dal.getTxHistoryByPubkeyBetweenTimes(
+      pubkey,
+      +from - medianTimeOffset,
+      +to - medianTimeOffset
+    );
+    return this.toHttpTxHistory(pubkey, history);
   }
 
-  async getPendingForPubkey(req: any): Promise<HttpTxHistory> {
+  async getPendingByPubkey(req: any): Promise<HttpTxHistory> {
     const pubkey = await ParametersService.getPubkeyP(req);
-    return this.getFilteredHistory(pubkey, function (res: any) {
-      const histo = res.history;
-      Underscore.extend(histo, { sent: [], received: [] });
-      return res;
-    });
+    const history = await this.server.dal.getTxHistoryMempool(pubkey);
+    return this.toHttpTxHistory(pubkey, history);
   }
 
   async getPending(): Promise<HttpTxPending> {
@@ -140,6 +133,7 @@ export class TransactionBinding extends AbstractController {
         const tx = TransactionDTO.fromJSONObject(t);
         return {
           version: tx.version,
+          currency: tx.currency,
           issuers: tx.issuers,
           inputs: tx.inputs,
           unlocks: tx.unlocks,
@@ -156,22 +150,27 @@ export class TransactionBinding extends AbstractController {
     };
   }
 
-  private async getFilteredHistory(
+  private async toHttpTxHistory(
     pubkey: string,
-    filter: any
+    dbTxHistory: {
+      sent?: DBTx[];
+      received?: DBTx[];
+      receiving?: DBTx[];
+      sending?: DBTx[];
+      pending?: DBTx[];
+    }
   ): Promise<HttpTxHistory> {
-    let history = await this.server.dal.getTransactionsHistory(pubkey);
-    let result = {
+    return {
       currency: this.conf.currency,
       pubkey: pubkey,
       history: {
-        sending: history.sending.map(dbtx2HttpTxOfHistory),
-        received: history.received.map(dbtx2HttpTxOfHistory),
-        sent: history.sent.map(dbtx2HttpTxOfHistory),
-        pending: history.pending.map(dbtx2HttpTxOfHistory),
+        sending: dbTxHistory.sending?.map(dbtx2HttpTxOfHistory) || [],
+        received: dbTxHistory.received?.map(dbtx2HttpTxOfHistory) || [],
+        receiving: dbTxHistory.receiving?.map(dbtx2HttpTxOfHistory) || [],
+        sent: dbTxHistory.sent?.map(dbtx2HttpTxOfHistory) || [],
+        pending: dbTxHistory.pending?.map(dbtx2HttpTxOfHistory) || [],
       },
     };
-    return filter(result);
   }
 }
 
diff --git a/app/modules/bma/lib/controllers/wot.ts b/app/modules/bma/lib/controllers/wot.ts
index 61e869b06e7ae01296a122ab523f990b6cc943dc..73422c518b6fe41820b115de7237280ecae6a613 100644
--- a/app/modules/bma/lib/controllers/wot.ts
+++ b/app/modules/bma/lib/controllers/wot.ts
@@ -132,9 +132,16 @@ export class WOTBinding extends AbstractController {
 
   async certifiersOf(req: any): Promise<HttpCertifications> {
     const search = await ParametersService.getSearchP(req);
-    const idty = (await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember(
-      search
-    )) as FullIindexEntry;
+    let idty: FullIindexEntry;
+    if (ParametersService.getIsPubkey(req)) {
+      idty = (await this.server.dal.getWrittenIdtyByPubkeyForHashingAndIsMember(
+        search
+      )) as FullIindexEntry;
+    } else {
+      idty = (await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember(
+        search
+      )) as FullIindexEntry;
+    }
     const certs = await this.server.dal.certsToTarget(
       idty.pub,
       IdentityDTO.getTargetHash(idty)
@@ -180,7 +187,15 @@ export class WOTBinding extends AbstractController {
 
   async requirements(req: any): Promise<HttpRequirements> {
     const search = await ParametersService.getSearchP(req);
-    const identities: any = await this.IdentityService.searchIdentities(search);
+    let identities: any = [];
+    if (ParametersService.getIsPubkey(req)) {
+      if (!BMAConstants.PUBLIC_KEY.test(search)) {
+        throw BMAConstants.ERRORS.NO_IDTY_MATCHING_PUB_OR_UID;
+      }
+      identities = await this.IdentityService.searchIdentitiesByPubkey(search);
+    } else {
+      identities = await this.IdentityService.searchIdentities(search);
+    }
     const all: HttpIdentityRequirement[] = await this.BlockchainService.requirementsOfIdentities(
       identities
     );
@@ -229,9 +244,16 @@ export class WOTBinding extends AbstractController {
 
   async certifiedBy(req: any): Promise<HttpCertifications> {
     const search = await ParametersService.getSearchP(req);
-    const idty = (await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember(
-      search
-    )) as FullIindexEntry;
+    let idty: FullIindexEntry;
+    if (ParametersService.getIsPubkey(req)) {
+      idty = (await this.server.dal.getWrittenIdtyByPubkeyForHashingAndIsMember(
+        search
+      )) as FullIindexEntry;
+    } else {
+      idty = (await this.server.dal.getWrittenIdtyByPubkeyOrUIdForHashingAndIsMember(
+        search
+      )) as FullIindexEntry;
+    }
     const certs = await this.server.dal.certsFrom(idty.pub);
     const theCerts: HttpCertification[] = [];
     for (const cert of certs) {
diff --git a/app/modules/bma/lib/dtos.ts b/app/modules/bma/lib/dtos.ts
index e236aa61186f3ede696ecd55c93cfc47f7d863a5..1c0f08484539949df4129ead1abeb7b12bf0674d 100644
--- a/app/modules/bma/lib/dtos.ts
+++ b/app/modules/bma/lib/dtos.ts
@@ -21,6 +21,10 @@ export const Summary = {
     software: String,
     version: String,
     forkWindowSize: Number,
+    storage: {
+      transactions: Boolean,
+      wotwizard: Boolean,
+    },
   },
 };
 
@@ -29,6 +33,10 @@ export interface HttpSummary {
     software: string;
     version: string;
     forkWindowSize: number;
+    storage: {
+      transactions: boolean;
+      wotwizard: boolean;
+    };
   };
 }
 
@@ -197,6 +205,7 @@ export interface HttpTransactionOfBlock {
   unlocks: string[];
   signatures: string[];
   comment: string;
+  time?: number;
 }
 
 export const Block = {
@@ -307,6 +316,7 @@ export function block2HttpBlock(blockDTO: BlockDTO): HttpBlock {
           hash: tx.hash,
           blockstamp: tx.blockstamp,
           blockstampTime: tx.blockstampTime,
+          time: tx.time || undefined
         };
       }
     ),
@@ -781,11 +791,12 @@ export interface HttpTransaction {
   signatures: string[];
   raw: string;
   written_block: number | null;
-  writtenTime: number;
+  writtenTime: number | null;
   hash: string;
 }
 
 export interface HttpTransactionPending {
+  currency: string;
   version: number;
   issuers: string[];
   inputs: string[];
@@ -796,6 +807,8 @@ export interface HttpTransactionPending {
   signatures: string[];
   hash: string;
   receivedTime: number;
+  blockstamp?: string;
+  blockstampTime?: number;
 }
 
 export const Source = {
@@ -880,6 +893,7 @@ export interface HttpTxHistory {
     sent: HttpTxOfHistory[];
     received: HttpTxOfHistory[];
     sending: HttpTxOfHistory[];
+    receiving: HttpTxOfHistory[];
     pending: HttpTxOfHistory[];
   };
 }
diff --git a/app/modules/bma/lib/parameters.ts b/app/modules/bma/lib/parameters.ts
index 49a437f26c53ad1e00efcd9146f2bdcea021c730..c5463ecd963be1655785292a448d83d6f42179d0 100644
--- a/app/modules/bma/lib/parameters.ts
+++ b/app/modules/bma/lib/parameters.ts
@@ -96,6 +96,16 @@ export class ParametersService {
     callback(null, matches[0]);
   };
 
+  static getIsPubkey(req: any): boolean {
+    const value = req.query.pubkey;
+    return (
+      value !== null &&
+      value !== undefined &&
+      value !== "false" &&
+      value !== false
+    );
+  }
+
   static getPubkeyP(req: any) {
     return Q.nbind(ParametersService.getPubkey, this)(req);
   }
diff --git a/app/modules/bma/lib/upnp.ts b/app/modules/bma/lib/upnp.ts
index 3c5eecb980b80cf0018e5753dcfd9f73220ebcae..50910d13bbe1e7437709746637dfa0e1fb6da00a 100644
--- a/app/modules/bma/lib/upnp.ts
+++ b/app/modules/bma/lib/upnp.ts
@@ -35,7 +35,7 @@ export const Upnp = async function (
       await Q.nbind(client.externalIp, client)();
     } catch (err) {
       if (err && err.message == "timeout") {
-        throw 'No UPnP gateway found: your node won\'t be reachable from the Internet. Use --noupnp option to avoid this message.';
+        throw "No UPnP gateway found: your node won't be reachable from the Internet. Use --noupnp option to avoid this message.";
       }
       throw err;
     } finally {
diff --git a/app/modules/config.ts b/app/modules/config.ts
index 27925692eee116e30a076bcfea985575e7299bba..6e526a56d133417226b871443f6f64295ca51814 100644
--- a/app/modules/config.ts
+++ b/app/modules/config.ts
@@ -21,14 +21,28 @@ import { ProgramOptions } from "../lib/common-libs/programOptions";
 
 module.exports = {
   duniter: {
-    cliOptions: [],
-
+    cliOptions: [],ProgramOptions
     config: {
       onLoading: async (conf: ConfDTO, program: ProgramOptions) => {
         conf.msPeriod = conf.msWindow;
         conf.sigReplay = conf.msPeriod;
         conf.switchOnHeadAdvance =
           CommonConstants.SWITCH_ON_BRANCH_AHEAD_BY_X_BLOCKS;
+
+        // Garder la gestion du stockage des transactions de 1.8
+        // car c'est une fonctionnalité importante pour les optimisations BMA
+        conf.storage = conf.storage || {
+          transactions: false,
+          wotwizard: false,
+        };
+        
+        // Conserver la logique de configuration du stockage
+        if (program.storeTxs || (program.storeTxs === undefined && !conf.nobma)) {
+          conf.storage.transactions = true;
+        }
+        if (program.storeWw) {
+          conf.storage.wotwizard = true;
+        }
       },
       beforeSave: async (conf: ConfDTO) => {
         conf.msPeriod = conf.msWindow;
@@ -68,10 +82,9 @@ module.exports = {
           const aggregates = Underscore.uniq(
             lines
               .map((l) => l.match(/: (\[\w+\](\[\w+\])*)/))
-              .filter((l) => l)
-              .map((l: string[]) => l[1])
+              .filter((l) => !!l)
+              .map((l) => l && ((l[1] as unknown) as string))
           );
-          console.log(aggregates);
           const results = aggregates.map((a: string) => {
             return {
               name: a,
diff --git a/app/modules/crawler/index.ts b/app/modules/crawler/index.ts
index 839784542caa0cde7d24d06d85aa64ab0364fe22..22ea9b995302e14201c73be318a0e795befd25fe 100644
--- a/app/modules/crawler/index.ts
+++ b/app/modules/crawler/index.ts
@@ -309,7 +309,11 @@ export const CrawlerDependency = {
                 ? [
                     {
                       endpoints: [
-                        ["BASIC_MERKLED_API", fromHost, fromPort].join(" "),
+                        [
+                          fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API",
+                          fromHost,
+                          fromPort,
+                        ].join(" "),
                       ],
                     },
                   ]
@@ -358,7 +362,11 @@ export const CrawlerDependency = {
           const { host, port } = extractHostPort(from);
           try {
             const peer = PeerDTO.fromJSONObject({
-              endpoints: [["BASIC_MERKLED_API", host, port].join(" ")],
+              endpoints: [
+                [port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(
+                  " "
+                ),
+              ],
             });
             const fromHost = peer.getHostPreferDNS();
             const fromPort = peer.getPort();
@@ -405,7 +413,11 @@ export const CrawlerDependency = {
           const { host, port } = extractHostPort(from);
           try {
             const peer = PeerDTO.fromJSONObject({
-              endpoints: [["BASIC_MERKLED_API", host, port].join(" ")],
+              endpoints: [
+                [port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(
+                  " "
+                ),
+              ],
             });
             const fromHost = peer.getHostPreferDNS();
             const fromPort = peer.getPort();
@@ -460,7 +472,11 @@ export const CrawlerDependency = {
           const { host: toHost, port: toPort } = extractHostPort(target);
           try {
             const peer = PeerDTO.fromJSONObject({
-              endpoints: [["BASIC_MERKLED_API", host, port].join(" ")],
+              endpoints: [
+                [port == "443" ? "BMAS" : "BASIC_MERKLED_API", host, port].join(
+                  " "
+                ),
+              ],
             });
             logger.info("Looking at %s...", source);
             try {
@@ -485,7 +501,7 @@ export const CrawlerDependency = {
         },
       },
       {
-        name: "pull <from> [<number>]",
+        name: "pull <from> [<start>] [<end>]",
         desc: "Pull blocks from <from> source up to block <number>",
         onDatabaseExecute: async (
           server: Server,
@@ -494,7 +510,11 @@ export const CrawlerDependency = {
           params: any
         ) => {
           const source: string = params[0];
-          const to = parseInt(params[1]);
+          const to = parseInt(params[2] || params[1]);
+          let from: null | number = null;
+          if (params[2]) {
+            from = parseInt(params[1]);
+          }
           if (
             !source ||
             !(source.match(HOST_PATTERN) || source.match(FILE_PATTERN))
@@ -505,12 +525,17 @@ export const CrawlerDependency = {
           const { host, port } = extractHostPort(source);
           try {
             const peer = PeerDTO.fromJSONObject({
-              endpoints: [["BASIC_MERKLED_API", host, port].join(" ")],
+              endpoints: [
+                [port == "443" ? "BMAS" : "BASIC_MERKLED_API"].join(" "),
+              ],
             });
             logger.info("Looking at %s...", source);
             try {
               const fromHost = await connect(peer);
               let current: DBBlock | null = await server.dal.getCurrentBlockOrNull();
+              if (from) {
+                current = { number: from - 1 } as any;
+              }
               // Loop until an error occurs
               while (current && (isNaN(to) || current.number < to)) {
                 current = await fromHost.getBlock(current.number + 1);
@@ -746,7 +771,11 @@ export const CrawlerDependency = {
                 ? [
                     {
                       endpoints: [
-                        ["BASIC_MERKLED_API", fromHost, fromPort].join(" "),
+                        [
+                          fromPort == "443" ? "BMAS" : "BASIC_MERKLED_API",
+                          fromHost,
+                          fromPort,
+                        ].join(" "),
                       ],
                     },
                   ]
diff --git a/app/modules/crawler/lib/contacter.ts b/app/modules/crawler/lib/contacter.ts
index 8bac0518997ec3f305759f556f65ac9b222ba469..59268a98764b351fae26b15fc429f48a10dbb624 100644
--- a/app/modules/crawler/lib/contacter.ts
+++ b/app/modules/crawler/lib/contacter.ts
@@ -19,9 +19,9 @@ const sanitize = require("../../../modules/bma/lib/sanitize");
 const dtos = require("../../../modules/bma").BmaDependency.duniter.methods.dtos;
 
 export class Contacter {
-  path: string = "";
   options: { timeout: number };
   fullyQualifiedHost: string;
+  path: string = "";
 
   constructor(
     public readonly host: string,
diff --git a/app/modules/crawler/lib/req2fwd.ts b/app/modules/crawler/lib/req2fwd.ts
index 14bee1a634139c3ea270bba8b9229d6c3297851b..befe13ec8f22d8346d82bcb1d62604613431d8e3 100644
--- a/app/modules/crawler/lib/req2fwd.ts
+++ b/app/modules/crawler/lib/req2fwd.ts
@@ -63,7 +63,7 @@ export const req2fwd = async (
       for (const received of idty.pendingCerts) {
         const cid = [received.from, iid].join("-");
         if (!certs[cid]) {
-          await new Promise((res) => setTimeout(res, 300));
+          await new Promise<void>((res) => setTimeout(res, 300));
           certs[cid] = received;
           const rawCert = rawer.getOfficialCertification({
             currency: "g1",
diff --git a/app/modules/crawler/lib/sync.ts b/app/modules/crawler/lib/sync.ts
index 907cce5ceeb32274642d5e8b4377316c760967ce..94bb108db15a31563e22aeb966d474c4855ce013 100644
--- a/app/modules/crawler/lib/sync.ts
+++ b/app/modules/crawler/lib/sync.ts
@@ -183,6 +183,13 @@ export class Synchroniser extends stream.Duplex {
       // We use cautious mode if it is asked, or not particulary asked but blockchain has been started
       const cautious = askedCautious === true || localNumber >= 0;
 
+      // Disable check constraints
+      if (!cautious) await this.server.dal.disableCheckConstraints();
+
+      // Remove existing TX, because we will all download it again
+      // This is required to be able to insert, instead of save
+      if (fullSync) await this.dal.removeAllTxs();
+
       const milestonesStream = new ValidatorStream(
         localNumber,
         to,
@@ -211,7 +218,7 @@ export class Synchroniser extends stream.Duplex {
         this.watcher
       );
 
-      await new Promise((res, rej) => {
+      await new Promise<void>((res, rej) => {
         milestonesStream
           .pipe(download)
           .pipe(localIndexer)
@@ -225,7 +232,7 @@ export class Synchroniser extends stream.Duplex {
       this.watcher.storagePercent(100.0);
       this.watcher.appliedPercent(100.0);
 
-      this.server.dal.blockDAL.cleanCache();
+      await this.server.dal.cleanCaches();
 
       if (!cliprogram.nosbx) {
         //=======
@@ -253,6 +260,9 @@ export class Synchroniser extends stream.Duplex {
         );
       this.watcher.end();
       throw err;
+    } finally {
+      // Make sure to enable check constraints, even if failed
+      await this.server.dal.enableCheckConstraints();
     }
   }
 }
diff --git a/app/modules/crawler/lib/sync/P2PSyncDownloader.ts b/app/modules/crawler/lib/sync/P2PSyncDownloader.ts
index a677cdb4b1dafeccfaa6cb064a614b5ce4fe45dc..ad5bda8425f45474be5e140dee0c8a0ac4734577 100644
--- a/app/modules/crawler/lib/sync/P2PSyncDownloader.ts
+++ b/app/modules/crawler/lib/sync/P2PSyncDownloader.ts
@@ -136,7 +136,11 @@ export class P2PSyncDownloader extends ASyncDownloader
    * @param count The number of blocks to download.
    * @param chunkIndex The # of the chunk in local algorithm (logging purposes only)
    */
-  private async p2pDownload(from: number, count: number, chunkIndex: number) {
+  private async p2pDownload(
+    from: number,
+    count: number,
+    chunkIndex: number
+  ): Promise<BlockDTO[]> {
     // if this chunk has already been downloaded before, we exclude its supplier node from the download list as it won't give correct answer now
     const lastSupplier = this.downloads[chunkIndex];
     if (lastSupplier) {
@@ -148,14 +152,14 @@ export class P2PSyncDownloader extends ASyncDownloader
     const candidates = await this.getP2Pcandidates(chunkIndex);
     // Book the nodes
     this.watcher.gettingChunk(chunkIndex, candidates);
-    return await this.raceOrCancelIfTimeout(
+    return await this.raceOrCancelIfTimeout<BlockDTO[]>(
       this.MAX_DELAY_PER_DOWNLOAD,
       candidates.map(async (node) => {
         try {
           this.handler[chunkIndex] = node;
           this.nbDownloading++;
           this.watcher.writeStatus(
-            "Getting chunck #" +
+            "Getting chunk #" +
               chunkIndex +
               "/" +
               (this.numberOfChunksToDownload - 1) +
@@ -166,10 +170,10 @@ export class P2PSyncDownloader extends ASyncDownloader
               " on peer " +
               node.hostName
           );
-          let blocks = await node.downloadBlocks(count, from);
+          let blocks = (await node.downloadBlocks(count, from)) || [];
           this.watcher.gotChunk(chunkIndex, node);
           this.watcher.writeStatus(
-            "GOT chunck #" +
+            "GOT chunk #" +
               chunkIndex +
               "/" +
               (this.numberOfChunksToDownload - 1) +
@@ -226,7 +230,7 @@ export class P2PSyncDownloader extends ASyncDownloader
           throw Error(DataErrors[DataErrors.NO_NODE_FOUND_TO_DOWNLOAD_CHUNK]);
         }
       }
-      await new Promise((res) => setTimeout(res, 1000)); // Wait 1s before retrying
+      await new Promise<void>((res) => setTimeout(res, 1000)); // Wait 1s before retrying
       return this.downloadChunk(index);
     }
   }
@@ -237,11 +241,11 @@ export class P2PSyncDownloader extends ASyncDownloader
    * @param races
    * @returns {Promise}
    */
-  private raceOrCancelIfTimeout(timeout: number, races: any[]) {
+  private raceOrCancelIfTimeout<T = any>(timeout: number, races: Promise<T>[]) {
     return Promise.race(
       [
         // Process the race, but cancel it if we don't get an anwser quickly enough
-        new Promise((resolve, reject) => {
+        new Promise<T>((resolve, reject) => {
           setTimeout(() => {
             reject(this.TOO_LONG_TIME_DOWNLOAD);
           }, timeout);
diff --git a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts
index e9a759fc33eccab8435289f0e27aa26a7b2d890f..427d126b0f410d7fcfb85eea1deb295f45e5a509 100644
--- a/app/modules/crawler/lib/sync/RemoteSynchronizer.ts
+++ b/app/modules/crawler/lib/sync/RemoteSynchronizer.ts
@@ -150,29 +150,22 @@ export class RemoteSynchronizer extends AbstractSynchronizer {
       const host = access.host;
       const port = access.port;
       const path = access.path;
-      logger.info(`Connecting to address ${host} :${port}...`);
+      logger.info(`Connecting to address ${host}:${port}${path || ""}...`);
 
       // If we know this is a WS2P connection, don't try BMA
       if (access.isWS2P !== true) {
         try {
+          endpoint =
+            [port == 443 ? "BMAS" : "BASIC_MERKLED_API", host, port].join(" ") +
+            (path ? " " + path : "");
           const contacter = await connect(
             PeerDTO.fromJSONObject({
-              endpoints: [
-                `BASIC_MERKLED_API ${host} ${port}${
-                  (path && " " + path) || ""
-                }`,
-              ],
+              endpoints: [endpoint],
             }),
             3000
           );
           peering = await contacter.getPeer();
           api = new BMARemoteContacter(contacter);
-          endpoint =
-            "BASIC_MERKLED_API " +
-            host +
-            " " +
-            port +
-            ((path && " " + path) || "");
         } catch (e) {}
       }
 
@@ -181,7 +174,7 @@ export class RemoteSynchronizer extends AbstractSynchronizer {
         const pair = new Key(keypair.pub, keypair.sec);
         const connection = WS2PConnection.newConnectionToAddress(
           1,
-          `ws://${host}:${port}${(path && " " + path) || ""}`,
+          `ws://${host}:${port}${path || ""}`,
           new (class SyncMessageHandler implements WS2PMessageHandler {
             async answerToRequest(
               json: any,
diff --git a/app/modules/crawler/lib/sync/p2p/p2p-candidate.ts b/app/modules/crawler/lib/sync/p2p/p2p-candidate.ts
index 6b6403bbcc4638d894ee365ccd3b5709015ac64e..def1fbfdae61fb6e7e24b7509ca60229819b8527 100644
--- a/app/modules/crawler/lib/sync/p2p/p2p-candidate.ts
+++ b/app/modules/crawler/lib/sync/p2p/p2p-candidate.ts
@@ -83,11 +83,10 @@ export class P2pCandidate {
     return (this.api && this.api.hostName) || "NO_API";
   }
 
-  async downloadBlocks(count: number, from: number) {
+  async downloadBlocks(count: number, from: number): Promise<BlockDTO[]> {
     const start = Date.now();
-    let error: Error | undefined;
     this.reserved = false;
-    this.dlPromise = querablep(
+    const promise = querablep(
       (async () => {
         // We try to download the blocks
         let blocks: BlockDTO[] = [];
@@ -110,6 +109,12 @@ export class P2pCandidate {
             break;
           }
         }
+        
+        // Ajouter la vérification supplémentaire de 1.8
+        if (!blocks || blocks.length != count) {
+          throw new Error("Wrong number of blocks from " + this.hostName);
+        }
+        
         this.responseTimes.push(Date.now() - start);
         // Only keep a flow of 5 ttas for the node
         if (this.responseTimes.length > 5) this.responseTimes.shift();
@@ -117,7 +122,8 @@ export class P2pCandidate {
         return blocks;
       })()
     );
-    return this.dlPromise;
+    this.dlPromise = promise;
+    return promise;
   }
 
   private getRemoteAPIs() {
@@ -130,6 +136,7 @@ export class P2pCandidate {
         isBMA: true,
         port: bmaAPI.port,
         host: bmaHost,
+        path: bmaAPI.path,
       });
     }
     if (ws2pAPI) {
diff --git a/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts b/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts
index 9824b490efbe0c0cd8031cfd930a53d31b48c324..aec89a11d9e81cf4ab697948219c666f5fd7c58a 100644
--- a/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts
+++ b/app/modules/crawler/lib/sync/v2/GlobalIndexStream.ts
@@ -38,7 +38,6 @@ let sync_iindex: any[] = [];
 let sync_mindex: any[] = [];
 let sync_cindex: any[] = [];
 let sync_nextExpiring = 0;
-let sync_bindexSize = 0;
 let txCount = 0;
 let logger = NewLogger();
 
@@ -89,8 +88,6 @@ export class GlobalIndexStream extends Duplex {
   private numberOfChunksToDownload: number;
   private memToCopyDone = false;
 
-  private mapInjection: { [k: string]: any } = {};
-
   constructor(
     private conf: ConfDTO,
     private dal: FileDAL,
@@ -125,12 +122,6 @@ export class GlobalIndexStream extends Duplex {
     })();
   }
 
-  private async injectLoki<T, K extends keyof T>(dal: T, f: K, obj: T[K]) {
-    // this.mapInjection[f] = dal[f]
-    // dal[f] = obj
-    // await (obj as any).triggerInit()
-  }
-
   readChunk(i: number) {}
 
   _read(size: number) {
@@ -212,9 +203,10 @@ export class GlobalIndexStream extends Duplex {
         );
       }
 
-      const bindexSize = requiredBindexSizeForTail(block, this.conf);
+      const requiredBindexSize = requiredBindexSizeForTail(block, this.conf);
       if (
-        (block.number <= this.to - bindexSize - 1 || cliprogram.noSources) &&
+        (block.number <= this.to - requiredBindexSize - 1 ||
+          cliprogram.noSources) &&
         !this.cautious
       ) {
         // If we require nosources option, this blockchain can't be valid so we don't make checks
@@ -399,27 +391,21 @@ export class GlobalIndexStream extends Duplex {
         }
 
         // Trim the bindex
-        sync_bindexSize =
-          this.conf.forksize +
-          [
-            block.issuersCount,
-            block.issuersFrame,
-            this.conf.medianTimeBlocks,
-            this.conf.dtDiffEval,
-            dataArray.length,
-          ].reduce((max, value) => {
-            return Math.max(max, value);
-          }, 0);
-
-        if (sync_bindexSize && sync_bindex.length >= 2 * sync_bindexSize) {
+        let maxBindexSize = Math.max(requiredBindexSize, dataArray.length) * 2;
+
+        if (sync_bindex.length >= maxBindexSize) {
           // We trim it, not necessary to store it all (we already store the full blocks)
-          sync_bindex.splice(0, sync_bindexSize);
-          // TODO GINDEX
-          await this.doTrimming();
+          sync_bindex.splice(0, sync_bindex.length - requiredBindexSize);
+          await this.trimIndexes();
         }
       } else if (block.number <= this.to) {
+        // Trim bindex to the minimal required size
+        if (sync_bindex.length > requiredBindexSize) {
+          sync_bindex.splice(0, sync_bindex.length - requiredBindexSize);
+        }
+
         const dto = BlockDTO.fromJSONObject(block);
-        await this.finalizeSync(block, dto);
+        await this.finalizeSync(block, dto, dto.number === this.to);
       }
 
       gindex.push(gData);
@@ -429,17 +415,45 @@ export class GlobalIndexStream extends Duplex {
 
   @MonitorExecutionTime()
   private async beforeBlocks(blocks: BlockDTO[]) {
+    let newTxCount = 0;
     await this.dal.blockDAL.insertBatch(
       blocks.map((b) => {
-        txCount += b.transactions.length;
+        newTxCount += b.transactions.length;
         const block = DBBlock.fromBlockDTO(b);
         block.fork = false;
         return block;
       })
     );
 
+    // Appliquer les blocs via Rust (de HEAD)
     this.dal.rustServer.applyChunkOfBlocks(blocks);
 
+    // Gérer le stockage des transactions (de 1.8)
+    if (newTxCount && this.conf.storage?.transactions !== false) {
+      if (this.cautious || this.localNumber >= 0) {
+        await Promise.all(
+          blocks.map((block) =>
+            this.dal.saveTxsInFiles(
+              block.transactions,
+              block.number,
+              block.medianTime
+            )
+          )
+        );
+      } else {
+        await Promise.all(
+          blocks.map((block) =>
+            this.dal.insertTxsInFiles(
+              block.transactions,
+              block.number,
+              block.medianTime
+            )
+          )
+        );
+      }
+      txCount += newTxCount;
+    }
+
     logger.debug("Total tx count: %s", txCount);
   }
 
@@ -514,13 +528,13 @@ export class GlobalIndexStream extends Duplex {
   }
 
   @MonitorExecutionTime()
-  private async doTrimming() {
+  private async trimIndexes() {
     // Process triming & archiving continuously to avoid super long ending of sync
     await this.dal.trimIndexes(sync_bindex[0].number);
   }
 
   @MonitorExecutionTime()
-  private async finalizeSync(block: BlockDTO, dto: BlockDTO) {
+  private async finalizeSync(block: BlockDTO, dto: BlockDTO, trim: boolean) {
     // Save the INDEX
     await this.dal.bindexDAL.insertBatch(sync_bindex);
     await this.dal.flushIndexes({
@@ -591,7 +605,8 @@ export class GlobalIndexStream extends Duplex {
       HEAD,
       this.conf,
       this.dal,
-      NewLogger()
+      NewLogger(),
+      trim
     );
 
     // Clean temporary variables
@@ -599,7 +614,6 @@ export class GlobalIndexStream extends Duplex {
     sync_iindex = [];
     sync_mindex = [];
     sync_cindex = [];
-    sync_bindexSize = 0;
     sync_expires = [];
     sync_nextExpiring = 0;
   }
diff --git a/app/modules/crawler/lib/sync/v2/ValidatorStream.ts b/app/modules/crawler/lib/sync/v2/ValidatorStream.ts
index 9f2802eb55758c66ab9b701df1e3ab7a867924f4..26322bac6a8336283246f5e70ae39f3caf54314e 100644
--- a/app/modules/crawler/lib/sync/v2/ValidatorStream.ts
+++ b/app/modules/crawler/lib/sync/v2/ValidatorStream.ts
@@ -74,7 +74,7 @@ export class ValidatorStream extends Readable {
               }
             } catch (e) {
               failures++;
-              await new Promise((res) => setTimeout(res, 3000));
+              await new Promise<void>((res) => setTimeout(res, 3000));
               if (failures >= 15) {
                 NewLogger().error(
                   "Could not get a validation from remote blockchain after %s trials. Stopping sync.",
diff --git a/app/modules/daemon.ts b/app/modules/daemon.ts
index ffd19949dcd525a81ec7fc9ee3a9fbee3dae5a6a..adacef0ab3468f90fbd07e638eba2b0801e727b0 100644
--- a/app/modules/daemon.ts
+++ b/app/modules/daemon.ts
@@ -14,6 +14,10 @@
 import { ConfDTO } from "../lib/dto/ConfDTO";
 import { Server } from "../../server";
 import { ExitCodes } from "../lib/common-libs/exit-codes";
+import { Directory } from "../lib/system/directory";
+import { RealFS } from "../lib/system/directory";
+import { Tail } from "tail";
+import { constants } from "../lib/common-libs/constants";
 
 module.exports = {
   duniter: {
@@ -65,6 +69,24 @@ module.exports = {
           }
         },
       },
+      {
+        name: "logs",
+        desc: "Follow duniter logs.",
+        logs: false,
+        onConfiguredExecute: async (
+          server: Server,
+          conf: ConfDTO,
+          program: any,
+          params: any
+        ) => {
+          printTailAndWatchFile(
+            Directory.INSTANCE_HOMELOG_FILE,
+            constants.NB_INITIAL_LINES_TO_SHOW
+          );
+          // Never ending command
+          return new Promise<void>((res) => null);
+        },
+      },
       {
         name: "direct_start",
         desc: "Start Duniter node with direct output, non-daemonized.",
@@ -93,7 +115,7 @@ module.exports = {
 
           logger.info(">> Server ready!");
 
-          return new Promise(() => null); // Never ending
+          return new Promise<void>(() => null); // Never ending
         },
       },
     ],
@@ -105,3 +127,55 @@ function ServerService(server: Server) {
   server.stopService = () => Promise.resolve();
   return server;
 }
+
+function startDaemon(daemon: any) {
+  return new Promise<void>((resolve, reject) =>
+    daemon.start((err: any) => {
+      if (err) return reject(err);
+      resolve();
+    })
+  );
+}
+
+function stopDaemon(daemon: any) {
+  return new Promise<void>((resolve, reject) =>
+    daemon.stop((err: any) => {
+      err && console.error(err);
+      if (err) return reject(err);
+      resolve();
+    })
+  );
+}
+
+async function printTailAndWatchFile(file: any, tailSize: number) {
+  const fs = RealFS();
+  if (await fs.fsExists(file)) {
+    const content = await fs.fsReadFile(file);
+    const lines = content.split("\n");
+    const from = Math.max(0, lines.length - tailSize);
+    const lastLines = lines.slice(from).join("\n");
+    console.log(lastLines);
+  }
+  watchFile(file);
+}
+
+function watchFile(file: any) {
+  const tail = new Tail(file);
+
+  // Specific errors handling
+  process.on("uncaughtException", (err: any) => {
+    if (err.code === "ENOENT") {
+      console.error("EXCEPTION: ", err.message);
+      setTimeout(() => watchFile(file), 1000); // Wait a second
+    }
+  });
+
+  // On new line
+  tail.on("line", function (data: any) {
+    console.log(data);
+  });
+
+  tail.on("error", function (error: any) {
+    console.error("ERROR: ", error);
+  });
+}
diff --git a/app/modules/dump.ts b/app/modules/dump.ts
index d89ebb7475c5bb579d1ef7b587f26e85ed959653..ea01d58832e61980f7a91738a6f30c1fe0d3ed6f 100644
--- a/app/modules/dump.ts
+++ b/app/modules/dump.ts
@@ -41,6 +41,7 @@ import {
 } from "../lib/dto/CertificationDTO";
 import { MembershipDTO } from "../lib/dto/MembershipDTO";
 import { RevocationDTO, ShortRevocation } from "../lib/dto/RevocationDTO";
+import { NewLogger } from "../lib/logger";
 
 const Table = require("cli-table");
 
@@ -156,6 +157,7 @@ module.exports = {
                 break;
 
               case "wotwizard":
+                NewLogger().unmute();
                 await dumpWotWizard(server);
                 break;
 
diff --git a/app/modules/dump/blocks/dump.blocks.ts b/app/modules/dump/blocks/dump.blocks.ts
index f0967048486f6adacec248205fe03dc2a10d3b31..19229811d2ccabfaee877fc124534631dce229c5 100644
--- a/app/modules/dump/blocks/dump.blocks.ts
+++ b/app/modules/dump/blocks/dump.blocks.ts
@@ -28,5 +28,9 @@ export async function dumpBlocks(
 }
 
 export function dumpBlockIfDefined(b: DBBlock | undefined | null) {
+  console.log("-------- BLOCK --------");
+  console.log("Number: " + b?.number);
+  console.log("Hash: " + b?.hash);
+  console.log("");
   console.log(BlockDTO.fromJSONObject(b).getRawSigned());
 }
diff --git a/app/modules/dump/wotwizard/wotwizard.dump.ts b/app/modules/dump/wotwizard/wotwizard.dump.ts
index 35ce2cf4fdef6c38767df176c9eaee8fe315750b..f6df40e4366b33e2006205be99ba49167a3b7b12 100644
--- a/app/modules/dump/wotwizard/wotwizard.dump.ts
+++ b/app/modules/dump/wotwizard/wotwizard.dump.ts
@@ -53,6 +53,6 @@ export async function dumpWotWizard(server: Server) {
       Directory.GET_FILE_PATH(WotWizardConstants.DB_NAME_0),
       Directory.GET_FILE_PATH(WotWizardConstants.DB_NAME)
     );
-    fs.writeFileSync(updatingFile, Date.now());
+    fs.writeFileSync(updatingFile, String(Date.now()));
   }
 }
diff --git a/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts b/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts
index 86025ac294165a0044643e20b188c5a362a365b7..e548b76a0327af6cffaea97f108704fe2df215f2 100644
--- a/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts
+++ b/app/modules/dump/wotwizard/wotwizard.legacy.blocks.ts
@@ -26,8 +26,12 @@ export async function addLegacyBlocks(server: Server, wwDAL: WotWizardDAL) {
       return f;
     });
     legacies.forEach((l) => blocksSaved.push(l));
-    if (i % 25000 === 0) {
-      logger.debug("Saving 25 blocks... (%s yet stored)", i);
+    if (i % CommonConstants.BLOCKS_IN_MEMORY_MAX === 0) {
+      logger.debug(
+        "Saving %s blocks... (%s yet stored)",
+        CommonConstants.BLOCKS_IN_MEMORY_MAX,
+        i
+      );
       await wwDAL.blockDao.insertBatch(blocksSaved);
       blocksSaved = [];
     }
diff --git a/app/modules/keypair/index.ts b/app/modules/keypair/index.ts
index da7f21c9c5e71be98c247ba451c83db625b02cc4..248c3a4772d22f45c675b4b76e526a50fc481647 100644
--- a/app/modules/keypair/index.ts
+++ b/app/modules/keypair/index.ts
@@ -135,7 +135,7 @@ export const KeypairDependency = {
       },
 
       beforeSave: async (
-        conf: KeypairConfDTO,
+        conf: Partial<KeypairConfDTO>,
         program: any,
         logger: any,
         confDAL: any
@@ -150,9 +150,12 @@ export const KeypairDependency = {
         delete conf.oldPair;
 
         // We save the key in a separate file
-        const keyring =
-          'pub: "' + conf.pair.pub + '"\n' + 'sec: "' + conf.pair.sec + '"';
-        await confDAL.coreFS.write("keyring.yml", keyring, false, true);
+        if (conf.pair?.pub && conf.pair?.sec) {
+          const keyring = 'pub: "' + conf.pair.pub + '"\n' + 'sec: "' + conf.pair.sec + '"';
+          await confDAL.coreFS.write("keyring.yml", keyring, false, true);
+        } else {
+          logger.warn("No valid keypair to save");
+        }
 
         // We never want to store salt, password or keypair in the conf.json file
         delete conf.salt;
diff --git a/app/modules/keypair/lib/scrypt.ts b/app/modules/keypair/lib/scrypt.ts
index 3135ecad6609695a3a0c43684155613337bbae2e..8e35f8d3200224dd8edeb98330ce352d54effba0 100644
--- a/app/modules/keypair/lib/scrypt.ts
+++ b/app/modules/keypair/lib/scrypt.ts
@@ -32,24 +32,25 @@ export const Scrypt = async (
   r = 16,
   p = 1
 ) => {
-  const res: { pub: string; sec: string } = await new Promise(
-    (resolve, reject) => {
-      crypto.scrypt(
-        key,
-        salt,
-        SEED_LENGTH,
-        { N, r, p },
-        (err: any, seed: Buffer) => {
-          if (err) return reject(err);
-          const pair = KeyPairBuilder.fromSeed(seed);
-          resolve({
-            pub: pair.getPublicKey(),
-            sec: seedToSecretKey(seed),
-          });
-        }
-      );
-    }
-  );
+  const res: { pub: string; sec: string } = await new Promise<{
+    pub: string;
+    sec: string;
+  }>((resolve, reject) => {
+    crypto.scrypt(
+      key,
+      salt,
+      SEED_LENGTH,
+      { N, r, p },
+      (err: any, seed: Buffer) => {
+        if (err) return reject(err);
+        const pair = KeyPairBuilder.fromSeed(seed);
+        resolve({
+          pub: pair.getPublicKey(),
+          sec: seedToSecretKey(seed),
+        });
+      }
+    );
+  });
 
   return res;
 };
diff --git a/app/modules/plugin.ts b/app/modules/plugin.ts
index 06c60ff584a88e0ec9faf5dbb398fa2c16e12bae..83d2793393673321f3bacd691ede629950dd54c3 100644
--- a/app/modules/plugin.ts
+++ b/app/modules/plugin.ts
@@ -14,7 +14,7 @@
 import { ConfDTO } from "../lib/dto/ConfDTO";
 import { Server } from "../../server";
 
-"use strict";
+("use strict");
 
 const fs = require("fs");
 const path = require("path");
@@ -86,7 +86,7 @@ function npmInstall(
   npm: string | null = null,
   cwd: string | null = null
 ) {
-  return new Promise((res, rej) => {
+  return new Promise<void>((res, rej) => {
     const node = getNode();
     npm = npm || getNPM();
     cwd = cwd || getCWD();
@@ -117,7 +117,7 @@ function npmRemove(
   npm: string | null = null,
   cwd: string | null = null
 ) {
-  return new Promise((res, rej) => {
+  return new Promise<void>((res, rej) => {
     const node = getNode();
     npm = npm || getNPM();
     cwd = cwd || getCWD();
@@ -167,7 +167,7 @@ async function checkNPMAccess() {
 }
 
 async function getNPMAccess() {
-  const hasAccessToPackageJSON = await new Promise((res) => {
+  const hasAccessToPackageJSON = await new Promise<boolean>((res) => {
     fs.access(
       path.join(__dirname, "/../../package.json"),
       fs.constants.R_OK | fs.constants.W_OK,
@@ -176,7 +176,7 @@ async function getNPMAccess() {
       }
     );
   });
-  const hasAccessToNodeModules = await new Promise((res) => {
+  const hasAccessToNodeModules = await new Promise<boolean>((res) => {
     fs.access(
       path.join(__dirname, "/../../node_modules"),
       fs.constants.R_OK | fs.constants.W_OK,
diff --git a/app/modules/prover/index.ts b/app/modules/prover/index.ts
index 8bdca8b55ece27112c7955ef0394ec891cc28b54..05cf362dc1762b4628cc9cf61f2d447fadd5a596 100644
--- a/app/modules/prover/index.ts
+++ b/app/modules/prover/index.ts
@@ -50,7 +50,7 @@ export const ProverDependency = {
         conf.powSecurityRetryDelay = ProverConstants.POW_SECURITY_RETRY_DELAY;
         conf.powMaxHandicap = ProverConstants.POW_MAXIMUM_ACCEPTABLE_HANDICAP;
       },
-      beforeSave: async (conf: ConfDTO) => {
+      beforeSave: async (conf: Partial<ConfDTO>) => {
         delete conf.powSecurityRetryDelay;
         delete conf.powMaxHandicap;
       },
@@ -244,7 +244,7 @@ function generateAndSend(
   getGenerationMethod: any
 ) {
   const logger = server.logger;
-  return new Promise((resolve, reject) => {
+  return new Promise<any>((resolve, reject) => {
     if (!program.submitLocal) {
       if (!program.submitHost) {
         throw "Option --submit-host is required.";
@@ -351,7 +351,13 @@ function proveAndSend(
               next();
             } else {
               const peer = PeerDTO.fromJSONObject({
-                endpoints: [["BASIC_MERKLED_API", host, port].join(" ")],
+                endpoints: [
+                  [
+                    port == "443" ? "BMAS" : "BASIC_MERKLED_API",
+                    host,
+                    port,
+                  ].join(" "),
+                ],
               });
               program.show && console.log(proven.getRawSigned());
               logger.info("Posted block " + proven.getRawSigned());
diff --git a/app/modules/prover/lib/permanentProver.ts b/app/modules/prover/lib/permanentProver.ts
index bf52ee7d7cc2d7a77565b9c49aa3912497c22f2e..aaf574c96e505323a83023335c6dd7371f938b9c 100644
--- a/app/modules/prover/lib/permanentProver.ts
+++ b/app/modules/prover/lib/permanentProver.ts
@@ -20,6 +20,7 @@ import { parsers } from "../../../lib/common-libs/parsers/index";
 
 import { Server } from "../../../../server";
 import { Querable, querablep } from "../../../lib/common-libs/querable";
+import { BlockDTO } from "../../../lib/dto/BlockDTO";
 
 export class PermanentProver {
   logger: any;
@@ -29,12 +30,11 @@ export class PermanentProver {
   loops: number;
 
   private permanencePromise: Querable<void> | null = null;
-
-  private blockchainChangedResolver: any = null;
-  private promiseOfWaitingBetween2BlocksOfOurs: any = null;
-  private lastComputedBlock: any = null;
-  private resolveContinuePromise: any = null;
-  private continuePromise: any = null;
+  private blockchainChangedResolver: ((value: void) => void) | null = null;
+  private promiseOfWaitingBetween2BlocksOfOurs: Promise<void> | null = null;
+  private lastComputedBlock: BlockDTO | null = null;
+  private resolveContinuePromise: ((value: boolean) => void) | null = null;
+  private continuePromise: Promise<boolean> | null = null;
 
   constructor(private server: Server) {
     this.logger = server.logger;
@@ -44,7 +44,7 @@ export class PermanentProver {
 
     // Promises triggering the prooving lopp
     this.resolveContinuePromise = null;
-    this.continuePromise = new Promise(
+    this.continuePromise = new Promise<boolean>(
       (resolve) => (this.resolveContinuePromise = resolve)
     );
 
@@ -55,13 +55,13 @@ export class PermanentProver {
     if (!this.permanencePromise || this.permanencePromise.isFulfilled()) {
       this.startPermanence();
     }
-    this.resolveContinuePromise(true);
+    this.resolveContinuePromise && this.resolveContinuePromise(true);
   }
 
   async startPermanence() {
-    let permanenceResolve = () => {};
+    let permanenceResolve: (value: void) => void = () => {};
     this.permanencePromise = querablep(
-      new Promise((res) => {
+      new Promise<void>((res) => {
         permanenceResolve = res;
       })
     );
@@ -99,8 +99,8 @@ export class PermanentProver {
           this.checkTrialIsNotTooHigh(trial, current, selfPubkey);
           const lastIssuedByUs = current.issuer == selfPubkey;
           if (lastIssuedByUs && !this.promiseOfWaitingBetween2BlocksOfOurs) {
-            this.promiseOfWaitingBetween2BlocksOfOurs = new Promise((resolve) =>
-              setTimeout(resolve, theConf.powDelay)
+            this.promiseOfWaitingBetween2BlocksOfOurs = new Promise<void>(
+              (resolve) => setTimeout(resolve, theConf.powDelay)
             );
             this.logger.warn(
               "Waiting " +
@@ -126,15 +126,15 @@ export class PermanentProver {
             let cancelAlreadyTriggered = false;
 
             // The canceller
-            (async () => {
+            setTimeout(async () => {
               // If the blockchain changes
-              await new Promise(
+              await new Promise<void>(
                 (resolve) => (this.blockchainChangedResolver = resolve)
               );
               cancelAlreadyTriggered = true;
               // Then cancel the generation
               await this.prover.cancel();
-            })();
+            });
 
             let unsignedBlock = null,
               trial2 = 0;
@@ -165,11 +165,13 @@ export class PermanentProver {
                 );
               }
               try {
-                const obj = parsers.parseBlock.syncWrite(
-                  dos2unix(this.lastComputedBlock.getRawSigned())
-                );
+                const obj =
+                  this.lastComputedBlock &&
+                  parsers.parseBlock.syncWrite(
+                    dos2unix(this.lastComputedBlock.getRawSigned())
+                  );
                 await this.server.writeBlock(obj);
-                await new Promise((res) => {
+                await new Promise<void>((res) => {
                   this.server.once("bcEvent", () => res());
                 });
               } catch (err) {
@@ -201,7 +203,7 @@ export class PermanentProver {
           await Promise.race(
             waitingRaces.concat([
               // The blockchain has changed! We or someone else found a proof, we must make a gnu one
-              new Promise(
+              new Promise<void>(
                 (resolve) =>
                   (this.blockchainChangedResolver = () => {
                     this.logger.warn("Blockchain changed!");
@@ -210,7 +212,7 @@ export class PermanentProver {
               ),
 
               // Security: if nothing happens for a while, trigger the whole process again
-              new Promise((resolve) =>
+              new Promise<void>((resolve) =>
                 setTimeout(() => {
                   if (!raceDone) {
                     this.logger.warn(
@@ -251,10 +253,10 @@ export class PermanentProver {
     }
   }
 
-  async stopEveryting() {
+  async stopEverything() {
     // First: avoid continuing the main loop
-    this.resolveContinuePromise(true);
-    this.continuePromise = new Promise(
+    this.resolveContinuePromise && this.resolveContinuePromise(true);
+    this.continuePromise = new Promise<boolean>(
       (resolve) => (this.resolveContinuePromise = resolve)
     );
     // Second: stop any started proof
diff --git a/app/modules/prover/lib/proof.ts b/app/modules/prover/lib/proof.ts
index f24d30571026488f51a3e663b20fdb2c9a5546c7..5c242b58e5631122ac9c2375588487a652ae0e73 100644
--- a/app/modules/prover/lib/proof.ts
+++ b/app/modules/prover/lib/proof.ts
@@ -53,7 +53,7 @@ export function createPowWorker() {
     process.exit(ExitCodes.OK);
   });
 
-  process.on("message", async (message) => {
+  process.on("message", async (message: any) => {
     switch (message.command) {
       case "newPoW":
         (async () => {
@@ -383,7 +383,7 @@ export function createPowWorker() {
   }
 
   function pSend(stuff: any) {
-    return new Promise(function (resolve, reject) {
+    return new Promise<void>(function (resolve, reject) {
       if (process.send) {
         process.send(stuff, function (error: any) {
           !error && resolve();
diff --git a/app/modules/prover/lib/prover.ts b/app/modules/prover/lib/prover.ts
index c1adcbdea0927d6ed8d735e9f892ba128056731b..dbdce332842833595811977ebf7c51760e820f13 100644
--- a/app/modules/prover/lib/prover.ts
+++ b/app/modules/prover/lib/prover.ts
@@ -45,6 +45,6 @@ export class Prover extends stream.Transform {
   }
 
   async stopService() {
-    await this.permaProver.stopEveryting();
+    await this.permaProver.stopEverything();
   }
 }
diff --git a/app/modules/upnp-provider.ts b/app/modules/upnp-provider.ts
index 776453de15c99ea4e34140a4926da2146ec1c0a8..6783bb93703563f7522b13707df6f5b81f1403c8 100644
--- a/app/modules/upnp-provider.ts
+++ b/app/modules/upnp-provider.ts
@@ -43,7 +43,7 @@ export class UpnpProvider {
 
   async checkUPnPisAvailable() {
     try {
-      await new Promise((resolve, reject) => {
+      await new Promise<void>((resolve, reject) => {
         this.client.externalIp((err: any, res: any) => {
           if (err || !res) {
             reject();
@@ -179,7 +179,7 @@ export class UpnpProvider {
   }
 
   static async getUPnPMappings(client: any): Promise<any> {
-    return new Promise((resolve, reject) => {
+    return new Promise<any>((resolve, reject) => {
       client.getMappings((err: any, res: any) => {
         if (err) {
           reject(err);
@@ -200,7 +200,7 @@ export class UpnpProvider {
     let keys = Underscore.keys(netInterfaces);
     let res = [];
     for (const name of keys) {
-      let addresses = netInterfaces[name];
+      let addresses = netInterfaces[name] || [];
       for (const addr of addresses) {
         if (!family || addr.family == family) {
           res.push({
diff --git a/app/modules/ws2p/lib/WS2PCluster.ts b/app/modules/ws2p/lib/WS2PCluster.ts
index 080150134257750cec254a0684776dca06c92cf0..406d6eaaee8b3bac547a0d5ae641e2acd1ffd085 100644
--- a/app/modules/ws2p/lib/WS2PCluster.ts
+++ b/app/modules/ws2p/lib/WS2PCluster.ts
@@ -11,7 +11,6 @@
 // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 // GNU Affero General Public License for more details.
 
-import { DEFAULT_ENCODING } from "crypto";
 import { WS2PServer } from "./WS2PServer";
 import { Server } from "../../../../server";
 import { WS2PClient } from "./WS2PClient";
@@ -38,7 +37,6 @@ import { TransactionDTO } from "../../../lib/dto/TransactionDTO";
 
 const es = require("event-stream");
 const nuuid = require("node-uuid");
-const logger = NewLogger();
 
 export interface WS2PHead {
   message: string;
@@ -576,6 +574,7 @@ export class WS2PCluster {
     const canReachClearEndpoint = ProxiesConf.canReachClearEndpoint(
       this.server.conf.proxiesConf
     );
+    const isMember = await this.server.dal.isMember(this.server.conf.pair.pub);
     peers.sort((a, b) => {
       // Top priority at our own nodes
       if (
@@ -595,12 +594,14 @@ export class WS2PCluster {
       const aNumberOfFreeRooms = this.numberOfFreeRooms(
         a,
         canReachTorEndpoint,
-        canReachClearEndpoint
+        canReachClearEndpoint,
+        isMember
       );
       const bNumberOfFreeRooms = this.numberOfFreeRooms(
         b,
         canReachTorEndpoint,
-        canReachClearEndpoint
+        canReachClearEndpoint,
+        isMember
       );
 
       if (canReachTorEndpoint) {
@@ -744,7 +745,8 @@ export class WS2PCluster {
   private numberOfFreeRooms(
     p: PeerDTO,
     canReachTorEndpoint: boolean,
-    canReachClearEndpoint: boolean
+    canReachClearEndpoint: boolean,
+    isMember: boolean
   ) {
     const api = p.getOnceWS2PEndpoint(
       canReachTorEndpoint,
@@ -768,9 +770,7 @@ export class WS2PCluster {
               freeMemberRoom,
               freeMirorRoom,
             ]: string[] = messageV2.split(":");
-            return this.server.dal.isMember(this.server.conf.pair.pub)
-              ? freeMemberRoom
-              : freeMirorRoom;
+            return isMember ? freeMemberRoom : freeMirorRoom;
           }
         }
       }
diff --git a/app/modules/ws2p/lib/WS2PConnection.ts b/app/modules/ws2p/lib/WS2PConnection.ts
index e31bf49cea195446246da6cbba8d06c58d2f4a69..c960ebca48457bb87f6d2ffaf84ebf4a74a00421 100644
--- a/app/modules/ws2p/lib/WS2PConnection.ts
+++ b/app/modules/ws2p/lib/WS2PConnection.ts
@@ -528,7 +528,7 @@ export class WS2PConnection {
         try {
           await Promise.race([
             connectionTimeout,
-            new Promise((resolve, reject) => {
+            new Promise<void>((resolve, reject) => {
               (async () => {
                 await this.onWsOpened;
                 try {
@@ -737,7 +737,7 @@ export class WS2PConnection {
   async request(body: WS2PRequest) {
     await this.connectAsInitiator();
     const uuid = nuuid.v4();
-    return new Promise((resolve, reject) => {
+    return new Promise<void>((resolve, reject) => {
       this.nbRequestsCount++;
       this.ws.send(
         JSON.stringify({
@@ -761,7 +761,7 @@ export class WS2PConnection {
               extras,
               Promise.race([
                 // The answer
-                new Promise((res, rej) => {
+                new Promise<void>((res, rej) => {
                   extras.resolve = res;
                   extras.reject = () => {
                     this.errorDetected(WS2P_ERR.REQUEST_FAILED);
@@ -833,7 +833,7 @@ export class WS2PConnection {
 
   async pushData(type: WS2P_PUSH, key: string, data: any) {
     await this.connectAsInitiator();
-    return new Promise((resolve, reject) => {
+    return new Promise<void>((resolve, reject) => {
       this.nbPushsToRemoteCount++;
       try {
         this.ws.send(
diff --git a/app/modules/ws2p/lib/WS2PServer.ts b/app/modules/ws2p/lib/WS2PServer.ts
index 5ebf238f6fc2b04121987615b0891089b9dafea7..fa66c2462bda81fc586ba6e7eff58d8983d3ef38 100644
--- a/app/modules/ws2p/lib/WS2PServer.ts
+++ b/app/modules/ws2p/lib/WS2PServer.ts
@@ -288,7 +288,7 @@ export class WS2PServer extends events.EventEmitter {
 
   async close() {
     await Promise.all(this.connections.map((c) => c.close()));
-    return new Promise((res, rej) => {
+    return new Promise<void>((res, rej) => {
       this.wss.close((err: any) => {
         if (err) return rej(err);
         res();
diff --git a/app/service/IdentityService.ts b/app/service/IdentityService.ts
index 676b8602a57b183a8582444c9be37e77ca39cd12..9501db0bbe916ccad1eb83914fbd0a17b921d850 100644
--- a/app/service/IdentityService.ts
+++ b/app/service/IdentityService.ts
@@ -30,7 +30,7 @@ import { MindexEntry } from "../lib/indexer";
 import { DataErrors } from "../lib/common-libs/errors";
 import { Tristamp } from "../lib/common/Tristamp";
 
-"use strict";
+("use strict");
 const constants = require("../lib/constants");
 
 const BY_ABSORPTION = true;
@@ -54,6 +54,10 @@ export class IdentityService extends FIFOService {
     return this.dal.searchJustIdentities(search);
   }
 
+  searchIdentitiesByPubkey(pubkey: string) {
+    return this.dal.searchJustIdentitiesByPubkey(pubkey);
+  }
+
   async findMember(search: string) {
     let idty = null;
     if (search.match(constants.PUBLIC_KEY)) {
diff --git a/app/service/PeeringService.ts b/app/service/PeeringService.ts
index 6fd0a104effdeeaaed010a21e95a6a303a10a5f2..daec3e5899f338a9203fc0650cd3c980816870bb 100755
--- a/app/service/PeeringService.ts
+++ b/app/service/PeeringService.ts
@@ -76,10 +76,9 @@ export class PeeringService {
   async mirrorBMAEndpoints() {
     const localPeer = await this.peer();
     const localEndpoints = await this.server.getEndpoints();
-    return this.getOtherEndpoints(
-      localPeer.endpoints,
-      localEndpoints
-    ).filter((ep) => ep.match(/^BASIC_MERKLED_API/));
+    return this.getOtherEndpoints(localPeer.endpoints, localEndpoints).filter(
+      PeerDTO.isBMA
+    );
   }
 
   checkPeerSignature(p: PeerDTO) {
@@ -363,9 +362,8 @@ export class PeeringService {
       const ws2pAccess = PeerDTO.fromJSONObject(p2).getFirstNonTorWS2P();
       if (ws2pAccess) {
         logger.info(
-          `WS2P access: ${ws2pAccess.host} :${ws2pAccess.port}${
-            (ws2pAccess.path && " " + ws2pAccess.path) || ""
-          }`
+          `WS2P access: ${ws2pAccess.host}:${ws2pAccess.port}` +
+            (ws2pAccess.path ? " " + ws2pAccess.path : "")
         );
       }
       logger.debug(
diff --git a/doc/HTTP_API.md b/doc/HTTP_API.md
index 3a5bace82210065faff189686f6cd51b56108b80..bf88629d92d2679adf1165954e9e63d71083f4da 100644
--- a/doc/HTTP_API.md
+++ b/doc/HTTP_API.md
@@ -2,63 +2,65 @@
 
 ## Contents
 
-* [Contents](#contents)
-* [Overview](#overview)
-* [Merkle URLs](#merkle-urls)
-* [API](#api)
-  * [node/](#node)
-      * [summary](#nodesummary)
-      * [sandboxes](#nodesandboxes)
-  * [wot/](#wot)
-      * [add](#wotadd)
-      * [certify](#wotcertify)
-      * [revoke](#wotrevoke)
-      * [lookup/[search]](#wotlookupsearch)
-      * [members](#wotmembers)
-      * [requirements/[PUBKEY]](#wotrequirementspubkey)
-      * [requirements-of-pending/[minsig]](#wotrequirements-of-pendingminsig)
-      * [certifiers-of/[search]](#wotcertifiers-ofsearch)
-      * [certified-by/[search]](#wotcertified-bysearch)
-      * [identity-of/[search]](#wotidentity-ofsearch)
-  * [blockchain/](#blockchain)
-      * [parameters](#blockchainparameters)
-      * [membership](#blockchainmembership)
-      * [memberships/[search]](#blockchainmembershipssearch)
-      * [block](#blockchainblock)
-      * [block/[number]](#blockchainblocknumber)
-      * [blocks/[count]/[from]](#blockchainblockscountfrom)
-      * [current](#blockchaincurrent)
-      * [hardship/[PUBKEY]](#blockchainhardshippubkey)
-      * [difficulties](#blockchaindifficulties)
-      * [with/](#blockchainwith)
-          * [newcomers](#blockchainwithnewcomers)
-          * [certs](#blockchainwithcerts)
-          * [joiners](#blockchainwithjoiners)
-          * [actives](#blockchainwithactives)
-          * [leavers](#blockchainwithleavers)
-          * [revoked](#blockchainwithrevoked)
-          * [excluded](#blockchainwithexcluded)
-          * [ud](#blockchainwithud)
-          * [tx](#blockchainwithtx)
-      * [branches](#blockchainbranches)
-  * [network/](#network)
-      * [peers](#networkpeers)
-      * [peering](#networkpeering)
-      * [peering/peers (GET)](#networkpeeringpeers-get)
-      * [peering/peers (POST)](#networkpeeringpeers-post)
-      * [ws2p/heads (GET)](#networkws2pheads-get)
-  * [tx/](#tx)
-      * [process](#txprocess)
-      * [sources/[pubkey]](#txsourcespubkey)
-      * [history/[pubkey]](#txhistorypubkey)
-      * [history/[pubkey]/pending](#txhistorypubkeypending)
-      * [history/[pubkey]/blocks/[from]/[to]](#txhistorypubkeyblocksfromto)
-      * [history/[pubkey]/times/[from]/[to]](#txhistorypubkeytimesfromto)
-  * [ud/](#ud)
-      * [history/[pubkey]](#udhistorypubkey)
-  * [ws/](#ws)
-      * [block](#wsblock)
-      * [peer](#wspeer)
+- [Duniter HTTP API](#duniter-http-api)
+  - [Contents](#contents)
+  - [Overview](#overview)
+  - [Merkle URLs](#merkle-urls)
+    - [Duniter Merkle trees leaves](#duniter-merkle-trees-leaves)
+      - [Unicity](#unicity)
+  - [API](#api)
+    - [node/\*](#node)
+      - [`node/summary`](#nodesummary)
+      - [`node/sandboxes`](#nodesandboxes)
+    - [wot/\*](#wot)
+      - [`wot/add`](#wotadd)
+      - [`wot/certify`](#wotcertify)
+      - [`wot/revoke`](#wotrevoke)
+      - [`wot/lookup/[search]`](#wotlookupsearch)
+      - [`wot/members`](#wotmembers)
+      - [`wot/requirements/[pubkey]`](#wotrequirementspubkey)
+      - [`wot/requirements-of-pending/[minsig]`](#wotrequirements-of-pendingminsig)
+      - [`wot/certifiers-of/[search]`](#wotcertifiers-ofsearch)
+      - [`wot/certified-by/[search]`](#wotcertified-bysearch)
+      - [`wot/identity-of/[search]`](#wotidentity-ofsearch)
+    - [blockchain/\*](#blockchain)
+      - [`blockchain/parameters`](#blockchainparameters)
+      - [`blockchain/membership`](#blockchainmembership)
+      - [`blockchain/memberships/[search]`](#blockchainmembershipssearch)
+      - [`blockchain/block`](#blockchainblock)
+      - [`blockchain/block/[NUMBER]`](#blockchainblocknumber)
+      - [`blockchain/blocks/[COUNT]/[FROM]`](#blockchainblockscountfrom)
+      - [`blockchain/current`](#blockchaincurrent)
+      - [`blockchain/hardship/[PUBKEY]`](#blockchainhardshippubkey)
+      - [`blockchain/difficulties`](#blockchaindifficulties)
+      - [`blockchain/with/newcomers`](#blockchainwithnewcomers)
+      - [`blockchain/with/certs`](#blockchainwithcerts)
+      - [`blockchain/with/joiners`](#blockchainwithjoiners)
+      - [`blockchain/with/actives`](#blockchainwithactives)
+      - [`blockchain/with/leavers`](#blockchainwithleavers)
+      - [`blockchain/with/revoked`](#blockchainwithrevoked)
+      - [`blockchain/with/excluded`](#blockchainwithexcluded)
+      - [`blockchain/with/ud`](#blockchainwithud)
+      - [`blockchain/with/tx`](#blockchainwithtx)
+      - [`blockchain/branches`](#blockchainbranches)
+    - [network/\*](#network)
+      - [`network/peers`](#networkpeers)
+      - [`network/peering`](#networkpeering)
+      - [`network/peering/peers (GET)`](#networkpeeringpeers-get)
+      - [`network/peering/peers (POST)`](#networkpeeringpeers-post)
+      - [`network/ws2p/heads (GET)`](#networkws2pheads-get)
+    - [tx/\*](#tx)
+      - [`tx/process`](#txprocess)
+      - [`tx/sources/[pubkey]`](#txsourcespubkey)
+      - [`tx/history/[pubkey]`](#txhistorypubkey)
+      - [`tx/history/[pubkey]pending`](#txhistorypubkeypending)
+      - [`tx/history/[PUBKEY]/blocks/[from]/[to]`](#txhistorypubkeyblocksfromto)
+      - [`tx/history/[pubkey]/times/[from]/[to]`](#txhistorypubkeytimesfromto)
+    - [ud/\*](#ud)
+      - [`ud/history/[pubkey]`](#udhistorypubkey)
+    - [ws/\*](#ws)
+      - [`ws/block`](#wsblock)
+      - [`ws/peer`](#wspeer)
 
 ## Overview
 
@@ -244,8 +246,12 @@ Technical informations about the node.
 {
   "duniter": {
     "software": "duniter",
-    "version": "0.10.3",
-    "forkWindowSize": 10
+    "version": "1.9.1",
+    "forkWindowSize": 100,
+    "storage": {
+      "transaction": true,
+      "wotwizard": false
+    }
   }
 }
 ```
diff --git a/gui/index.html b/gui/index.html
index 8ccfda3b5ff7533a9dfe110dd7c9aa94581c2253..0b2f15bec07e84f5afc3dded4c3e596602392f6d 100644
--- a/gui/index.html
+++ b/gui/index.html
@@ -3,7 +3,7 @@
 <head>
   <meta charset="utf-8">
   <meta http-equiv="X-UA-Compatible" content="IE=edge">
-  <title>Duniter 1.8.1</title>
+  <title>Duniter 1.8.7</title>
   <style>
     html {
       font-family: "Courier New", Courier, monospace;
diff --git a/neon/native/Cargo.toml b/neon/native/Cargo.toml
index bc5fe8859f2122108eb590e604fec48144ddd788..75833dc311a9a430c98f61c0d587db76e264880e 100644
--- a/neon/native/Cargo.toml
+++ b/neon/native/Cargo.toml
@@ -12,13 +12,14 @@ name = "duniteroxyde"
 crate-type = ["cdylib"]
 
 [build-dependencies]
-neon-build = "0.4.0"
+neon-build = "0.10.0"
 
 [dependencies]
 bincode = "1.2.1"
 bs58 = "0.3.0"
-duniter-core = { git = "https://git.duniter.org/nodes/rust/duniter-core", features = ["bc-writer"] }
-duniter-server = { path = "../../rust-libs/duniter-server" }
+duniter-core = "1.9.1"
+duniter-gva = "1.9.0"
+duniter-bma = "1.9.1"
 flate2 = { version = "1.0", features = ["zlib-ng-compat"], default-features = false }
 flexi_logger = { version = "=0.16.0", default-features = false, features = ["compress"] }
 flume = "0.10.0"
diff --git a/neon/native/artifacts.json b/neon/native/artifacts.json
new file mode 100644
index 0000000000000000000000000000000000000000..1bf1b5b5b89381e350b072267d8252190ef3eb54
--- /dev/null
+++ b/neon/native/artifacts.json
@@ -0,0 +1 @@
+{"active":"debug","targets":{"release":{"rustc":"","nodeVersion":null,"env":{"npm_config_target":null,"npm_config_arch":null,"npm_config_target_arch":null,"npm_config_disturl":null,"npm_config_runtime":null,"npm_config_build_from_source":null,"npm_config_devdir":null}},"debug":{"rustc":"rustc 1.63.0 (4b91a6ea7 2022-08-08)","nodeVersion":"v16.16.0","env":{"npm_config_target":null,"npm_config_arch":null,"npm_config_target_arch":null,"npm_config_disturl":null,"npm_config_runtime":null,"npm_config_build_from_source":null,"npm_config_devdir":null,"npm_config_node_engine":null,"npm_config_nodedir":null,"npm_config_node_gyp":"/Users/cgeek/.nvm/versions/node/v16.16.0/lib/node_modules/npm/node_modules/node-gyp/bin/node-gyp.js","npm_config_platform":null}}}}
\ No newline at end of file
diff --git a/neon/native/src/wot.rs b/neon/native/src/wot.rs
index 112d842e62c9d0d3512424b4b2d85509fb79208c..804022395df2cf4ca586f2f759ba676acffdb870 100644
--- a/neon/native/src/wot.rs
+++ b/neon/native/src/wot.rs
@@ -327,8 +327,8 @@ declare_types! {
 
             match distance_res {
                 Ok(distance_data) => Ok(cx.boolean(distance_data.outdistanced).upcast()),
-                Err(e) => match e {
-                    DistanceError::NodeDontExist(wot_id) => cx.throw_error(format!("node '{}' not exist.", wot_id.0)),
+                Err(DistanceError::NodeDontExist(wot_id)) => {
+                    cx.throw_error(format!("node '{}' not exist.", wot_id.0))
                 }
             }
         }
@@ -345,8 +345,8 @@ declare_types! {
 
             match distance_res {
                 Ok(distance_data) => distance_response_to_js_object(cx, distance_data),
-                Err(e) => match e {
-                    DistanceError::NodeDontExist(wot_id) => cx.throw_error(format!("node '{}' not exist.", wot_id.0)),
+                Err(DistanceError::NodeDontExist(wot_id)) => {
+                    cx.throw_error(format!("node '{}' not exist.", wot_id.0))
                 }
             }
         }
diff --git a/package-lock.json b/package-lock.json
index cfbf5a13970bda8fe7397e73a0c1b3e21a843705..fd34077287e940549d28ad72a1094e099cbbc7d7 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,6 +1,6 @@
 {
   "name": "duniter",
-  "version": "1.9.0-dev",
+  "version": "1.9.1",
   "lockfileVersion": 1,
   "requires": true,
   "dependencies": {
@@ -403,21 +403,6 @@
       "integrity": "sha512-hHUXGagefjN2iRrID63xckIvotOXOojhQKWIPUZ4mNUZ9nLZW+7FMNoE1lOkEhNWYsx/7ysGIuJYCiMAA9FnrA==",
       "dev": true
     },
-    "ansi-escape-sequences": {
-      "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/ansi-escape-sequences/-/ansi-escape-sequences-4.1.0.tgz",
-      "integrity": "sha512-dzW9kHxH011uBsidTXd14JXgzye/YLb2LzeKZ4bsgl/Knwx8AtbSFkkGxagdNOoh0DlqHCmfiEjWKBaqjOanVw==",
-      "requires": {
-        "array-back": "^3.0.1"
-      },
-      "dependencies": {
-        "array-back": {
-          "version": "3.1.0",
-          "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz",
-          "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q=="
-        }
-      }
-    },
     "ansi-escapes": {
       "version": "4.3.1",
       "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.3.1.tgz",
@@ -470,19 +455,6 @@
             "lodash": "^4.17.14"
           }
         },
-        "glob": {
-          "version": "7.1.6",
-          "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
-          "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
-          "requires": {
-            "fs.realpath": "^1.0.0",
-            "inflight": "^1.0.4",
-            "inherits": "2",
-            "minimatch": "^3.0.4",
-            "once": "^1.3.0",
-            "path-is-absolute": "^1.0.0"
-          }
-        },
         "readable-stream": {
           "version": "3.6.0",
           "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.0.tgz",
@@ -510,21 +482,6 @@
         "lodash.union": "^4.6.0",
         "normalize-path": "^3.0.0",
         "readable-stream": "^2.0.0"
-      },
-      "dependencies": {
-        "glob": {
-          "version": "7.1.6",
-          "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
-          "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
-          "requires": {
-            "fs.realpath": "^1.0.0",
-            "inflight": "^1.0.4",
-            "inherits": "2",
-            "minimatch": "^3.0.4",
-            "once": "^1.3.0",
-            "path-is-absolute": "^1.0.0"
-          }
-        }
       }
     },
     "are-we-there-yet": {
@@ -551,12 +508,9 @@
       }
     },
     "array-back": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/array-back/-/array-back-2.0.0.tgz",
-      "integrity": "sha512-eJv4pLLufP3g5kcZry0j6WXpIbzYw9GUB4mVJZno9wfwiBxbizTnHCw3VJb07cBihbFX48Y7oSrW9y+gt4glyw==",
-      "requires": {
-        "typical": "^2.6.1"
-      }
+      "version": "3.1.0",
+      "resolved": "https://registry.npmjs.org/array-back/-/array-back-3.1.0.tgz",
+      "integrity": "sha512-TkuxA4UCOvxuDK6NZYXCalszEzj+TLszyASooky+i742l9TqsOdYCMJJupxRic61hwquNtppB3hgcuq9SVSH1Q=="
     },
     "array-flatten": {
       "version": "1.1.1",
@@ -950,32 +904,78 @@
       }
     },
     "command-line-args": {
-      "version": "4.0.7",
-      "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-4.0.7.tgz",
-      "integrity": "sha512-aUdPvQRAyBvQd2n7jXcsMDz68ckBJELXNzBybCHOibUWEg0mWTnaYCSRU8h9R+aNRSvDihJtssSRCiDRpLaezA==",
+      "version": "5.2.1",
+      "resolved": "https://registry.npmjs.org/command-line-args/-/command-line-args-5.2.1.tgz",
+      "integrity": "sha512-H4UfQhZyakIjC74I9d34fGYDwk3XpSr17QhEd0Q3I9Xq1CETHo4Hcuo87WyWHpAF1aSLjLRf5lD9ZGX2qStUvg==",
       "requires": {
-        "array-back": "^2.0.0",
-        "find-replace": "^1.0.3",
-        "typical": "^2.6.1"
+        "array-back": "^3.1.0",
+        "find-replace": "^3.0.0",
+        "lodash.camelcase": "^4.3.0",
+        "typical": "^4.0.0"
       }
     },
     "command-line-commands": {
-      "version": "2.0.1",
-      "resolved": "https://registry.npmjs.org/command-line-commands/-/command-line-commands-2.0.1.tgz",
-      "integrity": "sha512-m8c2p1DrNd2ruIAggxd/y6DgygQayf6r8RHwchhXryaLF8I6koYjoYroVP+emeROE9DXN5b9sP1Gh+WtvTTdtQ==",
+      "version": "3.0.2",
+      "resolved": "https://registry.npmjs.org/command-line-commands/-/command-line-commands-3.0.2.tgz",
+      "integrity": "sha512-ac6PdCtdR6q7S3HN+JiVLIWGHY30PRYIEl2qPo+FuEuzwAUk0UYyimrngrg7FvF/mCr4Jgoqv5ZnHZgads50rw==",
       "requires": {
-        "array-back": "^2.0.0"
+        "array-back": "^4.0.1"
+      },
+      "dependencies": {
+        "array-back": {
+          "version": "4.0.2",
+          "resolved": "https://registry.npmjs.org/array-back/-/array-back-4.0.2.tgz",
+          "integrity": "sha512-NbdMezxqf94cnNfWLL7V/im0Ub+Anbb0IoZhvzie8+4HJ4nMQuzHuy49FkGYCJK2yAloZ3meiB6AVMClbrI1vg=="
+        }
       }
     },
     "command-line-usage": {
-      "version": "4.1.0",
-      "resolved": "https://registry.npmjs.org/command-line-usage/-/command-line-usage-4.1.0.tgz",
-      "integrity": "sha512-MxS8Ad995KpdAC0Jopo/ovGIroV/m0KHwzKfXxKag6FHOkGsH8/lv5yjgablcRxCJJC0oJeUMuO/gmaq+Wq46g==",
-      "requires": {
-        "ansi-escape-sequences": "^4.0.0",
-        "array-back": "^2.0.0",
-        "table-layout": "^0.4.2",
-        "typical": "^2.6.1"
+      "version": "6.1.3",
+      "resolved": "https://registry.npmjs.org/command-line-usage/-/command-line-usage-6.1.3.tgz",
+      "integrity": "sha512-sH5ZSPr+7UStsloltmDh7Ce5fb8XPlHyoPzTpyyMuYCtervL65+ubVZ6Q61cFtFl62UyJlc8/JwERRbAFPUqgw==",
+      "requires": {
+        "array-back": "^4.0.2",
+        "chalk": "^2.4.2",
+        "table-layout": "^1.0.2",
+        "typical": "^5.2.0"
+      },
+      "dependencies": {
+        "ansi-styles": {
+          "version": "3.2.1",
+          "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
+          "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
+          "requires": {
+            "color-convert": "^1.9.0"
+          }
+        },
+        "array-back": {
+          "version": "4.0.2",
+          "resolved": "https://registry.npmjs.org/array-back/-/array-back-4.0.2.tgz",
+          "integrity": "sha512-NbdMezxqf94cnNfWLL7V/im0Ub+Anbb0IoZhvzie8+4HJ4nMQuzHuy49FkGYCJK2yAloZ3meiB6AVMClbrI1vg=="
+        },
+        "chalk": {
+          "version": "2.4.2",
+          "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz",
+          "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==",
+          "requires": {
+            "ansi-styles": "^3.2.1",
+            "escape-string-regexp": "^1.0.5",
+            "supports-color": "^5.3.0"
+          }
+        },
+        "supports-color": {
+          "version": "5.5.0",
+          "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz",
+          "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==",
+          "requires": {
+            "has-flag": "^3.0.0"
+          }
+        },
+        "typical": {
+          "version": "5.2.0",
+          "resolved": "https://registry.npmjs.org/typical/-/typical-5.2.0.tgz",
+          "integrity": "sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg=="
+        }
       }
     },
     "commander": {
@@ -1193,11 +1193,6 @@
         }
       }
     },
-    "decamelize": {
-      "version": "1.2.0",
-      "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz",
-      "integrity": "sha1-9lNNFRSCabIDUue+4m9QH5oZEpA="
-    },
     "dedent": {
       "version": "0.7.0",
       "resolved": "https://registry.npmjs.org/dedent/-/dedent-0.7.0.tgz",
@@ -2040,28 +2035,18 @@
       }
     },
     "find-replace": {
-      "version": "1.0.3",
-      "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-1.0.3.tgz",
-      "integrity": "sha1-uI5zZNLZyVlVnziMZmcNYTBEH6A=",
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/find-replace/-/find-replace-3.0.0.tgz",
+      "integrity": "sha512-6Tb2myMioCAgv5kfvP5/PkZZ/ntTpVK39fHY7WkWBgvbeE+VHd/tZuZ4mrC+bxh4cfOZeYKVPaJIZtZXV7GNCQ==",
       "requires": {
-        "array-back": "^1.0.4",
-        "test-value": "^2.1.0"
-      },
-      "dependencies": {
-        "array-back": {
-          "version": "1.0.4",
-          "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz",
-          "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=",
-          "requires": {
-            "typical": "^2.6.0"
-          }
-        }
+        "array-back": "^3.0.1"
       }
     },
     "find-up": {
       "version": "4.1.0",
       "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz",
       "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==",
+      "dev": true,
       "requires": {
         "locate-path": "^5.0.0",
         "path-exists": "^4.0.0"
@@ -2078,20 +2063,6 @@
         "write": "1.0.3"
       },
       "dependencies": {
-        "glob": {
-          "version": "7.1.6",
-          "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.6.tgz",
-          "integrity": "sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA==",
-          "dev": true,
-          "requires": {
-            "fs.realpath": "^1.0.0",
-            "inflight": "^1.0.4",
-            "inherits": "2",
-            "minimatch": "^3.0.4",
-            "once": "^1.3.0",
-            "path-is-absolute": "^1.0.0"
-          }
-        },
         "rimraf": {
           "version": "2.6.3",
           "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-2.6.3.tgz",
@@ -2213,11 +2184,6 @@
         }
       }
     },
-    "get-caller-file": {
-      "version": "2.0.5",
-      "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz",
-      "integrity": "sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="
-    },
     "get-own-enumerable-property-symbols": {
       "version": "3.0.2",
       "resolved": "https://registry.npmjs.org/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz",
@@ -2249,14 +2215,14 @@
       }
     },
     "glob": {
-      "version": "7.1.2",
-      "resolved": "https://registry.npmjs.org/glob/-/glob-7.1.2.tgz",
-      "integrity": "sha512-MJTUg1kjuLeQCJ+ccE4Vpa6kKVXkPYJ2mOCQyUuKLcLQsdrMCpBPUi8qVE6+YuaJkozeA9NusTAw3hLr8Xe5EQ==",
+      "version": "7.2.3",
+      "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz",
+      "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==",
       "requires": {
         "fs.realpath": "^1.0.0",
         "inflight": "^1.0.4",
         "inherits": "2",
-        "minimatch": "^3.0.4",
+        "minimatch": "^3.1.1",
         "once": "^1.3.0",
         "path-is-absolute": "^1.0.0"
       }
@@ -2304,65 +2270,33 @@
       "dev": true
     },
     "handlebars": {
-      "version": "4.7.4",
-      "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.4.tgz",
-      "integrity": "sha512-Is8+SzHv8K9STNadlBVpVhxXrSXxVgTyIvhdg2Qjak1SfSZ7iEozLHdwiX1jJ9lLFkcFJxqGK5s/cI7ZX+qGkQ==",
+      "version": "4.7.7",
+      "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.7.7.tgz",
+      "integrity": "sha512-aAcXm5OAfE/8IXkcZvCepKU3VzW1/39Fb5ZuqMtgI/hT8X2YgoMvBY5dLhq/cpOvw7Lk1nK/UF71aLG/ZnVYRA==",
       "requires": {
+        "minimist": "^1.2.5",
         "neo-async": "^2.6.0",
         "source-map": "^0.6.1",
         "uglify-js": "^3.1.4",
-        "yargs": "^15.3.1"
+        "wordwrap": "^1.0.0"
       },
       "dependencies": {
-        "ansi-regex": {
-          "version": "5.0.0",
-          "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
-          "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg=="
-        },
-        "cliui": {
-          "version": "6.0.0",
-          "resolved": "https://registry.npmjs.org/cliui/-/cliui-6.0.0.tgz",
-          "integrity": "sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==",
-          "requires": {
-            "string-width": "^4.2.0",
-            "strip-ansi": "^6.0.0",
-            "wrap-ansi": "^6.2.0"
-          }
-        },
         "commander": {
           "version": "2.20.3",
           "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz",
           "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==",
           "optional": true
         },
-        "is-fullwidth-code-point": {
-          "version": "3.0.0",
-          "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
-          "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
+        "minimist": {
+          "version": "1.2.6",
+          "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.6.tgz",
+          "integrity": "sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q=="
         },
         "source-map": {
           "version": "0.6.1",
           "resolved": "https://registry.npmjs.org/source-map/-/source-map-0.6.1.tgz",
           "integrity": "sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g=="
         },
-        "string-width": {
-          "version": "4.2.0",
-          "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
-          "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==",
-          "requires": {
-            "emoji-regex": "^8.0.0",
-            "is-fullwidth-code-point": "^3.0.0",
-            "strip-ansi": "^6.0.0"
-          }
-        },
-        "strip-ansi": {
-          "version": "6.0.0",
-          "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
-          "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==",
-          "requires": {
-            "ansi-regex": "^5.0.0"
-          }
-        },
         "uglify-js": {
           "version": "3.8.1",
           "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.8.1.tgz",
@@ -2373,23 +2307,10 @@
             "source-map": "~0.6.1"
           }
         },
-        "yargs": {
-          "version": "15.3.1",
-          "resolved": "https://registry.npmjs.org/yargs/-/yargs-15.3.1.tgz",
-          "integrity": "sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==",
-          "requires": {
-            "cliui": "^6.0.0",
-            "decamelize": "^1.2.0",
-            "find-up": "^4.1.0",
-            "get-caller-file": "^2.0.1",
-            "require-directory": "^2.1.1",
-            "require-main-filename": "^2.0.0",
-            "set-blocking": "^2.0.0",
-            "string-width": "^4.2.0",
-            "which-module": "^2.0.0",
-            "y18n": "^4.0.0",
-            "yargs-parser": "^18.1.1"
-          }
+        "wordwrap": {
+          "version": "1.0.0",
+          "resolved": "https://registry.npmjs.org/wordwrap/-/wordwrap-1.0.0.tgz",
+          "integrity": "sha512-gvVzJFlPycKc5dZN4yPkP8w7Dc37BtP1yczEneOb4uq34pXZcvrtRTmWV8W+Ume+XCxKgbjM+nevkyFPMybd4Q=="
         }
       }
     },
@@ -2419,8 +2340,7 @@
     "has-flag": {
       "version": "3.0.0",
       "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz",
-      "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0=",
-      "dev": true
+      "integrity": "sha1-tdRU3CGZriJWmfNGfloH87lVuv0="
     },
     "has-unicode": {
       "version": "2.0.1",
@@ -3132,14 +3052,15 @@
       "version": "5.0.0",
       "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz",
       "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==",
+      "dev": true,
       "requires": {
         "p-locate": "^4.1.0"
       }
     },
     "lodash": {
-      "version": "4.17.15",
-      "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.15.tgz",
-      "integrity": "sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A=="
+      "version": "4.17.21",
+      "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
+      "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
     },
     "lodash._baseassign": {
       "version": "3.2.0",
@@ -3175,6 +3096,11 @@
       "integrity": "sha1-UgOte6Ql+uhCRg5pbbnPPmqsBXw=",
       "dev": true
     },
+    "lodash.camelcase": {
+      "version": "4.3.0",
+      "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz",
+      "integrity": "sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA=="
+    },
     "lodash.create": {
       "version": "3.1.1",
       "resolved": "https://registry.npmjs.org/lodash.create/-/lodash.create-3.1.1.tgz",
@@ -3229,11 +3155,6 @@
         "lodash.isarray": "^3.0.0"
       }
     },
-    "lodash.padend": {
-      "version": "4.6.1",
-      "resolved": "https://registry.npmjs.org/lodash.padend/-/lodash.padend-4.6.1.tgz",
-      "integrity": "sha1-U8y6BH0G4VjTEfRdpiX05J5vFm4="
-    },
     "lodash.union": {
       "version": "4.6.0",
       "resolved": "https://registry.npmjs.org/lodash.union/-/lodash.union-4.6.0.tgz",
@@ -3366,6 +3287,11 @@
       "integrity": "sha512-s8UhlNe7vPKomQhC1qFelMokr/Sc3AgNbso3n74mVPA5LTZwkB9NlXf4XPamLxJE8h0gh73rM94xvwRT2CVInw==",
       "dev": true
     },
+    "make-promises-safe": {
+      "version": "5.1.0",
+      "resolved": "https://registry.npmjs.org/make-promises-safe/-/make-promises-safe-5.1.0.tgz",
+      "integrity": "sha512-AfdZ49rtyhQR/6cqVKGoH7y4ql7XkS5HJI1lZm0/5N6CQosy1eYbBJ/qbhkKHzo17UH7M918Bysf6XB9f3kS1g=="
+    },
     "map-stream": {
       "version": "0.0.7",
       "resolved": "https://registry.npmjs.org/map-stream/-/map-stream-0.0.7.tgz",
@@ -3472,9 +3398,9 @@
       "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg=="
     },
     "minimatch": {
-      "version": "3.0.4",
-      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.0.4.tgz",
-      "integrity": "sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA==",
+      "version": "3.1.2",
+      "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz",
+      "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==",
       "requires": {
         "brace-expansion": "^1.1.7"
       }
@@ -3961,177 +3887,135 @@
       "integrity": "sha512-iyam8fBuCUpWeKPGpaNMetEocMt364qkCsfL9JuhjXX6dRnguRVOfk2GZaDpPjcOKiiXCPINZC1GczQ7iTq3Zw=="
     },
     "neon-cli": {
-      "version": "0.4.0",
-      "resolved": "https://registry.npmjs.org/neon-cli/-/neon-cli-0.4.0.tgz",
-      "integrity": "sha512-66HhHb8rk+zHSG64CI6jhyOQqpibBAald8ObdQPCjXcCjzSEVnkQHutUE8dyNlHRNT7xLfrZGkDbtwrYh2p+6w==",
-      "requires": {
-        "chalk": "~2.1.0",
-        "command-line-args": "^4.0.2",
-        "command-line-commands": "^2.0.0",
-        "command-line-usage": "^4.0.0",
+      "version": "0.10.1",
+      "resolved": "https://registry.npmjs.org/neon-cli/-/neon-cli-0.10.1.tgz",
+      "integrity": "sha512-kOd9ELaYETe1J1nBEOYD7koAZVj6xR9TGwOPccAsWmwL5amkaXXXwXHCUHkBAWujlgSZY5f2pT+pFGkzoHExYQ==",
+      "requires": {
+        "chalk": "^4.1.0",
+        "command-line-args": "^5.1.1",
+        "command-line-commands": "^3.0.1",
+        "command-line-usage": "^6.1.0",
         "git-config": "0.0.7",
-        "handlebars": "^4.1.0",
-        "inquirer": "^3.0.6",
-        "mkdirp": "^0.5.1",
-        "quickly-copy-file": "^1.0.0",
-        "rimraf": "^2.6.1",
-        "rsvp": "^4.6.1",
-        "semver": "^5.1.0",
-        "toml": "^2.3.0",
-        "ts-typed-json": "^0.2.2",
-        "validate-npm-package-license": "^3.0.1",
+        "handlebars": "^4.7.6",
+        "inquirer": "^7.3.3",
+        "make-promises-safe": "^5.1.0",
+        "rimraf": "^3.0.2",
+        "semver": "^7.3.2",
+        "toml": "^3.0.0",
+        "ts-typed-json": "^0.3.2",
+        "validate-npm-package-license": "^3.0.4",
         "validate-npm-package-name": "^3.0.0"
       },
       "dependencies": {
-        "ansi-escapes": {
-          "version": "3.2.0",
-          "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz",
-          "integrity": "sha512-cBhpre4ma+U0T1oM5fXg7Dy1Jw7zzwv7lt/GoCpr+hDQJoYnKVPLL4dCvSEFMmQurOQvSrwT7SL/DAlhBI97RQ=="
-        },
         "ansi-regex": {
-          "version": "3.0.0",
-          "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-3.0.0.tgz",
-          "integrity": "sha1-7QMXwyIGT3lGbAKWa922Bas32Zg="
+          "version": "5.0.1",
+          "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz",
+          "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="
         },
         "ansi-styles": {
-          "version": "3.2.1",
-          "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz",
-          "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==",
+          "version": "4.3.0",
+          "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz",
+          "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==",
           "requires": {
-            "color-convert": "^1.9.0"
+            "color-convert": "^2.0.1"
           }
         },
         "chalk": {
-          "version": "2.1.0",
-          "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.1.0.tgz",
-          "integrity": "sha512-LUHGS/dge4ujbXMJrnihYMcL4AoOweGnw9Tp3kQuqy1Kx5c1qKjqvMJZ6nVJPMWJtKCTN72ZogH3oeSO9g9rXQ==",
+          "version": "4.1.2",
+          "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz",
+          "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==",
           "requires": {
-            "ansi-styles": "^3.1.0",
-            "escape-string-regexp": "^1.0.5",
-            "supports-color": "^4.0.0"
+            "ansi-styles": "^4.1.0",
+            "supports-color": "^7.1.0"
           }
         },
-        "chardet": {
-          "version": "0.4.2",
-          "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.4.2.tgz",
-          "integrity": "sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I="
-        },
-        "cli-cursor": {
-          "version": "2.1.0",
-          "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-2.1.0.tgz",
-          "integrity": "sha1-s12sN2R5+sw+lHR9QdDQ9SOP/LU=",
-          "requires": {
-            "restore-cursor": "^2.0.0"
-          }
+        "cli-width": {
+          "version": "3.0.0",
+          "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-3.0.0.tgz",
+          "integrity": "sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw=="
         },
-        "external-editor": {
-          "version": "2.2.0",
-          "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-2.2.0.tgz",
-          "integrity": "sha512-bSn6gvGxKt+b7+6TKEv1ZycHleA7aHhRHyAqJyp5pbUFuYYNIzpZnQDk7AsYckyWdEnTeAnay0aCy2aV6iTk9A==",
+        "color-convert": {
+          "version": "2.0.1",
+          "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
+          "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
           "requires": {
-            "chardet": "^0.4.0",
-            "iconv-lite": "^0.4.17",
-            "tmp": "^0.0.33"
+            "color-name": "~1.1.4"
           }
         },
-        "figures": {
-          "version": "2.0.0",
-          "resolved": "https://registry.npmjs.org/figures/-/figures-2.0.0.tgz",
-          "integrity": "sha1-OrGi0qYsi/tDGgyUy3l6L84nyWI=",
-          "requires": {
-            "escape-string-regexp": "^1.0.5"
-          }
+        "color-name": {
+          "version": "1.1.4",
+          "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
+          "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
         },
         "has-flag": {
-          "version": "2.0.0",
-          "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-2.0.0.tgz",
-          "integrity": "sha1-6CB68cx7MNRGzHC3NLXovhj4jVE="
+          "version": "4.0.0",
+          "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz",
+          "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="
         },
         "inquirer": {
-          "version": "3.3.0",
-          "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-3.3.0.tgz",
-          "integrity": "sha512-h+xtnyk4EwKvFWHrUYsWErEVR+igKtLdchu+o0Z1RL7VU/jVMFbYir2bp6bAj8efFNxWqHX0dIss6fJQ+/+qeQ==",
-          "requires": {
-            "ansi-escapes": "^3.0.0",
-            "chalk": "^2.0.0",
-            "cli-cursor": "^2.1.0",
-            "cli-width": "^2.0.0",
-            "external-editor": "^2.0.4",
-            "figures": "^2.0.0",
-            "lodash": "^4.3.0",
-            "mute-stream": "0.0.7",
-            "run-async": "^2.2.0",
-            "rx-lite": "^4.0.8",
-            "rx-lite-aggregates": "^4.0.8",
-            "string-width": "^2.1.0",
-            "strip-ansi": "^4.0.0",
+          "version": "7.3.3",
+          "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.3.3.tgz",
+          "integrity": "sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA==",
+          "requires": {
+            "ansi-escapes": "^4.2.1",
+            "chalk": "^4.1.0",
+            "cli-cursor": "^3.1.0",
+            "cli-width": "^3.0.0",
+            "external-editor": "^3.0.3",
+            "figures": "^3.0.0",
+            "lodash": "^4.17.19",
+            "mute-stream": "0.0.8",
+            "run-async": "^2.4.0",
+            "rxjs": "^6.6.0",
+            "string-width": "^4.1.0",
+            "strip-ansi": "^6.0.0",
             "through": "^2.3.6"
           }
         },
-        "is-fullwidth-code-point": {
-          "version": "2.0.0",
-          "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz",
-          "integrity": "sha1-o7MKXE8ZkYMWeqq5O+764937ZU8="
-        },
-        "mimic-fn": {
-          "version": "1.2.0",
-          "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-1.2.0.tgz",
-          "integrity": "sha512-jf84uxzwiuiIVKiOLpfYk7N46TSy8ubTonmneY9vrpHNAnp0QBt2BxWV9dO3/j+BoVAb+a5G6YDPW3M5HOdMWQ=="
-        },
-        "mute-stream": {
-          "version": "0.0.7",
-          "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.7.tgz",
-          "integrity": "sha1-MHXOk7whuPq0PhvE2n6BFe0ee6s="
-        },
-        "onetime": {
-          "version": "2.0.1",
-          "resolved": "https://registry.npmjs.org/onetime/-/onetime-2.0.1.tgz",
-          "integrity": "sha1-BnQoIw/WdEOyeUsiu6UotoZ5YtQ=",
+        "lru-cache": {
+          "version": "6.0.0",
+          "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz",
+          "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==",
           "requires": {
-            "mimic-fn": "^1.0.0"
+            "yallist": "^4.0.0"
           }
         },
-        "restore-cursor": {
-          "version": "2.0.0",
-          "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-2.0.0.tgz",
-          "integrity": "sha1-n37ih/gv0ybU/RYpI9YhKe7g368=",
+        "rimraf": {
+          "version": "3.0.2",
+          "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz",
+          "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==",
           "requires": {
-            "onetime": "^2.0.0",
-            "signal-exit": "^3.0.2"
+            "glob": "^7.1.3"
           }
         },
-        "string-width": {
-          "version": "2.1.1",
-          "resolved": "https://registry.npmjs.org/string-width/-/string-width-2.1.1.tgz",
-          "integrity": "sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==",
+        "semver": {
+          "version": "7.3.7",
+          "resolved": "https://registry.npmjs.org/semver/-/semver-7.3.7.tgz",
+          "integrity": "sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g==",
           "requires": {
-            "is-fullwidth-code-point": "^2.0.0",
-            "strip-ansi": "^4.0.0"
+            "lru-cache": "^6.0.0"
           }
         },
         "strip-ansi": {
-          "version": "4.0.0",
-          "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-4.0.0.tgz",
-          "integrity": "sha1-qEeQIusaw2iocTibY1JixQXuNo8=",
+          "version": "6.0.1",
+          "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz",
+          "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==",
           "requires": {
-            "ansi-regex": "^3.0.0"
+            "ansi-regex": "^5.0.1"
           }
         },
         "supports-color": {
-          "version": "4.5.0",
-          "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-4.5.0.tgz",
-          "integrity": "sha1-vnoN5ITexcXN34s9WRJQRJEvY1s=",
+          "version": "7.2.0",
+          "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz",
+          "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==",
           "requires": {
-            "has-flag": "^2.0.0"
+            "has-flag": "^4.0.0"
           }
         },
-        "tmp": {
-          "version": "0.0.33",
-          "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz",
-          "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==",
-          "requires": {
-            "os-tmpdir": "~1.0.2"
-          }
+        "yallist": {
+          "version": "4.0.0",
+          "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz",
+          "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A=="
         }
       }
     },
@@ -5393,6 +5277,7 @@
       "version": "2.2.2",
       "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.2.2.tgz",
       "integrity": "sha512-WGR+xHecKTr7EbUEhyLSh5Dube9JtdiG78ufaeLxTgpudf/20KqyMioIUZJAezlTIi6evxuoUs9YXc11cU+yzQ==",
+      "dev": true,
       "requires": {
         "p-try": "^2.0.0"
       }
@@ -5401,6 +5286,7 @@
       "version": "4.1.0",
       "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz",
       "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==",
+      "dev": true,
       "requires": {
         "p-limit": "^2.2.0"
       }
@@ -5417,7 +5303,8 @@
     "p-try": {
       "version": "2.2.0",
       "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz",
-      "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ=="
+      "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==",
+      "dev": true
     },
     "pad": {
       "version": "3.2.0",
@@ -5457,7 +5344,8 @@
     "path-exists": {
       "version": "4.0.0",
       "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz",
-      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="
+      "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==",
+      "dev": true
     },
     "path-is-absolute": {
       "version": "1.0.1",
@@ -5618,14 +5506,6 @@
       "resolved": "https://registry.npmjs.org/querablep/-/querablep-0.1.0.tgz",
       "integrity": "sha1-ss0rPnX81F1d163kwYEatUeEmoQ="
     },
-    "quickly-copy-file": {
-      "version": "1.0.0",
-      "resolved": "https://registry.npmjs.org/quickly-copy-file/-/quickly-copy-file-1.0.0.tgz",
-      "integrity": "sha1-n4/wZiMFEO50IrASFHKwk6hpCFk=",
-      "requires": {
-        "mkdirp": "~0.5.0"
-      }
-    },
     "ramda": {
       "version": "0.27.0",
       "resolved": "https://registry.npmjs.org/ramda/-/ramda-0.27.0.tgz",
@@ -5689,9 +5569,9 @@
       }
     },
     "reduce-flatten": {
-      "version": "1.0.1",
-      "resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-1.0.1.tgz",
-      "integrity": "sha1-JYx479FT3fk8tWEjf2EYTzaW4yc="
+      "version": "2.0.0",
+      "resolved": "https://registry.npmjs.org/reduce-flatten/-/reduce-flatten-2.0.0.tgz",
+      "integrity": "sha512-EJ4UNY/U1t2P/2k6oqotuX2Cc3T6nxJwsM0N0asT7dhrtH1ltUxDn4NalSYmPE2rCkVpcf/X6R0wDwcFpzhd4w=="
     },
     "regenerator-runtime": {
       "version": "0.13.5",
@@ -5776,16 +5656,6 @@
         "lodash": "^4.17.15"
       }
     },
-    "require-directory": {
-      "version": "2.1.1",
-      "resolved": "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz",
-      "integrity": "sha1-jGStX9MNqxyXbiNE/+f3kqam30I="
-    },
-    "require-main-filename": {
-      "version": "2.0.0",
-      "resolved": "https://registry.npmjs.org/require-main-filename/-/require-main-filename-2.0.0.tgz",
-      "integrity": "sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg=="
-    },
     "resolve": {
       "version": "1.17.0",
       "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.17.0.tgz",
@@ -5817,33 +5687,15 @@
         "glob": "^7.0.5"
       }
     },
-    "rsvp": {
-      "version": "4.8.5",
-      "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-4.8.5.tgz",
-      "integrity": "sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA=="
-    },
     "run-async": {
       "version": "2.4.1",
       "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.4.1.tgz",
       "integrity": "sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ=="
     },
-    "rx-lite": {
-      "version": "4.0.8",
-      "resolved": "https://registry.npmjs.org/rx-lite/-/rx-lite-4.0.8.tgz",
-      "integrity": "sha1-Cx4Rr4vESDbwSmQH6S2kJGe3lEQ="
-    },
-    "rx-lite-aggregates": {
-      "version": "4.0.8",
-      "resolved": "https://registry.npmjs.org/rx-lite-aggregates/-/rx-lite-aggregates-4.0.8.tgz",
-      "integrity": "sha1-dTuHqJoRyVRnxKwWJsTvxOBcZ74=",
-      "requires": {
-        "rx-lite": "*"
-      }
-    },
     "rxjs": {
-      "version": "6.5.5",
-      "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.5.tgz",
-      "integrity": "sha512-WfQI+1gohdf0Dai/Bbmk5L5ItH5tYqm3ki2c5GdWhKjalzjg93N3avFjVStyZZz+A2Em+ZxKH5bNghw9UeylGQ==",
+      "version": "6.6.7",
+      "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.6.7.tgz",
+      "integrity": "sha512-hTdwr+7yYNIT5n4AMYp85KA6yw2Va0FLa3Rguvbpa4W3I5xynaBZo41cM3XM+4Q6fRMj3sBYIR1VAmZMXYJvRQ==",
       "requires": {
         "tslib": "^1.9.0"
       }
@@ -6439,15 +6291,26 @@
       }
     },
     "table-layout": {
-      "version": "0.4.5",
-      "resolved": "https://registry.npmjs.org/table-layout/-/table-layout-0.4.5.tgz",
-      "integrity": "sha512-zTvf0mcggrGeTe/2jJ6ECkJHAQPIYEwDoqsiqBjI24mvRmQbInK5jq33fyypaCBxX08hMkfmdOqj6haT33EqWw==",
+      "version": "1.0.2",
+      "resolved": "https://registry.npmjs.org/table-layout/-/table-layout-1.0.2.tgz",
+      "integrity": "sha512-qd/R7n5rQTRFi+Zf2sk5XVVd9UQl6ZkduPFC3S7WEGJAmetDTjY3qPN50eSKzwuzEyQKy5TN2TiZdkIjos2L6A==",
       "requires": {
-        "array-back": "^2.0.0",
+        "array-back": "^4.0.1",
         "deep-extend": "~0.6.0",
-        "lodash.padend": "^4.6.1",
-        "typical": "^2.6.1",
-        "wordwrapjs": "^3.0.0"
+        "typical": "^5.2.0",
+        "wordwrapjs": "^4.0.0"
+      },
+      "dependencies": {
+        "array-back": {
+          "version": "4.0.2",
+          "resolved": "https://registry.npmjs.org/array-back/-/array-back-4.0.2.tgz",
+          "integrity": "sha512-NbdMezxqf94cnNfWLL7V/im0Ub+Anbb0IoZhvzie8+4HJ4nMQuzHuy49FkGYCJK2yAloZ3meiB6AVMClbrI1vg=="
+        },
+        "typical": {
+          "version": "5.2.0",
+          "resolved": "https://registry.npmjs.org/typical/-/typical-5.2.0.tgz",
+          "integrity": "sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg=="
+        }
       }
     },
     "tail": {
@@ -6500,25 +6363,6 @@
         }
       }
     },
-    "test-value": {
-      "version": "2.1.0",
-      "resolved": "https://registry.npmjs.org/test-value/-/test-value-2.1.0.tgz",
-      "integrity": "sha1-Edpv9nDzRxpztiXKTz/c97t0gpE=",
-      "requires": {
-        "array-back": "^1.0.3",
-        "typical": "^2.6.0"
-      },
-      "dependencies": {
-        "array-back": {
-          "version": "1.0.4",
-          "resolved": "https://registry.npmjs.org/array-back/-/array-back-1.0.4.tgz",
-          "integrity": "sha1-ZEun8JX3/898Q7Xw3DnTwfA8Bjs=",
-          "requires": {
-            "typical": "^2.6.0"
-          }
-        }
-      }
-    },
     "text-table": {
       "version": "0.2.0",
       "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz",
@@ -6560,9 +6404,9 @@
       "integrity": "sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw=="
     },
     "toml": {
-      "version": "2.3.6",
-      "resolved": "https://registry.npmjs.org/toml/-/toml-2.3.6.tgz",
-      "integrity": "sha512-gVweAectJU3ebq//Ferr2JUY4WKSDe5N+z0FvjDncLGyHmIDoxgY/2Ie4qfEIDm4IS7OA6Rmdm7pdEEdMcV/xQ=="
+      "version": "3.0.0",
+      "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz",
+      "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w=="
     },
     "tough-cookie": {
       "version": "2.5.0",
@@ -6611,19 +6455,9 @@
       }
     },
     "ts-typed-json": {
-      "version": "0.2.2",
-      "resolved": "https://registry.npmjs.org/ts-typed-json/-/ts-typed-json-0.2.2.tgz",
-      "integrity": "sha1-UxhL7ok+RZkbc8jEY6OLWeJ81H4=",
-      "requires": {
-        "rsvp": "^3.5.0"
-      },
-      "dependencies": {
-        "rsvp": {
-          "version": "3.6.2",
-          "resolved": "https://registry.npmjs.org/rsvp/-/rsvp-3.6.2.tgz",
-          "integrity": "sha512-OfWGQTb9vnwRjwtA2QwpG2ICclHC3pgXZO5xt8H2EfgDquO0qVdSb5T88L4qJVAEugbS56pAuV4XZM58UX8ulw=="
-        }
-      }
+      "version": "0.3.2",
+      "resolved": "https://registry.npmjs.org/ts-typed-json/-/ts-typed-json-0.3.2.tgz",
+      "integrity": "sha512-Tdu3BWzaer7R5RvBIJcg9r8HrTZgpJmsX+1meXMJzYypbkj8NK2oJN0yvm4Dp/Iv6tzFa/L5jKRmEVTga6K3nA=="
     },
     "tslib": {
       "version": "1.11.2",
@@ -6724,9 +6558,9 @@
       "dev": true
     },
     "typical": {
-      "version": "2.6.1",
-      "resolved": "https://registry.npmjs.org/typical/-/typical-2.6.1.tgz",
-      "integrity": "sha1-XAgOXWYcu+OCWdLnCjxyU+hziB0="
+      "version": "4.0.0",
+      "resolved": "https://registry.npmjs.org/typical/-/typical-4.0.0.tgz",
+      "integrity": "sha512-VAH4IvQ7BDFYglMd7BPRDfLgxZZX4O4TFcRDA6EN5X7erNJJq+McIEp8np9aVtxrCJ6qx4GTYVfOWNjcqwZgRw=="
     },
     "ultron": {
       "version": "1.0.2",
@@ -6910,18 +6744,26 @@
       "integrity": "sha1-o9XabNXAvAAI03I0u68b7WMFkQc="
     },
     "wordwrapjs": {
-      "version": "3.0.0",
-      "resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-3.0.0.tgz",
-      "integrity": "sha512-mO8XtqyPvykVCsrwj5MlOVWvSnCdT+C+QVbm6blradR7JExAhbkZ7hZ9A+9NUtwzSqrlUo9a67ws0EiILrvRpw==",
+      "version": "4.0.1",
+      "resolved": "https://registry.npmjs.org/wordwrapjs/-/wordwrapjs-4.0.1.tgz",
+      "integrity": "sha512-kKlNACbvHrkpIw6oPeYDSmdCTu2hdMHoyXLTcUKala++lx5Y+wjJ/e474Jqv5abnVmwxw08DiTuHmw69lJGksA==",
       "requires": {
-        "reduce-flatten": "^1.0.1",
-        "typical": "^2.6.1"
+        "reduce-flatten": "^2.0.0",
+        "typical": "^5.2.0"
+      },
+      "dependencies": {
+        "typical": {
+          "version": "5.2.0",
+          "resolved": "https://registry.npmjs.org/typical/-/typical-5.2.0.tgz",
+          "integrity": "sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg=="
+        }
       }
     },
     "wrap-ansi": {
       "version": "6.2.0",
       "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-6.2.0.tgz",
       "integrity": "sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==",
+      "dev": true,
       "requires": {
         "ansi-styles": "^4.0.0",
         "string-width": "^4.1.0",
@@ -6931,12 +6773,14 @@
         "ansi-regex": {
           "version": "5.0.0",
           "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz",
-          "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg=="
+          "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==",
+          "dev": true
         },
         "ansi-styles": {
           "version": "4.2.1",
           "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.2.1.tgz",
           "integrity": "sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA==",
+          "dev": true,
           "requires": {
             "@types/color-name": "^1.1.1",
             "color-convert": "^2.0.1"
@@ -6946,6 +6790,7 @@
           "version": "2.0.1",
           "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz",
           "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==",
+          "dev": true,
           "requires": {
             "color-name": "~1.1.4"
           }
@@ -6953,17 +6798,20 @@
         "color-name": {
           "version": "1.1.4",
           "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz",
-          "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="
+          "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==",
+          "dev": true
         },
         "is-fullwidth-code-point": {
           "version": "3.0.0",
           "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz",
-          "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="
+          "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==",
+          "dev": true
         },
         "string-width": {
           "version": "4.2.0",
           "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz",
           "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==",
+          "dev": true,
           "requires": {
             "emoji-regex": "^8.0.0",
             "is-fullwidth-code-point": "^3.0.0",
@@ -6974,6 +6822,7 @@
           "version": "6.0.0",
           "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz",
           "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==",
+          "dev": true,
           "requires": {
             "ansi-regex": "^5.0.0"
           }
@@ -7022,11 +6871,6 @@
       "resolved": "https://registry.npmjs.org/xtend/-/xtend-4.0.2.tgz",
       "integrity": "sha512-LKYU1iAXJXUgAXn9URjiu+MWhyUXHsvfp7mcuYm9dSUKK0/CjtrUwFAxD82/mCWbtLsGjFIad0wIsod4zrTAEQ=="
     },
-    "y18n": {
-      "version": "4.0.0",
-      "resolved": "https://registry.npmjs.org/y18n/-/y18n-4.0.0.tgz",
-      "integrity": "sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w=="
-    },
     "yallist": {
       "version": "2.1.2",
       "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz",
@@ -7041,22 +6885,6 @@
         "@babel/runtime": "^7.9.2"
       }
     },
-    "yargs-parser": {
-      "version": "18.1.2",
-      "resolved": "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.2.tgz",
-      "integrity": "sha512-hlIPNR3IzC1YuL1c2UwwDKpXlNFBqD1Fswwh1khz5+d8Cq/8yc/Mn0i+rQXduu8hcrFKvO7Eryk+09NecTQAAQ==",
-      "requires": {
-        "camelcase": "^5.0.0",
-        "decamelize": "^1.2.0"
-      },
-      "dependencies": {
-        "camelcase": {
-          "version": "5.3.1",
-          "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz",
-          "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg=="
-        }
-      }
-    },
     "yauzl": {
       "version": "2.10.0",
       "resolved": "https://registry.npmjs.org/yauzl/-/yauzl-2.10.0.tgz",
diff --git a/package.json b/package.json
index 273fcd06decc1521be480c2e47b41bced431394f..d3142fe62b05818be9eff843d4b8aaf7f12f729f 100644
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
 {
   "name": "duniter",
-  "version": "1.9.0-dev",
+  "version": "1.9.1",
   "engines": {
     "node": ">= 10.19",
     "npm": ">= 6.13",
@@ -13,7 +13,7 @@
   "node-main": "./bin/duniter_js",
   "window": {
     "icon": "duniter.png",
-    "title": "v1.8.1",
+    "title": "v1.9.1",
     "width": 800,
     "height": 800,
     "min_width": 750,
@@ -91,7 +91,7 @@
     "morgan": "1.10.0",
     "multimeter": "0.1.1",
     "nat-upnp": "^1.1.1",
-    "neon-cli": "^0.4.0",
+    "neon-cli": "0.10.1",
     "node-pre-gyp": "0.14.0",
     "node-uuid": "1.4.8",
     "optimist": "0.6.1",
diff --git a/release/arch/arm/build-arm.sh b/release/arch/arm/build-arm.sh
index 5e43073faf74294d5acc45fb2f0e88060e8f45f8..74705540415c8a3a7f62a48d845963b521689b89 100755
--- a/release/arch/arm/build-arm.sh
+++ b/release/arch/arm/build-arm.sh
@@ -60,7 +60,7 @@ echo "DebianVer: $DUNITER_DEB_VER"
 
 if [ ! -f "$DOWNLOADS/node-${NVER}-linux-${ARCH}.tar.gz" ]; then
   # Download Node.js and package it with the sources
-  wget http://nodejs.org/dist/${NVER}/node-${NVER}-linux-${ARCH}.tar.gz
+  wget https://nodejs.org/dist/${NVER}/node-${NVER}-linux-${ARCH}.tar.gz
   tar xzf node-${NVER}-linux-${ARCH}.tar.gz
 fi
 
diff --git a/release/arch/linux/build-lin.sh b/release/arch/linux/build-lin.sh
index df26212669926e7428d12165934b0bd092866c72..434ebd6513fb2cfeaf4193277e1fb94375fbaf39 100755
--- a/release/arch/linux/build-lin.sh
+++ b/release/arch/linux/build-lin.sh
@@ -109,7 +109,15 @@ rm -rf "${BIN}/"*.{deb,tar.gz}{,.desc} # Clean up
 # ------------
 
 cd "${DOWNLOADS}"
-cp -r ~/.nvm/versions/node/${NVER}/ node-${NVER}-linux-x64
+# Utiliser nvm si disponible, sinon télécharger depuis nodejs.org
+if [[ -d ~/.nvm/versions/node/${NVER} ]]; then
+    cp -r ~/.nvm/versions/node/${NVER}/ node-${NVER}-linux-x64
+else
+    echo "nvm installation not found, downloading Node.js directly..."
+    curl -O https://nodejs.org/dist/${NVER}/node-${NVER}-linux-x64.tar.gz || exit 1
+    tar xzf node-${NVER}-linux-x64.tar.gz || exit 1
+    rm node-${NVER}-linux-x64.tar.gz
+fi
 
 # -----------
 # Releases
diff --git a/release/arch/windows/duniter.iss b/release/arch/windows/duniter.iss
index c22f1dc04a8cb4bce65cc9ade3007ab8000fc084..4783dc49a79cb139f81c3fdc4ee055452f2e20f0 100644
--- a/release/arch/windows/duniter.iss
+++ b/release/arch/windows/duniter.iss
@@ -15,7 +15,7 @@
 #error "Unable to find MyAppExe"
 #endif
 
-#define MyAppVerStr "v1.8.1"
+#define MyAppVerStr "v1.9.1"
 
 [Setup]
 AppName={#MyAppName}
diff --git a/release/extra/debian/package/DEBIAN/control b/release/extra/debian/package/DEBIAN/control
index 6f06c2462fa20e2f30d7e6c63b6fefa9022e06e2..3521c24051e1f7734cfe508210ee8ebdd33acd67 100644
--- a/release/extra/debian/package/DEBIAN/control
+++ b/release/extra/debian/package/DEBIAN/control
@@ -1,5 +1,5 @@
 Package: duniter
-Version: 1.9.0-dev
+Version: 1.9.1
 Depends: unzip
 Section: misc
 Priority: optional
diff --git a/server.ts b/server.ts
index 280773d94ff7b7117c21446d1c9f48d995d48c6a..bf1566d70d3bb8301dd1ac21dda202f07121eae9 100644
--- a/server.ts
+++ b/server.ts
@@ -86,6 +86,8 @@ export class Server extends stream.Duplex implements HookableServer {
   keyPair:any
   sign:any
   blockchain:any
+  sqliteDBs: {[path: string]: SQLiteDriver} = {};
+  levelDBs: {[path: string]: LevelUp} = {};
 
   MerkleService:(req:any, merkle:any, valueCoroutine:any) => any
   IdentityService:IdentityService
@@ -159,14 +161,32 @@ export class Server extends stream.Duplex implements HookableServer {
   async plugFileSystem() {
     logger.debug('Plugging file system...');
     const params = await this.paramsP
-    this.dal = new FileDAL(params, async (dbName: string): Promise<SQLiteDriver> => {
-      return Directory.getHomeDB(this.memoryOnly, dbName, params.home)
-    }, async (dbName: string): Promise<LevelUp> => {
-      return Directory.getHomeLevelDB(this.memoryOnly, dbName, params.home)
-    }, )
+    this.dal = new FileDAL(params,
+        (dbName: string) => this.getSQLiteDB(dbName, params.home),
+        (dbName: string) => this.getLevelDB(dbName, params.home))
     await this.onPluggedFSHook()
   }
 
+  async getSQLiteDB(dbName: string, home: string) {
+    // Check in cach (useful to avoid migration task to create a new driver on the same DB file)
+    let driver: SQLiteDriver = this.sqliteDBs[dbName];
+    if (!driver || driver.isClosed()) {
+      driver = await Directory.getHomeDB(this.memoryOnly, dbName, home);
+      if (!this.memoryOnly) this.sqliteDBs[dbName] = driver;
+    }
+    return driver;
+  }
+
+  async getLevelDB(dbName: string, home: string) {
+    // Check in cache (useful to avoid migration task to create a new driver on the same DB file)
+    let driver: LevelUp = this.levelDBs[dbName];
+    if (!driver || driver.isClosed()) {
+      driver = await Directory.getHomeLevelDB(this.memoryOnly, dbName, home);
+      if (!this.memoryOnly) this.levelDBs[dbName] = driver;
+    }
+    return driver;
+  }
+
   async unplugFileSystem() {
     logger.debug('Unplugging file system...');
     await this.dal.close()
@@ -523,6 +543,12 @@ export class Server extends stream.Duplex implements HookableServer {
     if (this.dal) {
       await this.dal.close()
     }
+    await Promise.all(Object.values(this.sqliteDBs)
+        .filter(db => db && !db.isClosed())
+        .map(db => db.closeConnection()));
+    await Promise.all(Object.values(this.levelDBs)
+        .filter(db => db && !db.isClosed())
+        .map(db => db.close()));
   }
 
   revert() {
diff --git a/test/dal/basic-dal-tests.ts b/test/dal/basic-dal-tests.ts
index a3e39fffca82e792eeebec22d562066c874812d9..f80672cd34c77077f2dab2ef409b53f494299517 100644
--- a/test/dal/basic-dal-tests.ts
+++ b/test/dal/basic-dal-tests.ts
@@ -110,18 +110,29 @@ var mocks = {
   }
 };
 
-let fileDAL:FileDAL
+let conf: ConfDTO;
+let fileDAL:FileDAL;
 
 describe("DAL", function(){
 
   before(async () => {
+    conf = ConfDTO.complete({
+      currency: "meta_brouzouf",
+      storage: {
+        transactions: true
+      }
+    });
     let params = await Directory.getHomeParams(true, 'db0');
     fileDAL = new FileDAL(params, async (name: string) => Directory.getHomeDB(true, name), async (name: string) => Directory.getHomeLevelDB(true, name));
-    await fileDAL.init({} as any);
-    return fileDAL.saveConf({ currency: "meta_brouzouf" } as any);
+    await fileDAL.init(conf);
+    return fileDAL.saveConf(conf);
+  })
+
+  after(() => {
+    return fileDAL.close();
   })
 
-  it('should have DB version 21', async () => {
+  it('should have last DB version', async () => {
     let version = await fileDAL.getDBVersion();
     should.exist(version);
     version.should.equal(constants.CURRENT_DB_VERSION);
@@ -159,35 +170,40 @@ describe("DAL", function(){
   });
 
   it('should be able to save a Block', async () => {
-    await fileDAL.saveBlock(DBBlock.fromBlockDTO(BlockDTO.fromJSONObject(mocks.block0)), ConfDTO.mock());
-    let block = (await fileDAL.getFullBlockOf(0)) as DBBlock
-    block.should.have.property('hash').equal(mocks.block0.hash);
-    block.should.have.property('signature').equal(mocks.block0.signature);
-    block.should.have.property('version').equal(mocks.block0.version);
-    block.should.have.property('currency').equal(mocks.block0.currency);
-    block.should.have.property('issuer').equal(mocks.block0.issuer);
-    block.should.have.property('parameters').equal(mocks.block0.parameters);
-    block.should.have.property('previousHash').equal(mocks.block0.previousHash);
-    block.should.have.property('previousIssuer').equal(mocks.block0.previousIssuer);
-    block.should.have.property('membersCount').equal(mocks.block0.membersCount);
-    block.should.have.property('monetaryMass').equal(mocks.block0.monetaryMass);
-    block.should.have.property('medianTime').equal(mocks.block0.medianTime);
-    block.should.have.property('dividend').equal(mocks.block0.dividend);
-    block.should.have.property('unitbase').equal(mocks.block0.unitbase);
-    block.should.have.property('time').equal(mocks.block0.time);
-    block.should.have.property('powMin').equal(mocks.block0.powMin);
-    block.should.have.property('number').equal(mocks.block0.number);
-    block.should.have.property('nonce').equal(mocks.block0.nonce);
+    const block = DBBlock.fromBlockDTO(BlockDTO.fromJSONObject({
+      ...mocks.block0,
+      fork: false
+    }));
+    
+    await fileDAL.saveBlock(block, conf);
+    
+    let savedBlock = (await fileDAL.getFullBlockOf(0)) as DBBlock;
+    savedBlock.should.have.property('hash').equal(mocks.block0.hash);
+    savedBlock.should.have.property('signature').equal(mocks.block0.signature);
+    savedBlock.should.have.property('version').equal(mocks.block0.version);
+    savedBlock.should.have.property('currency').equal(mocks.block0.currency);
+    savedBlock.should.have.property('issuer').equal(mocks.block0.issuer);
+    savedBlock.should.have.property('parameters').equal(mocks.block0.parameters);
+    savedBlock.should.have.property('previousHash').equal(mocks.block0.previousHash);
+    savedBlock.should.have.property('previousIssuer').equal(mocks.block0.previousIssuer);
+    savedBlock.should.have.property('membersCount').equal(mocks.block0.membersCount);
+    savedBlock.should.have.property('monetaryMass').equal(mocks.block0.monetaryMass);
+    savedBlock.should.have.property('medianTime').equal(mocks.block0.medianTime);
+    savedBlock.should.have.property('dividend').equal(mocks.block0.dividend);
+    savedBlock.should.have.property('unitbase').equal(mocks.block0.unitbase);
+    savedBlock.should.have.property('time').equal(mocks.block0.time);
+    savedBlock.should.have.property('powMin').equal(mocks.block0.powMin);
+    savedBlock.should.have.property('number').equal(mocks.block0.number);
+    savedBlock.should.have.property('nonce').equal(mocks.block0.nonce);
 
-    //assert.deepEqual(block, mocks.block0);
-    assert.deepEqual(block.identities, mocks.block0.identities);
-    assert.deepEqual(block.certifications, mocks.block0.certifications);
-    assert.deepEqual(block.actives, mocks.block0.actives);
-    assert.deepEqual(block.revoked, mocks.block0.revoked);
-    assert.deepEqual(block.excluded, mocks.block0.excluded);
-    assert.deepEqual(block.leavers, mocks.block0.leavers);
-    assert.deepEqual(block.actives, mocks.block0.actives);
-    assert.deepEqual(block.joiners, mocks.block0.joiners);
-    assert.deepEqual(block.transactions, mocks.block0.transactions);
+    assert.deepEqual(savedBlock.identities, mocks.block0.identities);
+    assert.deepEqual(savedBlock.certifications, mocks.block0.certifications);
+    assert.deepEqual(savedBlock.actives, mocks.block0.actives);
+    assert.deepEqual(savedBlock.revoked, mocks.block0.revoked);
+    assert.deepEqual(savedBlock.excluded, mocks.block0.excluded);
+    assert.deepEqual(savedBlock.leavers, mocks.block0.leavers);
+    assert.deepEqual(savedBlock.actives, mocks.block0.actives);
+    assert.deepEqual(savedBlock.joiners, mocks.block0.joiners);
+    assert.deepEqual(savedBlock.transactions, mocks.block0.transactions);
   });
 });
diff --git a/test/dal/sources-dal.ts b/test/dal/sources-dal.ts
index 7faac7713596cdc8dad080389a69716049598b96..ed4d9ab836ecd8cafe6ef62b85ac0f86ca3e575c 100644
--- a/test/dal/sources-dal.ts
+++ b/test/dal/sources-dal.ts
@@ -18,30 +18,70 @@ const should = require('should');
 
 let dal:FileDAL
 
-describe("Source DAL", function(){
+describe("Source DAL", function() {
+  const pubkeyA = 'BYfWYFrsyjpvpFysgu19rGK3VHBkz4MqmQbNyEuVU64g';
+  const pubkeyB = 'DSz4rgncXCytsUMW2JU2yhLquZECD2XpEkpP9gG5HyAx';
 
   before(async () => {
     dal = new FileDAL(await Directory.getHomeParams(true, 'db0'), async (name: string) => Directory.getHomeDB(true, name), async (name: string) => Directory.getHomeLevelDB(true, name))
     await dal.init({} as any)
   })
 
-  it('should be able to feed the sindex with unordered rows', async () => {
+  it('should be able to fill the sindex with unordered rows', async () => {
     await dal.sindexDAL.insertBatch([
-      { op: 'UPDATE', tx: null, identifier: 'SOURCE_1', pos: 4, written_on: '139-H', writtenOn: 139, written_time: 4500, consumed: true,  conditions: 'SIG(ABC)' },
-      { op: 'CREATE', tx: null, identifier: 'SOURCE_1', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: 'SIG(ABC)' },
-      { op: 'CREATE', tx: null, identifier: 'SOURCE_2', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: 'SIG(ABC)' },
-      { op: 'CREATE', tx: null, identifier: 'SOURCE_3', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: 'SIG(DEF)' }
+      { op: 'UPDATE', tx: null, identifier: 'SOURCE_1', pos: 4, written_on: '139-H', writtenOn: 139, written_time: 4500, consumed: true,  conditions: `SIG(${pubkeyA})` },
+      { op: 'CREATE', tx: null, identifier: 'SOURCE_1', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: `SIG(${pubkeyA})` },
+      { op: 'CREATE', tx: null, identifier: 'SOURCE_2', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: `SIG(${pubkeyA})` },
+      { op: 'CREATE', tx: null, identifier: 'SOURCE_3', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: `SIG(${pubkeyB})` }
     ] as any);
     (await dal.sindexDAL.findByIdentifier('SOURCE_1')).should.have.length(2);
     (await dal.sindexDAL.findByPos(4)).should.have.length(4);
     // Source availability
-    const sourcesOfDEF = await dal.sindexDAL.getAvailableForPubkey('DEF');
-    sourcesOfDEF.should.have.length(1);
-    const sourcesOfABC = await dal.sindexDAL.getAvailableForPubkey('ABC');
-    sourcesOfABC.should.have.length(1);
+    const sourcesOfA = await dal.sindexDAL.getAvailableForPubkey(pubkeyA);
+    sourcesOfA.should.have.length(1);
+    const sourcesOfB = await dal.sindexDAL.getAvailableForPubkey(pubkeyB);
+    sourcesOfB.should.have.length(1);
     const source1 = await dal.sindexDAL.getTxSource('SOURCE_1', 4) as any
     source1.should.have.property('consumed').equal(true);
     const source2 = await dal.sindexDAL.getTxSource('SOURCE_2', 4) as any
     source2.should.have.property('consumed').equal(false);
+
+    // Check sources not available after block deletion
+    await dal.sindexDAL.removeBlock('126-H');
+    (await dal.sindexDAL.findByIdentifier('SOURCE_1')).should.have.length(1);
+    should(await dal.sindexDAL.getTxSource('SOURCE_2', 4) as any).be.null();
+    should(await dal.sindexDAL.getTxSource('SOURCE_3', 4) as any).be.null();
+    (await dal.sindexDAL.findByPos(4)).should.have.length(1);
+    await dal.sindexDAL.removeBlock('139-H');
+    (await dal.sindexDAL.findByIdentifier('SOURCE_1')).should.have.length(0);
+    (await dal.sindexDAL.findByPos(4)).should.have.length(0);
+    (await dal.sindexDAL.getAvailableForPubkey(pubkeyA)).should.have.length(0);
+    (await dal.sindexDAL.getAvailableForPubkey(pubkeyB)).should.have.length(0);
+    should(await dal.sindexDAL.getTxSource('SOURCE_1', 4) as any).be.null();
+    should(await dal.sindexDAL.getTxSource('SOURCE_2', 4) as any).be.null();
+    should(await dal.sindexDAL.getTxSource('SOURCE_3', 4) as any).be.null();
+  })
+
+  it('should be able to read sindex by pubkey', async () => {
+    // Test insertion, using complex condition
+    await dal.sindexDAL.insertBatch([
+      { op: 'CREATE', tx: null, identifier: 'SOURCE_4', pos: 4, written_on: '139-H', writtenOn: 139, written_time: 2000, consumed: false, conditions: `SIG(${pubkeyA})` },
+      { op: 'CREATE', tx: null, identifier: 'SOURCE_5', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: `(SIG(${pubkeyA}) && SIG(${pubkeyB}))` },
+      { op: 'CREATE', tx: null, identifier: 'SOURCE_6', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: `(XHX(3EB4702F2AC2FD3FA4FDC46A4FC05AE8CDEE1A85F2AC2FD3FA4FDC46A4FC01CA) || SIG(${pubkeyB}))` }
+    ] as any);
+
+    // Check sources availability by pubkey
+    let sourcesOfA = await dal.sindexDAL.getAvailableForPubkey(pubkeyA);
+    sourcesOfA.should.have.length(2);
+    let sourcesOfB = await dal.sindexDAL.getAvailableForPubkey(pubkeyB);
+    sourcesOfB.should.have.length(2);
+
+    // Check sources not available after block deletion
+    await dal.sindexDAL.removeBlock('126-H');
+    await dal.sindexDAL.removeBlock('139-H');
+    sourcesOfA = await dal.sindexDAL.getAvailableForPubkey(pubkeyA);
+    sourcesOfA.should.have.length(0);
+    sourcesOfB = await dal.sindexDAL.getAvailableForPubkey(pubkeyB);
+    sourcesOfB.should.have.length(0);
   })
 })
diff --git a/test/dal/triming-dal.ts b/test/dal/triming-dal.ts
index 65315a330db800ec4359817695697551af13aea6..3bf316edb2f2a4de7cfe9c528dbe1d0eebcbcf9e 100644
--- a/test/dal/triming-dal.ts
+++ b/test/dal/triming-dal.ts
@@ -15,6 +15,8 @@ import {FileDAL} from "../../app/lib/dal/fileDAL"
 import {Directory} from "../../app/lib/system/directory"
 import {Indexer} from "../../app/lib/indexer"
 import {simpleNodeWith2Users} from "../integration/tools/toolbox"
+import {LevelDBSindex} from "../../app/lib/dal/indexDAL/leveldb/LevelDBSindex";
+import {requiredBindexSizeForTail} from "../../app/lib/blockchain/DuniterBlockchain";
 
 const should = require('should');
 
@@ -119,34 +121,79 @@ describe("Triming", function(){
     await dal.sindexDAL.insertBatch([
       { op: 'CREATE', identifier: 'SOURCE_1', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: 'COND(SOURCE_1)'},
       { op: 'UPDATE', identifier: 'SOURCE_1', pos: 4, written_on: '139-H', writtenOn: 139, written_time: 4500, consumed: true, conditions: 'COND(SOURCE_1)'},
-      { op: 'CREATE', identifier: 'SOURCE_2', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: 'COND(SOURCE_2)'},
-      { op: 'CREATE', identifier: 'SOURCE_3', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2000, consumed: false, conditions: 'COND(SOURCE_3)'}
+      { op: 'CREATE', identifier: 'SOURCE_2', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2500, consumed: false, conditions: 'COND(SOURCE_2)'},
+      { op: 'CREATE', identifier: 'SOURCE_3', pos: 4, written_on: '126-H', writtenOn: 126, written_time: 2500, consumed: false, conditions: 'SIG(PUB_1)'}
     ] as any);
     (await dal.sindexDAL.findByIdentifier('SOURCE_1')).should.have.length(2);
+    (await dal.sindexDAL.getAvailableForConditions('COND(SOURCE_2)')).should.have.length(1);
+    (await dal.sindexDAL.getAvailableForPubkey('PUB_1')).should.have.length(1);
     (await dal.sindexDAL.findByPos(4)).should.have.length(4);
   })
 
   it('should be able to trim the sindex', async () => {
-    // Triming
+    const sindexDAL = dal.sindexDAL as LevelDBSindex;
+
+    // First triming before b#140 => should kept some sources
     await dal.trimIndexes(140);
     (await dal.sindexDAL.findByIdentifier('SOURCE_1')).should.have.length(0);
+    (await dal.sindexDAL.getAvailableForConditions('COND(SOURCE_1)')).should.have.length(0);
+    (await dal.sindexDAL.getAvailableForConditions('COND(SOURCE_2)')).should.have.length(1);
+    (await dal.sindexDAL.getAvailableForPubkey('PUB_1')).should.have.length(1);
     (await dal.sindexDAL.findByPos(4)).should.have.length(2);
+
+    // Check internal index
+    should.not.exist(await sindexDAL.getIndexForConditions().getOrNull("COND(SOURCE_1)")); // The only source at this condition was trimmed
+    should.not.exist(await sindexDAL.getIndexForConsumed().getOrNull("0000000139")); // The only consumption at this block was trimmed
+    let entriesAt0000000126 = await sindexDAL.getIndexForTrimming().getOrNull("0000000126");
+
+    // https://git.duniter.org/nodes/typescript/duniter/-/issues/1447
+    entriesAt0000000126.should.not.containEql('SOURCE_1-0000000004'); // This CREATE entry should have been trimmed
+
+    let entriesAt0000000139 = await sindexDAL.getIndexForTrimming().getOrNull("0000000139");
+    should.not.exist(entriesAt0000000139);
+
+    // Now we consume all sources
+    await dal.sindexDAL.insertBatch([
+      { op: 'UPDATE', identifier: 'SOURCE_2', pos: 4, written_on: '140-H', writtenOn: 140, written_time: 5000, consumed: true, conditions: 'COND(SOURCE_2)'},
+      { op: 'UPDATE', identifier: 'SOURCE_3', pos: 4, written_on: '140-H', writtenOn: 140, written_time: 5000, consumed: true, conditions: 'SIG(PUB_1)'}
+    ] as any);
+
+    // Second triming => should remove all sources
+    await dal.trimIndexes(141);
+    (await dal.sindexDAL.getAvailableForConditions('COND(SOURCE_2)')).should.have.length(0);
+    (await dal.sindexDAL.getAvailableForPubkey('PUB_1')).should.have.length(0);
+    (await dal.sindexDAL.findByPos(4)).should.have.length(0);
+
+    // All sub index should be empty
+    for (let index of sindexDAL.getInternalIndexes()) {
+      const res = await index.findAllKeys();
+      // https://git.duniter.org/nodes/typescript/duniter/-/issues/1447
+      res.should.have.length(0, `index ${index['name']} should have been trimmed`);
+    }
   })
 
   it('should be able to trim the bindex', async () => {
     // Triming
-    const server = (await simpleNodeWith2Users({
+    const conf = {
       forksize: 9,
       sigQty: 1,
       dtDiffEval: 2,
       medianTimeBlocks: 3
-    })).s1;
+    };
+    const server = (await simpleNodeWith2Users(conf)).s1;
+
     // const s1 = server.s1;
-    for (let i = 0; i < 13; i++) {
+    const b0 = await server.commit()
+
+    const requiredBindexSize = requiredBindexSizeForTail(b0, conf);
+    should(requiredBindexSize).equal(12);
+
+    for (let i = 1; i <= requiredBindexSize; i++) {
       await server.commit();
     }
     (await server.dal.bindexDAL.head(1)).should.have.property('number').equal(12);
     (await server.dal.bindexDAL.head(13)).should.have.property('number').equal(0);
+
     await server.commit();
     should.not.exists(await server.dal.bindexDAL.head(14)); // Trimed
 
diff --git a/test/fast/modules/bma/bma-module-test.ts b/test/fast/modules/bma/bma-module-test.ts
index 58c20ea81a5824f6a2f6cd3bb8763b962636e08f..cbec2c94aa9dfd871868c9b897b836ac11a5ae9c 100644
--- a/test/fast/modules/bma/bma-module-test.ts
+++ b/test/fast/modules/bma/bma-module-test.ts
@@ -51,6 +51,8 @@ describe('Module usage', () => {
     });
     should.exist(json);
     json.should.have.property('duniter').property('software').equal('duniter');
+    json.should.have.property('duniter').property('storage').property('transactions').equal(true);
+    json.should.have.property('duniter').property('storage').property('wotwizard').equal(false);
   })
 
   it('remoteipv4 should NOT be filled if remote Host is declared', async () => {
diff --git a/test/fast/modules/ws2p/single_write.ts b/test/fast/modules/ws2p/single_write.ts
index 31d8d09052bd50d9aaf62fb0c893facdff97184b..667aedcc58d36284c0e9e7f3ca3315e0362eb81b 100644
--- a/test/fast/modules/ws2p/single_write.ts
+++ b/test/fast/modules/ws2p/single_write.ts
@@ -26,7 +26,7 @@ describe('WS2P Single Write limiter', () => {
     const source = new Readable()
     const protection = new WS2PSingleWriteStream(PROTECTION_DURATION)
     let nbDocs = 0
-    await new Promise(res => {
+    await new Promise<void>(res => {
       source
         .pipe(protection)
         .pipe(es.mapSync(() => {
@@ -56,4 +56,4 @@ class Readable extends stream.Readable {
 
   async _read() {
   }
-}
\ No newline at end of file
+}
diff --git a/test/fast/prover/prover-pow-1-cluster.ts b/test/fast/prover/prover-pow-1-cluster.ts
index ea00618c0ebb3e204810328e60656546d16fef45..3560bf861fdc67ab457942b0533a3c39998d4f15 100644
--- a/test/fast/prover/prover-pow-1-cluster.ts
+++ b/test/fast/prover/prover-pow-1-cluster.ts
@@ -111,7 +111,7 @@ describe('PoW Cluster', () => {
         }
       }
     })
-    await new Promise(res => {
+    await new Promise<void>(res => {
       master.onInfoMessage = () => res()
     })
     await master.cancelWork()
diff --git a/test/integration/fork-resolution/block-with-expired-revert.ts b/test/integration/fork-resolution/block-with-expired-revert.ts
new file mode 100644
index 0000000000000000000000000000000000000000..1e729c7a42e7935d83fe79c8918ed67d6c8ed134
--- /dev/null
+++ b/test/integration/fork-resolution/block-with-expired-revert.ts
@@ -0,0 +1,104 @@
+// Source file from duniter: Crypto-currency software to manage libre currency such as Äž1
+// Copyright (C) 2018  Cedric Moreau <cem.moreau@gmail.com>
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+
+import {
+  assertDeepEqual,
+  assertEqual,
+  assertFalse, assertNull,
+  assertTrue,
+  writeBasicTestWithConfAnd2Users
+} from "../tools/test-framework"
+import {CommonConstants} from "../../../app/lib/common-libs/constants"
+import {Server} from "../../../server";
+
+const es = require('event-stream');
+
+const currentVersion = CommonConstants.BLOCK_GENESIS_VERSION
+
+describe('Block revert with an identity expiry in it', () => writeBasicTestWithConfAnd2Users({
+  sigQty: 2,
+  sigReplay: 0,
+  sigPeriod: 0,
+  sigValidity: 10,
+  msValidity: 5,
+  dtDiffEval: 1,
+  forksize: 0,
+}, (test) => {
+
+  const now = 1500000000
+
+  test('(t = 0) should init with a 3 members WoT with bidirectionnal certs', async (s1, cat, tac, toc) => {
+    CommonConstants.BLOCK_GENESIS_VERSION = 11
+    await cat.createIdentity()
+    await tac.createIdentity()
+    await toc.createIdentity()
+    await cat.cert(tac)
+    await cat.cert(toc)
+    await tac.cert(cat)
+    await tac.cert(toc)
+    await toc.cert(cat)
+    await toc.cert(tac)
+    await cat.join()
+    await tac.join()
+    await toc.join()
+    const b0 = await s1.commit({ time: now })
+    assertEqual(b0.certifications.length, 6)
+    const b1 = await s1.commit({ time: now })
+    assertEqual(b1.membersCount, 3)
+  })
+
+  test('(t = 3) cat & tac renew their membership, but NOT toc', async (s1, cat, tac, toc) => {
+    await s1.commit({ time: now + 3 })
+    await s1.commit({ time: now + 3 })
+    // cat and tac renew their membership to stay in the WoT
+    await tac.join()
+    await cat.join()
+    const b1 = await s1.commit({ time: now + 3 })
+    assertEqual(b1.actives.length, 2)
+    // The index expects toc to expire at time = 1500000005
+    assertDeepEqual(await s1.getMindexExpiresOnIndexer().getOrNull('1500000005'),
+      ['DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo'])
+  })
+
+  test('(t = 6) toc membership expires', async (s1, cat, tac, toc) => {
+    await s1.commit({ time: now + 6 })
+    const b = await s1.commit({ time: now + 6 })
+    const mindexChanges = await s1.dal.mindexDAL.getWrittenOn([b.number, b.hash].join('-'))
+    assertEqual(mindexChanges.length, 1)
+    assertEqual(mindexChanges[0].pub, toc.pub)
+    assertEqual(mindexChanges[0].expired_on as number, 1500000006)
+    assertEqual(b.excluded.length, 0) // Not excluded right now, but at next block
+    // The index no more expires anyone to expire at 1500000005
+    assertDeepEqual(await s1.getMindexExpiresOnIndexer().getOrNull('1500000005'), null)
+  })
+
+  test('block t = 6 reverted successfully', async (s1) => {
+    await s1.revert()
+    const b = await s1.dal.getBlockCurrent()
+    assertEqual(b.number, 5)
+    assertDeepEqual(await s1.getMindexExpiresOnIndexer().getOrNull('1500000005'),
+      ['DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo'])
+  })
+
+  test('resolution should put back block t = 6 successfully', async (s1) => {
+    const err = await s1.resolveForError()
+    assertNull(err)
+    const b = await s1.dal.getBlockCurrent()
+    assertEqual(b.number, 6)
+  })
+
+  after(() => {
+    CommonConstants.BLOCK_GENESIS_VERSION = currentVersion
+  })
+}))
+
diff --git a/test/integration/fork-resolution/block-with-transaction-revert.ts b/test/integration/fork-resolution/block-with-transaction-revert.ts
index 021dc53cbeb8b9583ccfb13e0c62d32cae9d3952..8743746dc777ac4f4293d60d01cc335edd61c87e 100644
--- a/test/integration/fork-resolution/block-with-transaction-revert.ts
+++ b/test/integration/fork-resolution/block-with-transaction-revert.ts
@@ -17,6 +17,7 @@ import {DBBlock} from "../../../app/lib/db/DBBlock"
 import {CommonConstants} from "../../../app/lib/common-libs/constants"
 import {TestUser} from "../tools/TestUser"
 import {TestingServer} from "../tools/toolbox"
+import {LevelDBSindex} from "../../../app/lib/dal/indexDAL/leveldb/LevelDBSindex";
 
 describe.skip('Block revert with transaction sources', () => writeBasicTestWithConfAnd2Users({
   dt: 10,
@@ -46,7 +47,7 @@ describe.skip('Block revert with transaction sources', () => writeBasicTestWithC
     const tx2 = await cat.prepareUTX(tx1, ['SIG(0)'],
       [
         { qty: 100, base: 0, lock: 'SIG(' + tac.pub + ')' },
-        { qty: 200, base: 0, lock: 'SIG(' + toc.pub + ')' }, // Send money also to toc, to test that his money is ketp safe during a revert
+        { qty: 200, base: 0, lock: 'SIG(' + toc.pub + ')' }, // Send money also to toc, to test that his money is kept safe during a revert
         { qty: 700, base: 0, lock: 'SIG(' + cat.pub + ')' }, // REST
       ],
       {
@@ -75,10 +76,14 @@ describe.skip('Block revert with transaction sources', () => writeBasicTestWithC
   })
 
   test('revert b#3-4 and re-commit block#3 should be ok', async (s1, cat, tac, toc) => {
-    await s1.revert()
-    await s1.revert()
-    await s1.resolve(b => b.number === 3)
-    await assertBlock3(s1, cat, tac, toc)
+    await s1.revert() // Revert b#4
+    await assertBlock3(s1, cat, tac, toc) // Current is b#3
+
+    await s1.revert() // Revert b#3
+    await assertBlock2(s1, cat, tac, toc) // Current is b#2
+
+    await s1.resolve(b => b.number === 3) // Waiting b#3 to commit
+    await assertBlock3(s1, cat, tac, toc) // Current is b#3
   })
 
   test('re-commit block#4 should be ok', async (s1, cat, tac, toc) => {
diff --git a/test/integration/fork-resolution/register-fork-blocks.ts b/test/integration/fork-resolution/register-fork-blocks.ts
index d448373d5f28797f5bb7148e6dbf79d604aed692..920d6f21bb839d26e022944c406e6bf72f0d8ca2 100644
--- a/test/integration/fork-resolution/register-fork-blocks.ts
+++ b/test/integration/fork-resolution/register-fork-blocks.ts
@@ -176,7 +176,7 @@ describe("Fork blocks", function() {
     await s2.sharePeeringWith(s1)
     await s2.writeBlock(b4a)
     const b3c = await s3.commit({ time: now + 33 })
-    await new Promise((res, rej) => {
+    await new Promise<void>((res, rej) => {
       const event = CommonConstants.DocumentError
       s2.on(event, (e:any) => {
         try {
diff --git a/test/integration/identity/identity-test.ts b/test/integration/identity/identity-test.ts
index 6966e9971ebf9b7d850756824698fa2cf949ca04..2a60a7ffb6f58ba0fcf6c4a4eef072101ec022a7 100644
--- a/test/integration/identity/identity-test.ts
+++ b/test/integration/identity/identity-test.ts
@@ -192,6 +192,13 @@ describe("Identities collision", function() {
     });
   });
 
+  it('should have certifiers-of/:pubkey of cat giving results', function() {
+    return expectAnswer(rp('http://127.0.0.1:7799/wot/certifiers-of/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd?pubkey', { json: true }), function(res:HttpCertifications) {
+      res.should.have.property('pubkey').equal('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
+      res.should.have.property('uid').equal('cat');
+    });
+  });
+
   it('should have certifiers-of/tic giving results', function() {
     return expectAnswer(rp('http://127.0.0.1:7799/wot/certifiers-of/tic', { json: true }), function(res:HttpCertifications) {
       res.should.have.property('pubkey').equal('DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV');
@@ -213,6 +220,13 @@ describe("Identities collision", function() {
     });
   });
 
+  it('should have certifiers-of/:pubkey of tic giving results', function() {
+    return expectAnswer(rp('http://127.0.0.1:7799/wot/certifiers-of/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV?pubkey', { json: true }), function(res:HttpCertifications) {
+      res.should.have.property('pubkey').equal('DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV');
+      res.should.have.property('uid').equal('tic');
+    });
+  });
+
   it('should have certifiers-of/toc giving results', function() {
     return expectAnswer(rp('http://127.0.0.1:7799/wot/certifiers-of/toc', { json: true }), function(res:HttpCertifications) {
       res.should.have.property('pubkey').equal('DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo');
@@ -234,6 +248,13 @@ describe("Identities collision", function() {
     });
   });
 
+  it('should have certifiers-of/:pubkey of toc giving results', function() {
+    return expectAnswer(rp('http://127.0.0.1:7799/wot/certifiers-of/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo?pubkey', { json: true }), function(res:HttpCertifications) {
+      res.should.have.property('pubkey').equal('DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo');
+      res.should.have.property('uid').equal('toc');
+    });
+  });
+
   it('requirements of cat', function() {
     return expectAnswer(rp('http://127.0.0.1:7799/wot/requirements/cat', { json: true }), function(res:HttpRequirements) {
       res.should.have.property('identities').be.an.Array;
@@ -270,6 +291,46 @@ describe("Identities collision", function() {
     });
   });
 
+  it('requirements by pubkey of cat', function() {
+    return expectAnswer(rp('http://127.0.0.1:7799/wot/requirements/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd?pubkey', { json: true }), function(res:HttpRequirements) {
+      res.should.have.property('identities').be.an.Array;
+      res.should.have.property('identities').have.length(1);
+      res.identities[0].should.have.property('pubkey').equal('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
+      res.identities[0].should.have.property('uid').equal('cat');
+      res.identities[0].should.have.property('meta').property('timestamp');
+      res.identities[0].should.have.property('wasMember').equal(true);
+      res.identities[0].should.have.property('expired').equal(false); // Because it has been a member once! So its identity will exist forever.
+      res.identities[0].should.have.property('outdistanced').equal(false);
+      res.identities[0].should.have.property('isSentry').equal(true); // dSen = 2, cat has issued and received 2 certs with tic and toc
+      res.identities[0].should.have.property('certifications').have.length(2);
+      res.identities[0].should.have.property('membershipPendingExpiresIn').equal(0);
+      res.identities[0].should.have.property('membershipExpiresIn').greaterThan(9000);
+    });
+  });
+
+  it('requirements by pubkey of man1', function() {
+    return expectAnswer(rp('http://127.0.0.1:7799/wot/requirements/12AbjvYY5hxV4v2KrN9pnGzgFxogwrzgYyncYHHsyFDK?pubkey', { json: true }), function(res:HttpRequirements) {
+      res.should.have.property('identities').be.an.Array;
+      res.should.have.property('identities').have.length(1);
+      res.identities[0].should.have.property('pubkey').equal('12AbjvYY5hxV4v2KrN9pnGzgFxogwrzgYyncYHHsyFDK');
+      res.identities[0].should.have.property('uid').equal('man1');
+      res.identities[0].should.have.property('meta').property('timestamp');
+      res.identities[0].should.have.property('expired').equal(false);
+      res.identities[0].should.have.property('outdistanced').equal(false);
+      res.identities[0].should.have.property('isSentry').equal(false); // Not a member, also dSen = 2, but man1 has only 1 certification
+      res.identities[0].should.have.property('certifications').length(1);
+      res.identities[0].certifications[0].should.have.property('from').equal('2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc');
+      res.identities[0].certifications[0].should.have.property('to').equal('12AbjvYY5hxV4v2KrN9pnGzgFxogwrzgYyncYHHsyFDK');
+      res.identities[0].certifications[0].should.have.property('expiresIn').greaterThan(0);
+      res.identities[0].should.have.property('membershipPendingExpiresIn').greaterThan(9000);
+      res.identities[0].should.have.property('membershipExpiresIn').equal(0);
+    });
+  });
+
+  it('requirements by invalid pubkey', function() {
+    return expectError(404, "No identity matching this pubkey or uid", rp('http://127.0.0.1:7799/wot/requirements/cat?pubkey', { json: true }));
+  });
+
   it('should have certified-by/tic giving results', function() {
     return expectAnswer(rp('http://127.0.0.1:7799/wot/certified-by/tic', { json: true }), function(res:HttpCertifications) {
       res.should.have.property('pubkey').equal('DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV');
@@ -298,6 +359,13 @@ describe("Identities collision", function() {
     });
   });
 
+  it('should have certified-by/:pubkey of tic giving results', function() {
+    return expectAnswer(rp('http://127.0.0.1:7799/wot/certified-by/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV?pubkey', { json: true }), function(res:HttpCertifications) {
+      res.should.have.property('pubkey').equal('DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV');
+      res.should.have.property('uid').equal('tic');
+    });
+  });
+
   it('should have certified-by/tac giving results', function() {
     return expectAnswer(rp('http://127.0.0.1:7799/wot/certified-by/tac', { json: true }), function(res:HttpCertifications) {
       res.should.have.property('pubkey').equal('2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc');
@@ -308,6 +376,13 @@ describe("Identities collision", function() {
     });
   });
 
+  it('should have certified-by/:pubkey of tac giving results', function() {
+    return expectAnswer(rp('http://127.0.0.1:7799/wot/certified-by/2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc?pubkey', { json: true }), function(res:HttpCertifications) {
+      res.should.have.property('pubkey').equal('2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc');
+      res.should.have.property('uid').equal('tac');
+    });
+  });
+
   it('should have certified-by/cat giving results', function() {
     return expectAnswer(rp('http://127.0.0.1:7799/wot/certified-by/cat', { json: true }), function(res:HttpCertifications) {
       res.should.have.property('pubkey').equal('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
@@ -343,6 +418,13 @@ describe("Identities collision", function() {
     });
   });
 
+  it('should have certified-by/:pubkey of cat giving results', function() {
+    return expectAnswer(rp('http://127.0.0.1:7799/wot/certified-by/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd?pubkey', { json: true }), function(res:HttpCertifications) {
+      res.should.have.property('pubkey').equal('HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd');
+      res.should.have.property('uid').equal('cat');
+    });
+  });
+
   it('requirements of man2', function() {
     return expectAnswer(rp('http://127.0.0.1:7799/wot/requirements/man2', { json: true }), function(res:HttpRequirements) {
       res.should.have.property('identities').be.an.Array;
diff --git a/test/integration/misc/http-api.ts b/test/integration/misc/http-api.ts
index 6ec11146cb3f3f5e4fe202068494191e3ce03bc9..1dcb732f603be605e359f61189ce64a9495a2f5e 100644
--- a/test/integration/misc/http-api.ts
+++ b/test/integration/misc/http-api.ts
@@ -350,7 +350,7 @@ function postBlock(server2:TestingServer) {
     })
       .then(async (result:HttpBlock) => {
         const numberToReach = block.number
-        await new Promise((res) => {
+        await new Promise<void>((res) => {
           const interval = setInterval(async () => {
             const current = await server2.dal.getCurrentBlockOrNull()
             if (current && current.number == numberToReach) {
diff --git a/test/integration/misc/server-import-export.ts b/test/integration/misc/server-import-export.ts
index 2f2e7bd5487336cf1fa3bde24bc46cdbd2fc9ec5..6e0ca63da8548302f7e187d0df819bf31ad89556 100644
--- a/test/integration/misc/server-import-export.ts
+++ b/test/integration/misc/server-import-export.ts
@@ -59,7 +59,7 @@ describe('Import/Export', () => {
     const archive = await s1.exportAllDataAsZIP();
     const output = require('fs').createWriteStream(s1.home + '/export.zip');
     archive.pipe(output);
-    return new Promise((resolve, reject) => {
+    return new Promise<void>((resolve, reject) => {
       archive.on('error', reject);
       output.on('close', function() {
         resolve();
diff --git a/test/integration/network/network-merkle.ts b/test/integration/network/network-merkle.ts
index d1ea5d084537f2be595610d24fb14aad9ece9638..4f624defdaaa0021ab515ad30e76f50076677bb9 100644
--- a/test/integration/network/network-merkle.ts
+++ b/test/integration/network/network-merkle.ts
@@ -13,7 +13,7 @@
 
 import {Underscore} from "../../../app/lib/common-libs/underscore"
 import {HttpMerkleOfPeers} from "../../../app/modules/bma/lib/dtos"
-import {NewTestingServer} from "../tools/toolbox"
+import {NewTestingServer, TestingServer} from "../tools/toolbox"
 import {expectAnswer, expectHttpCode} from "../tools/http-expect"
 
 const rp        = require('request-promise');
@@ -28,34 +28,37 @@ const commonConf = {
   sigQty: 1
 };
 
-const s1 = NewTestingServer(Underscore.extend({
-  name: 'bb33',
-  ipv4: '127.0.0.1',
-  port: '20501',
-  remoteport: '20501',
-  ws2p: { upnp: false },
-  pair: {
-    pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
-    sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
-  },
-  rootoffset: 10,
-  sigQty: 1, dt: 0, ud0: 120
-}, commonConf));
-
-const s2 = NewTestingServer(Underscore.extend({
-  name: 'bb12',
-  port: '20502',
-  remoteport: '20502',
-  ws2p: { upnp: false },
-  pair: {
-    pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
-    sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
-  }
-}, commonConf));
-
 describe("Network Merkle", function() {
 
+  let s1: TestingServer, s2: TestingServer;
+
   before(async () => {
+
+    s1 = NewTestingServer(Underscore.extend({
+      name: 'bb33',
+      ipv4: '127.0.0.1',
+      port: '20501',
+      remoteport: '20501',
+      ws2p: { upnp: false },
+      pair: {
+        pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
+        sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
+      },
+      rootoffset: 10,
+      sigQty: 1, dt: 0, ud0: 120
+    }, commonConf));
+
+    s2 = NewTestingServer(Underscore.extend({
+      name: 'bb12',
+      port: '20502',
+      remoteport: '20502',
+      ws2p: { upnp: false },
+      pair: {
+        pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo',
+        sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'
+      }
+    }, commonConf));
+
     await s1.initDalBmaConnections()
     await s2.initDalBmaConnections()
     await s1._server.PeeringService.generateSelfPeer(s1._server.conf, 0)
diff --git a/test/integration/network/peer-outdated.ts b/test/integration/network/peer-outdated.ts
index 406ca5a54f1b4a3306962ec58b003a8b64a182cf..b34718254aaa3bbafc41ae8f5732d31199c274f3 100644
--- a/test/integration/network/peer-outdated.ts
+++ b/test/integration/network/peer-outdated.ts
@@ -90,7 +90,7 @@ describe("Peer document expiry", function() {
 
   it('routing V1 peer document should raise an "outdated" event', async () => {
     const caster = new Multicaster();
-    return new Promise((resolve) => {
+    return new Promise<void>((resolve) => {
       caster
         .pipe(es.mapSync((obj:any) => {
           obj.should.have.property("outdated").equal(true);
diff --git a/test/integration/proof-of-work/continuous-proof.ts b/test/integration/proof-of-work/continuous-proof.ts
index 1db5b4f2882471bd7ca816113ddec6fa02807dfb..63fba46cd3edc61b99570088a9a407d8fb6677a1 100644
--- a/test/integration/proof-of-work/continuous-proof.ts
+++ b/test/integration/proof-of-work/continuous-proof.ts
@@ -117,9 +117,9 @@ describe("Continous proof-of-work", function() {
     s2.startBlockComputation();
     await s2.until('block', 15);
     await s2.stopBlockComputation();
-    await [
+    await Promise.all([
       CrawlerDependency.duniter.methods.pullBlocks(s3._server),
-      new Promise(res => {
+      new Promise<void>(res => {
         s3.pipe(es.mapSync((e:any) => {
           if (e.number === 15) {
             res()
@@ -129,7 +129,7 @@ describe("Continous proof-of-work", function() {
 
       }),
       s3.startBlockComputation()
-    ];
+    ]);
     const current = await s3.get('/blockchain/current')
     await s3.stopBlockComputation();
     current.number.should.be.aboveOrEqual(14)
diff --git a/test/integration/tools/test-framework.ts b/test/integration/tools/test-framework.ts
index 9f826b859b6ffdecee94553d215c244bea136096..4ea2846485cd8b5d9e44a31584d03c8c3043afb1 100644
--- a/test/integration/tools/test-framework.ts
+++ b/test/integration/tools/test-framework.ts
@@ -79,3 +79,7 @@ export function assertNull(value: any) {
 export function assertFalse(expected: boolean) {
   assert.equal(false, expected)
 }
+
+export function assertDeepEqual(value: any, expected: any) {
+  assert.deepEqual(value, expected)
+}
\ No newline at end of file
diff --git a/test/integration/tools/test-until.ts b/test/integration/tools/test-until.ts
index 4c05899eab0e5f7599930e6489edf6d440059642..eff7ba46b0890d75c5f8ac3f1ab642e39b65b7ca 100644
--- a/test/integration/tools/test-until.ts
+++ b/test/integration/tools/test-until.ts
@@ -18,7 +18,7 @@ const UNTIL_TIMEOUT = 115000;
 export function until(server:TestingServer, eventName:string, count:number) {
   let counted = 0;
   const max = count == undefined ? 1 : count;
-  return new Promise(function (resolve, reject) {
+  return new Promise<void>(function (resolve, reject) {
     let finished = false;
     server._server.on(eventName, function () {
       counted++;
diff --git a/test/integration/tools/toolbox.ts b/test/integration/tools/toolbox.ts
index 571c3c6ed6852f02a0bc6437440119eae2dc59ea..8c8e1cfc479aae50346568faa1301d6cf9f4e8ad 100644
--- a/test/integration/tools/toolbox.ts
+++ b/test/integration/tools/toolbox.ts
@@ -61,6 +61,7 @@ import {CommonConstants} from "../../../app/lib/common-libs/constants"
 import {WS2PRequester} from "../../../app/modules/ws2p/lib/WS2PRequester"
 import {WS2PDependency} from "../../../app/modules/ws2p/index"
 import {ForcedBlockValues} from "../../../app/modules/prover/lib/blockGenerator"
+import {LevelMIndexExpiresOnIndexer} from "../../../app/lib/dal/indexDAL/leveldb/indexers/LevelMIndexExpiresOnIndexer";
 
 const assert      = require('assert');
 const rp          = require('request-promise');
@@ -242,6 +243,11 @@ export const NewTestingServer = (conf:any) => {
     httpLogs: true,
     forksize: conf.forksize !== undefined ? conf.forksize : 3,
     nonWoTPeersLimit: CommonConstants.DEFAULT_NON_WOT_PEERS_LIMIT,
+    storage: {
+      transaction: true,
+      wotwizard: false,
+      ...conf.storage
+    }
   };
   if (conf.sigQty === undefined) {
     conf.sigQty = 1;
@@ -259,7 +265,7 @@ export const NewTestingServer = (conf:any) => {
 }
 
 export const serverWaitBlock = async (server:Server, number:number) => {
-  await new Promise((res) => {
+  await new Promise<void>((res) => {
     const interval = setInterval(async () => {
       const current = await server.dal.getCurrentBlockOrNull()
       if (current && current.number == number) {
@@ -275,7 +281,7 @@ export const waitToHaveBlock = async (server:Server, number:number) => {
 }
 
 export const waitForkResolution = async (server:Server, number:number) => {
-  await new Promise(res => {
+  await new Promise<void>(res => {
     server.pipe(es.mapSync((e:any) => {
       if (e.bcEvent === 'switched' && e.block.number === number) {
         res()
@@ -287,7 +293,7 @@ export const waitForkResolution = async (server:Server, number:number) => {
 }
 
 export const waitForkWS2PConnection = async (server:Server, pubkey:string) => {
-  await new Promise(res => {
+  await new Promise<void>(res => {
     server.pipe(es.mapSync((e:any) => {
       if (e.ws2p === 'connected' && e.to.pubkey === pubkey) {
         res()
@@ -299,7 +305,7 @@ export const waitForkWS2PConnection = async (server:Server, pubkey:string) => {
 }
 
 export const waitForkWS2PDisconnection = async (server:Server, pubkey:string) => {
-  await new Promise(res => {
+  await new Promise<void>((res) => {
     server.pipe(es.mapSync((e:any) => {
       if (e.ws2p === 'disconnected' && e.peer.pub === pubkey) {
         res()
@@ -386,6 +392,27 @@ export class TestingServer {
     return blocksResolved
   }
 
+  async resolveForError(): Promise<string|null> {
+    const server = this.server
+    const bcService = await server.BlockchainService
+    let errorCatch: Promise<string> = new Promise(res => {
+      server.pipe(es.mapSync((e:any) => {
+        if (e.blockResolutionError) {
+          res(e.blockResolutionError)
+        }
+      }))
+    })
+    await bcService.blockResolution()
+    return Promise.race([
+      errorCatch,
+      new Promise<null>(res => setTimeout(() => res(null), 200))
+    ])
+  }
+
+  getMindexExpiresOnIndexer(): LevelMIndexExpiresOnIndexer {
+    return (this.server.dal.mindexDAL as any).indexForExpiresOn
+  }
+
   async resolveFork(): Promise<BlockDTO|null> {
     return this.server.BlockchainService.forkResolution()
   }
@@ -451,7 +478,7 @@ export class TestingServer {
   }
 
   push(chunk: any, encoding?: string) {
-    return this.server.push(chunk, encoding)
+    return this.server.push(chunk, encoding as BufferEncoding)
   }
 
   pipe(writable:stream.Writable) {
@@ -544,7 +571,7 @@ export class TestingServer {
 
   async commitWaitError(options:any, expectedError:string) {
     const results = await Promise.all([
-      new Promise(res => {
+      new Promise<void>((res) => {
         this.server.pipe(es.mapSync((e:any) => {
           if (e.blockResolutionError === expectedError) {
             res()
diff --git a/test/integration/transactions/transactions-history.ts b/test/integration/transactions/transactions-history.ts
new file mode 100644
index 0000000000000000000000000000000000000000..1eb345f5eea807b3137b07a592c44e0ead8d4a28
--- /dev/null
+++ b/test/integration/transactions/transactions-history.ts
@@ -0,0 +1,173 @@
+// Source file from duniter: Crypto-currency software to manage libre currency such as Äž1
+// Copyright (C) 2018  Cedric Moreau <cem.moreau@gmail.com>
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+
+import {TestUser} from "../tools/TestUser"
+import {CommonConstants} from "../../../app/lib/common-libs/constants"
+import {NewTestingServer, TestingServer} from "../tools/toolbox"
+import {HttpBlock, HttpTxHistory} from "../../../app/modules/bma/lib/dtos"
+import {Underscore} from "../../../app/lib/common-libs/underscore";
+
+const should    = require('should');
+
+let s1:TestingServer, cat1:TestUser, tac1:TestUser
+
+describe("Transactions history", function() {
+
+  const now = 1500000000
+  const conf = {
+    udTime0: now,
+    dt: 30,
+    avgGenTime: 5000,
+    medianTimeBlocks: 2
+  };
+
+  before(async () => {
+
+    s1 = NewTestingServer(Underscore.extend({
+      currency: 'currency_one',
+      pair: {
+        pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd',
+        sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'
+      }
+    }, conf));
+
+    cat1 = new TestUser('cat', { pub: 'HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', sec: '51w4fEShBk1jCMauWu4mLpmDVfHksKmWcygpxriqCEZizbtERA6de4STKRkQBpxmMUwsKXRjSzuQ8ECwmqN1u2DP'}, { server: s1 });
+    tac1 = new TestUser('tac', { pub: '2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', sec: '2HuRLWgKgED1bVio1tdpeXrf7zuUszv1yPHDsDj7kcMC4rVSN9RC58ogjtKNfTbH1eFz7rn38U1PywNs3m6Q7UxE'}, { server: s1 });
+
+    await s1.prepareForNetwork();
+
+    const now = parseInt(String(Date.now() / 1000))
+
+    // Publishing identities
+    await cat1.createIdentity();
+    await tac1.createIdentity();
+    await cat1.cert(tac1);
+    await tac1.cert(cat1);
+    await cat1.join();
+    await tac1.join();
+    await s1.commit();
+    await s1.commit({
+      time: now + conf.avgGenTime
+    });
+    await s1.commit();
+    await cat1.sendMoney(20, tac1);
+  })
+
+  after(() => {
+    return Promise.all([
+      s1.closeCluster()
+    ])
+  })
+
+  it('sending transactions should exist in /tx/history/:pubkey/pending', () => s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/pending', (res:HttpTxHistory) => {
+    res.history.should.have.property('sending').length(1);
+    res.history.should.have.property('pending').length(0);
+  }));
+
+  it('pending transactions should exist in /tx/history/:pubkey/pending', () => s1.expect('/tx/history/2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc/pending', (res:HttpTxHistory) => {
+    res.history.should.have.property('sending').length(0);
+    res.history.should.have.property('pending').length(1);
+  }));
+
+  it('sent and received transactions should should exist', async () => {
+    await s1.commit();
+
+    // cat1 pending should be empty
+    await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/pending', (res:HttpTxHistory) => {
+      res.history.should.have.property('sending').length(0);
+      res.history.should.have.property('pending').length(0);
+    });
+    // cat1 sent should have one element
+    await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (res:HttpTxHistory) => {
+      res.history.should.have.property('sent').length(1);
+      res.history.should.have.property('received').length(0);
+    });
+    // tac1 sending should be empty
+    await s1.expect('/tx/history/2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc/pending', (res:HttpTxHistory) => {
+      res.history.should.have.property('sending').length(0);
+      res.history.should.have.property('pending').length(0);
+    });
+    // tac1 received should have one element
+    await s1.expect('/tx/history/2LvDg21dVXvetTD9GdkPLURavLYEqP3whauvPWX4c2qc', (res:HttpTxHistory) => {
+      res.history.should.have.property('sent').length(0);
+      res.history.should.have.property('received').length(1);
+    });
+  })
+
+  it('get transactions by blocks slice', async () => {
+
+    const firstBlock = await s1.commit();
+
+    // cat1 sent should have one element
+    await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/blocks/0/' + firstBlock.number, (res:HttpTxHistory) => {
+      res.history.should.have.property('sent').length(1);
+      res.history.should.have.property('received').length(0);
+    });
+
+    // Add a pending TX from tac1 -> cat1
+    await s1.commit({
+      time: firstBlock.time + conf.avgGenTime
+    });
+    await tac1.sendMoney(10, cat1);
+    const secondBlock = await s1.commit();
+
+    // Should not appear in sliced history
+    await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/blocks/0/' + firstBlock.number, (res:HttpTxHistory) => {
+      res.history.should.have.property('sent').length(1);
+      res.history.should.have.property('received').length(0);
+    });
+    await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd/blocks/' + (firstBlock.number + 1) + '/' + secondBlock.number, (res:HttpTxHistory) => {
+      res.history.should.have.property('sent').length(0);
+      res.history.should.have.property('received').length(1);
+    });
+
+    // Whole history
+    await s1.expect('/tx/history/HgTTJLAQ5sqfknMq7yLPZbehtuLSsKj9CxWN7k8QvYJd', (res:HttpTxHistory) => {
+      res.history.should.have.property('sent').length(1);
+      res.history.should.have.property('received').length(1);
+    });
+  })
+
+  it('get transactions by times slice', async () => {
+
+    const medianTimeOffset = conf.avgGenTime * conf.medianTimeBlocks / 2;
+    const firstBlock = await s1.commit();
+    const startTime = firstBlock.medianTime + medianTimeOffset;
+
+    // Should not have TX yet
+    await s1.expect(`/tx/history/${cat1.pub}/times/${startTime}/${startTime + conf.avgGenTime - 1}`, (res:HttpTxHistory) => {
+      res.history.should.have.property('sent').length(0);
+      res.history.should.have.property('received').length(0);
+    });
+
+    // Add a pending TX from tac1 -> cat1
+    await tac1.sendMoney(10, cat1);
+    const secondBlock = await s1.commit({
+      time: firstBlock.time + conf.avgGenTime
+    });
+    should(secondBlock).property('time').greaterThan(firstBlock.time);
+    const secondTime = secondBlock.medianTime + medianTimeOffset;
+
+    // Previous range (before TX) should still be empty
+    await s1.expect(`/tx/history/${cat1.pub}/times/${startTime}/${secondTime - 1}`, (res:HttpTxHistory) => {
+      res.history.should.have.property('sent').length(0);
+      res.history.should.have.property('received').length(0);
+    });
+
+    // Should appear in next range
+    await s1.expect(`/tx/history/${cat1.pub}/times/${secondTime}/${secondTime + conf.avgGenTime}`, (res:HttpTxHistory) => {
+      res.history.should.have.property('sent').length(0);
+      res.history.should.have.property('received').length(1);
+    });
+  })
+})
diff --git a/test/integration/transactions/transactions-test.ts b/test/integration/transactions/transactions-test.ts
new file mode 100644
index 0000000000000000000000000000000000000000..f61ba887d100a2f35439e25ab4a5b0faa49412d3
--- /dev/null
+++ b/test/integration/transactions/transactions-test.ts
@@ -0,0 +1,237 @@
+// Source file from duniter: Crypto-currency software to manage libre currency such as Äž1
+// Copyright (C) 2018  Cedric Moreau <cem.moreau@gmail.com>
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as published by
+// the Free Software Foundation, either version 3 of the License, or
+// (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+
+import {TestUser} from "../tools/TestUser"
+import {NewTestingServer, TestingServer} from "../tools/toolbox"
+import {Underscore} from "../../../app/lib/common-libs/underscore"
+import {shouldFail, shouldNotFail} from "../../unit-tools"
+
+const should = require('should');
+const assert = require('assert');
+
+describe("Testing transactions", function() {
+
+  const now = 1490000000;
+  const yesterday = now - 86400;
+  const tomorrow = now + 86400;
+  const intwodays = now + (86400 * 2);
+
+  let s1:TestingServer, tic:TestUser, toc:TestUser
+
+  before(async () => {
+
+    s1 = NewTestingServer({
+      pair: {
+        pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV',
+        sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'
+      },
+      nbCores: 1,
+      dt: 7210,
+      ud0: 1200,
+      udTime0: now + 7210,
+      udReevalTime0: now + 7210,
+      avgGenTime: 7210,
+      medianTimeBlocks: 1
+    });
+
+    tic = new TestUser('tic', { pub: 'DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', sec: '468Q1XtTq7h84NorZdWBZFJrGkB18CbmbHr9tkp9snt5GiERP7ySs3wM8myLccbAAGejgMRC9rqnXuW3iAfZACm7'}, { server: s1 });
+    toc = new TestUser('toc', { pub: 'DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', sec: '64EYRvdPpTfLGGmaX5nijLXRqWXaVz8r1Z1GtaahXwVSJGQRn7tqkxLb288zwSYzELMEG5ZhXSBYSxsTsz1m9y8F'}, { server: s1 });
+
+    await s1.initDalBmaConnections();
+    // Self certifications
+    await tic.createIdentity();
+    await toc.createIdentity();
+    // Certification;
+    await tic.cert(toc);
+    await toc.cert(tic);
+    await tic.join();
+    await toc.join();
+    await s1.commit({ time: now });
+    await s1.commit({
+      time: now + 7210
+    });
+    await s1.commit({
+      time: now + 7210
+    });
+    await tic.sendMoney(510, toc);
+    await s1.expect('/tx/history/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', (res:any) => {
+      res.should.have.property('pubkey').equal('DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo');
+      res.should.have.property('history').property('pending').length(1);
+    });
+    await s1.commit({
+      time: now + 7220
+    });
+  })
+
+  after(() => {
+    return Promise.all([
+      s1.closeCluster()
+    ])
+  })
+
+  describe("History by time", function(){
+    it('should have a time not null', () => s1.expect('/tx/history/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', (res:any) => {
+      res.should.have.property('pubkey').equal('DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo');
+      res.should.have.property('history').property('received').length(1);
+      res.history.received[0].should.have.property('time').not.be.Null;
+      res.history.received[0].should.have.property('time').be.a.Number;
+    }));
+
+    it('should return a received transaction between yesterday and tomorrow', () => s1.expect('/tx/history/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo/times/' + yesterday + '/' + tomorrow, (res:any) => {
+      res.should.have.property('pubkey').equal('DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo');
+      res.should.have.property('history').property('received').length(1);
+      res.history.received[0].should.have.property('time').not.be.Null;
+      res.history.received[0].should.have.property('time').be.a.Number;
+    }));
+
+    it('should not return a received transaction the day after tomorrow', () => s1.expect('/tx/history/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo/times/' + tomorrow + '/' + intwodays, (res:any) => {
+      res.should.have.property('pubkey').equal('DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo');
+      res.should.have.property('history').property('received').length(0);
+    }));
+  })
+
+  describe("Sources", function(){
+
+    it('it should exist block#2 with UD of 1200', () => s1.expect('/blockchain/block/2', (block:any) => {
+      should.exists(block);
+      assert.equal(block.number, 2);
+      assert.equal(block.dividend, 1200);
+    }));
+
+    it('tic should be able to send 510 to toc', async () => {
+      await s1.expect('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV', (res:any) => {
+        should.exists(res);
+        assert.equal(res.sources.length, 1);
+        assert.equal(res.sources[0].conditions, 'SIG(DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV)')
+        const txSrc = (Underscore.findWhere(res.sources, { type: 'T' }) as any)
+        assert.equal(txSrc.amount, 690);
+      })
+      const tx = await s1.get('/tx/hash/B6DCADFB841AC05A902741A8772A70B4086D5AEAB147AD48987DDC3887DD55C8')
+      assert.notEqual(tx, null)
+      assert.deepEqual(tx, {
+        "comment": "",
+        "currency": "duniter_unit_test_currency",
+        "hash": "B6DCADFB841AC05A902741A8772A70B4086D5AEAB147AD48987DDC3887DD55C8",
+        "inputs": [
+          "1200:0:D:DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV:2"
+        ],
+        "issuers": [
+          "DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV"
+        ],
+        "locktime": 0,
+        "outputs": [
+          "510:0:SIG(DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo)",
+          "690:0:SIG(DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV)"
+        ],
+        "raw": "",
+        "signatures": [
+          "Wy2tAKp/aFH2hqZJ5qnUFUNEukFbHwaR4v9gZ/aGoySPfXovDwld9W15w8C0ojVYbma9nlU3eLkVqzVBYz3lAw=="
+        ],
+        "unlocks": [
+          "0:SIG(0)"
+        ],
+        "version": 10,
+        "writtenTime": 1490007210,
+        "written_block": 3
+      })
+    })
+
+    it('toc should have 1510 of sources', () => s1.expect('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo', (res:any) => {
+      should.exists(res);
+      assert.equal(res.sources.length, 2);
+      const txRes = (Underscore.findWhere(res.sources, { type: 'T' }) as any)
+      const duRes = (Underscore.where(res.sources, { type: 'D' }) as any)
+      assert.equal(txRes.type, 'T');
+      assert.equal(txRes.amount, 510);
+      assert.equal(duRes[0].type, 'D');
+      assert.equal(duRes[0].amount, 1200);
+    }));
+
+    it('toc should be able to send 800 to tic', async () => {
+      let tx1 = await toc.prepareITX(1710, tic);
+      await toc.sendTX(tx1);
+      await s1.commit({ time: now + 15000 });
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+    })
+  });
+
+  describe("Chaining", function(){
+
+    it('with SIG and XHX', async () => {
+      // Current state
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(2);
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(2);
+      // Make the time go so another UD is available
+      await s1.commit({ time: now + 15000 });
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3);
+      let tx1 = await toc.prepareITX(1200, tic);
+      await toc.sendTX(tx1);
+      await s1.commit({ time: now + 15000 });
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(4);
+      // Now cat has all the money...
+      let current = await s1.get('/blockchain/current');
+      let tx2 = await tic.prepareUTX(tx1, ['SIG(2)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      let tx3 = await tic.prepareUTX(tx1, ['SIG(1)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      let tx4 = await tic.prepareUTX(tx1, ['SIG(0)'], [{ qty: 1200, base: 0, lock: 'XHX(8AFC8DF633FC158F9DB4864ABED696C1AA0FE5D617A7B5F7AB8DE7CA2EFCD4CB)' }], { comment: 'ok', blockstamp: [current.number, current.hash].join('-') });
+      let tx5 = await tic.prepareUTX(tx1, ['XHX(2)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      let tx6 = await tic.prepareUTX(tx1, ['XHX(4)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      await shouldFail(toc.sendTX(tx2), 'Wrong unlocker in transaction');
+      await shouldFail(toc.sendTX(tx3), 'Wrong unlocker in transaction');
+      await shouldNotFail(toc.sendTX(tx4));
+      await shouldFail(toc.sendTX(tx5), 'Wrong unlocker in transaction');
+      await shouldFail(toc.sendTX(tx6), 'Wrong unlocker in transaction');
+      await s1.commit({ time: now + 19840 }); // TX4 commited
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0); // The tx was not sent to someone, but with an XHX! So toc has nothing more than before.
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3);
+      let tx7 = await tic.prepareUTX(tx4, ['XHX(2872767826647264)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong1', blockstamp: [current.number, current.hash].join('-') });
+      let tx8 = await tic.prepareUTX(tx4, ['XHX(1872767826647264)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'okk', blockstamp: [current.number, current.hash].join('-') }); // tic unlocks the XHX locked amount, and gives it to toc!
+      await shouldFail(toc.sendTX(tx7), 'Wrong unlocker in transaction');
+      await shouldNotFail(toc.sendTX(tx8));
+      await s1.commit({ time: now + 19840 }); // TX8 commited
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // That's why toc now has 1 more source...
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3); // ...and why tic's number of sources hasn't changed
+    })
+
+    it('with MULTISIG', async () => {
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(3);
+      let tx1 = await toc.prepareITX(1200, tic);
+      await toc.sendTX(tx1);
+      await s1.commit({ time: now + 19840 });
+      let current = await s1.get('/blockchain/current');
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(0);
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(4);
+      // The funding transaction that can be reverted by its issuer (tic here) or consumed by toc if he knowns X for H(X)
+      let tx2 = await tic.prepareUTX(tx1, ['SIG(0)'], [{ qty: 1200, base: 0, lock: '(XHX(8AFC8DF633FC158F9DB4864ABED696C1AA0FE5D617A7B5F7AB8DE7CA2EFCD4CB) && SIG(' + toc.pub + ')) || (SIG(' + tic.pub + ') && SIG(' + toc.pub + '))'  }], { comment: 'cross1', blockstamp: [current.number, current.hash].join('-') });
+      await shouldNotFail(toc.sendTX(tx2));
+      await s1.commit({ time: now + 19840 }); // TX2 commited
+      (await s1.get('/tx/sources/DKpQPUL4ckzXYdnDRvCRKAm1gNvSdmAXnTrJZ7LvM5Qo')).should.have.property('sources').length(1); // toc is also present in the target of tx2
+      (await s1.get('/tx/sources/DNann1Lh55eZMEDXeYt59bzHbA3NJR46DeQYCS2qQdLV')).should.have.property('sources').length(4); // As well as tic
+      let tx3 = await tic.prepareUTX(tx2, ['XHX(1872767826647264) SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong', blockstamp: [current.number, current.hash].join('-') });
+      let tx4 = await toc.prepareUTX(tx2, ['XHX(1872767826647264) SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'ok', blockstamp: [current.number, current.hash].join('-') });
+      let tx5 = await tic.prepareMTX(tx2, toc, ['XHX(1872767826647264) SIG(1) SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'multi OK', blockstamp: [current.number, current.hash].join('-') });
+      let tx6 = await toc.prepareMTX(tx2, tic, ['XHX(1872767826647264) SIG(1) SIG(0) SIG(0) SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'multi WRONG', blockstamp: [current.number, current.hash].join('-') });
+      // nLocktime
+      let tx7 = await tic.prepareMTX(tx2, toc, ['XHX(1872767826647264) SIG(1) SIG(0)'], [{ qty: 1200, base: 0, lock: 'SIG(' + toc.pub + ')' }], { comment: 'wrong locktime', locktime: 100, blockstamp: [current.number, current.hash].join('-') });
+      await shouldFail(toc.sendTX(tx3), 'Wrong unlocker in transaction');
+      await shouldNotFail(toc.sendTX(tx4));
+      await shouldNotFail(toc.sendTX(tx5));
+      await shouldFail(toc.sendTX(tx6), 'Wrong unlocker in transaction');
+      await shouldFail(toc.sendTX(tx7), 'Locktime not elapsed yet');
+    })
+  })
+})
diff --git a/test/integration/ws2p/ws2p_pulling.ts b/test/integration/ws2p/ws2p_pulling.ts
index 52f7aaaf448e026d850a17eace500ac704cae3e2..7794605a2d0686c95c15e95a23333818ef7960b7 100644
--- a/test/integration/ws2p/ws2p_pulling.ts
+++ b/test/integration/ws2p/ws2p_pulling.ts
@@ -74,7 +74,7 @@ describe("WS2P block pulling", function() {
     cluster2 = network.cluster2
   })
 
-  after(() => wss.close())
+  after(() => wss?.close())
 
   it('should have b#6 on s1, b#2 on s2', async () => {
     const currentS1 = await s1.BlockchainService.current()