diff --git a/Cargo.lock b/Cargo.lock
index 9494aa84c9a2a4c274bf9a1758a16ebb07284a25..4cdc79e78552cd483bd6096c7a437f0c507881fb 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -32,6 +32,27 @@ dependencies = [
  "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "backtrace"
+version = "0.3.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "backtrace-sys 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
+ "cfg-if 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustc-demangle 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "winapi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "backtrace-sys"
+version = "0.1.23"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "cc 1.0.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libc 0.2.40 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "base58"
 version = "0.1.0"
@@ -72,6 +93,15 @@ dependencies = [
  "serde 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "bincode"
+version = "1.0.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "byteorder 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "bitflags"
 version = "0.9.1"
@@ -227,12 +257,15 @@ dependencies = [
  "duniter-network 0.1.0",
  "duniter-wotb 0.8.0-a0.6",
  "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustbreak 2.0.0-rc2 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_derive 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_json 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
  "sqlite 0.23.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -278,6 +311,7 @@ dependencies = [
  "base64 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -294,10 +328,10 @@ dependencies = [
  "rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
+ "rustbreak 2.0.0-rc2 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_derive 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_json 1.0.17 (registry+https://github.com/rust-lang/crates.io-index)",
- "sqlite 0.23.9 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -312,6 +346,7 @@ dependencies = [
  "regex 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde_derive 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -419,6 +454,25 @@ name = "either"
 version = "1.5.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "failure"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "backtrace 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "failure_derive"
+version = "0.1.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "synstructure 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "foreign-types"
 version = "0.3.2"
@@ -716,6 +770,11 @@ dependencies = [
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "quote"
+version = "0.3.15"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "quote"
 version = "0.5.2"
@@ -838,6 +897,22 @@ dependencies = [
  "time 0.1.40 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "rustbreak"
+version = "2.0.0-rc2"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "base64 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.57 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "rustc-demangle"
+version = "0.1.8"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "rustc-serialize"
 version = "0.3.24"
@@ -964,6 +1039,16 @@ name = "strsim"
 version = "0.7.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "syn"
+version = "0.11.11"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "syn"
 version = "0.13.10"
@@ -974,6 +1059,23 @@ dependencies = [
  "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "synom"
+version = "0.11.3"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
+[[package]]
+name = "synstructure"
+version = "0.6.1"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+dependencies = [
+ "quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "tempdir"
 version = "0.3.7"
@@ -1213,6 +1315,11 @@ name = "unicode-width"
 version = "0.1.4"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "unicode-xid"
+version = "0.0.4"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+
 [[package]]
 name = "unicode-xid"
 version = "0.1.0"
@@ -1330,11 +1437,14 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum ansi_term 0.11.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ee49baf6cb617b853aa8d93bf420db2383fab46d314482ca2803b40d5fde979b"
 "checksum arrayvec 0.4.7 (registry+https://github.com/rust-lang/crates.io-index)" = "a1e964f9e24d588183fcb43503abda40d288c8657dfc27311516ce2f05675aef"
 "checksum atty 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)" = "2fc4a1aa4c24c0718a250f0681885c1af91419d242f29eb8f2ab28502d80dbd1"
+"checksum backtrace 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "dbdd17cd962b570302f5297aea8648d5923e22e555c2ed2d8b2e34eca646bf6d"
+"checksum backtrace-sys 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)" = "bff67d0c06556c0b8e6b5f090f0eac52d950d9dfd1d35ba04e4ca3543eaf6a7e"
 "checksum base58 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "5024ee8015f02155eee35c711107ddd9a9bf3cb689cf2a9089c97e79b6e1ae83"
 "checksum base64 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "30e93c03064e7590d0466209155251b90c22e37fab1daf2771582598b5827557"
 "checksum base64 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "96434f987501f0ed4eb336a411e0631ecd1afa11574fe148587adc4ff96143c9"
 "checksum base64 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9263aa6a38da271eec5c91a83ce1e800f093c8535788d403d626d8d5c3f8f007"
 "checksum bincode 0.9.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9a6301db0b49fb63551bc15b5ae348147101cdf323242b93ec7546d5002ff1af"
+"checksum bincode 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bda13183df33055cbb84b847becce220d392df502ebe7a4a78d7021771ed94d0"
 "checksum bitflags 0.9.1 (registry+https://github.com/rust-lang/crates.io-index)" = "4efd02e230a02e18f92fc2735f44597385ed02ad8f831e7c1c1156ee5e1ab3a5"
 "checksum bitflags 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d0c54bb8f454c567f21197eefcdbf5679d0bd99f2ddbe52e84c77061952e6789"
 "checksum byteorder 1.2.3 (registry+https://github.com/rust-lang/crates.io-index)" = "74c0b906e9446b0a2e4f760cdb3fa4b2c48cdc6db8766a845c54b6ff063fd2e9"
@@ -1353,6 +1463,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum crossbeam-utils 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "d636a8b3bcc1b409d7ffd3facef8f21dcb4009626adbd0c5e6c4305c07253c7b"
 "checksum dtoa 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "09c3753c3db574d215cba4ea76018483895d7bff25a31b49ba45db21c48e50ab"
 "checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
+"checksum failure 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "934799b6c1de475a012a02dab0ace1ace43789ee4b99bcfbf1a2e3e8ced5de82"
+"checksum failure_derive 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "c7cdda555bb90c9bb67a3b670a0f42de8e73f5981524123ad8578aafec8ddb8b"
 "checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
 "checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
 "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82"
@@ -1391,6 +1503,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831"
 "checksum pkg-config 0.3.11 (registry+https://github.com/rust-lang/crates.io-index)" = "110d5ee3593dbb73f56294327fe5668bcc997897097cbc76b51e7aed3f52452f"
 "checksum proc-macro2 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)" = "1b06e2f335f48d24442b35a19df506a835fb3547bc3c06ef27340da9acf5cae7"
+"checksum quote 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e920b65c65f10b2ae65c831a81a073a89edd28c7cce89475bff467ab4167a"
 "checksum quote 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9949cfe66888ffe1d53e6ec9d9f3b70714083854be20fd5e271b232a017401e8"
 "checksum rand 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "15a732abf9d20f0ad8eeb6f909bf6868722d9a06e1e50802b6a70351f40b4eb1"
 "checksum rand 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "eba5f8cb59cc50ed56be8880a5c7b496bfd9bd26394e176bc67884094145c2c5"
@@ -1404,6 +1517,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum regex-syntax 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "8f1ac0f60d675cc6cf13a20ec076568254472551051ad5dd050364d70671bf6b"
 "checksum remove_dir_all 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3488ba1b9a2084d38645c4c08276a1752dcbf2c7130d74f1569681ad5d2799c5"
 "checksum rust-crypto 0.2.36 (registry+https://github.com/rust-lang/crates.io-index)" = "f76d05d3993fd5f4af9434e8e436db163a12a9d40e1a58a726f27a01dfd12a2a"
+"checksum rustbreak 2.0.0-rc2 (registry+https://github.com/rust-lang/crates.io-index)" = "676cb04876f4391d5d2f2c9029d10cfe5fe28ad625ad8460531bed1191477083"
+"checksum rustc-demangle 0.1.8 (registry+https://github.com/rust-lang/crates.io-index)" = "76d7ba1feafada44f2d38eed812bd2489a03c0f5abb975799251518b68848649"
 "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
 "checksum safemem 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "e27a8b19b835f7aea908818e871f5cc3a5a186550c30773be987e155e8163d8f"
 "checksum schannel 0.1.12 (registry+https://github.com/rust-lang/crates.io-index)" = "85fd9df495640643ad2d00443b3d78aae69802ad488debab4f1dd52fc1806ade"
@@ -1421,7 +1536,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum sqlite3-src 0.2.9 (registry+https://github.com/rust-lang/crates.io-index)" = "46e0bc115b563b1ee6c665ef895b56bf488522f57d1c6571887547c57c8f5a88"
 "checksum sqlite3-sys 0.12.0 (registry+https://github.com/rust-lang/crates.io-index)" = "71fec807a1534bd13eeaaec396175d67c79bdc68df55e18a452726ec62a8fb08"
 "checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
+"checksum syn 0.11.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d3b891b9015c88c576343b9b3e41c2c11a51c219ef067b264bd9c8aa9b441dad"
 "checksum syn 0.13.10 (registry+https://github.com/rust-lang/crates.io-index)" = "77961dcdac942fa8bc033c16f3a790b311c8a27d00811b878ebd8cf9b7ba39d5"
+"checksum synom 0.11.3 (registry+https://github.com/rust-lang/crates.io-index)" = "a393066ed9010ebaed60b9eafa373d4b1baac186dd7e008555b0f702b51945b6"
+"checksum synstructure 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a761d12e6d8dcb4dcf952a7a89b475e3a9d69e4a69307e01a470977642914bd"
 "checksum tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8"
 "checksum term 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5e6b677dd1e8214ea1ef4297f85dbcbed8e8cdddb561040cc998ca2551c37561"
 "checksum termion 1.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "689a3bdfaab439fd92bc87df5c4c78417d3cbe537487274e9b0b2dce76e92096"
@@ -1447,6 +1565,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum unicode-bidi 0.3.4 (registry+https://github.com/rust-lang/crates.io-index)" = "49f2bd0c6468a8230e1db229cff8029217cf623c767ea5d60bfbd42729ea54d5"
 "checksum unicode-normalization 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "6a0180bc61fc5a987082bfa111f4cc95c4caff7f9799f3e46df09163a937aa25"
 "checksum unicode-width 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "bf3a113775714a22dcb774d8ea3655c53a32debae63a063acc00a91cc586245f"
+"checksum unicode-xid 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "8c1f860d7d29cf02cb2f3f359fd35991af3d30bac52c57d265a3c461074cb4dc"
 "checksum unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
 "checksum unreachable 1.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "382810877fe448991dfc7f0dd6e3ae5d58088fd0ea5e35189655f84e6814fa56"
 "checksum url 1.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f808aadd8cfec6ef90e4a14eb46f24511824d1ac596b9682703c87056c8678b7"
diff --git a/blockchain/Cargo.toml b/blockchain/Cargo.toml
index 3ae9f21190bb69bfeab67a1699c2c7c4e56a337d..416cd092d5c4e3ecd1194f38baa53dffce8116ee 100644
--- a/blockchain/Cargo.toml
+++ b/blockchain/Cargo.toml
@@ -18,9 +18,12 @@ duniter-module = { path = "../module" }
 duniter-network = { path = "../network" }
 duniter-wotb = { path = "../wotb" }
 log = "0.4.1"
+num_cpus = "1.8.0"
 pbr = "1.0.1"
 rand = "0.4.2"
+rustbreak = {version = "2.0.0-rc2", features = ["bin_enc"]}
 serde = "1.0.57"
 serde_derive = "1.0.57"
 serde_json = "1.0.17"
-sqlite = "0.23.9"
\ No newline at end of file
+sqlite = "0.23.9"
+threadpool = "1.7.1"
\ No newline at end of file
diff --git a/blockchain/apply_valid_block.rs b/blockchain/apply_valid_block.rs
new file mode 100644
index 0000000000000000000000000000000000000000..204148a3e13447fcc50232e7a3d738f3c25e03e5
--- /dev/null
+++ b/blockchain/apply_valid_block.rs
@@ -0,0 +1,247 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use duniter_crypto::keys::*;
+use duniter_dal::block::DALBlock;
+use duniter_dal::sources::SourceAmount;
+use duniter_dal::writers::requests::*;
+use duniter_dal::ForkId;
+use duniter_documents::blockchain::v10::documents::transaction::{TxAmount, TxBase};
+use duniter_documents::blockchain::v10::documents::BlockDocument;
+use duniter_documents::blockchain::Document;
+use duniter_documents::BlockId;
+use duniter_wotb::data::NewLinkResult;
+use duniter_wotb::{NodeId, WebOfTrust};
+use std::collections::HashMap;
+
+#[derive(Debug)]
+/// Stores all queries to apply in database to "apply" the block
+pub struct ValidBlockApplyReqs(
+    pub BlocksDBsWriteQuery,
+    pub Vec<WotsDBsWriteQuery>,
+    pub Vec<CurrencyDBsWriteQuery>,
+);
+
+#[derive(Debug, Copy, Clone)]
+/// ApplyValidBlockError
+pub enum ApplyValidBlockError {
+    ExcludeUnknowNodeId(),
+    RevokeUnknowNodeId(),
+}
+
+pub fn apply_valid_block<W: WebOfTrust + Sync>(
+    block: &BlockDocument,
+    wot_index: &mut HashMap<PubKey, NodeId>,
+    wot: &mut W,
+    expire_certs: &HashMap<(NodeId, NodeId), BlockId>,
+    old_fork_id: Option<ForkId>,
+) -> Result<ValidBlockApplyReqs, ApplyValidBlockError> {
+    debug!(
+        "BlockchainModule : apply_valid_block({})",
+        block.blockstamp()
+    );
+    let mut wot_dbs_requests = Vec::new();
+    let mut currency_dbs_requests = Vec::new();
+    let current_blockstamp = block.blockstamp();
+    let mut identities = HashMap::with_capacity(block.identities.len());
+    for identity in block.identities.clone() {
+        identities.insert(identity.issuers()[0], identity);
+    }
+    for joiner in block.joiners.clone() {
+        let pubkey = joiner.clone().issuers()[0];
+        if let Some(idty_doc) = identities.get(&pubkey) {
+            // Newcomer
+            let wotb_id = NodeId(wot.size());
+            wot.add_node();
+            wot_index.insert(pubkey, wotb_id);
+            wot_dbs_requests.push(WotsDBsWriteQuery::CreateIdentity(
+                wotb_id,
+                current_blockstamp,
+                block.median_time,
+                Box::new(idty_doc.clone()),
+                joiner.blockstamp().id,
+            ));
+        } else {
+            // Renewer
+            let wotb_id = wot_index[&joiner.issuers()[0]];
+            wot.set_enabled(wotb_id, true);
+            wot_dbs_requests.push(WotsDBsWriteQuery::RenewalIdentity(
+                joiner.issuers()[0],
+                wotb_id,
+                block.median_time,
+                joiner.blockstamp().id,
+            ));
+        }
+    }
+    for active in block.actives.clone() {
+        let pubkey = active.issuers()[0];
+        if !identities.contains_key(&pubkey) {
+            let wotb_id = wot_index[&pubkey];
+            wot.set_enabled(wotb_id, true);
+            wot_dbs_requests.push(WotsDBsWriteQuery::RenewalIdentity(
+                pubkey,
+                wotb_id,
+                block.median_time,
+                active.blockstamp().id,
+            ));
+        }
+    }
+    for exclusion in block.excluded.clone() {
+        let wot_id = if let Some(wot_id) = wot_index.get(&exclusion) {
+            wot_id
+        } else {
+            return Err(ApplyValidBlockError::ExcludeUnknowNodeId());
+        };
+        wot.set_enabled(*wot_id, false);
+        wot_dbs_requests.push(WotsDBsWriteQuery::ExcludeIdentity(
+            exclusion,
+            block.blockstamp(),
+        ));
+    }
+    for revocation in block.revoked.clone() {
+        let compact_revoc = revocation.to_compact_document();
+        let wot_id = if let Some(wot_id) = wot_index.get(&compact_revoc.issuer) {
+            wot_id
+        } else {
+            return Err(ApplyValidBlockError::RevokeUnknowNodeId());
+        };
+        wot.set_enabled(*wot_id, false);
+        wot_dbs_requests.push(WotsDBsWriteQuery::RevokeIdentity(
+            compact_revoc.issuer,
+            block.blockstamp(),
+        ));
+    }
+    for certification in block.certifications.clone() {
+        trace!("stack_up_valid_block: apply cert...");
+        let compact_cert = certification.to_compact_document();
+        let wotb_node_from = wot_index[&compact_cert.issuer];
+        let wotb_node_to = wot_index[&compact_cert.target];
+        let result = wot.add_link(wotb_node_from, wotb_node_to);
+        match result {
+            NewLinkResult::Ok(_) => {}
+            _ => panic!(
+                "Fail to add_link {}->{} : {:?}",
+                wotb_node_from.0, wotb_node_to.0, result
+            ),
+        }
+        wot_dbs_requests.push(WotsDBsWriteQuery::CreateCert(
+            compact_cert.issuer,
+            wotb_node_from,
+            wotb_node_to,
+            compact_cert.block_number,
+            block.median_time,
+        ));
+        trace!("stack_up_valid_block: apply cert...success.");
+    }
+    for ((source, target), created_block_id) in expire_certs {
+        wot_dbs_requests.push(WotsDBsWriteQuery::ExpireCert(
+            *source,
+            *target,
+            *created_block_id,
+        ));
+    }
+    if let Some(du_amount) = block.dividend {
+        if du_amount > 0 {
+            let members_wot_ids = wot.get_enabled();
+            let mut members_pubkeys = Vec::new();
+            for (pubkey, wotb_id) in wot_index {
+                if members_wot_ids.contains(wotb_id) {
+                    members_pubkeys.push(*pubkey);
+                }
+            }
+            currency_dbs_requests.push(CurrencyDBsWriteQuery::CreateDU(
+                SourceAmount(TxAmount(du_amount as isize), TxBase(block.unit_base)),
+                block.number,
+                members_pubkeys,
+            ));
+        }
+    }
+    for tx in block.transactions.clone() {
+        currency_dbs_requests.push(CurrencyDBsWriteQuery::WriteTx(Box::new(tx.unwrap_doc())));
+    }
+
+    /*// Calculate the state of the wot
+        if !wot_events.is_empty() && verif_level != SyncVerificationLevel::FastSync() {
+            // Calculate sentries_count
+            let sentries_count = wot.get_sentries(3).len();
+            // Calculate average_density
+            let average_density = calculate_average_density::<W>(&wot);
+            let sentry_requirement =
+                get_sentry_requirement(block.members_count, G1_PARAMS.step_max);
+            // Calculate distances and connectivities
+            let (average_distance, distances, average_connectivity, connectivities) =
+                compute_distances::<W>(
+                    &wot,
+                    sentry_requirement,
+                    G1_PARAMS.step_max,
+                    G1_PARAMS.x_percent,
+                );
+            // Calculate centralities and average_centrality
+            let centralities =
+                calculate_distance_stress_centralities::<W>(&wot, G1_PARAMS.step_max);
+            let average_centrality =
+                (centralities.iter().sum::<u64>() as f64 / centralities.len() as f64) as usize;
+            // Register the state of the wot
+            let max_connectivity = currency_params.max_connectivity();
+            duniter_dal::register_wot_state(
+                db,
+                &WotState {
+                    block_number: block.number.0,
+                    block_hash: block.hash.expect("Fail to get block hash").to_string(),
+                    sentries_count,
+                    average_density,
+                    average_distance,
+                    distances,
+                    average_connectivity,
+                    connectivities: connectivities
+                        .iter()
+                        .map(|c| {
+                            if *c > max_connectivity {
+                                max_connectivity
+                            } else {
+                                *c
+                            }
+                        })
+                        .collect(),
+                    average_centrality,
+                    centralities,
+                },
+            );
+        }*/
+    // Create DALBlock
+    let mut block = block.clone();
+    let previous_blockcstamp = block.previous_blockstamp();
+    let block_hash = block
+        .hash
+        .expect("Try to get hash of an uncompleted or reduce block !");
+    block.reduce();
+    let dal_block = DALBlock {
+        block,
+        fork_id: ForkId(0),
+        isolate: false,
+        expire_certs: Some(expire_certs.clone()),
+    };
+    // Return DBs requests
+    Ok(ValidBlockApplyReqs(
+        BlocksDBsWriteQuery::WriteBlock(
+            Box::new(dal_block),
+            old_fork_id,
+            previous_blockcstamp,
+            block_hash,
+        ),
+        wot_dbs_requests,
+        currency_dbs_requests,
+    ))
+}
diff --git a/blockchain/check_and_apply_block.rs b/blockchain/check_and_apply_block.rs
new file mode 100644
index 0000000000000000000000000000000000000000..0a426a8ebba69861397c5445dcf7f40a2beec8e6
--- /dev/null
+++ b/blockchain/check_and_apply_block.rs
@@ -0,0 +1,187 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use std::collections::HashMap;
+
+use apply_valid_block::*;
+use duniter_crypto::keys::*;
+use duniter_dal::block::DALBlock;
+use duniter_dal::*;
+use duniter_documents::blockchain::Document;
+use duniter_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
+use duniter_network::NetworkBlock;
+use *;
+
+#[derive(Debug, Copy, Clone)]
+pub enum BlockError {
+    BlockVersionNotSupported(),
+    CompletedBlockError(CompletedBlockError),
+    DALError(DALError),
+    //CheckBlockError(),
+    ApplyValidBlockError(ApplyValidBlockError),
+    NoForkAvailable(),
+    UnknowError(),
+}
+
+impl From<CompletedBlockError> for BlockError {
+    fn from(err: CompletedBlockError) -> Self {
+        BlockError::CompletedBlockError(err)
+    }
+}
+
+impl From<DALError> for BlockError {
+    fn from(err: DALError) -> Self {
+        BlockError::DALError(err)
+    }
+}
+
+impl From<ApplyValidBlockError> for BlockError {
+    fn from(err: ApplyValidBlockError) -> Self {
+        BlockError::ApplyValidBlockError(err)
+    }
+}
+
+pub fn check_and_apply_block<W: WebOfTrust + Sync>(
+    blocks_databases: &BlocksV10DBs,
+    certs_db: &BinFileDB<CertsExpirV10Datas>,
+    block: &Block,
+    current_blockstamp: &Blockstamp,
+    wotb_index: &mut HashMap<PubKey, NodeId>,
+    wot: &mut W,
+    forks_states: &[ForkStatus],
+) -> Result<ValidBlockApplyReqs, BlockError> {
+    let (block_doc, already_have_block) = match *block {
+        Block::NetworkBlock(network_block) => match *network_block {
+            NetworkBlock::V10(ref network_block_v10) => {
+                let already_have_block = DALBlock::already_have_block(
+                    &blocks_databases.blockchain_db,
+                    &blocks_databases.forks_blocks_db,
+                    network_block_v10.uncompleted_block_doc.blockstamp(),
+                )?;
+                (&network_block_v10.uncompleted_block_doc, already_have_block)
+            }
+            _ => return Err(BlockError::BlockVersionNotSupported()),
+        },
+        Block::LocalBlock(block_doc) => (block_doc, true),
+    };
+    if (block_doc.number.0 == current_blockstamp.id.0 + 1
+        && block_doc.previous_hash.to_string() == current_blockstamp.hash.0.to_string())
+        || (block_doc.number.0 == 0 && *current_blockstamp == Blockstamp::default())
+    {
+        debug!(
+            "stackable_block : block {} chainable !",
+            block_doc.blockstamp()
+        );
+        // Detect expire_certs
+        let blocks_expiring = Vec::with_capacity(0);
+        let expire_certs = duniter_dal::certs::find_expire_certs(certs_db, blocks_expiring)?;
+        // Try stack up block
+        let mut old_fork_id = None;
+        let block_doc = match *block {
+            Block::NetworkBlock(network_block) => complete_network_block(network_block)?,
+            Block::LocalBlock(block_doc) => {
+                old_fork_id = duniter_dal::block::get_fork_id_of_blockstamp(
+                    &blocks_databases.forks_blocks_db,
+                    &block_doc.blockstamp(),
+                )?;
+                block_doc.clone()
+            }
+        };
+        return Ok(apply_valid_block(
+            &block_doc,
+            wotb_index,
+            wot,
+            &expire_certs,
+            old_fork_id,
+        )?);
+    } else if !already_have_block
+        && (block_doc.number.0 >= current_blockstamp.id.0
+            || (current_blockstamp.id.0 - block_doc.number.0) < 100)
+    {
+        debug!(
+            "stackable_block : block {} not chainable, store this for future !",
+            block_doc.blockstamp()
+        );
+        let (fork_id, new_fork) = DALBlock::assign_fork_to_new_block(
+            &blocks_databases.forks_db,
+            &PreviousBlockstamp {
+                id: BlockId(block_doc.number.0 - 1),
+                hash: BlockHash(block_doc.previous_hash),
+            },
+            &block_doc
+                .hash
+                .expect("Try to get hash of an uncompleted or reduce block"),
+        )?;
+        if let Some(fork_id) = fork_id {
+            let mut isolate = true;
+            let fork_state = if new_fork {
+                ForkStatus::Isolate()
+            } else {
+                forks_states[fork_id.0]
+            };
+            match fork_state {
+                ForkStatus::Stackable(_) | ForkStatus::RollBack(_, _) | ForkStatus::TooOld(_) => {
+                    isolate = false
+                }
+                _ => {}
+            }
+            match *block {
+                Block::NetworkBlock(network_block) => {
+                    // Completed network block
+                    let block_doc = complete_network_block(network_block)?;
+                    let dal_block = DALBlock {
+                        fork_id,
+                        isolate,
+                        block: block_doc,
+                        expire_certs: None,
+                    };
+                    duniter_dal::writers::block::write(
+                        &blocks_databases.blockchain_db,
+                        &blocks_databases.forks_db,
+                        &blocks_databases.forks_blocks_db,
+                        &dal_block,
+                        None,
+                        false,
+                    ).expect("duniter_dal::writers::block::write() : DALError")
+                }
+                Block::LocalBlock(block_doc) => {
+                    let old_fork_id = None;
+                    let dal_block = DALBlock {
+                        fork_id,
+                        isolate,
+                        block: block_doc.clone(),
+                        expire_certs: None,
+                    };
+                    duniter_dal::writers::block::write(
+                        &blocks_databases.blockchain_db,
+                        &blocks_databases.forks_db,
+                        &blocks_databases.forks_blocks_db,
+                        &dal_block,
+                        old_fork_id,
+                        false,
+                    ).expect("duniter_dal::writers::block::write() : DALError")
+                }
+            };
+        } else {
+            return Err(BlockError::NoForkAvailable());
+        }
+    } else {
+        debug!(
+            "stackable_block : block {} not chainable and already stored !",
+            block_doc.blockstamp()
+        );
+    }
+    Err(BlockError::UnknowError())
+}
diff --git a/blockchain/clippy.toml b/blockchain/clippy.toml
index 2bd93f5e4e2163651a3eaa626e2f5c4ffa201edf..1c6d687e2556b27948a8036e154546fcebec87eb 100644
--- a/blockchain/clippy.toml
+++ b/blockchain/clippy.toml
@@ -1 +1 @@
-cyclomatic-complexity-threshold = 36
\ No newline at end of file
+cyclomatic-complexity-threshold = 37
\ No newline at end of file
diff --git a/blockchain/dbex.rs b/blockchain/dbex.rs
new file mode 100644
index 0000000000000000000000000000000000000000..a1efe2b42252ea2364e5030fb23b999917cb81c7
--- /dev/null
+++ b/blockchain/dbex.rs
@@ -0,0 +1,190 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use duniter_crypto::keys::*;
+use duniter_dal::identity::DALIdentity;
+use duniter_documents::blockchain::v10::documents::transaction::*;
+use duniter_documents::Blockstamp;
+use duniter_module::DuniterConf;
+use duniter_wotb::data::rusty::RustyWebOfTrust;
+use std::time::*;
+use *;
+
+#[derive(Debug, Clone)]
+/// Query for wot databases explorer
+pub enum DBExWotQuery {
+    /// Ask member datas
+    MemberDatas(String),
+}
+
+#[derive(Debug, Clone)]
+/// Query for tx databases explorer
+pub enum DBExTxQuery {
+    /// Ask balance of an address (pubkey or uid)
+    Balance(String),
+}
+
+#[derive(Debug, Clone)]
+/// Query for databases explorer
+pub enum DBExQuery {
+    /// Wot query
+    WotQuery(DBExWotQuery),
+    /// Tx query
+    TxQuery(DBExTxQuery),
+}
+
+pub fn dbex(conf: &DuniterConf, query: &DBExQuery) {
+    match *query {
+        DBExQuery::WotQuery(ref wot_query) => dbex_wot(conf, wot_query),
+        DBExQuery::TxQuery(ref tx_query) => dbex_tx(conf, tx_query),
+    }
+}
+
+pub fn dbex_tx(conf: &DuniterConf, query: &DBExTxQuery) {
+    // Get db path
+    let db_path = duniter_conf::get_blockchain_db_path(conf.profile().as_str(), &conf.currency());
+
+    // Open databases
+    let load_dbs_begin = SystemTime::now();
+    //let blocks_databases = BlocksV10DBs::open(&db_path, false);
+    let currency_databases = CurrencyV10DBs::<FileBackend>::open(&db_path);
+    let wot_databases = WotsV10DBs::open(&db_path, false);
+    let load_dbs_duration = SystemTime::now()
+        .duration_since(load_dbs_begin)
+        .expect("duration_since error !");
+    println!(
+        "Databases loaded in {}.{:03} seconds.",
+        load_dbs_duration.as_secs(),
+        load_dbs_duration.subsec_nanos() / 1_000_000
+    );
+    let req_process_begin = SystemTime::now();
+
+    match *query {
+        DBExTxQuery::Balance(ref address_str) => {
+            let pubkey = if let Ok(ed25519_pubkey) = ed25519::PublicKey::from_base58(address_str) {
+                PubKey::Ed25519(ed25519_pubkey)
+            } else if let Some(pubkey) = duniter_dal::identity::get_pubkey_from_uid(
+                &wot_databases.identities_db,
+                address_str,
+            ).expect("get_uid : DALError")
+            {
+                pubkey
+            } else {
+                println!("This address doesn't exist !");
+                return;
+            };
+            let address =
+                TransactionOutputConditionGroup::Single(TransactionOutputCondition::Sig(pubkey));
+            let address_balance = duniter_dal::balance::get_address_balance(
+                &currency_databases.balances_db,
+                &address,
+            ).expect("get_address_balance : DALError")
+                .expect("Address not found in balances DB.");
+            println!(
+                "Balance={},{} Ğ1",
+                (address_balance.0).0 / 100,
+                (address_balance.0).0 % 100
+            );
+        }
+    }
+
+    let req_process_duration = SystemTime::now()
+        .duration_since(req_process_begin)
+        .expect("duration_since error");
+    println!(
+        "Request processed in  {}.{:06} seconds.",
+        req_process_duration.as_secs(),
+        req_process_duration.subsec_nanos() / 1_000
+    );
+}
+
+pub fn dbex_wot(conf: &DuniterConf, query: &DBExWotQuery) {
+    // Get db path
+    let db_path = duniter_conf::get_blockchain_db_path(conf.profile().as_str(), &conf.currency());
+
+    // Open databases
+    let load_dbs_begin = SystemTime::now();
+    //let blocks_databases = BlocksV10DBs::open(&db_path, false);
+    let wot_databases = WotsV10DBs::open(&db_path, false);
+    let load_dbs_duration = SystemTime::now()
+        .duration_since(load_dbs_begin)
+        .expect("duration_since error");
+    println!(
+        "Databases loaded in {}.{:03} seconds.",
+        load_dbs_duration.as_secs(),
+        load_dbs_duration.subsec_nanos() / 1_000_000
+    );
+    let req_process_begin = SystemTime::now();
+
+    // get wot_index
+    let wot_index = DALIdentity::get_wotb_index(&wot_databases.identities_db).expect("DALError");
+
+    // get wot_reverse_index
+    let wot_reverse_index: HashMap<NodeId, &PubKey> =
+        wot_index.iter().map(|(p, id)| (*id, p)).collect();
+
+    // Get wot path
+    let wot_path = duniter_conf::get_wot_path(conf.profile().clone().to_string(), &conf.currency());
+
+    // Open wot file
+    let (wot, wot_blockstamp): (RustyWebOfTrust, Blockstamp) =
+        open_wot_file(&WOT_FILE_FORMATER, &wot_path, *INFINITE_SIG_STOCK);
+
+    // Print wot blockstamp
+    println!("Wot : Current blockstamp = {}.", wot_blockstamp);
+
+    // Print members count
+    let members_count = wot.get_enabled().len();
+    println!(" Members count = {}.", members_count);
+
+    match *query {
+        DBExWotQuery::MemberDatas(ref uid) => {
+            if let Some(pubkey) =
+                duniter_dal::identity::get_pubkey_from_uid(&wot_databases.identities_db, uid)
+                    .expect("get_pubkey_from_uid() : DALError !")
+            {
+                let wot_id = wot_index[&pubkey];
+                println!(
+                    "{} : wot_id={}, pubkey={}.",
+                    uid,
+                    wot_id.0,
+                    pubkey.to_string()
+                );
+                let sources = wot
+                    .get_links_source(wot_id)
+                    .expect("Fail to get links source !");
+                println!("Certifiers : {}", sources.len());
+                for (i, source) in sources.iter().enumerate() {
+                    let source_uid = duniter_dal::identity::get_uid(
+                        &wot_databases.identities_db,
+                        *(wot_reverse_index[&source]),
+                    ).expect("get_uid() : DALError")
+                        .expect("Not found source_uid !");
+                    println!("{}: {}", i + 1, source_uid);
+                }
+            } else {
+                println!("Uid \"{}\" not found !", uid);
+            }
+        }
+    }
+    let req_process_duration = SystemTime::now()
+        .duration_since(req_process_begin)
+        .expect("duration_since error");
+    println!(
+        "Request processed in  {}.{:06} seconds.",
+        req_process_duration.as_secs(),
+        req_process_duration.subsec_nanos() / 1_000
+    );
+}
diff --git a/blockchain/lib.rs b/blockchain/lib.rs
index e4be85fabceebdaf3142ab5eafa54779b917c7dd..a676c924789855f9951210589febc859696e5ef4 100644
--- a/blockchain/lib.rs
+++ b/blockchain/lib.rs
@@ -34,12 +34,16 @@ extern crate duniter_message;
 extern crate duniter_module;
 extern crate duniter_network;
 extern crate duniter_wotb;
+extern crate rustbreak;
 extern crate serde;
 extern crate serde_json;
 extern crate sqlite;
 
-mod stack_up_block;
+mod apply_valid_block;
+mod check_and_apply_block;
+mod dbex;
 mod sync;
+mod ts_parsers;
 
 use std::collections::HashMap;
 use std::env;
@@ -49,30 +53,34 @@ use std::str;
 use std::sync::mpsc;
 use std::time::{Duration, SystemTime, UNIX_EPOCH};
 
-use self::stack_up_block::try_stack_up_completed_block;
+use apply_valid_block::*;
+use check_and_apply_block::*;
+pub use dbex::{DBExQuery, DBExTxQuery, DBExWotQuery};
 use duniter_crypto::keys::*;
-use duniter_dal::block::{DALBlock, WotEvent};
-use duniter_dal::constants::MAX_FORKS;
+use duniter_dal::block::DALBlock;
+use duniter_dal::currency_params::CurrencyParameters;
 use duniter_dal::dal_event::DALEvent;
 use duniter_dal::dal_requests::{DALReqBlockchain, DALRequest, DALResBlockchain, DALResponse};
 use duniter_dal::identity::DALIdentity;
-use duniter_dal::parsers::memberships::MembershipParseError;
-use duniter_dal::writers::requests::DBWriteRequest;
-use duniter_dal::{DuniterDB, ForkState};
+use duniter_dal::writers::requests::BlocksDBsWriteQuery;
+use duniter_dal::*;
 use duniter_documents::blockchain::v10::documents::{BlockDocument, V10Document};
-use duniter_documents::blockchain::{BlockchainProtocol, Document, VerificationResult};
-use duniter_documents::{BlockHash, BlockId, Blockstamp};
+use duniter_documents::blockchain::{BlockchainProtocol, Document};
+use duniter_documents::*;
 use duniter_message::DuniterMessage;
 use duniter_module::*;
 use duniter_network::{
     NetworkBlock, NetworkDocument, NetworkEvent, NetworkRequest, NetworkResponse, NodeFullId,
 };
 use duniter_wotb::data::rusty::RustyWebOfTrust;
-use duniter_wotb::operations::file::{BinaryFileFormater, FileFormater};
+use duniter_wotb::operations::file::BinaryFileFormater;
 use duniter_wotb::{NodeId, WebOfTrust};
+use rustbreak::backend::FileBackend;
 
 /// The blocks are requested by packet groups. This constant sets the block packet size.
 pub static CHUNK_SIZE: &'static u32 = &50;
+/// Necessary to instantiate the wot object before knowing the currency parameters
+pub static INFINITE_SIG_STOCK: &'static usize = &4_000_000_000;
 /// The blocks are requested by packet groups. This constant sets the number of packets per group.
 pub static MAX_BLOCKS_REQUEST: &'static u32 = &500;
 /// There can be several implementations of the wot file backup, this constant fixes the implementation used by the blockchain module.
@@ -87,15 +95,23 @@ pub struct BlockchainModule {
     pub conf_profile: String,
     /// Currency
     pub currency: Currency,
-    /// Database containing the blockchain
-    pub db: DuniterDB,
+    // Currency parameters
+    currency_params: CurrencyParameters,
+    /// Wots Databases
+    pub wot_databases: WotsV10DBs,
+    /// Blocks Databases
+    pub blocks_databases: BlocksV10DBs,
+    /// Currency databases
+    currency_databases: CurrencyV10DBs<FileBackend>,
     /// The block under construction
     pub pending_block: Option<Box<BlockDocument>>,
+    /// Current state of all forks
+    pub forks_states: Vec<ForkStatus>,
 }
 
 #[derive(Debug, Clone)]
 /// Block
-enum Block<'a> {
+pub enum Block<'a> {
     /// Block coming from Network
     NetworkBlock(&'a NetworkBlock),
     /// Block coming from local database
@@ -115,24 +131,14 @@ pub enum SyncVerificationLevel {
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
 /// Error returned by function complete_network_block()
 pub enum CompletedBlockError {
-    /// MembershipParseError
-    MembershipParseError(MembershipParseError),
     /// Invalid block inner hash
     InvalidInnerHash(),
-    /// Invalid block signature
-    InvalidSig(),
     /// Invalid block hash
     InvalidHash(),
     /// Invalid block version
     InvalidVersion(),
 }
 
-impl From<MembershipParseError> for CompletedBlockError {
-    fn from(e: MembershipParseError) -> CompletedBlockError {
-        CompletedBlockError::MembershipParseError(e)
-    }
-}
-
 impl BlockchainModule {
     /// Return module identifier
     pub fn id() -> ModuleId {
@@ -142,38 +148,63 @@ impl BlockchainModule {
     pub fn load_blockchain_conf(
         conf: &DuniterConf,
         _keys: RequiredKeysContent,
-        sync: bool,
     ) -> BlockchainModule {
         // Get db path
-        let db_path = duniter_conf::get_db_path(conf.profile().as_str(), &conf.currency(), sync);
+        let db_path =
+            duniter_conf::get_blockchain_db_path(conf.profile().as_str(), &conf.currency());
+
+        // Open databases
+        let blocks_databases = BlocksV10DBs::open(&db_path, false);
+        let wot_databases = WotsV10DBs::open(&db_path, false);
+        let currency_databases = CurrencyV10DBs::<FileBackend>::open(&db_path);
+
+        // Get current blockstamp
+        let current_blockstamp = duniter_dal::block::get_current_blockstamp(&blocks_databases)
+            .expect("Fatal error : fail to read Blockchain DB !");
 
-        // Open duniter database
-        let db = duniter_dal::open_db(&db_path, false).unwrap();
+        // Get currency parameters
+        let currency_params = duniter_dal::currency_params::get_currency_params(
+            &blocks_databases.blockchain_db,
+        ).expect("Fatal error : fail to read Blockchain DB !")
+            .unwrap_or_default();
+
+        // Get forks states
+        let forks_states = if let Some(current_blockstamp) = current_blockstamp {
+            duniter_dal::block::get_forks(&blocks_databases.forks_db, current_blockstamp)
+                .expect("Fatal error : fail to read Forks DB !")
+        } else {
+            vec![]
+        };
 
         // Instanciate BlockchainModule
         BlockchainModule {
             followers: Vec::new(),
             conf_profile: conf.profile(),
             currency: conf.currency(),
-            db,
+            currency_params,
+            blocks_databases,
+            wot_databases,
+            currency_databases,
             pending_block: None,
+            forks_states,
         }
     }
+    /// Databases explorer
+    pub fn dbex(conf: &DuniterConf, req: &DBExQuery) {
+        dbex::dbex(conf, req);
+    }
     /// Synchronize blockchain from a duniter-ts database
     pub fn sync_ts(conf: &DuniterConf, ts_profile: &str, cautious: bool) {
-        // Open local blockchain db
-        let db_path = duniter_conf::get_db_path(&conf.profile(), &conf.currency(), false);
-        let db = duniter_dal::open_db(&db_path, false).expect(&format!(
-            "Fatal error : fail to open blockchain database as path : {} !",
-            db_path.as_path().to_str().unwrap()
-        ));
+        // get databases path
+        let db_path = duniter_conf::get_blockchain_db_path(&conf.profile(), &conf.currency());
+        // Open blocks dbs
+        let blocks_dbs = BlocksV10DBs::open(&db_path, false);
         // Get local current blockstamp
         debug!("Get local current blockstamp...");
-        let current_block: Option<BlockDocument> = duniter_dal::new_get_current_block(&db);
-        let current_blockstamp = match current_block.clone() {
-            Some(block) => block.blockstamp(),
-            None => Blockstamp::default(),
-        };
+        let current_blockstamp: Blockstamp = duniter_dal::block::get_current_blockstamp(
+            &blocks_dbs,
+        ).expect("ForksV10DB : RustBreakError !")
+            .unwrap_or_default();
         debug!("Success to get local current blockstamp.");
         // get db_ts_path
         let mut db_ts_path = match env::home_dir() {
@@ -270,42 +301,94 @@ impl BlockchainModule {
         }
     }
     fn receive_network_documents<W: WebOfTrust + Sync>(
-        &self,
+        &mut self,
         network_documents: &[NetworkDocument],
         current_blockstamp: &Blockstamp,
-        forks: &mut Vec<ForkState>,
-        wotb_index: &HashMap<PubKey, NodeId>,
-        wot: &W,
-    ) -> (Blockstamp, Vec<WotEvent>) {
+        wotb_index: &mut HashMap<PubKey, NodeId>,
+        wot: &mut W,
+    ) -> Blockstamp {
         let mut blockchain_documents = Vec::new();
         let mut current_blockstamp = *current_blockstamp;
-        let mut wot_events = Vec::new();
+        let mut save_blocks_dbs = false;
+        let mut save_wots_dbs = false;
+        let mut save_currency_dbs = false;
         for network_document in network_documents {
             match *network_document {
                 NetworkDocument::Block(ref network_block) => {
-                    let (success, _new_forks, mut new_wot_events) = self.apply_block(
+                    match check_and_apply_block(
+                        &self.blocks_databases,
+                        &self.wot_databases.certs_db,
                         &Block::NetworkBlock(network_block),
                         &current_blockstamp,
-                        forks,
                         wotb_index,
                         wot,
-                    );
-                    if success {
-                        current_blockstamp = network_block.blockstamp();
-                        wot_events.append(&mut new_wot_events);
-                        // Update isolates forks
-                        let stackables_forks =
-                            DALBlock::get_stackables_forks(&self.db, &current_blockstamp);
-                        for fork in stackables_forks {
-                            debug!("unisolate fork {}", fork);
-                            if forks.len() > fork {
-                                forks[fork] = ForkState::Full();
-                                DALBlock::unisolate_fork(&self.db, fork);
+                        &self.forks_states,
+                    ) {
+                        Ok(ValidBlockApplyReqs(block_req, wot_dbs_reqs, currency_dbs_reqs)) => {
+                            let block_doc = network_block.uncompleted_block_doc().clone();
+                            // Apply wot dbs requests
+                            wot_dbs_reqs
+                                .iter()
+                                .map(|req| {
+                                    req.apply(&self.wot_databases, &self.currency_params)
+                                            .expect(
+                                            "Fatal error : fail to apply WotsDBsWriteQuery : DALError !",
+                                        )
+                                })
+                                .collect::<()>();
+                            // Apply currency dbs requests
+                            currency_dbs_reqs
+                                .iter()
+                                .map(|req| {
+                                    req.apply(&self.currency_databases).expect(
+                                            "Fatal error : fail to apply CurrencyDBsWriteQuery : DALError !",
+                                        )
+                                })
+                                .collect::<()>();
+                            // Write block
+                            block_req.apply(&self.blocks_databases, false).expect(
+                                "Fatal error : fail to write block in BlocksDBs : DALError !",
+                            );
+                            if let BlocksDBsWriteQuery::WriteBlock(_, _, _, block_hash) = block_req
+                            {
+                                info!("StackUpValidBlock({})", block_doc.number.0);
+                                self.send_event(&DALEvent::StackUpValidBlock(
+                                    Box::new(block_doc.clone()),
+                                    Blockstamp {
+                                        id: block_doc.number,
+                                        hash: block_hash,
+                                    },
+                                ));
+                            }
+                            current_blockstamp = network_block.blockstamp();
+                            // Update forks states
+                            self.forks_states = duniter_dal::block::get_forks(
+                                &self.blocks_databases.forks_db,
+                                current_blockstamp,
+                            ).expect("get_forks() : DALError");
+                            save_blocks_dbs = true;
+                            if !wot_dbs_reqs.is_empty() {
+                                save_wots_dbs = true;
+                            }
+                            if !block_doc.transactions.is_empty()
+                                || (block_doc.dividend.is_some()
+                                    && block_doc.dividend.expect("safe unwrap") > 0)
+                            {
+                                save_currency_dbs = true;
                             }
                         }
-                    } /*else if !new_forks.is_empty() {
-                        forks = new_forks;
-                    }*/
+                        Err(_) => {
+                            warn!(
+                                "RefusedBlock({})",
+                                network_block.uncompleted_block_doc().number.0
+                            );
+                            self.send_event(&DALEvent::RefusedPendingDoc(BlockchainProtocol::V10(
+                                Box::new(V10Document::Block(Box::new(
+                                    network_block.uncompleted_block_doc().clone(),
+                                ))),
+                            )));
+                        }
+                    }
                 }
                 NetworkDocument::Identity(ref doc) => blockchain_documents.push(
                     BlockchainProtocol::V10(Box::new(V10Document::Identity(doc.deref().clone()))),
@@ -333,7 +416,17 @@ impl BlockchainModule {
         if !blockchain_documents.is_empty() {
             self.receive_documents(&blockchain_documents);
         }
-        (current_blockstamp, wot_events)
+        // Save databases
+        if save_blocks_dbs {
+            self.blocks_databases.save_dbs();
+        }
+        if save_wots_dbs {
+            self.wot_databases.save_dbs();
+        }
+        if save_currency_dbs {
+            self.currency_databases.save_dbs(true, true);
+        }
+        current_blockstamp
     }
     fn receive_documents(&self, documents: &[BlockchainProtocol]) {
         debug!("BlockchainModule : receive_documents()");
@@ -348,234 +441,73 @@ impl BlockchainModule {
         }
     }
     fn receive_blocks<W: WebOfTrust + Sync>(
-        &self,
+        &mut self,
         blocks_in_box: &[Box<NetworkBlock>],
         current_blockstamp: &Blockstamp,
-        forks: &[ForkState],
-        wotb_index: &HashMap<PubKey, NodeId>,
-        wot: &W,
-    ) -> (Blockstamp, Vec<ForkState>, Vec<WotEvent>) {
+        wotb_index: &mut HashMap<PubKey, NodeId>,
+        wot: &mut W,
+    ) -> Blockstamp {
         debug!("BlockchainModule : receive_blocks()");
         let blocks: Vec<&NetworkBlock> = blocks_in_box.into_iter().map(|b| b.deref()).collect();
         let mut current_blockstamp = *current_blockstamp;
-        let mut all_wot_events = Vec::new();
-        let mut forks = forks.to_owned();
-        let mut wot_copy: W = wot.clone();
-        let mut wotb_index_copy = wotb_index.clone();
+        let mut save_blocks_dbs = false;
+        let mut save_wots_dbs = false;
+        let mut save_currency_dbs = false;
         for block in blocks {
-            let (success, _new_forks, mut wot_events) = self.apply_block::<W>(
-                &Block::NetworkBlock(block),
-                &current_blockstamp,
-                &mut forks,
-                &wotb_index_copy,
-                &wot_copy,
-            );
-            all_wot_events.append(&mut wot_events);
-            if success {
-                current_blockstamp = block.blockstamp();
-            } /*else if !new_forks.is_empty() {
-                forks = new_forks;
-            }*/
-            if !wot_events.is_empty() {
-                for wot_event in wot_events {
-                    match wot_event {
-                        WotEvent::AddNode(pubkey, wotb_id) => {
-                            wot_copy.add_node();
-                            wotb_index_copy.insert(pubkey, wotb_id);
-                        }
-                        WotEvent::RemNode(pubkey) => {
-                            wot_copy.rem_node();
-                            wotb_index_copy.remove(&pubkey);
-                        }
-                        WotEvent::AddLink(source, target) => {
-                            wot_copy.add_link(source, target);
-                        }
-                        WotEvent::RemLink(source, target) => {
-                            wot_copy.rem_link(source, target);
-                        }
-                        WotEvent::EnableNode(wotb_id) => {
-                            wot_copy.set_enabled(wotb_id, true);
-                        }
-                        WotEvent::DisableNode(wotb_id) => {
-                            wot_copy.set_enabled(wotb_id, false);
-                        }
-                    }
-                }
-            }
-        }
-        (current_blockstamp, forks, all_wot_events)
-    }
-    /*fn apply_local_block<W: WebOfTrust>(
-        db: &sqlite::connexion,
-        current_blockstamp: &Blockstamp,
-        wotb_index: &HashMap<PubKey, NodeId>,
-        wot: &W,
-    ) {
-        for f in 1..10 {
-            let potential_next_block = get_block(db, );
-        }
-    }*/
-    fn apply_block<W: WebOfTrust + Sync>(
-        &self,
-        block: &Block,
-        current_blockstamp: &Blockstamp,
-        forks: &mut Vec<ForkState>,
-        wotb_index: &HashMap<PubKey, NodeId>,
-        wot: &W,
-    ) -> (bool, Vec<ForkState>, Vec<WotEvent>) {
-        let mut already_have_block = false;
-        let block_doc = match *block {
-            Block::NetworkBlock(network_block) => match *network_block {
-                NetworkBlock::V10(ref network_block_v10) => {
-                    let (hashs, _) = DALBlock::get_blocks_hashs_all_forks(
-                        &self.db,
-                        &network_block_v10.uncompleted_block_doc.number,
-                    );
-                    for hash in hashs {
-                        if hash == network_block_v10.uncompleted_block_doc.hash.unwrap() {
-                            already_have_block = true;
-                        }
-                    }
-                    &network_block_v10.uncompleted_block_doc
-                }
-                _ => return (false, Vec::with_capacity(0), Vec::with_capacity(0)),
-            },
-            Block::LocalBlock(block_doc) => {
-                already_have_block = true;
-                block_doc
-            }
-        };
-        if (block_doc.number.0 == current_blockstamp.id.0 + 1
-            && block_doc.previous_hash.to_string() == current_blockstamp.hash.0.to_string())
-            || (block_doc.number.0 == 0 && *current_blockstamp == Blockstamp::default())
-        {
-            debug!(
-                "stackable_block : block {} chainable !",
-                block_doc.blockstamp()
-            );
-            let (success, db_requests, wot_events) = match *block {
-                Block::NetworkBlock(network_block) => self.try_stack_up_block(
-                    &network_block,
+            if let Ok(ValidBlockApplyReqs(bc_db_query, wot_dbs_queries, tx_dbs_queries)) =
+                check_and_apply_block::<W>(
+                    &self.blocks_databases,
+                    &self.wot_databases.certs_db,
+                    &Block::NetworkBlock(block),
+                    &current_blockstamp,
                     wotb_index,
                     wot,
-                    SyncVerificationLevel::Cautious(),
-                ),
-                Block::LocalBlock(block_doc) => {
-                    try_stack_up_completed_block(&block_doc, wotb_index, wot)
-                }
-            };
-            debug!(
-                "stackable_block_ : block {} chainable !",
-                block_doc.blockstamp()
-            );
-            if success {
+                    &self.forks_states,
+                ) {
+                current_blockstamp = block.blockstamp();
+                // Update forks states
+                self.forks_states = duniter_dal::block::get_forks(
+                    &self.blocks_databases.forks_db,
+                    current_blockstamp,
+                ).expect("get_forks() : DALError");
                 // Apply db requests
-                db_requests
+                bc_db_query
+                    .apply(&self.blocks_databases, false)
+                    .expect("Fatal error : Fail to apply DBWriteRequest !");
+                wot_dbs_queries
                     .iter()
-                    .map(|req| req.apply(&block_doc.currency, &self.db))
+                    .map(|req| {
+                        req.apply(&self.wot_databases, &self.currency_params)
+                            .expect("Fatal error : Fail to apply WotsDBsWriteRequest !");
+                    })
                     .collect::<()>();
-                info!("StackUpValidBlock({})", block_doc.number.0);
-                self.send_event(&DALEvent::StackUpValidBlock(Box::new(block_doc.clone())));
-                return (true, Vec::with_capacity(0), wot_events);
-            } else {
-                warn!("RefusedBlock({})", block_doc.number.0);
-                self.send_event(&DALEvent::RefusedPendingDoc(BlockchainProtocol::V10(
-                    Box::new(V10Document::Block(Box::new(block_doc.clone()))),
-                )));
-            }
-        } else if !already_have_block
-            && (block_doc.number.0 >= current_blockstamp.id.0
-                || (current_blockstamp.id.0 - block_doc.number.0) < 100)
-        {
-            debug!(
-                "stackable_block : block {} not chainable, store this for future !",
-                block_doc.blockstamp()
-            );
-            //let mut forks = forks.clone();
-            let (fork, fork_state) = match DALBlock::get_block_fork(
-                &self.db,
-                &Blockstamp {
-                    id: BlockId(block_doc.number.0 - 1),
-                    hash: BlockHash(block_doc.previous_hash),
-                },
-            ) {
-                Some(fork) => if forks.len() > fork {
-                    (fork, forks[fork])
-                } else {
-                    panic!(format!("Error: fork n° {} is indicated as non-existent whereas it exists in database !", fork));
-                },
-                None => {
-                    let mut free_fork = 0;
-                    while forks.len() > free_fork && forks[free_fork] != ForkState::Free() {
-                        free_fork += 1;
-                    }
-                    if free_fork >= *MAX_FORKS {
-                        return (false, Vec::with_capacity(0), Vec::with_capacity(0));
-                    }
-                    info!("BlockchainModule : New Isolate fork : {}", free_fork);
-                    if free_fork == forks.len() {
-                        forks.push(ForkState::Isolate());
-                        (forks.len() - 1, ForkState::Isolate())
-                    } else {
-                        forks[free_fork] = ForkState::Isolate();
-                        (free_fork, ForkState::Isolate())
-                    }
+                tx_dbs_queries
+                    .iter()
+                    .map(|req| {
+                        req.apply(&self.currency_databases)
+                            .expect("Fatal error : Fail to apply CurrencyDBsWriteRequest !");
+                    })
+                    .collect::<()>();
+                save_blocks_dbs = true;
+                if !wot_dbs_queries.is_empty() {
+                    save_wots_dbs = true;
                 }
-            };
-            let mut isolate = true;
-            match fork_state {
-                ForkState::Full() => isolate = false,
-                ForkState::Isolate() => {}
-                ForkState::Free() => {
-                    warn!("fork n° {} is indicated as free when it is not !", fork);
-                    forks[fork] = ForkState::Isolate();
+                if !tx_dbs_queries.is_empty() {
+                    save_currency_dbs = true;
                 }
             }
-            match *block {
-                Block::NetworkBlock(network_block) => match *network_block {
-                    NetworkBlock::V10(ref network_block_v10) => {
-                        duniter_dal::writers::block::write_network_block(
-                            &self.db,
-                            &network_block_v10.uncompleted_block_doc,
-                            fork,
-                            isolate,
-                            &network_block_v10.revoked,
-                            &network_block_v10.certifications,
-                        )
-                    }
-                    _ => return (false, Vec::with_capacity(0), Vec::with_capacity(0)),
-                },
-                Block::LocalBlock(block_doc) => {
-                    duniter_dal::writers::block::write(&self.db, &block_doc, fork, isolate)
-                }
-            };
-            return (false, forks.to_vec(), Vec::with_capacity(0));
-        } else {
-            debug!(
-                "stackable_block : block {} not chainable and already stored !",
-                block_doc.blockstamp()
-            );
         }
-        (false, Vec::with_capacity(0), Vec::with_capacity(0))
-    }
-    /// Try stack up block
-    pub fn try_stack_up_block<W: WebOfTrust + Sync>(
-        &self,
-        network_block: &NetworkBlock,
-        wotb_index: &HashMap<PubKey, NodeId>,
-        wot: &W,
-        verif_level: SyncVerificationLevel,
-    ) -> (bool, Vec<DBWriteRequest>, Vec<WotEvent>) {
-        let block_doc = match complete_network_block(
-            &self.currency.to_string(),
-            Some(&self.db),
-            network_block,
-            verif_level,
-        ) {
-            Ok(block_doc) => block_doc,
-            Err(_) => return (false, Vec::with_capacity(0), Vec::with_capacity(0)),
-        };
-        try_stack_up_completed_block::<W>(&block_doc, wotb_index, wot)
+        // Save databases
+        if save_blocks_dbs {
+            self.blocks_databases.save_dbs();
+        }
+        if save_wots_dbs {
+            self.wot_databases.save_dbs();
+        }
+        if save_currency_dbs {
+            self.currency_databases.save_dbs(true, true);
+        }
+        current_blockstamp
     }
     /// Start blockchain module.
     pub fn start_blockchain(&mut self, blockchain_receiver: &mpsc::Receiver<DuniterMessage>) -> () {
@@ -585,32 +517,30 @@ impl BlockchainModule {
         let wot_path = duniter_conf::get_wot_path(self.conf_profile.clone(), &self.currency);
 
         // Get wotb index
-        let mut wotb_index: HashMap<PubKey, NodeId> = DALIdentity::get_wotb_index(&self.db);
+        let mut wotb_index: HashMap<PubKey, NodeId> =
+            DALIdentity::get_wotb_index(&self.wot_databases.identities_db)
+                .expect("Fatal eror : get_wotb_index : Fail to read blockchain databases");
 
         // Open wot file
-        let (mut wot, mut _wot_blockstamp) = duniter_dal::open_wot_file::<
-            RustyWebOfTrust,
-            BinaryFileFormater,
-        >(&WOT_FILE_FORMATER, &wot_path);
-
-        // Get forks
-        let mut forks: Vec<ForkState> = duniter_dal::block::get_forks(&self.db);
-        let mut last_get_stackables_blocks = UNIX_EPOCH;
-        let mut last_request_blocks = UNIX_EPOCH;
+        let (mut wot, mut _wot_blockstamp) = open_wot_file::<RustyWebOfTrust, BinaryFileFormater>(
+            &WOT_FILE_FORMATER,
+            &wot_path,
+            self.currency_params.sig_stock,
+        );
 
         // Get current block
-        let current_block: Option<BlockDocument> = duniter_dal::new_get_current_block(&self.db);
-        let mut current_blockstamp = match current_block.clone() {
-            Some(block) => block.blockstamp(),
-            None => Blockstamp::default(),
-        };
+        let mut current_blockstamp = duniter_dal::block::get_current_blockstamp(
+            &self.blocks_databases,
+        ).expect("Fatal error : fail to read ForksV10DB !")
+            .unwrap_or_default();
 
         // Init datas
+        let mut last_get_stackables_blocks = UNIX_EPOCH;
+        let mut last_request_blocks = UNIX_EPOCH;
         let mut pending_network_requests: HashMap<ModuleReqId, NetworkRequest> = HashMap::new();
         let mut consensus = Blockstamp::default();
 
         loop {
-            let mut wot_events = Vec::new();
             // Request Consensus
             let req = NetworkRequest::GetConsensus(ModuleReqFullId(
                 BlockchainModule::id(),
@@ -620,7 +550,10 @@ impl BlockchainModule {
             pending_network_requests.insert(req_id, req);
             // Request Blocks
             let now = SystemTime::now();
-            if now.duration_since(last_request_blocks).unwrap() > Duration::new(20, 0) {
+            if now
+                .duration_since(last_request_blocks)
+                .expect("duration_since error") > Duration::new(20, 0)
+            {
                 last_request_blocks = now;
                 // Request begin blocks
                 let to = match consensus.id.0 {
@@ -660,16 +593,20 @@ impl BlockchainModule {
                                 DALReqBlockchain::CurrentBlock(ref requester_full_id) => {
                                     debug!("BlockchainModule : receive DALReqBc::CurrentBlock()");
 
-                                    if let Some(current_block) = DALBlock::get_block(
-                                        &self.currency.to_string(),
-                                        &self.db,
-                                        &current_blockstamp,
-                                    ) {
-                                        debug!("BlockchainModule : send_req_response(CurrentBlock({}))", current_block.block.blockstamp());
+                                    if let Some(current_block) =
+                                        DALBlock::get_block(
+                                            &self.blocks_databases.blockchain_db,
+                                            None,
+                                            &current_blockstamp,
+                                        ).expect(
+                                            "Fatal error : get_block : fail to read LocalBlockchainV10DB !",
+                                        ) {
+                                        debug!("BlockchainModule : send_req_response(CurrentBlock({}))", current_blockstamp);
                                         self.send_req_response(&DALResponse::Blockchain(Box::new(
                                             DALResBlockchain::CurrentBlock(
                                                 *requester_full_id,
                                                 Box::new(current_block.block),
+                                                current_blockstamp,
                                             ),
                                         )));
                                     } else {
@@ -682,16 +619,11 @@ impl BlockchainModule {
                                             pubkeys
                                                 .iter()
                                                 .map(|p| {
-                                                    if let Some(wotb_id) = wotb_index.get(p) {
-                                                        (
-                                                            *p,
-                                                            duniter_dal::get_uid(
-                                                                &self.db, *wotb_id,
-                                                            ),
-                                                        )
-                                                    } else {
-                                                        (*p, None)
-                                                    }
+                                                    (
+                                                        *p,
+                                                        duniter_dal::identity::get_uid(&self.wot_databases.identities_db, *p)
+                                                            .expect("Fatal error : get_uid : Fail to read WotV10DB !")
+                                                    )
                                                 })
                                                 .collect(),
                                         ),
@@ -704,16 +636,13 @@ impl BlockchainModule {
                     },
                     DuniterMessage::NetworkEvent(ref network_event) => match *network_event {
                         NetworkEvent::ReceiveDocuments(ref network_docs) => {
-                            let (new_current_blockstamp, mut new_wot_events) = self
-                                .receive_network_documents(
-                                    network_docs,
-                                    &current_blockstamp,
-                                    &mut forks,
-                                    &wotb_index,
-                                    &wot,
-                                );
+                            let new_current_blockstamp = self.receive_network_documents(
+                                network_docs,
+                                &current_blockstamp,
+                                &mut wotb_index,
+                                &mut wot,
+                            );
                             current_blockstamp = new_current_blockstamp;
-                            wot_events.append(&mut new_wot_events);
                         }
                         NetworkEvent::ReqResponse(ref network_response) => {
                             debug!("BlockchainModule : receive NetworkEvent::ReqResponse() !");
@@ -734,21 +663,20 @@ impl BlockchainModule {
                                         if let NetworkResponse::Chunk(_, _, ref blocks) =
                                             *network_response.deref()
                                         {
-                                            let (
-                                                new_current_blockstamp,
-                                                new_forks,
-                                                mut new_wot_events,
-                                            ) = self.receive_blocks(
+                                            let new_current_blockstamp = self.receive_blocks(
                                                 blocks,
                                                 &current_blockstamp,
-                                                &forks,
-                                                &wotb_index,
-                                                &wot,
+                                                &mut wotb_index,
+                                                &mut wot,
                                             );
-                                            current_blockstamp = new_current_blockstamp;
-                                            wot_events.append(&mut new_wot_events);
-                                            if !new_forks.is_empty() {
-                                                forks = new_forks;
+                                            if current_blockstamp != new_current_blockstamp {
+                                                current_blockstamp = new_current_blockstamp;
+                                                // Update forks states
+                                                self.forks_states =
+                                                    duniter_dal::block::get_forks(
+                                                        &self.blocks_databases.forks_db,
+                                                        current_blockstamp,
+                                                    ).expect("get_forks() : DALError");
                                             }
                                         }
                                     }
@@ -771,49 +699,74 @@ impl BlockchainModule {
                     mpsc::RecvTimeoutError::Timeout => {}
                 },
             }
-            // Write wot
-            BlockchainModule::apply_wot_events(
-                &wot_events,
-                &wot_path,
-                &current_blockstamp,
-                &mut wot,
-                &mut wotb_index,
-            );
             // Try to apply local stackable blocks
-            let mut wot_events = Vec::new();
             let now = SystemTime::now();
-            if now.duration_since(last_get_stackables_blocks).unwrap() > Duration::new(20, 0) {
+            if now
+                .duration_since(last_get_stackables_blocks)
+                .expect("duration_since error") > Duration::new(20, 0)
+            {
                 last_get_stackables_blocks = now;
                 loop {
                     let stackable_blocks = duniter_dal::block::DALBlock::get_stackables_blocks(
-                        &self.currency.to_string(),
-                        &self.db,
+                        &self.blocks_databases.forks_db,
+                        &self.blocks_databases.forks_blocks_db,
                         &current_blockstamp,
-                    );
+                    ).expect("Fatal error : Fail to read ForksV10DB !");
                     if stackable_blocks.is_empty() {
                         break;
                     } else {
                         let mut find_valid_block = false;
                         for stackable_block in stackable_blocks {
                             debug!("stackable_block({})", stackable_block.block.number);
-                            let (success, _new_forks, mut new_wot_events) = self.apply_block(
+                            if let Ok(ValidBlockApplyReqs(
+                                bc_db_query,
+                                wot_dbs_queries,
+                                tx_dbs_queries,
+                            )) = check_and_apply_block(
+                                &self.blocks_databases,
+                                &self.wot_databases.certs_db,
                                 &Block::LocalBlock(&stackable_block.block),
                                 &current_blockstamp,
-                                &mut forks,
-                                &wotb_index,
-                                &wot,
-                            );
-                            if success {
+                                &mut wotb_index,
+                                &mut wot,
+                                &self.forks_states,
+                            ) {
+                                // Apply db requests
+                                bc_db_query
+                                    .apply(&self.blocks_databases, false)
+                                    .expect("Fatal error : Fail to apply DBWriteRequest !");
+                                wot_dbs_queries
+                                    .iter()
+                                    .map(|req| {
+                                        req.apply(&self.wot_databases, &self.currency_params)
+                                            .expect(
+                                                "Fatal error : Fail to apply WotsDBsWriteRequest !",
+                                            );
+                                    })
+                                    .collect::<()>();
+                                tx_dbs_queries
+                                    .iter()
+                                    .map(|req| {
+                                        req.apply(&self.currency_databases).expect(
+                                            "Fatal error : Fail to apply CurrencyDBsWriteRequest !",
+                                        );
+                                    })
+                                    .collect::<()>();
+                                // Save databases
+                                self.blocks_databases.save_dbs();
+                                if !wot_dbs_queries.is_empty() {
+                                    self.wot_databases.save_dbs();
+                                }
+                                if !tx_dbs_queries.is_empty() {
+                                    self.currency_databases.save_dbs(true, true);
+                                }
                                 debug!(
                                     "success to stackable_block({})",
                                     stackable_block.block.number
                                 );
+
                                 current_blockstamp = stackable_block.block.blockstamp();
-                                wot_events.append(&mut new_wot_events);
                                 find_valid_block = true;
-                                /*if !new_forks.is_empty() {
-                                    forks = new_forks;
-                                }*/
                                 break;
                             } else {
                                 warn!(
@@ -821,8 +774,13 @@ impl BlockchainModule {
                                     stackable_block.block.number
                                 );
                                 // Delete this fork
-                                DALBlock::delete_fork(&self.db, stackable_block.fork);
-                                forks[stackable_block.fork] = ForkState::Free();
+                                DALBlock::delete_fork(
+                                    &self.blocks_databases.forks_db,
+                                    &self.blocks_databases.forks_blocks_db,
+                                    stackable_block.fork_id,
+                                ).expect("delete_fork() : DALError");
+                                // Update forks states
+                                self.forks_states[stackable_block.fork_id.0] = ForkStatus::Free();
                             }
                         }
                         if !find_valid_block {
@@ -836,122 +794,33 @@ impl BlockchainModule {
                     current_blockstamp
                 );
             }
-            // Write wot
-            BlockchainModule::apply_wot_events(
+            // Apply wot events
+            /*BlockchainModule::apply_wot_events(
                 &wot_events,
                 &wot_path,
                 &current_blockstamp,
                 &mut wot,
                 &mut wotb_index,
-            );
-        }
-    }
-    fn apply_wot_events<W: WebOfTrust + Sync>(
-        wot_events: &[WotEvent],
-        wot_path: &PathBuf,
-        current_blockstamp: &Blockstamp,
-        wot: &mut W,
-        wotb_index: &mut HashMap<PubKey, NodeId>,
-    ) {
-        if !wot_events.is_empty() {
-            for wot_event in wot_events {
-                match *wot_event {
-                    WotEvent::AddNode(pubkey, wotb_id) => {
-                        wot.add_node();
-                        wotb_index.insert(pubkey, wotb_id);
-                    }
-                    WotEvent::RemNode(pubkey) => {
-                        wot.rem_node();
-                        wotb_index.remove(&pubkey);
-                    }
-                    WotEvent::AddLink(source, target) => {
-                        wot.add_link(source, target);
-                    }
-                    WotEvent::RemLink(source, target) => {
-                        wot.rem_link(source, target);
-                    }
-                    WotEvent::EnableNode(wotb_id) => {
-                        wot.set_enabled(wotb_id, true);
-                    }
-                    WotEvent::DisableNode(wotb_id) => {
-                        wot.set_enabled(wotb_id, false);
-                    }
-                }
-            }
-            // Save wot
-            WOT_FILE_FORMATER
-                .to_file(
-                    wot,
-                    current_blockstamp.to_string().as_bytes(),
-                    wot_path.as_path().to_str().unwrap(),
-                )
-                .expect("Fatal Error: Fail to write wotb in file !");
-        }
+            );*/        }
     }
 }
 
 /// Complete Network Block
 pub fn complete_network_block(
-    currency: &str,
-    db: Option<&DuniterDB>,
     network_block: &NetworkBlock,
-    verif_level: SyncVerificationLevel,
 ) -> Result<BlockDocument, CompletedBlockError> {
     if let NetworkBlock::V10(ref network_block_v10) = *network_block {
         let mut block_doc = network_block_v10.uncompleted_block_doc.clone();
         trace!("complete_network_block #{}...", block_doc.number);
-        if verif_level == SyncVerificationLevel::Cautious() {
-            // Indexing block_identities
-            let mut block_identities = HashMap::new();
-            block_doc
-                .identities
-                .iter()
-                .map(|idty| {
-                    if idty.issuers().is_empty() {
-                        panic!("idty without issuer !")
-                    }
-                    block_identities.insert(idty.issuers()[0], idty.clone());
-                })
-                .collect::<()>();
-            block_doc.certifications =
-                    duniter_dal::parsers::certifications::parse_certifications_from_json_value(
-                        currency,
-                        db.expect("complete_network_block() : Cautious mode need access to blockchain database !"),
-                        &block_identities,
-                        &network_block_v10.certifications,
-                    );
-            trace!("Success to complete certs.");
-            block_doc.revoked = duniter_dal::parsers::revoked::parse_revocations_from_json_value(
-                currency,
-                db.expect(
-                    "complete_network_block() : Cautious mode need access to blockchain database !",
-                ),
-                &block_identities,
-                &network_block_v10.revoked,
+        block_doc.certifications =
+            duniter_dal::parsers::certifications::parse_certifications_into_compact(
+                &network_block_v10.certifications,
             );
-        } else {
-            block_doc.certifications =
-                duniter_dal::parsers::certifications::parse_certifications_into_compact(
-                    &network_block_v10.certifications,
-                );
-            trace!("Success to complete certs.");
-            block_doc.revoked = duniter_dal::parsers::revoked::parse_revocations_into_compact(
-                &network_block_v10.revoked,
-            );
-        }
+        trace!("Success to complete certs.");
+        block_doc.revoked = duniter_dal::parsers::revoked::parse_revocations_into_compact(
+            &network_block_v10.revoked,
+        );
         trace!("Success to complete certs & revocations.");
-        // In cautions mode, verify all signatures !
-        if verif_level == SyncVerificationLevel::Cautious() {
-            for idty in block_doc.clone().identities {
-                if idty.verify_signatures() != VerificationResult::Valid() {
-                    error!(
-                        "Fail to sync block #{} : Idty with invalid singature !",
-                        block_doc.number
-                    );
-                    panic!("Idty with invalid singature !");
-                }
-            }
-        }
         let inner_hash = block_doc.inner_hash.expect(
             "BlockchainModule : complete_network_block() : fatal error : block.inner_hash = None",
         );
@@ -966,20 +835,12 @@ pub fn complete_network_block(
         {
             let nonce = block_doc.nonce;
             block_doc.change_nonce(nonce);
-            if verif_level == SyncVerificationLevel::FastSync()
-                || block_doc.verify_signatures() == VerificationResult::Valid()
-                || block_doc.number.0 <= 1
-            {
-                if block_doc.hash == hash {
-                    trace!("Succes to complete_network_block #{}", block_doc.number.0);
-                    Ok(block_doc)
-                } else {
-                    warn!("BlockchainModule : Refuse Bloc : invalid hash !");
-                    Err(CompletedBlockError::InvalidHash())
-                }
+            if block_doc.hash == hash {
+                trace!("Succes to complete_network_block #{}", block_doc.number.0);
+                Ok(block_doc)
             } else {
-                warn!("BlockchainModule : Refuse Bloc : invalid signature !");
-                Err(CompletedBlockError::InvalidSig())
+                warn!("BlockchainModule : Refuse Bloc : invalid hash !");
+                Err(CompletedBlockError::InvalidHash())
             }
         } else {
             warn!("BlockchainModule : Refuse Bloc : invalid inner hash !");
diff --git a/blockchain/stack_up_block.rs b/blockchain/stack_up_block.rs
deleted file mode 100644
index b3c327cdfcbbf1f717a5d0c70a26a4b4eeda2231..0000000000000000000000000000000000000000
--- a/blockchain/stack_up_block.rs
+++ /dev/null
@@ -1,178 +0,0 @@
-//  Copyright (C) 2018  The Duniter Project Developers.
-//
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as
-// published by the Free Software Foundation, either version 3 of the
-// License, or (at your option) any later version.
-//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-// GNU Affero General Public License for more details.
-//
-// You should have received a copy of the GNU Affero General Public License
-// along with this program.  If not, see <https://www.gnu.org/licenses/>.
-
-extern crate duniter_crypto;
-extern crate duniter_dal;
-extern crate duniter_documents;
-extern crate duniter_wotb;
-
-use duniter_crypto::keys::*;
-use duniter_dal::block::{DALBlock, WotEvent};
-use duniter_dal::writers::requests::DBWriteRequest;
-use duniter_documents::blockchain::v10::documents::BlockDocument;
-use duniter_documents::blockchain::Document;
-use duniter_wotb::{NodeId, WebOfTrust};
-
-use std::collections::HashMap;
-
-pub fn try_stack_up_completed_block<W: WebOfTrust + Sync>(
-    block: &BlockDocument,
-    wotb_index: &HashMap<PubKey, NodeId>,
-    wot: &W,
-) -> (bool, Vec<DBWriteRequest>, Vec<WotEvent>) {
-    debug!(
-        "BlockchainModule : try stack up complete block {}",
-        block.blockstamp()
-    );
-    let mut db_requests = Vec::new();
-    let mut wot_events = Vec::new();
-    let mut wot_copy: W = wot.clone();
-    let mut wotb_index_copy: HashMap<PubKey, NodeId> = wotb_index.clone();
-    let current_blockstamp = block.blockstamp();
-    let mut identities = HashMap::with_capacity(block.identities.len());
-    for identity in block.identities.clone() {
-        identities.insert(identity.issuers()[0], identity);
-    }
-    for joiner in block.joiners.clone() {
-        let pubkey = joiner.clone().issuers()[0];
-        if let Some(idty_doc) = identities.get(&pubkey) {
-            // Newcomer
-            let wotb_id = NodeId(wot_copy.size());
-            wot_events.push(WotEvent::AddNode(pubkey, wotb_id));
-            wot_copy.add_node();
-            wotb_index_copy.insert(pubkey, wotb_id);
-            db_requests.push(DBWriteRequest::CreateIdentity(
-                wotb_id,
-                current_blockstamp,
-                block.median_time,
-                Box::new(idty_doc.clone()),
-            ));
-        } else {
-            // Renewer
-            let wotb_id = wotb_index_copy[&joiner.issuers()[0]];
-            wot_events.push(WotEvent::EnableNode(wotb_id));
-            wot_copy.set_enabled(wotb_id, true);
-            db_requests.push(DBWriteRequest::RenewalIdentity(
-                joiner.issuers()[0],
-                block.blockstamp(),
-                block.median_time,
-            ));
-        }
-    }
-    for active in block.actives.clone() {
-        let pubkey = active.issuers()[0];
-        if !identities.contains_key(&pubkey) {
-            let wotb_id = wotb_index_copy[&pubkey];
-            wot_events.push(WotEvent::EnableNode(wotb_id));
-            wot_copy.set_enabled(wotb_id, true);
-            db_requests.push(DBWriteRequest::RenewalIdentity(
-                pubkey,
-                block.blockstamp(),
-                block.median_time,
-            ));
-        }
-    }
-    for exclusion in block.excluded.clone() {
-        let wotb_id = wotb_index_copy[&exclusion];
-        wot_events.push(WotEvent::DisableNode(wotb_id));
-        wot_copy.set_enabled(wotb_id, false);
-        db_requests.push(DBWriteRequest::ExcludeIdentity(
-            wotb_id,
-            block.blockstamp(),
-            block.median_time,
-        ));
-    }
-    for revocation in block.revoked.clone() {
-        let compact_revoc = revocation.to_compact_document();
-        let wotb_id = wotb_index_copy[&compact_revoc.issuer];
-        wot_events.push(WotEvent::DisableNode(wotb_id));
-        wot_copy.set_enabled(wotb_id, false);
-        db_requests.push(DBWriteRequest::RevokeIdentity(
-            wotb_id,
-            block.blockstamp(),
-            block.median_time,
-        ));
-    }
-    for certification in block.certifications.clone() {
-        trace!("try_stack_up_completed_block: apply cert...");
-        let compact_cert = certification.to_compact_document();
-        let wotb_node_from = wotb_index_copy[&compact_cert.issuer];
-        let wotb_node_to = wotb_index_copy[&compact_cert.target];
-        wot_events.push(WotEvent::AddLink(wotb_node_from, wotb_node_to));
-        wot_copy.add_link(wotb_node_from, wotb_node_to);
-        db_requests.push(DBWriteRequest::CreateCert(
-            block.blockstamp(),
-            block.median_time,
-            compact_cert,
-        ));
-        trace!("try_stack_up_completed_block: apply cert...success.");
-    }
-
-    /*// Calculate the state of the wot
-        if !wot_events.is_empty() && verif_level != SyncVerificationLevel::FastSync() {
-            // Calculate sentries_count
-            let sentries_count = wot_copy.get_sentries(3).len();
-            // Calculate average_density
-            let average_density = calculate_average_density::<W>(&wot_copy);
-            let sentry_requirement =
-                get_sentry_requirement(block.members_count, G1_PARAMS.step_max);
-            // Calculate distances and connectivities
-            let (average_distance, distances, average_connectivity, connectivities) =
-                compute_distances::<W>(
-                    &wot_copy,
-                    sentry_requirement,
-                    G1_PARAMS.step_max,
-                    G1_PARAMS.x_percent,
-                );
-            // Calculate centralities and average_centrality
-            let centralities =
-                calculate_distance_stress_centralities::<W>(&wot_copy, G1_PARAMS.step_max);
-            let average_centrality =
-                (centralities.iter().sum::<u64>() as f64 / centralities.len() as f64) as usize;
-            // Register the state of the wot
-            duniter_dal::register_wot_state(
-                db,
-                &WotState {
-                    block_number: block.number.0,
-                    block_hash: block.hash.unwrap().to_string(),
-                    sentries_count,
-                    average_density,
-                    average_distance,
-                    distances,
-                    average_connectivity,
-                    connectivities: connectivities
-                        .iter()
-                        .map(|c| {
-                            if *c > *G1_CONNECTIVITY_MAX {
-                                *G1_CONNECTIVITY_MAX
-                            } else {
-                                *c
-                            }
-                        })
-                        .collect(),
-                    average_centrality,
-                    centralities,
-                },
-            );
-        }*/
-    // Write block in bdd
-    db_requests.push(DBWriteRequest::WriteBlock(Box::new(DALBlock {
-        block: block.clone(),
-        fork: 0,
-        isolate: false,
-    })));
-
-    (true, db_requests, wot_events)
-}
diff --git a/blockchain/sync.rs b/blockchain/sync.rs
index a8f536dfa161b19b103832badb4844d147dde734..5d5c29df4e1f9b0980177354b9b454686d14b90e 100644
--- a/blockchain/sync.rs
+++ b/blockchain/sync.rs
@@ -13,50 +13,61 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-extern crate duniter_conf;
-extern crate duniter_crypto;
-extern crate duniter_dal;
-extern crate duniter_documents;
-extern crate duniter_message;
-extern crate duniter_module;
-extern crate duniter_network;
+extern crate num_cpus;
 extern crate pbr;
-extern crate serde;
-extern crate serde_json;
 extern crate sqlite;
+extern crate threadpool;
 
 use self::pbr::ProgressBar;
+use self::threadpool::ThreadPool;
 use duniter_crypto::keys::*;
-use duniter_dal::parsers::identities::parse_compact_identity;
-use duniter_dal::parsers::transactions::parse_transaction;
-//use duniter_dal::writers::requests::DBWriteRequest;
-use duniter_documents::blockchain::v10::documents::membership::MembershipType;
-use duniter_documents::blockchain::v10::documents::BlockDocument;
+use duniter_dal::currency_params::CurrencyParameters;
+use duniter_dal::writers::requests::*;
+use duniter_dal::ForkId;
 use duniter_documents::{BlockHash, BlockId, Hash};
-use duniter_network::{NetworkBlock, NetworkBlockV10};
+use duniter_network::NetworkBlock;
+use duniter_wotb::operations::file::FileFormater;
 use duniter_wotb::{NodeId, WebOfTrust};
-use std::collections::HashMap;
+use rustbreak::{deser::Bincode, MemoryDatabase};
+use std::collections::{HashMap, VecDeque};
 use std::fs;
+use std::ops::Deref;
 use std::sync::mpsc;
 use std::thread;
 use std::time::SystemTime;
+use ts_parsers::*;
+use *;
 
-use super::*;
+/// Number of sync jobs
+pub static NB_SYNC_JOBS: &'static usize = &4;
 
 #[derive(Debug, Clone, PartialEq, Eq)]
+/// Block header
 pub struct BlockHeader {
     pub number: BlockId,
     pub hash: BlockHash,
     pub issuer: PubKey,
 }
 
-enum ParserWorkMess {
+#[derive(Debug)]
+/// Message for main sync thread
+enum MessForSyncThread {
     TargetBlockstamp(Blockstamp),
     NetworkBlock(NetworkBlock),
-    //DBWriteRequest(DBWriteRequest),
+    DownloadFinish(),
+    ApplyFinish(),
+}
+
+#[derive(Debug)]
+/// Message for a job thread
+enum SyncJobsMess {
+    BlocksDBsWriteQuery(BlocksDBsWriteQuery),
+    WotsDBsWriteQuery(WotsDBsWriteQuery, Box<CurrencyParameters>),
+    CurrencyDBsWriteQuery(CurrencyDBsWriteQuery),
     End(),
 }
 
+/// Sync from a duniter-ts database
 pub fn sync_ts(
     conf: &DuniterConf,
     current_blockstamp: &Blockstamp,
@@ -68,14 +79,6 @@ pub fn sync_ts(
     let currency = &conf.currency();
     let mut current_blockstamp = *current_blockstamp;
 
-    // Copy blockchain db in ramfs
-    let db_path = duniter_conf::get_db_path(profile, currency, false);
-    if db_path.as_path().exists() {
-        info!("Copy blockchain DB in ramfs...");
-        fs::copy(db_path, format!("/dev/shm/{}_durs.db", profile))
-            .expect("Fatal error : fail to copy DB in ramfs !");
-    }
-
     // Get wot path
     let wot_path = duniter_conf::get_wot_path(profile.clone().to_string(), currency);
 
@@ -83,24 +86,31 @@ pub fn sync_ts(
     let (mut wot, mut _wot_blockstamp): (RustyWebOfTrust, Blockstamp) =
         if wot_path.as_path().exists() {
             match WOT_FILE_FORMATER.from_file(
-                wot_path.as_path().to_str().unwrap(),
-                duniter_dal::constants::G1_PARAMS.sig_stock as usize,
+                wot_path
+                    .as_path()
+                    .to_str()
+                    .expect("Fail to convert path to str"),
+                *INFINITE_SIG_STOCK,
             ) {
                 Ok((wot, binary_blockstamp)) => match str::from_utf8(&binary_blockstamp) {
-                    Ok(str_blockstamp) => (wot, Blockstamp::from_string(str_blockstamp).unwrap()),
+                    Ok(str_blockstamp) => (
+                        wot,
+                        Blockstamp::from_string(str_blockstamp)
+                            .expect("Fail to deserialize wot blockcstamp"),
+                    ),
                     Err(e) => panic!("Invalid UTF-8 sequence: {}", e),
                 },
                 Err(e) => panic!("Fatal Error : fail te read wot file : {:?}", e),
             }
         } else {
             (
-                RustyWebOfTrust::new(duniter_dal::constants::G1_PARAMS.sig_stock as usize),
+                RustyWebOfTrust::new(*INFINITE_SIG_STOCK),
                 Blockstamp::default(),
             )
         };
 
     // Get verification level
-    let verif_level = if cautious {
+    let _verif_level = if cautious {
         println!("Start cautious sync...");
         info!("Start cautious sync...");
         SyncVerificationLevel::Cautious()
@@ -110,14 +120,32 @@ pub fn sync_ts(
         SyncVerificationLevel::FastSync()
     };
 
-    // Create sync_thread channel
+    // Create sync_thread channels
     let (sender_sync_thread, recv_sync_thread) = mpsc::channel();
 
+    // Create ThreadPool
+    let nb_cpus = num_cpus::get();
+    let nb_workers = if nb_cpus < *NB_SYNC_JOBS {
+        nb_cpus
+    } else {
+        *NB_SYNC_JOBS
+    };
+    let pool = ThreadPool::new(nb_workers);
+
+    // Determine db_ts_copy_path
+    let mut db_ts_copy_path = duniter_conf::datas_path(&profile.clone(), currency);
+    db_ts_copy_path.push("tmp_db_ts_copy.db");
+
     // Lauch ts thread
-    thread::spawn(move || {
-        // open db_ts
-        let ts_db = sqlite::open(db_ts_path.as_path())
-            .expect("Fatal error : fail to open duniter-ts database !");
+    let sender_sync_thread_clone = sender_sync_thread.clone();
+    pool.execute(move || {
+        let ts_job_begin = SystemTime::now();
+        // copy db_ts
+        fs::copy(db_ts_path.as_path(), db_ts_copy_path.as_path())
+            .expect("Fatal error : fail to copy duniter-ts database !");
+        // open copy of db_ts
+        let ts_db = sqlite::open(db_ts_copy_path.as_path())
+            .expect("Fatal error : fail to open copy of duniter-ts database !");
         info!("sync_ts : Success to open duniter-ts database.");
 
         // Get ts current blockstamp
@@ -152,8 +180,8 @@ pub fn sync_ts(
         debug!("Success to ts-db current blockstamp.");
 
         // Send ts current blockstamp
-        sender_sync_thread
-            .send(ParserWorkMess::TargetBlockstamp(current_ts_blockstamp))
+        sender_sync_thread_clone
+            .send(MessForSyncThread::TargetBlockstamp(current_ts_blockstamp))
             .expect("Fatal error : sync_thread unrechable !");
 
         // Get genesis block
@@ -172,8 +200,8 @@ pub fn sync_ts(
                 .bind(&[sqlite::Value::Integer(0)])
                 .expect("Fail to get genesis block !");
             if let Some(row) = cursor.next().expect("cursor error") {
-                sender_sync_thread
-                    .send(ParserWorkMess::NetworkBlock(parse_ts_block(row)))
+                sender_sync_thread_clone
+                    .send(MessForSyncThread::NetworkBlock(parse_ts_block(row)))
                     .expect("Fatal error : sync_thread unrechable !");
             }
         }
@@ -202,27 +230,38 @@ pub fn sync_ts(
         while let Some(row) = cursor.next().expect("cursor error") {
             //let sender_sync_thread_clone = sender_sync_thread.clone();
             //pool.execute(move || {
-            sender_sync_thread
-                .send(ParserWorkMess::NetworkBlock(parse_ts_block(row)))
+            sender_sync_thread_clone
+                .send(MessForSyncThread::NetworkBlock(parse_ts_block(row)))
                 .expect("Fatal error : sync_thread unrechable !");
             //});
         }
-        sender_sync_thread
-            .send(ParserWorkMess::End())
+        fs::remove_file(db_ts_copy_path.as_path())
+            .expect("Fatal error : fail to remove db_ts_copy !");
+        sender_sync_thread_clone
+            .send(MessForSyncThread::DownloadFinish())
             .expect("Fatal error : sync_thread unrechable !");
+        let ts_job_duration = SystemTime::now()
+            .duration_since(ts_job_begin)
+            .expect("duration_since error");
+        info!(
+            "ts_job_duration={},{:03} seconds.",
+            ts_job_duration.as_secs(),
+            ts_job_duration.subsec_nanos() / 1_000_000
+        );
     });
 
     // Get target blockstamp
-    let target_blockstamp =
-        if let Ok(ParserWorkMess::TargetBlockstamp(target_blockstamp)) = recv_sync_thread.recv() {
-            target_blockstamp
-        } else {
-            panic!("Fatal error : no TargetBlockstamp !")
-        };
+    let target_blockstamp = if let Ok(MessForSyncThread::TargetBlockstamp(target_blockstamp)) =
+        recv_sync_thread.recv()
+    {
+        target_blockstamp
+    } else {
+        panic!("Fatal error : no TargetBlockstamp !")
+    };
 
     // Instanciate blockchain module
     let blockchain_module =
-        BlockchainModule::load_blockchain_conf(conf, RequiredKeysContent::None(), true);
+        BlockchainModule::load_blockchain_conf(conf, RequiredKeysContent::None());
 
     // Node is already synchronized ?
     if target_blockstamp.id.0 < current_blockstamp.id.0 {
@@ -232,86 +271,312 @@ pub fn sync_ts(
 
     // Get wotb index
     let mut wotb_index: HashMap<PubKey, NodeId> =
-        DALIdentity::get_wotb_index(&blockchain_module.db);
+        DALIdentity::get_wotb_index(&blockchain_module.wot_databases.identities_db)
+            .expect("Fatal eror : get_wotb_index : Fail to read blockchain databases");
 
     // Start sync
     let sync_start_time = SystemTime::now();
-    println!(
-        "Sync from #{} to #{} :",
+    info!(
+        "Sync from #{} to #{}...",
         current_blockstamp.id.0, target_blockstamp.id.0
     );
-    info!(
+    println!(
         "Sync from #{} to #{}...",
         current_blockstamp.id.0, target_blockstamp.id.0
     );
-    let mut pb = ProgressBar::new((target_blockstamp.id.0 + 1 - current_blockstamp.id.0).into());
+
+    // Createprogess bar
+    let count_blocks = target_blockstamp.id.0 + 1 - current_blockstamp.id.0;
+    let count_chunks = if count_blocks % 250 > 0 {
+        (count_blocks / 250) + 1
+    } else {
+        count_blocks / 250
+    };
+    let mut apply_pb = ProgressBar::new(count_chunks.into());
+    apply_pb.format("╢▌▌░╟");
+    // Create workers threads channels
+    let (sender_blocks_thread, recv_blocks_thread) = mpsc::channel();
+    let (sender_tx_thread, recv_tx_thread) = mpsc::channel();
+    let (sender_wot_thread, recv_wot_thread) = mpsc::channel();
+
+    // Launch blocks_worker thread
+    let profile_copy = conf.profile().clone();
+    let currency_copy = conf.currency().clone();
+    let sender_sync_thread_clone = sender_sync_thread.clone();
+    pool.execute(move || {
+        let blocks_job_begin = SystemTime::now();
+        // Open databases
+        let db_path = duniter_conf::get_blockchain_db_path(&profile_copy, &currency_copy);
+        let databases = BlocksV10DBs::open(&db_path, false);
+
+        // Listen db requets
+        let mut chunk_index = 0;
+        let mut blockchain_meta_datas = HashMap::new();
+        let mut all_wait_duration = Duration::from_millis(0);
+        let mut wait_begin = SystemTime::now();
+        while let Ok(SyncJobsMess::BlocksDBsWriteQuery(req)) = recv_blocks_thread.recv() {
+            all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+            // Apply db request
+            req.apply(&databases, true)
+                .expect("Fatal error : Fail to apply DBWriteRequest !");
+            if let BlocksDBsWriteQuery::WriteBlock(
+                ref _dal_block,
+                ref _old_fork_id,
+                ref previous_blockstamp,
+                ref previous_hash,
+            ) = req
+            {
+                blockchain_meta_datas.insert(*previous_blockstamp, *previous_hash);
+                chunk_index += 1;
+                if chunk_index == 250 {
+                    chunk_index = 0;
+                    apply_pb.inc();
+                }
+            }
+            wait_begin = SystemTime::now();
+        }
+
+        // Indexing blockchain meta datas
+        info!("Indexing blockchain meta datas...");
+        /*let blockchain_meta_datas: HashMap<PreviousBlockstamp, BlockHash> = databases
+            .blockchain_db
+            .read(|db| {
+                let mut blockchain_meta_datas: HashMap<
+                    PreviousBlockstamp,
+                    BlockHash,
+                > = HashMap::new();
+                for dal_block in db.values() {
+                    let block_previous_hash = if dal_block.block.number.0 == 0 {
+                        PreviousBlockstamp::default()
+                    } else {
+                        PreviousBlockstamp {
+                            id: BlockId(dal_block.block.number.0 - 1),
+                            hash: BlockHash(dal_block.block.previous_hash),
+                        }
+                    };
+                    blockchain_meta_datas
+                        .insert(block_previous_hash, dal_block.block.expect("Try to get hash of an uncompleted or reduce block !"));
+                }
+                blockchain_meta_datas
+            })
+            .expect("Indexing blockchain meta datas : DALError");*/
+        databases
+            .forks_db
+            .write(|db| {
+                db.insert(ForkId(0), blockchain_meta_datas);
+            })
+            .expect("Indexing blockchain meta datas : DALError");
+
+        // Increment progress bar (last chunk)
+        apply_pb.inc();
+        // Save blockchain, and fork databases
+        info!("Save blockchain and forks databases in files...");
+        databases.save_dbs();
+
+        // Send finish signal
+        sender_sync_thread_clone
+            .send(MessForSyncThread::ApplyFinish())
+            .expect("Fatal error : sync_thread unrechable !");
+        let blocks_job_duration =
+            SystemTime::now().duration_since(blocks_job_begin).unwrap() - all_wait_duration;
+        info!(
+            "blocks_job_duration={},{:03} seconds.",
+            blocks_job_duration.as_secs(),
+            blocks_job_duration.subsec_nanos() / 1_000_000
+        );
+    });
+
+    // / Launch wot_worker thread
+    let profile_copy2 = profile.clone();
+    let currency_copy2 = currency.clone();
+    let sender_sync_thread_clone2 = sender_sync_thread.clone();
+
+    pool.execute(move || {
+        let wot_job_begin = SystemTime::now();
+        // Open databases
+        let db_path = duniter_conf::get_blockchain_db_path(&profile_copy2, &currency_copy2);
+        let databases = WotsV10DBs::open(&db_path, false);
+
+        // Listen db requets
+        let mut all_wait_duration = Duration::from_millis(0);
+        let mut wait_begin = SystemTime::now();
+        while let Ok(mess) = recv_wot_thread.recv() {
+            all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+            match mess {
+                SyncJobsMess::WotsDBsWriteQuery(req, currency_params) => req
+                    .apply(&databases, &currency_params.deref())
+                    .expect("Fatal error : Fail to apply DBWriteRequest !"),
+                SyncJobsMess::End() => break,
+                _ => {}
+            }
+            wait_begin = SystemTime::now();
+        }
+        // Save wots databases
+        info!("Save wots databases in files...");
+        databases.save_dbs();
+
+        // Send finish signal
+        sender_sync_thread_clone2
+            .send(MessForSyncThread::ApplyFinish())
+            .expect("Fatal error : sync_thread unrechable !");
+        let wot_job_duration =
+            SystemTime::now().duration_since(wot_job_begin).unwrap() - all_wait_duration;
+        info!(
+            "wot_job_duration={},{:03} seconds.",
+            wot_job_duration.as_secs(),
+            wot_job_duration.subsec_nanos() / 1_000_000
+        );
+    });
+
+    // Launch tx_worker thread
+    let profile_copy = conf.profile().clone();
+    let currency_copy = conf.currency().clone();
+    let sender_sync_thread_clone = sender_sync_thread.clone();
+    pool.execute(move || {
+        let tx_job_begin = SystemTime::now();
+        // Open databases
+        let db_path = duniter_conf::get_blockchain_db_path(&profile_copy, &currency_copy);
+        let databases = CurrencyV10DBs::<FileBackend>::open(&db_path);
+
+        // Listen db requets
+        let mut all_wait_duration = Duration::from_millis(0);
+        let mut wait_begin = SystemTime::now();
+        while let Ok(SyncJobsMess::CurrencyDBsWriteQuery(req)) = recv_tx_thread.recv() {
+            all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+            // Apply db request
+            req.apply(&databases)
+                .expect("Fatal error : Fail to apply DBWriteRequest !");
+            wait_begin = SystemTime::now();
+        }
+        // Save tx, utxo, du and balances databases
+        info!("Save tx and sources database in file...");
+        databases.save_dbs(true, true);
+
+        // Send finish signal
+        sender_sync_thread_clone
+            .send(MessForSyncThread::ApplyFinish())
+            .expect("Fatal error : sync_thread unrechable !");
+        let tx_job_duration =
+            SystemTime::now().duration_since(tx_job_begin).unwrap() - all_wait_duration;
+        info!(
+            "tx_job_duration={},{:03} seconds.",
+            tx_job_duration.as_secs(),
+            tx_job_duration.subsec_nanos() / 1_000_000
+        );
+    });
+    let main_job_begin = SystemTime::now();
 
     // Apply blocks
-    while let Ok(ParserWorkMess::NetworkBlock(network_block)) = recv_sync_thread.recv() {
+    let mut blocks_not_expiring = VecDeque::with_capacity(200_000);
+    let mut last_block_expiring: isize = -1;
+    let certs_db = MemoryDatabase::<CertsExpirV10Datas, Bincode>::memory(HashMap::new())
+        .expect("Fail to create memory certs_db");
+    let mut currency_params = CurrencyParameters::default();
+    let mut get_currency_params = false;
+    let mut certs_count = 0;
+
+    let mut all_wait_duration = Duration::from_millis(0);
+    let mut wait_begin = SystemTime::now();
+    let mut all_complete_block_duration = Duration::from_millis(0);
+    let mut all_apply_valid_block_duration = Duration::from_millis(0);
+    while let Ok(MessForSyncThread::NetworkBlock(network_block)) = recv_sync_thread.recv() {
+        all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
         // Complete block
-        let block_doc = complete_network_block(
-            &blockchain_module.currency.to_string(),
-            None,
-            &network_block,
-            SyncVerificationLevel::FastSync(),
-        ).expect("Receive wrong block, please reset data and resync !");
+        let complete_block_begin = SystemTime::now();
+        let block_doc = complete_network_block(&network_block)
+            .expect("Receive wrong block, please reset data and resync !");
+        all_complete_block_duration += SystemTime::now()
+            .duration_since(complete_block_begin)
+            .unwrap();
+        // Get currency params
+        if !get_currency_params {
+            if block_doc.number.0 == 0 {
+                if block_doc.parameters.is_some() {
+                    currency_params = CurrencyParameters::from((
+                        block_doc.currency.clone(),
+                        block_doc.parameters.unwrap(),
+                    ));
+                    wot.set_max_link(currency_params.sig_stock);
+                    get_currency_params = true;
+                } else {
+                    panic!("The genesis block are None parameters !");
+                }
+            } else {
+                panic!("The first block is not genesis !");
+            }
+        }
+        // Push block median_time in blocks_not_expiring
+        blocks_not_expiring.push_back(block_doc.median_time);
+        // Get blocks_expiring
+        let mut blocks_expiring = Vec::new();
+        while blocks_not_expiring.front().cloned()
+            < Some(block_doc.median_time - currency_params.sig_validity)
+        {
+            last_block_expiring += 1;
+            blocks_expiring.push(BlockId(last_block_expiring as u32));
+            blocks_not_expiring.pop_front();
+        }
+        // Find expire_certs
+        let expire_certs = duniter_dal::certs::find_expire_certs(&certs_db, blocks_expiring)
+            .expect("find_expire_certs() : DALError");
         // Apply block
-        let (success, db_requests, new_wot_events) =
-            try_stack_up_completed_block::<RustyWebOfTrust>(&block_doc, &wotb_index, &wot);
-
-        blockchain_module.try_stack_up_block::<RustyWebOfTrust>(
-            &network_block,
-            &wotb_index,
-            &wot,
-            verif_level,
-        );
-        if success {
+        let apply_valid_block_begin = SystemTime::now();
+        if let Ok(ValidBlockApplyReqs(block_req, wot_db_reqs, currency_db_reqs)) =
+            apply_valid_block::<RustyWebOfTrust>(
+                &block_doc,
+                &mut wotb_index,
+                &mut wot,
+                &expire_certs,
+                None,
+            ) {
+            all_apply_valid_block_duration += SystemTime::now()
+                .duration_since(apply_valid_block_begin)
+                .unwrap();
             current_blockstamp = network_block.blockstamp();
             debug!("Apply db requests...");
-            // Apply db requests
-            db_requests
+            // Send block request to blocks worker thread
+            sender_blocks_thread
+                .send(SyncJobsMess::BlocksDBsWriteQuery(block_req.clone()))
+                .expect(
+                    "Fail to communicate with blocks worker thread, please reset data & resync !",
+                );
+            // Send wot requests to wot worker thread
+            wot_db_reqs
                 .iter()
-                .map(|req| req.apply(&conf.currency().to_string(), &blockchain_module.db))
-                .collect::<()>();
-            debug!("Finish to apply db requests.");
-            // Apply WotEvents
-            if !new_wot_events.is_empty() {
-                for wot_event in new_wot_events {
-                    match wot_event {
-                        WotEvent::AddNode(pubkey, wotb_id) => {
-                            wot.add_node();
-                            wotb_index.insert(pubkey, wotb_id);
-                        }
-                        WotEvent::RemNode(pubkey) => {
-                            wot.rem_node();
-                            wotb_index.remove(&pubkey);
-                        }
-                        WotEvent::AddLink(source, target) => {
-                            wot.add_link(source, target);
-                        }
-                        WotEvent::RemLink(source, target) => {
-                            wot.rem_link(source, target);
-                        }
-                        WotEvent::EnableNode(wotb_id) => {
-                            wot.set_enabled(wotb_id, true);
-                        }
-                        WotEvent::DisableNode(wotb_id) => {
-                            wot.set_enabled(wotb_id, false);
-                        }
+                .map(|req| {
+                    if let WotsDBsWriteQuery::CreateCert(
+                        ref _source_pubkey,
+                        ref source,
+                        ref target,
+                        ref created_block_id,
+                        ref _median_time,
+                    ) = req
+                    {
+                        certs_count += 1;
+                        // Add cert in certs_db
+                        certs_db
+                            .write(|db| {
+                                let mut created_certs =
+                                    db.get(&created_block_id).cloned().unwrap_or_default();
+                                created_certs.insert((*source, *target));
+                                db.insert(*created_block_id, created_certs);
+                            })
+                            .expect("RustBreakError : please reset data and resync !");
                     }
-                }
-                if current_blockstamp.id.0 > target_blockstamp.id.0 - 100 {
-                    // Save wot file
-                    WOT_FILE_FORMATER
-                        .to_file(
-                            &wot,
-                            current_blockstamp.to_string().as_bytes(),
-                            wot_path.as_path().to_str().unwrap(),
-                        )
-                        .expect("Fatal Error: Fail to write wotb in file !");
-                }
-            }
-            pb.inc();
+                    sender_wot_thread
+                        .send(SyncJobsMess::WotsDBsWriteQuery(req.clone(), Box::new(currency_params)))
+                        .expect("Fail to communicate with tx worker thread, please reset data & resync !")
+                })
+                .collect::<()>();
+            // Send blocks and wot requests to wot worker thread
+            currency_db_reqs
+                .iter()
+                .map(|req| {
+                    sender_tx_thread
+                        .send(SyncJobsMess::CurrencyDBsWriteQuery(req.clone()))
+                        .expect("Fail to communicate with tx worker thread, please reset data & resync !")
+                })
+                .collect::<()>();
             debug!("Success to apply block #{}", current_blockstamp.id.0);
             if current_blockstamp.id.0 >= target_blockstamp.id.0 {
                 if current_blockstamp == target_blockstamp {
@@ -327,184 +592,73 @@ pub fn sync_ts(
                 current_blockstamp.id.0 + 1
             )
         }
+        wait_begin = SystemTime::now();
     }
+    // Send end signal to workers threads
+    sender_blocks_thread
+        .send(SyncJobsMess::End())
+        .expect("Sync : Fail to send End signal to blocks worker !");
+    info!("Sync : send End signal to blocks job.");
+    sender_wot_thread
+        .send(SyncJobsMess::End())
+        .expect("Sync : Fail to send End signal to wot worker !");
+    info!("Sync : send End signal to wot job.");
+    sender_tx_thread
+        .send(SyncJobsMess::End())
+        .expect("Sync : Fail to send End signal to writer worker !");
+    info!("Sync : send End signal to tx job.");
 
-    // Copy memory db to real db
-    info!("Save blockchain DB in profile folder...");
-    fs::copy(
-        format!("/dev/shm/{}_durs.db", profile),
-        duniter_conf::get_db_path(profile, currency, false).as_path(),
-    ).expect("Fatal error : fail to copy DB in profile folder !");
+    // Save wot file
+    WOT_FILE_FORMATER
+        .to_file(
+            &wot,
+            current_blockstamp.to_string().as_bytes(),
+            wot_path.as_path().to_str().unwrap(),
+        )
+        .expect("Fatal Error: Fail to write wotb in file !");
 
-    // Remove memory db
-    fs::remove_file(format!("/dev/shm/{}_durs.db", profile))
-        .expect("Fatal error : fail to remove memory DB !");
+    let main_job_duration =
+        SystemTime::now().duration_since(main_job_begin).unwrap() - all_wait_duration;
+    info!(
+        "main_job_duration={},{:03} seconds.",
+        main_job_duration.as_secs(),
+        main_job_duration.subsec_nanos() / 1_000_000
+    );
+    info!(
+        "all_complete_block_duration={},{:03} seconds.",
+        all_complete_block_duration.as_secs(),
+        all_complete_block_duration.subsec_nanos() / 1_000_000
+    );
+    info!(
+        "all_apply_valid_block_duration={},{:03} seconds.",
+        all_apply_valid_block_duration.as_secs(),
+        all_apply_valid_block_duration.subsec_nanos() / 1_000_000
+    );
+
+    // Wait recv two finish signals
+    let mut wait_jobs = *NB_SYNC_JOBS - 1;
+    while wait_jobs > 0 {
+        if let Ok(MessForSyncThread::ApplyFinish()) = recv_sync_thread.recv() {
+            wait_jobs -= 1;
+        } else {
+            thread::sleep(Duration::from_millis(50));
+        }
+    }
+    info!("All sync jobs finish.");
 
-    // Print sync duration
+    // Log sync duration
+    println!("certs_count={}", certs_count);
     let sync_duration = SystemTime::now().duration_since(sync_start_time).unwrap();
     println!(
-        "Sync {} blocks in {}m {}s.",
-        current_blockstamp.id.0,
-        sync_duration.as_secs() / 60,
-        sync_duration.as_secs() % 60,
+        "Sync {} blocks in {}.{:03} seconds.",
+        current_blockstamp.id.0 + 1,
+        sync_duration.as_secs(),
+        sync_duration.subsec_nanos() / 1_000_000,
+    );
+    info!(
+        "Sync {} blocks in {}.{:03} seconds.",
+        current_blockstamp.id.0 + 1,
+        sync_duration.as_secs(),
+        sync_duration.subsec_nanos() / 1_000_000,
     );
-}
-
-pub fn parse_ts_block(row: &[sqlite::Value]) -> NetworkBlock {
-    // Parse block
-    let current_header = BlockHeader {
-        number: BlockId(row[16].as_integer().expect("Fail to parse block number") as u32),
-        hash: BlockHash(
-            Hash::from_hex(row[0].as_string().expect("Fail to parse block hash"))
-                .expect("Fail to parse block hash (2)"),
-        ),
-        issuer: PubKey::Ed25519(
-            ed25519::PublicKey::from_base58(
-                row[4].as_string().expect("Fail to parse block issuer"),
-            ).expect("Failt to parse block issuer (2)"),
-        ),
-    };
-    let previous_header = if current_header.number.0 > 0 {
-        Some(BlockHeader {
-            number: BlockId(current_header.number.0 - 1),
-            hash: BlockHash(
-                Hash::from_hex(
-                    row[6]
-                        .as_string()
-                        .expect("Fail to parse block previous hash"),
-                ).expect("Fail to parse block previous hash (2)"),
-            ),
-            issuer: PubKey::Ed25519(
-                ed25519::PublicKey::from_base58(
-                    row[7]
-                        .as_string()
-                        .expect("Fail to parse previous block issuer"),
-                ).expect("Fail to parse previous block issuer (2)"),
-            ),
-        })
-    } else {
-        None
-    };
-    let currency = row[3].as_string().expect("Fail to parse currency");
-    let dividend = match row[12].as_integer() {
-        Some(dividend) => Some(dividend as usize),
-        None => None,
-    };
-    let json_identities: serde_json::Value = serde_json::from_str(
-        row[20].as_string().expect("Fail to parse block identities"),
-    ).expect("Fail to parse block identities (2)");
-    let mut identities = Vec::new();
-    for raw_idty in json_identities
-        .as_array()
-        .expect("Fail to parse block identities (3)")
-    {
-        identities
-            .push(parse_compact_identity(&currency, &raw_idty).expect("Fail to parse block idty"));
-    }
-    let json_txs: serde_json::Value = serde_json::from_str(
-        row[18].as_string().expect("Fail to parse block txs"),
-    ).expect("Fail to parse block txs (2)");
-    let mut transactions = Vec::new();
-    for json_tx in json_txs.as_array().expect("Fail to parse block txs (3)") {
-        transactions.push(parse_transaction(currency, &json_tx).expect("Fail to parse block tx"));
-    }
-    let previous_hash = match previous_header.clone() {
-        Some(previous_header_) => previous_header_.hash.0,
-        None => Hash::default(),
-    };
-    let previous_issuer = match previous_header {
-        Some(previous_header_) => Some(previous_header_.issuer),
-        None => None,
-    };
-    let excluded: serde_json::Value = serde_json::from_str(
-        row[25].as_string().expect("Fail to parse excluded"),
-    ).expect("Fail to parse excluded (2)");
-    let uncompleted_block_doc = BlockDocument {
-        nonce: row[17].as_integer().expect("Fail to parse nonce") as u64,
-        number: current_header.number,
-        pow_min: row[15].as_integer().expect("Fail to parse pow_min") as usize,
-        time: row[14].as_integer().expect("Fail to parse time") as u64,
-        median_time: row[11].as_integer().expect("Fail to parse median_time") as u64,
-        members_count: row[9].as_integer().expect("Fail to parse members_count") as usize,
-        monetary_mass: row[10]
-            .as_string()
-            .expect("Fail to parse monetary_mass")
-            .parse()
-            .expect("Fail to parse monetary_mass (2)"),
-        unit_base: row[13].as_integer().expect("Fail to parse unit_base") as usize,
-        issuers_count: row[28].as_integer().expect("Fail to parse issuers_count") as usize,
-        issuers_frame: row[26].as_integer().expect("Fail to parse issuers_frame") as isize,
-        issuers_frame_var: row[27]
-            .as_integer()
-            .expect("Fail to parse issuers_frame_var") as isize,
-        currency: String::from(currency),
-        issuers: vec![PubKey::Ed25519(
-            ed25519::PublicKey::from_base58(row[4].as_string().expect("Fail to parse issuer"))
-                .expect("Fail to parse issuer '2)"),
-        )],
-        signatures: vec![Sig::Ed25519(
-            ed25519::Signature::from_base64(row[2].as_string().expect("Fail to parse signature"))
-                .expect("Fail to parse signature (2)"),
-        )],
-        hash: Some(current_header.hash),
-        parameters: None,
-        previous_hash,
-        previous_issuer,
-        inner_hash: Some(
-            Hash::from_hex(row[1].as_string().expect("Fail to parse block inner_hash"))
-                .expect("Fail to parse block inner_hash (2)"),
-        ),
-        dividend,
-        identities,
-        joiners: duniter_dal::parsers::memberships::parse_memberships(
-            currency,
-            MembershipType::In(),
-            row[21].as_string().expect("Fail to parse joiners"),
-        ).expect("Fail to parse joiners (2)"),
-        actives: duniter_dal::parsers::memberships::parse_memberships(
-            currency,
-            MembershipType::In(),
-            row[22].as_string().expect("Fail to parse actives"),
-        ).expect("Fail to parse actives (2)"),
-        leavers: duniter_dal::parsers::memberships::parse_memberships(
-            currency,
-            MembershipType::In(),
-            row[23].as_string().expect("Fail to parse leavers"),
-        ).expect("Fail to parse leavers (2)"),
-        revoked: Vec::new(),
-        excluded: excluded
-            .as_array()
-            .expect("Fail to parse excluded (3)")
-            .to_vec()
-            .into_iter()
-            .map(|e| {
-                PubKey::Ed25519(
-                    ed25519::PublicKey::from_base58(
-                        e.as_str().expect("Fail to parse excluded (4)"),
-                    ).expect("Fail to parse excluded (5)"),
-                )
-            })
-            .collect(),
-        certifications: Vec::new(),
-        transactions,
-        inner_hash_and_nonce_str: String::new(),
-    };
-    let revoked: serde_json::Value = serde_json::from_str(
-        row[24].as_string().expect("Fail to parse revoked"),
-    ).expect("Fail to parse revoked (2)");
-    let certifications: serde_json::Value = serde_json::from_str(
-        row[19].as_string().expect("Fail to parse certifications"),
-    ).expect("Fail to parse certifications (2)");
-    // return NetworkBlock
-    NetworkBlock::V10(Box::new(NetworkBlockV10 {
-        uncompleted_block_doc,
-        revoked: revoked
-            .as_array()
-            .expect("Fail to parse revoked (3)")
-            .to_vec(),
-        certifications: certifications
-            .as_array()
-            .expect("Fail to parse certifications (3)")
-            .to_vec(),
-    }))
 }
diff --git a/blockchain/ts_parsers.rs b/blockchain/ts_parsers.rs
new file mode 100644
index 0000000000000000000000000000000000000000..883179693a036ac9959785dd5e41e6c194113ca3
--- /dev/null
+++ b/blockchain/ts_parsers.rs
@@ -0,0 +1,374 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+extern crate serde_json;
+extern crate sqlite;
+
+use duniter_crypto::keys::*;
+use duniter_documents::blockchain::v10::documents::block::{
+    BlockV10Parameters, CurrencyName, TxDocOrTxHash,
+};
+use duniter_documents::blockchain::v10::documents::identity::IdentityDocumentBuilder;
+use duniter_documents::blockchain::v10::documents::membership::*;
+use duniter_documents::blockchain::v10::documents::transaction::*;
+use duniter_documents::blockchain::v10::documents::*;
+use duniter_documents::blockchain::DocumentBuilder;
+use duniter_documents::{BlockHash, BlockId, Blockstamp, Hash};
+use duniter_network::{NetworkBlock, NetworkBlockV10};
+use std::str::FromStr;
+use sync::BlockHeader;
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+/// MembershipParseError
+pub enum MembershipParseError {
+    WrongFormat(),
+}
+
+/// Parse a block from duniter-ts database
+pub fn parse_ts_block(row: &[sqlite::Value]) -> NetworkBlock {
+    let current_header = BlockHeader {
+        number: BlockId(row[16].as_integer().expect("Fail to parse block number") as u32),
+        hash: BlockHash(
+            Hash::from_hex(row[0].as_string().expect("Fail to parse block hash"))
+                .expect("Fail to parse block hash (2)"),
+        ),
+        issuer: PubKey::Ed25519(
+            ed25519::PublicKey::from_base58(
+                row[4].as_string().expect("Fail to parse block issuer"),
+            ).expect("Failt to parse block issuer (2)"),
+        ),
+    };
+    let previous_header = if current_header.number.0 > 0 {
+        Some(BlockHeader {
+            number: BlockId(current_header.number.0 - 1),
+            hash: BlockHash(
+                Hash::from_hex(
+                    row[6]
+                        .as_string()
+                        .expect("Fail to parse block previous hash"),
+                ).expect("Fail to parse block previous hash (2)"),
+            ),
+            issuer: PubKey::Ed25519(
+                ed25519::PublicKey::from_base58(
+                    row[7]
+                        .as_string()
+                        .expect("Fail to parse previous block issuer"),
+                ).expect("Fail to parse previous block issuer (2)"),
+            ),
+        })
+    } else {
+        None
+    };
+    let currency = row[3].as_string().expect("Fail to parse currency");
+    let parameters = if let Some(params_str) = row[5].as_string() {
+        if let Ok(params) = BlockV10Parameters::from_str(params_str) {
+            Some(params)
+        } else {
+            None
+        }
+    } else {
+        None
+    };
+    let dividend = match row[12].as_integer() {
+        Some(dividend) => Some(dividend as usize),
+        None => None,
+    };
+    let json_identities: serde_json::Value = serde_json::from_str(
+        row[20].as_string().expect("Fail to parse block identities"),
+    ).expect("Fail to parse block identities (2)");
+    let mut identities = Vec::new();
+    for raw_idty in json_identities
+        .as_array()
+        .expect("Fail to parse block identities (3)")
+    {
+        identities
+            .push(parse_compact_identity(&currency, &raw_idty).expect("Fail to parse block idty"));
+    }
+    let json_txs: serde_json::Value = serde_json::from_str(
+        row[18].as_string().expect("Fail to parse block txs"),
+    ).expect("Fail to parse block txs (2)");
+    let mut transactions = Vec::new();
+    for json_tx in json_txs.as_array().expect("Fail to parse block txs (3)") {
+        transactions.push(TxDocOrTxHash::TxDoc(Box::new(
+            parse_transaction(currency, &json_tx).expect("Fail to parse block tx"),
+        )));
+    }
+    let previous_hash = match previous_header.clone() {
+        Some(previous_header_) => previous_header_.hash.0,
+        None => Hash::default(),
+    };
+    let previous_issuer = match previous_header {
+        Some(previous_header_) => Some(previous_header_.issuer),
+        None => None,
+    };
+    let excluded: serde_json::Value = serde_json::from_str(
+        row[25].as_string().expect("Fail to parse excluded"),
+    ).expect("Fail to parse excluded (2)");
+    let uncompleted_block_doc = BlockDocument {
+        nonce: row[17].as_integer().expect("Fail to parse nonce") as u64,
+        number: current_header.number,
+        pow_min: row[15].as_integer().expect("Fail to parse pow_min") as usize,
+        time: row[14].as_integer().expect("Fail to parse time") as u64,
+        median_time: row[11].as_integer().expect("Fail to parse median_time") as u64,
+        members_count: row[9].as_integer().expect("Fail to parse members_count") as usize,
+        monetary_mass: row[10]
+            .as_string()
+            .expect("Fail to parse monetary_mass")
+            .parse()
+            .expect("Fail to parse monetary_mass (2)"),
+        unit_base: row[13].as_integer().expect("Fail to parse unit_base") as usize,
+        issuers_count: row[28].as_integer().expect("Fail to parse issuers_count") as usize,
+        issuers_frame: row[26].as_integer().expect("Fail to parse issuers_frame") as isize,
+        issuers_frame_var: row[27]
+            .as_integer()
+            .expect("Fail to parse issuers_frame_var") as isize,
+        currency: CurrencyName(String::from(currency)),
+        issuers: vec![PubKey::Ed25519(
+            ed25519::PublicKey::from_base58(row[4].as_string().expect("Fail to parse issuer"))
+                .expect("Fail to parse issuer '2)"),
+        )],
+        signatures: vec![Sig::Ed25519(
+            ed25519::Signature::from_base64(row[2].as_string().expect("Fail to parse signature"))
+                .expect("Fail to parse signature (2)"),
+        )],
+        hash: Some(current_header.hash),
+        parameters,
+        previous_hash,
+        previous_issuer,
+        inner_hash: Some(
+            Hash::from_hex(row[1].as_string().expect("Fail to parse block inner_hash"))
+                .expect("Fail to parse block inner_hash (2)"),
+        ),
+        dividend,
+        identities,
+        joiners: parse_memberships(
+            currency,
+            MembershipType::In(),
+            row[21].as_string().expect("Fail to parse joiners"),
+        ).expect("Fail to parse joiners (2)"),
+        actives: parse_memberships(
+            currency,
+            MembershipType::In(),
+            row[22].as_string().expect("Fail to parse actives"),
+        ).expect("Fail to parse actives (2)"),
+        leavers: parse_memberships(
+            currency,
+            MembershipType::In(),
+            row[23].as_string().expect("Fail to parse leavers"),
+        ).expect("Fail to parse leavers (2)"),
+        revoked: Vec::new(),
+        excluded: excluded
+            .as_array()
+            .expect("Fail to parse excluded (3)")
+            .to_vec()
+            .into_iter()
+            .map(|e| {
+                PubKey::Ed25519(
+                    ed25519::PublicKey::from_base58(
+                        e.as_str().expect("Fail to parse excluded (4)"),
+                    ).expect("Fail to parse excluded (5)"),
+                )
+            })
+            .collect(),
+        certifications: Vec::new(),
+        transactions,
+        inner_hash_and_nonce_str: String::new(),
+    };
+    let revoked: serde_json::Value = serde_json::from_str(
+        row[24].as_string().expect("Fail to parse revoked"),
+    ).expect("Fail to parse revoked (2)");
+    let certifications: serde_json::Value = serde_json::from_str(
+        row[19].as_string().expect("Fail to parse certifications"),
+    ).expect("Fail to parse certifications (2)");
+    // return NetworkBlock
+    NetworkBlock::V10(Box::new(NetworkBlockV10 {
+        uncompleted_block_doc,
+        revoked: revoked
+            .as_array()
+            .expect("Fail to parse revoked (3)")
+            .to_vec(),
+        certifications: certifications
+            .as_array()
+            .expect("Fail to parse certifications (3)")
+            .to_vec(),
+    }))
+}
+
+/// Parse a compact identity
+pub fn parse_compact_identity(
+    currency: &str,
+    source: &serde_json::Value,
+) -> Option<IdentityDocument> {
+    if source.is_string() {
+        let idty_elements: Vec<&str> = source.as_str().unwrap().split(':').collect();
+        let issuer = match ed25519::PublicKey::from_base58(idty_elements[0]) {
+            Ok(pubkey) => PubKey::Ed25519(pubkey),
+            Err(_) => return None,
+        };
+        let signature = match ed25519::Signature::from_base64(idty_elements[1]) {
+            Ok(sig) => Sig::Ed25519(sig),
+            Err(_) => return None,
+        };
+        let blockstamp = match Blockstamp::from_string(idty_elements[2]) {
+            Ok(blockstamp) => blockstamp,
+            Err(_) => return None,
+        };
+        let username = idty_elements[3];
+        let idty_doc_builder = IdentityDocumentBuilder {
+            currency,
+            username,
+            blockstamp: &blockstamp,
+            issuer: &issuer,
+        };
+        Some(idty_doc_builder.build_with_signature(vec![signature]))
+    } else {
+        None
+    }
+}
+
+/// Parse memberships documents from json string
+pub fn parse_memberships(
+    currency: &str,
+    membership_type: MembershipType,
+    json_datas: &str,
+) -> Option<Vec<MembershipDocument>> {
+    let raw_memberships: serde_json::Value = serde_json::from_str(json_datas).unwrap();
+    if raw_memberships.is_array() {
+        return Some(
+            parse_memberships_from_json_value(
+                currency,
+                membership_type,
+                raw_memberships.as_array().unwrap(),
+            ).iter()
+                .map(|m| {
+                    m.clone()
+                        .expect("Fatal error : Fail to parse membership from local DB !")
+                })
+                .collect(),
+        );
+    }
+    None
+}
+
+/// Parse memberships documents from array of json values
+pub fn parse_memberships_from_json_value(
+    currency: &str,
+    membership_type: MembershipType,
+    array_memberships: &[serde_json::Value],
+) -> Vec<Result<MembershipDocument, MembershipParseError>> {
+    //let memberships: Vec<MembershipDocument> = Vec::new();
+    array_memberships
+        .iter()
+        .map(|membership| {
+            let membership_datas: Vec<&str> = membership.as_str().unwrap().split(':').collect();
+            if membership_datas.len() == 5 {
+                let membership_doc_builder = MembershipDocumentBuilder {
+                    currency,
+                    issuer: &PubKey::Ed25519(
+                        ed25519::PublicKey::from_base58(membership_datas[0]).unwrap(),
+                    ),
+                    blockstamp: &Blockstamp::from_string(membership_datas[2]).unwrap(),
+                    membership: membership_type,
+                    identity_username: membership_datas[4],
+                    identity_blockstamp: &Blockstamp::from_string(membership_datas[3]).unwrap(),
+                };
+                let membership_sig =
+                    Sig::Ed25519(ed25519::Signature::from_base64(membership_datas[1]).unwrap());
+                Ok(membership_doc_builder.build_with_signature(vec![membership_sig]))
+            } else {
+                Err(MembershipParseError::WrongFormat())
+            }
+        })
+        .collect()
+}
+
+/// Parse transaction from json value
+pub fn parse_transaction(
+    currency: &str,
+    source: &serde_json::Value,
+) -> Option<TransactionDocument> {
+    //debug!("transaction={:#?}", source);
+    let blockstamp = match Blockstamp::from_string(source.get("blockstamp")?.as_str()?) {
+        Ok(blockstamp) => blockstamp,
+        Err(_) => {
+            return None;
+        }
+    };
+    let locktime = source.get("locktime")?.as_i64()? as u64;
+    let issuers_array = source.get("issuers")?.as_array()?;
+    let mut issuers = Vec::with_capacity(issuers_array.len());
+    for issuer in issuers_array {
+        match ed25519::PublicKey::from_base58(issuer.as_str()?) {
+            Ok(pubkey) => issuers.push(PubKey::Ed25519(pubkey)),
+            Err(_) => {
+                return None;
+            }
+        }
+    }
+    let inputs_array = source.get("inputs")?.as_array()?;
+    let mut inputs = Vec::with_capacity(inputs_array.len());
+    for input in inputs_array {
+        let input_str = input.as_str()?;
+        match TransactionInput::parse_from_str(input_str) {
+            Ok(input) => inputs.push(input),
+            Err(_) => {
+                return None;
+            }
+        }
+    }
+    let unlocks_array = source.get("unlocks")?.as_array()?;
+    let mut unlocks = Vec::with_capacity(unlocks_array.len());
+    for unlock in unlocks_array {
+        match TransactionInputUnlocks::parse_from_str(unlock.as_str()?) {
+            Ok(unlock) => unlocks.push(unlock),
+            Err(_) => {
+                return None;
+            }
+        }
+    }
+    let outputs_array = source.get("outputs")?.as_array()?;
+    let mut outputs = Vec::with_capacity(outputs_array.len());
+    for output in outputs_array {
+        match TransactionOutput::parse_from_str(output.as_str()?) {
+            Ok(output) => outputs.push(output),
+            Err(_) => {
+                return None;
+            }
+        }
+    }
+    let signatures_array = source.get("signatures")?.as_array()?;
+    let mut signatures = Vec::with_capacity(signatures_array.len());
+    for signature in signatures_array {
+        match ed25519::Signature::from_base64(signature.as_str()?) {
+            Ok(signature) => signatures.push(Sig::Ed25519(signature)),
+            Err(_) => {
+                return None;
+            }
+        }
+    }
+    let comment = source.get("comment")?.as_str()?;
+
+    let tx_doc_builder = TransactionDocumentBuilder {
+        currency,
+        blockstamp: &blockstamp,
+        locktime: &locktime,
+        issuers: &issuers,
+        inputs: &inputs,
+        unlocks: &unlocks,
+        outputs: &outputs,
+        comment,
+    };
+    Some(tx_doc_builder.build_with_signature(signatures))
+}
diff --git a/conf/lib.rs b/conf/lib.rs
index 4d35e68c90fee12d2a2129c68560533d1c87a6c3..01dc55d1d5144191d31872139ceb6b9edf3f1dab 100644
--- a/conf/lib.rs
+++ b/conf/lib.rs
@@ -327,19 +327,13 @@ pub fn write_conf_file(file_path: &PathBuf, conf: &DuniterConf) -> Result<(), st
 }
 
 /// Returns the path to the database containing the blockchain
-pub fn get_db_path(profile: &str, currency: &Currency, sync: bool) -> PathBuf {
-    if sync {
-        let mut db_path = PathBuf::new();
-        let mut db_name = String::from(profile);
-        db_name.push_str("_durs.db");
-        db_path.push("/dev/shm");
-        db_path.push(db_name);
-        db_path
-    } else {
-        let mut db_path = datas_path(profile, &currency);
-        db_path.push("blockchain.db");
-        db_path
+pub fn get_blockchain_db_path(profile: &str, currency: &Currency) -> PathBuf {
+    let mut db_path = datas_path(profile, &currency);
+    db_path.push("blockchain/");
+    if !db_path.as_path().exists() {
+        fs::create_dir(db_path.as_path()).expect("Impossible to create blockchain dir !");
     }
+    db_path
 }
 
 /// Returns the path to the binary file containing the state of the web of trust
diff --git a/core/cli/en.yml b/core/cli/en.yml
index 6c0c983b81520e9ba1a7287c7566371db856a989..ff97559fd4978303483a432af5d5b24a63795822 100644
--- a/core/cli/en.yml
+++ b/core/cli/en.yml
@@ -39,18 +39,29 @@ subcommands:
                 short: c
                 long: cautious
                 help: cautious mode (check all protocol rules, very slow)
-    - msync_ts:
-        about: synchronization in memory mode via a duniter-ts database
+    - dbex:
+        about: durs databases explorer
         version: "0.1.0"
         author: Elois L. <elois@duniter.org>
-        args:
-            - TS_PROFILE:
-                help: Set the ts profile to use
-                index: 1
-            - cautious:
-                short: c
-                long: cautious
-                help: cautious mode (check all protocol rules, very slow)
+        subcommands:
+            - member:
+                about: durs databases explorer (wot members datas)
+                version: "0.1.0"
+                author: Elois L. <elois@duniter.org>
+                args:
+                    - UID:
+                        help : choose member uid
+                        index: 1
+                        required: true
+            - balance:
+                about: durs databases explorer (balances datas)
+                version: "0.1.0"
+                author: Elois L. <elois@duniter.org>
+                args:
+                    - ADDRESS:
+                        help : public key or uid
+                        index: 1
+                        required: true
     - reset:
         about: reset data or conf or all
         version: "0.1.0"
diff --git a/core/lib.rs b/core/lib.rs
index 8b16719c0f7ba3b10ad18ba3a84b5efa7873028e..773a77f785b5e4a11870c7254188f384cdbb7b53 100644
--- a/core/lib.rs
+++ b/core/lib.rs
@@ -38,9 +38,8 @@ extern crate simplelog;
 extern crate sqlite;
 extern crate threadpool;
 
-use self::threadpool::ThreadPool;
 use clap::{App, ArgMatches};
-use duniter_blockchain::BlockchainModule;
+use duniter_blockchain::{BlockchainModule, DBExQuery, DBExTxQuery, DBExWotQuery};
 use duniter_conf::DuniterKeyPairs;
 use duniter_message::DuniterMessage;
 use duniter_module::*;
@@ -52,6 +51,7 @@ use std::fs::{File, OpenOptions};
 use std::sync::mpsc;
 use std::thread;
 use std::time::Duration;
+use threadpool::ThreadPool;
 
 #[derive(Debug)]
 /// Duniter Core Datas
@@ -124,6 +124,21 @@ impl DuniterCore {
             let ts_profile = matches.value_of("TS_PROFILE").unwrap_or("duniter_default");
             sync_ts(&conf, ts_profile, matches.is_present("cautious"));
             None
+        } else if let Some(matches) = cli_args.subcommand_matches("dbex") {
+            if let Some(member_matches) = matches.subcommand_matches("member") {
+                let uid = member_matches.value_of("UID").unwrap_or("");
+                dbex(
+                    &conf,
+                    &DBExQuery::WotQuery(DBExWotQuery::MemberDatas(String::from(uid))),
+                );
+            } else if let Some(balance_matches) = matches.subcommand_matches("balance") {
+                let address = balance_matches.value_of("ADDRESS").unwrap_or("");
+                dbex(
+                    &conf,
+                    &DBExQuery::TxQuery(DBExTxQuery::Balance(String::from(address))),
+                );
+            }
+            None
         } else if let Some(matches) = cli_args.subcommand_matches("reset") {
             let mut profile_path = match env::home_dir() {
                 Some(path) => path,
@@ -135,7 +150,10 @@ impl DuniterCore {
             if !profile_path.as_path().exists() {
                 panic!(format!("Error : {} profile don't exist !", profile));
             }
-            match matches.value_of("DATAS_TYPE").unwrap() {
+            match matches
+                .value_of("DATAS_TYPE")
+                .expect("cli param DATAS_TYPE is missing !")
+            {
                 "data" => {
                     let mut currency_datas_path = profile_path.clone();
                     currency_datas_path.push("g1");
@@ -189,7 +207,6 @@ impl DuniterCore {
             let mut blockchain_module = BlockchainModule::load_blockchain_conf(
                 &self.conf,
                 RequiredKeysContent::MemberKeyPair(None),
-                false,
             );
             info!("Success to load Blockchain module.");
 
@@ -265,7 +282,7 @@ pub fn start(
         let mut modules_senders: Vec<mpsc::Sender<DuniterMessage>> = Vec::new();
         let mut modules_count_expected = None;
         while modules_count_expected.is_none()
-            || modules_senders.len() < modules_count_expected.unwrap() + 1
+            || modules_senders.len() < modules_count_expected.expect("safe unwrap") + 1
         {
             match main_receiver.recv_timeout(Duration::from_secs(20)) {
                 Ok(mess) => {
@@ -363,7 +380,13 @@ pub fn start(
 /// Launch synchronisation from a duniter-ts database
 pub fn sync_ts(conf: &DuniterConf, ts_profile: &str, cautious: bool) {
     // Launch sync-ts
-    BlockchainModule::sync_ts(&conf, ts_profile, cautious);
+    BlockchainModule::sync_ts(conf, ts_profile, cautious);
+}
+
+/// Launch databases explorer
+pub fn dbex(conf: &DuniterConf, query: &DBExQuery) {
+    // Launch databases explorer
+    BlockchainModule::dbex(conf, query);
 }
 
 /// Initialize logger
@@ -418,20 +441,17 @@ pub fn init_logger(profile: &str, soft_name: &'static str, cli_args: &ArgMatches
         ).expect("Fatal error : fail to create log file path !");
     }
 
-    CombinedLogger::init(vec![
-        TermLogger::new(LevelFilter::Error, logger_config).unwrap(),
-        WriteLogger::new(
-            log_level.to_level_filter(),
-            logger_config,
-            OpenOptions::new()
-                .write(true)
-                .append(true)
-                .open(
-                    log_file_path
-                        .to_str()
-                        .expect("Fatal error : fail to get log file path !"),
-                )
-                .expect("Fatal error : fail to open log file !"),
-        ),
-    ]).expect("Fatal error : fail to init logger !");
+    CombinedLogger::init(vec![WriteLogger::new(
+        log_level.to_level_filter(),
+        logger_config,
+        OpenOptions::new()
+            .write(true)
+            .append(true)
+            .open(
+                log_file_path
+                    .to_str()
+                    .expect("Fatal error : fail to get log file path !"),
+            )
+            .expect("Fatal error : fail to open log file !"),
+    )]).expect("Fatal error : fail to init logger !");
 }
diff --git a/crypto/Cargo.toml b/crypto/Cargo.toml
index a2979d6b5ccdba927fbf46dfcb005ceda8593c13..c313880c7a23fabbb929516a49ddb08014aba16d 100644
--- a/crypto/Cargo.toml
+++ b/crypto/Cargo.toml
@@ -16,6 +16,7 @@ base58 = "0.1.0"
 base64 = "0.9.1"
 rust-crypto = "0.2.36"
 serde = "1.0.57"
+serde_derive = "1.0.57"
 
 [features]
 # Treat warnings as a build error.
diff --git a/crypto/keys/ed25519.rs b/crypto/keys/ed25519.rs
index 89fed477caccfd37462a97d47df7c4b28d60cb0b..ef762c258616e224de2b7fcce3d6d85f5fa59dad 100644
--- a/crypto/keys/ed25519.rs
+++ b/crypto/keys/ed25519.rs
@@ -21,20 +21,17 @@
 
 extern crate serde;
 
-use std::collections::hash_map::DefaultHasher;
-use std::fmt::Debug;
-use std::fmt::Display;
-use std::fmt::Error;
-use std::fmt::Formatter;
-use std::hash::{Hash, Hasher};
-
-use self::serde::ser::{Serialize, Serializer};
+use self::serde::de::{Deserialize, Deserializer, SeqAccess, Visitor};
+use self::serde::ser::{Serialize, SerializeSeq, Serializer};
+use super::{BaseConvertionError, PrivateKey as PrivateKeyMethods, PublicKey as PublicKeyMethods};
 use base58::{FromBase58, FromBase58Error, ToBase58};
 use base64;
 use base64::DecodeError;
 use crypto;
-
-use super::{BaseConvertionError, PrivateKey as PrivateKeyMethods, PublicKey as PublicKeyMethods};
+use std::collections::hash_map::DefaultHasher;
+use std::fmt::{self, Debug, Display, Error, Formatter};
+use std::hash::{Hash, Hasher};
+use std::marker::PhantomData;
 
 /// Store a ed25519 signature.
 #[derive(Clone, Copy)]
@@ -47,6 +44,73 @@ impl Hash for Signature {
     }
 }
 
+impl Serialize for Signature {
+    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
+    where
+        S: Serializer,
+    {
+        let mut seq = serializer.serialize_seq(Some(64))?;
+        for e in self.0.iter() {
+            seq.serialize_element(e)?;
+        }
+        seq.end()
+    }
+}
+
+struct SignatureVisitor {
+    marker: PhantomData<fn() -> Signature>,
+}
+
+impl SignatureVisitor {
+    fn new() -> Self {
+        SignatureVisitor {
+            marker: PhantomData,
+        }
+    }
+}
+
+impl<'de> Visitor<'de> for SignatureVisitor {
+    // The type that our Visitor is going to produce.
+    type Value = Signature;
+
+    // Format a message stating what data this Visitor expects to receive.
+    fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
+        formatter.write_str("Signature datas")
+    }
+
+    // Deserialize Signature from an abstract "map" provided by the
+    // Deserializer. The MapAccess input is a callback provided by
+    // the Deserializer to let us see each entry in the map.
+    fn visit_seq<M>(self, mut access: M) -> Result<Self::Value, M::Error>
+    where
+        M: SeqAccess<'de>,
+    {
+        let mut map = Vec::with_capacity(access.size_hint().unwrap_or(0));
+
+        // While there are entries remaining in the input, add them
+        // into our map.
+        while let Some(value) = access.next_element()? {
+            map.push(value);
+        }
+
+        let mut sig_datas: [u8; 64] = [0; 64];
+        sig_datas.copy_from_slice(&map[0..64]);
+        Ok(Signature(sig_datas))
+    }
+}
+
+// This is the trait that informs Serde how to deserialize Signature.
+impl<'de> Deserialize<'de> for Signature {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        // Instantiate our Visitor and ask the Deserializer to drive
+        // it over the input data, resulting in an instance of Signature.
+        deserializer.deserialize_seq(SignatureVisitor::new())
+    }
+}
+
 impl super::Signature for Signature {
     fn from_base64(base64_data: &str) -> Result<Signature, BaseConvertionError> {
         match base64::decode(base64_data) {
@@ -104,7 +168,7 @@ impl Eq for Signature {}
 /// Can be generated with [`KeyPairGenerator`].
 ///
 /// [`KeyPairGenerator`]: struct.KeyPairGenerator.html
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, Deserialize, PartialEq, Eq, Hash, Serialize)]
 pub struct PublicKey(pub [u8; 32]);
 
 impl ToBase58 for PublicKey {
@@ -126,15 +190,6 @@ impl Debug for PublicKey {
     }
 }
 
-impl Serialize for PublicKey {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
-    where
-        S: Serializer,
-    {
-        serializer.serialize_str(&format!("{}", self))
-    }
-}
-
 impl super::PublicKey for PublicKey {
     type Signature = Signature;
 
diff --git a/crypto/keys/mod.rs b/crypto/keys/mod.rs
index 9adcceaa6f5451f29b04bd46ccf630515e7b5876..1bd615e17ff431f1fb7ab0fd6963f7ea5459a0d8 100644
--- a/crypto/keys/mod.rs
+++ b/crypto/keys/mod.rs
@@ -48,7 +48,6 @@
 
 extern crate serde;
 
-use self::serde::ser::{Serialize, Serializer};
 use base58::ToBase58;
 use std::fmt::Debug;
 use std::fmt::Display;
@@ -109,7 +108,7 @@ pub trait Signature: Clone + Display + Debug + PartialEq + Eq + Hash {
 }
 
 /// Store a cryptographic signature.
-#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
 pub enum Sig {
     /// Store a ed25519 Signature
     Ed25519(ed25519::Signature),
@@ -171,7 +170,7 @@ pub trait PublicKey: Clone + Display + Debug + PartialEq + Eq + Hash + ToBase58
 }
 
 /// Store a cryptographic public key.
-#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
+#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
 pub enum PubKey {
     /// Store a ed25519 public key.
     Ed25519(ed25519::PublicKey),
@@ -203,15 +202,6 @@ impl Display for PubKey {
     }
 }
 
-impl Serialize for PubKey {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
-    where
-        S: Serializer,
-    {
-        serializer.serialize_str(&format!("{}", self))
-    }
-}
-
 impl PublicKey for PubKey {
     type Signature = Sig;
 
diff --git a/crypto/lib.rs b/crypto/lib.rs
index 2b9b8d18520c0074ef9974bc54fd4d4f6839f224..5f43cce266558af94012e3305a84eed8dc2eb16d 100644
--- a/crypto/lib.rs
+++ b/crypto/lib.rs
@@ -22,6 +22,9 @@
     unused_qualifications
 )]
 
+#[macro_use]
+extern crate serde_derive;
+
 extern crate base58;
 extern crate base64;
 extern crate crypto;
diff --git a/dal/Cargo.toml b/dal/Cargo.toml
index 1d2e4b8a2e934dbc0ab16a5de45bed5d16a65526..626ac9d121774507169f2b131a424eae9f684be6 100644
--- a/dal/Cargo.toml
+++ b/dal/Cargo.toml
@@ -17,9 +17,9 @@ duniter-wotb = { path = "../wotb" }
 lazy_static = "1.0.0"
 log = "0.4.1"
 rand = "0.4.2"
+rustbreak = {version = "2.0.0-rc2", features = ["bin_enc"]}
 rust-crypto = "0.2.36"
 regex = "1.0.0"
-sqlite = "0.23.9"
 serde = "1.0.57"
 serde_derive = "1.0.57"
 serde_json = "1.0.17"
diff --git a/dal/balance.rs b/dal/balance.rs
new file mode 100644
index 0000000000000000000000000000000000000000..848b480195cf88897d6170324f7b39e6c6adfafa
--- /dev/null
+++ b/dal/balance.rs
@@ -0,0 +1,30 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use sources::*;
+use *;
+
+pub fn get_address_balance(
+    balances_db: &BinFileDB<BalancesV10Datas>,
+    address: &TransactionOutputConditionGroup,
+) -> Result<Option<SourceAmount>, DALError> {
+    Ok(balances_db.read(|db| {
+        if let Some(balance_and_utxos) = db.get(address) {
+            Some(balance_and_utxos.0)
+        } else {
+            None
+        }
+    })?)
+}
diff --git a/dal/block.rs b/dal/block.rs
index d7a623bd15ffa29dd619acfc73b285e737384005..c9efb14adcc2da9327b03db892f7dd3ff96047fd 100644
--- a/dal/block.rs
+++ b/dal/block.rs
@@ -1,84 +1,25 @@
-extern crate duniter_crypto;
-extern crate duniter_documents;
-extern crate duniter_wotb;
+extern crate rustbreak;
 extern crate serde;
 extern crate serde_json;
-extern crate sqlite;
 
-use self::duniter_crypto::keys::*;
-use self::duniter_documents::blockchain::v10::documents::identity::IdentityDocument;
-use self::duniter_documents::blockchain::v10::documents::membership::MembershipType;
-use self::duniter_documents::blockchain::v10::documents::BlockDocument;
-use self::duniter_documents::blockchain::Document;
-use self::duniter_documents::{BlockHash, BlockId, Blockstamp, Hash};
-use self::duniter_wotb::NodeId;
 use super::constants::MAX_FORKS;
-use super::parsers::certifications::parse_certifications;
-use super::parsers::excluded::parse_exclusions;
-use super::parsers::identities::parse_identities;
-use super::parsers::memberships::parse_memberships;
-use super::parsers::revoked::parse_revocations;
-use super::parsers::transactions::parse_compact_transactions;
-use super::{DuniterDB, ForkState};
+use duniter_crypto::keys::*;
+use duniter_documents::blockchain::v10::documents::BlockDocument;
+use duniter_documents::blockchain::Document;
+use duniter_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
+use duniter_wotb::NodeId;
 use std::collections::HashMap;
+use *;
 
-pub fn blockstamp_to_timestamp(blockstamp: &Blockstamp, db: &DuniterDB) -> Option<u64> {
-    if blockstamp.id.0 == 0 {
-        return Some(1_488_987_127);
-    }
-    let mut cursor = db
-        .0
-        .prepare("SELECT median_time FROM blocks WHERE number=? AND hash=? LIMIT 1;")
-        .expect("convert blockstamp to timestamp failure at step 0 !")
-        .cursor();
-
-    cursor
-        .bind(&[
-            sqlite::Value::Integer(i64::from(blockstamp.id.0)),
-            sqlite::Value::String(blockstamp.hash.0.to_hex()),
-        ])
-        .expect("convert blockstamp to timestamp failure at step 1 !");
-
-    if let Some(row) = cursor
-        .next()
-        .expect("convert blockstamp to timestamp failure at step 2 !")
-    {
-        return Some(
-            row[0]
-                .as_integer()
-                .expect("convert blockstamp to timestamp failure at step 3 !") as u64,
-        );
-    }
-    None
-}
-
-#[derive(Debug, Copy, Clone)]
-pub enum WotEvent {
-    AddNode(PubKey, NodeId),
-    RemNode(PubKey),
-    AddLink(NodeId, NodeId),
-    RemLink(NodeId, NodeId),
-    EnableNode(NodeId),
-    DisableNode(NodeId),
-}
-
-#[derive(Debug, Clone)]
-pub struct BlockContext {
-    pub blockstamp: Blockstamp,
-    pub wot_events: Vec<WotEvent>,
-}
-
-#[derive(Debug, Clone)]
-pub struct BlockContextV2 {
-    pub blockstamp: Blockstamp,
-    pub wot_events: Vec<WotEvent>,
-}
-
-#[derive(Debug, Clone)]
+#[derive(Clone, Debug, Deserialize, Serialize)]
 pub struct DALBlock {
-    pub fork: usize,
+    pub fork_id: ForkId,
     pub isolate: bool,
     pub block: BlockDocument,
+    /// List of certifications that expire in this block.
+    /// Warning : BlockId contain the emission block, not the written block !
+    /// HashMap<(Source, Target), CreatedBlockId>
+    pub expire_certs: Option<HashMap<(NodeId, NodeId), BlockId>>,
 }
 
 impl DALBlock {
@@ -87,400 +28,334 @@ impl DALBlock {
     }
 }
 
-pub fn get_forks(db: &DuniterDB) -> Vec<ForkState> {
-    let mut forks = Vec::new();
-    forks.push(ForkState::Full());
-    for fork in 1..*MAX_FORKS {
-        let mut cursor = db
-            .0
-            .prepare("SELECT isolate FROM blocks WHERE fork=? ORDER BY median_time DESC LIMIT 1;")
-            .expect("Fail to get block !")
-            .cursor();
-
-        cursor
-            .bind(&[sqlite::Value::Integer(fork as i64)])
-            .expect("Fail to get block !");
-
-        if let Some(row) = cursor.next().unwrap() {
-            if row[0].as_integer().unwrap() == 0 {
-                forks.push(ForkState::Full())
+pub fn get_forks(
+    forks_db: &BinFileDB<ForksV10Datas>,
+    current_blockstamp: Blockstamp,
+) -> Result<Vec<ForkStatus>, DALError> {
+    Ok(forks_db.read(|forks_db| {
+        let blockchain_meta_datas = forks_db
+            .get(&ForkId(0))
+            .expect("Fatal error : ForksV10DB not contain local chain !");
+        let mut forks = Vec::new();
+        for fork_id in 1..*MAX_FORKS {
+            if let Some(fork_meta_datas) = forks_db.get(&ForkId(fork_id)) {
+                if fork_meta_datas.is_empty() {
+                    forks.push(ForkStatus::Free());
+                } else if fork_meta_datas.contains_key(&current_blockstamp) {
+                    forks.push(ForkStatus::Stackable(ForkAlreadyCheck(false)));
+                } else {
+                    let roll_back_max = if current_blockstamp.id.0 > 101 {
+                        current_blockstamp.id.0 - 101
+                    } else {
+                        0
+                    };
+                    let mut max_common_block_id = None;
+                    let mut too_old = false;
+                    for previous_blockstamp in fork_meta_datas.keys() {
+                        if blockchain_meta_datas.contains_key(&previous_blockstamp) {
+                            if previous_blockstamp.id.0 >= roll_back_max {
+                                if previous_blockstamp.id.0
+                                    >= max_common_block_id.unwrap_or(BlockId(0)).0
+                                {
+                                    max_common_block_id = Some(previous_blockstamp.id);
+                                    too_old = false;
+                                }
+                            } else {
+                                too_old = true;
+                            }
+                        }
+                    }
+                    if too_old {
+                        forks.push(ForkStatus::TooOld(ForkAlreadyCheck(false)));
+                    } else if let Some(max_common_block_id) = max_common_block_id {
+                        forks.push(ForkStatus::RollBack(
+                            ForkAlreadyCheck(false),
+                            max_common_block_id,
+                        ));
+                    } else {
+                        forks.push(ForkStatus::Isolate());
+                    }
+                }
             } else {
-                forks.push(ForkState::Isolate())
+                forks.push(ForkStatus::Free());
             }
-        } else {
-            forks.push(ForkState::Free());
         }
-    }
-    forks
+        forks
+    })?)
 }
 
-impl DALBlock {
-    pub fn unisolate_fork(db: &DuniterDB, fork: usize) {
-        db.0
-            .execute(format!("UPDATE blocks SET isolate=0 WHERE fork={};", fork))
-            .unwrap();
-    }
-    pub fn delete_fork(db: &DuniterDB, fork: usize) {
-        db.0
-            .execute(format!("DELETE FROM blocks WHERE fork={};", fork))
-            .unwrap();
-    }
-    pub fn get_block_fork(db: &DuniterDB, blockstamp: &Blockstamp) -> Option<usize> {
-        let mut cursor = db
-            .0
-            .prepare("SELECT fork FROM blocks WHERE number=? AND hash=?;")
-            .expect("Fail to get block !")
-            .cursor();
-
-        cursor
-            .bind(&[
-                sqlite::Value::Integer(i64::from(blockstamp.id.0)),
-                sqlite::Value::String(blockstamp.hash.0.to_string()),
-            ])
-            .expect("Fail to get block !");
-
-        if let Some(row) = cursor.next().unwrap() {
-            Some(row[0].as_integer().unwrap() as usize)
+pub fn get_current_blockstamp(blocks_db: &BlocksV10DBs) -> Result<Option<Blockstamp>, DALError> {
+    let current_previous_blockstamp = blocks_db.blockchain_db.read(|db| {
+        let blockchain_len = db.len() as u32;
+        if blockchain_len == 0 {
+            None
+        } else if let Some(dal_block) = db.get(&BlockId(blockchain_len - 1)) {
+            if blockchain_len > 1 {
+                Some(Blockstamp {
+                    id: BlockId(blockchain_len - 2),
+                    hash: BlockHash(dal_block.block.previous_hash),
+                })
+            } else {
+                Some(Blockstamp::default())
+            }
         } else {
             None
         }
+    })?;
+    if current_previous_blockstamp.is_none() {
+        return Ok(None);
     }
-    pub fn get_block_hash(db: &DuniterDB, block_number: &BlockId) -> Option<BlockHash> {
-        let mut cursor = db
-            .0
-            .prepare("SELECT hash FROM blocks WHERE number=? AND fork=0;")
-            .expect("Fail to get block !")
-            .cursor();
-
-        cursor
-            .bind(&[sqlite::Value::Integer(i64::from(block_number.0))])
-            .expect("Fail to get block !");
+    let current_previous_blockstamp = current_previous_blockstamp.expect("safe unwrap");
+    if let Some(current_block_hash) = blocks_db.forks_db.read(|db| {
+        let blockchain_meta_datas = db
+            .get(&ForkId(0))
+            .expect("Fatal error : ForksDB is incoherent, please reset data and resync !");
+        blockchain_meta_datas
+            .get(&current_previous_blockstamp)
+            .cloned()
+    })? {
+        Ok(Some(Blockstamp {
+            id: BlockId(current_previous_blockstamp.id.0 + 1),
+            hash: current_block_hash,
+        }))
+    } else {
+        Ok(None)
+    }
+}
 
-        if let Some(row) = cursor.next().unwrap() {
-            Some(BlockHash(
-                Hash::from_hex(row[0].as_string().unwrap()).unwrap(),
-            ))
+/// Get block fork id
+pub fn get_fork_id_of_blockstamp(
+    forks_blocks_db: &BinFileDB<ForksBlocksV10Datas>,
+    blockstamp: &Blockstamp,
+) -> Result<Option<ForkId>, DALError> {
+    Ok(forks_blocks_db.read(|db| {
+        if let Some(dal_block) = db.get(blockstamp) {
+            Some(dal_block.fork_id)
         } else {
             None
         }
-    }
+    })?)
+}
 
-    pub fn get_blocks_hashs_all_forks(
-        db: &DuniterDB,
+impl DALBlock {
+    pub fn delete_fork(
+        forks_db: &BinFileDB<ForksV10Datas>,
+        forks_blocks_db: &BinFileDB<ForksBlocksV10Datas>,
+        fork_id: ForkId,
+    ) -> Result<(), DALError> {
+        let fork_meta_datas = forks_db
+            .read(|forks_db| forks_db.get(&fork_id).cloned())?
+            .expect("Fatal error : try to delete unknow fork");
+        // Remove fork blocks
+        forks_blocks_db.write(|db| {
+            for (previous_blockstamp, hash) in fork_meta_datas {
+                let blockstamp = Blockstamp {
+                    id: BlockId(previous_blockstamp.id.0 + 1),
+                    hash,
+                };
+                db.remove(&blockstamp);
+            }
+        })?;
+        // Remove fork meta datas
+        forks_db.write_safe(|db| {
+            db.remove(&fork_id);
+        })?;
+        Ok(())
+    }
+    pub fn assign_fork_to_new_block(
+        forks_db: &BinFileDB<ForksV10Datas>,
+        new_block_previous_blockstamp: &PreviousBlockstamp,
+        new_block_hash: &BlockHash,
+    ) -> Result<(Option<ForkId>, bool), DALError> {
+        let forks_meta_datas = forks_db.read(|forks_db| forks_db.clone())?;
+        // Try to assign block to an existing fork
+        for (fork_id, fork_meta_datas) in &forks_meta_datas {
+            let mut fork_datas = fork_meta_datas.clone();
+            for (previous_blockstamp, hash) in fork_meta_datas {
+                let blockstamp = Blockstamp {
+                    id: BlockId(previous_blockstamp.id.0 + 1),
+                    hash: *hash,
+                };
+                if *new_block_previous_blockstamp == blockstamp {
+                    fork_datas.insert(*new_block_previous_blockstamp, *new_block_hash);
+                    forks_db.write(|forks_db| {
+                        forks_db.insert(*fork_id, fork_datas);
+                    })?;
+                    return Ok((Some(*fork_id), false));
+                }
+            }
+        }
+        // Find an available fork
+        let mut new_fork_id = ForkId(0);
+        for f in 0..*MAX_FORKS {
+            if !forks_meta_datas.contains_key(&ForkId(f)) {
+                new_fork_id = ForkId(f);
+                break;
+            }
+        }
+        if new_fork_id.0 == 0 {
+            if forks_meta_datas.len() >= *MAX_FORKS {
+                return Ok((None, false));
+            } else {
+                new_fork_id = ForkId(forks_meta_datas.len());
+            }
+        }
+        // Create new fork
+        let mut new_fork = HashMap::new();
+        new_fork.insert(*new_block_previous_blockstamp, *new_block_hash);
+        forks_db.write(|forks_db| {
+            forks_db.insert(new_fork_id, new_fork);
+        })?;
+        Ok((Some(new_fork_id), true))
+    }
+    pub fn get_block_fork(
+        forks_db: &BinFileDB<ForksV10Datas>,
+        previous_blockstamp: &PreviousBlockstamp,
+    ) -> Result<Option<ForkId>, DALError> {
+        Ok(forks_db.read(|forks_db| {
+            for (fork_id, fork_meta_datas) in forks_db {
+                if fork_meta_datas.contains_key(&previous_blockstamp) {
+                    return Some(*fork_id);
+                }
+            }
+            None
+        })?)
+    }
+    pub fn get_block_hash(
+        db: &BinFileDB<LocalBlockchainV10Datas>,
         block_number: &BlockId,
-    ) -> (Vec<BlockHash>, Vec<Hash>) {
-        let mut cursor = db
-            .0
-            .prepare("SELECT hash, previous_hash FROM blocks WHERE number=?;")
-            .expect("Fail to get block !")
-            .cursor();
-
-        cursor
-            .bind(&[sqlite::Value::Integer(i64::from(block_number.0))])
-            .expect("Fail to get block !");
+    ) -> Result<Option<BlockHash>, DALError> {
+        Ok(db.read(|db| {
+            if let Some(dal_block) = db.get(block_number) {
+                dal_block.block.hash
+            } else {
+                None
+            }
+        })?)
+    }
 
-        let mut hashs = Vec::new();
-        let mut previous_hashs = Vec::new();
-        while let Some(row) = cursor.next().unwrap() {
-            hashs.push(BlockHash(
-                Hash::from_hex(row[0].as_string().unwrap()).unwrap(),
-            ));
-            previous_hashs.push(Hash::from_hex(row[1].as_string().unwrap()).unwrap());
+    pub fn already_have_block(
+        blockchain_db: &BinFileDB<LocalBlockchainV10Datas>,
+        forks_blocks_db: &BinFileDB<ForksBlocksV10Datas>,
+        blockstamp: Blockstamp,
+    ) -> Result<bool, DALError> {
+        let already_have_block = forks_blocks_db.read(|db| db.contains_key(&blockstamp))?;
+        if !already_have_block {
+            Ok(blockchain_db.read(|db| {
+                if let Some(dal_block) = db.get(&blockstamp.id) {
+                    if dal_block.block.hash.unwrap_or_default() == blockstamp.hash {
+                        return true;
+                    }
+                }
+                false
+            })?)
+        } else {
+            Ok(true)
         }
-        (hashs, previous_hashs)
     }
 
     pub fn get_stackables_blocks(
-        currency: &str,
-        db: &DuniterDB,
+        forks_db: &BinFileDB<ForksV10Datas>,
+        forks_blocks_db: &BinFileDB<ForksBlocksV10Datas>,
         current_blockstamp: &Blockstamp,
-    ) -> Vec<DALBlock> {
+    ) -> Result<Vec<DALBlock>, DALError> {
         debug!("get_stackables_blocks() after {}", current_blockstamp);
-        let mut stackables_blocks = Vec::new();
-        let block_id = BlockId(current_blockstamp.id.0 + 1);
-        let (hashs, previous_hashs) = DALBlock::get_blocks_hashs_all_forks(db, &block_id);
-        for (hash, previous_hash) in hashs.into_iter().zip(previous_hashs) {
-            if previous_hash == current_blockstamp.hash.0 {
-                if let Some(dal_block) =
-                    DALBlock::get_block(currency, db, &Blockstamp { id: block_id, hash })
-                {
-                    stackables_blocks.push(dal_block);
-                } else {
-                    panic!(format!(
-                        "Fail to get stackable block {} !",
-                        Blockstamp { id: block_id, hash }
-                    ));
+        let stackables_blocks_hashs = forks_db.read(|db| {
+            let mut stackables_blocks_hashs = Vec::new();
+            for fork_meta_datas in db.values() {
+                if let Some(block_hash) = fork_meta_datas.get(&current_blockstamp) {
+                    stackables_blocks_hashs.push(*block_hash);
                 }
             }
-        }
-        stackables_blocks
+            stackables_blocks_hashs
+        })?;
+        let stackables_blocks = forks_blocks_db.read(|db| {
+            let mut stackables_blocks = Vec::new();
+            for stackable_block_hash in stackables_blocks_hashs {
+                if let Some(dal_block) = db.get(&Blockstamp {
+                    id: BlockId(current_blockstamp.id.0 + 1),
+                    hash: stackable_block_hash,
+                }) {
+                    stackables_blocks.push(dal_block.clone());
+                }
+            }
+            stackables_blocks
+        })?;
+        Ok(stackables_blocks)
     }
-    pub fn get_stackables_forks(db: &DuniterDB, current_blockstamp: &Blockstamp) -> Vec<usize> {
-        let mut stackables_forks = Vec::new();
-        let block_id = BlockId(current_blockstamp.id.0 + 1);
-        let (hashs, previous_hashs) = DALBlock::get_blocks_hashs_all_forks(db, &block_id);
-        for (hash, previous_hash) in hashs.into_iter().zip(previous_hashs) {
-            if previous_hash == current_blockstamp.hash.0 {
-                if let Some(fork) = DALBlock::get_block_fork(db, &Blockstamp { id: block_id, hash })
-                {
-                    if fork > 0 {
-                        stackables_forks.push(fork);
+    pub fn get_stackables_forks(
+        db: &BinFileDB<ForksV10Datas>,
+        current_blockstamp: &Blockstamp,
+    ) -> Result<Vec<usize>, DALError> {
+        Ok(db.read(|db| {
+            let mut stackables_forks = Vec::new();
+            for f in 0..*MAX_FORKS {
+                if let Some(fork_meta_datas) = db.get(&ForkId(f)) {
+                    if fork_meta_datas.get(&current_blockstamp).is_some() {
+                        stackables_forks.push(f);
                     }
                 }
             }
-        }
-        stackables_forks
+            stackables_forks
+        })?)
     }
-    pub fn get_block(currency: &str, db: &DuniterDB, blockstamp: &Blockstamp) -> Option<DALBlock> {
-        let mut cursor = db
-            .0
-            .prepare(
-                "SELECT fork, isolate, nonce, number,
-            pow_min, time, median_time, members_count,
-            monetary_mass, unit_base, issuers_count, issuers_frame,
-            issuers_frame_var, median_frame, second_tiercile_frame,
-            currency, issuer, signature, hash, previous_hash, dividend, identities, joiners,
-            actives, leavers, revoked, excluded, certifications,
-            transactions FROM blocks WHERE number=? AND hash=?;",
-            )
-            .expect("Fail to get block !")
-            .cursor();
-
-        cursor
-            .bind(&[
-                sqlite::Value::Integer(i64::from(blockstamp.id.0)),
-                sqlite::Value::String(blockstamp.hash.0.to_string()),
-            ])
-            .expect("Fail to get block !");
-
-        if let Some(row) = cursor.next().expect("block not found in bdd !") {
-            let dividend_amount = row[20]
-                .as_integer()
-                .expect("dal::get_block() : fail to parse dividend !");
-            let dividend = if dividend_amount > 0 {
-                Some(dividend_amount as usize)
-            } else if dividend_amount == 0 {
-                None
-            } else {
-                return None;
-            };
-            let nonce = row[2]
-                .as_integer()
-                .expect("dal::get_block() : fail to parse nonce !") as u64;
-            let inner_hash = Hash::from_hex(
-                row[18]
-                    .as_string()
-                    .expect("dal::get_block() : fail to parse inner_hash !"),
-            ).expect("dal::get_block() : fail to parse inner_hash (2) !");
-            let identities = parse_identities(
-                currency,
-                row[21]
-                    .as_string()
-                    .expect("dal::get_block() : fail to parse identities !"),
-            ).expect("dal::get_block() : fail to parse identities (2) !");
-            let hashmap_identities = identities
-                .iter()
-                .map(|i| (i.issuers()[0], i.clone()))
-                .collect::<HashMap<PubKey, IdentityDocument>>();
-            Some(DALBlock {
-                fork: row[0]
-                    .as_integer()
-                    .expect("dal::get_block() : fail to parse fork !")
-                    as usize,
-                isolate: !row[1]
-                    .as_integer()
-                    .expect("dal::get_block() : fail to parse isolate !")
-                    == 0,
-                block: BlockDocument {
-                    nonce,
-                    number: BlockId(
-                        row[3]
-                            .as_integer()
-                            .expect("dal::get_block() : fail to parse number !")
-                            as u32,
-                    ),
-                    pow_min: row[4]
-                        .as_integer()
-                        .expect("dal::get_block() : fail to parse pow min !")
-                        as usize,
-                    time: row[5]
-                        .as_integer()
-                        .expect("dal::get_block() : fail to parse time !")
-                        as u64,
-                    median_time: row[6]
-                        .as_integer()
-                        .expect("dal::get_block() : fail to parse median_time !")
-                        as u64,
-                    members_count: row[7]
-                        .as_integer()
-                        .expect("dal::get_block() : fail to parse members_count !")
-                        as usize,
-                    monetary_mass: row[8]
-                        .as_integer()
-                        .expect("dal::get_block() : fail to parse monetary_mass !")
-                        as usize,
-                    unit_base: row[9]
-                        .as_integer()
-                        .expect("dal::get_block() : fail to parse unit_base !")
-                        as usize,
-                    issuers_count: row[10]
-                        .as_integer()
-                        .expect("dal::get_block() : fail to parse issuers_count !")
-                        as usize,
-                    issuers_frame: row[11]
-                        .as_integer()
-                        .expect("dal::get_block() : fail to parse issuers_frame !")
-                        as isize,
-                    issuers_frame_var: row[12]
-                        .as_integer()
-                        .expect("dal::get_block() : fail to parse issuers_frame_var !")
-                        as isize,
-                    currency: row[15]
-                        .as_string()
-                        .expect("dal::get_block() : fail to parse currency !")
-                        .to_string(),
-                    issuers: vec![PubKey::Ed25519(
-                        ed25519::PublicKey::from_base58(
-                            row[16]
-                                .as_string()
-                                .expect("dal::get_block() : fail to parse issuer !"),
-                        ).expect("dal::get_block() : fail to parse pubkey !"),
-                    )],
-                    signatures: vec![Sig::Ed25519(
-                        ed25519::Signature::from_base64(
-                            row[17]
-                                .as_string()
-                                .expect("dal::get_block() : fail to parse signature !"),
-                        ).expect("dal::get_block() : fail to parse signature (2) !"),
-                    )],
-                    hash: Some(BlockHash(
-                        Hash::from_hex(
-                            row[18]
-                                .as_string()
-                                .expect("dal::get_block() : fail to parse hash !"),
-                        ).expect("dal::get_block() : fail to parse hash (2) !"),
-                    )),
-                    parameters: None,
-                    previous_hash: Hash::from_hex(
-                        row[19]
-                            .as_string()
-                            .expect("dal::get_block() : fail to parse previous_hash !"),
-                    ).expect(
-                        "dal::get_block() : fail to parse previous_hash (2) !",
-                    ),
-                    previous_issuer: None,
-                    inner_hash: Some(inner_hash),
-                    dividend,
-                    identities: identities.clone(),
-                    joiners: parse_memberships(
-                        currency,
-                        MembershipType::In(),
-                        row[22]
-                            .as_string()
-                            .expect("dal::get_block() : fail to parse joiners !"),
-                    ).expect("dal::get_block() : fail to parse joiners (2) !"),
-                    actives: parse_memberships(
-                        currency,
-                        MembershipType::In(),
-                        row[23]
-                            .as_string()
-                            .expect("dal::get_block() : fail to parse actives !"),
-                    ).expect("dal::get_block() : fail to parse actives (2) !"),
-                    leavers: parse_memberships(
-                        currency,
-                        MembershipType::Out(),
-                        row[24]
-                            .as_string()
-                            .expect("dal::get_block() : fail to parse leavers !"),
-                    ).expect("dal::get_block() : fail to parse leavers (2) !"),
-                    revoked: parse_revocations(
-                        currency,
-                        db,
-                        &hashmap_identities,
-                        row[25]
-                            .as_string()
-                            .expect("dal::get_block() : fail to parse revoked !"),
-                    ).expect("dal::get_block() : fail to parse revoked (2) !"),
-                    excluded: parse_exclusions(
-                        row[26]
-                            .as_string()
-                            .expect("dal::get_block() : fail to parse excluded !"),
-                    ).expect("dal::get_block() : fail to parse excluded (2) !"),
-                    certifications: parse_certifications(
-                        currency,
-                        db,
-                        &hashmap_identities,
-                        row[27]
-                            .as_string()
-                            .expect("dal::get_block() : fail to parse certifications !"),
-                    ).expect(
-                        "dal::get_block() : fail to parse certifications (2) !",
-                    ),
-                    transactions: parse_compact_transactions(
-                        currency,
-                        row[28]
-                            .as_string()
-                            .expect("dal::get_block() : fail to parse transactions !"),
-                    ).expect("dal::get_block() : fail to parse transactions (2) !"),
-                    inner_hash_and_nonce_str: format!(
-                        "InnerHash: {}\nNonce: {}\n",
-                        inner_hash.to_hex(),
-                        nonce
-                    ),
-                },
-                //median_frame: row[13].as_integer().unwrap_or(0) as usize,
-                //second_tiercile_frame: row[14].as_integer().unwrap_or(0) as usize,
-            })
+    pub fn get_block(
+        blockchain_db: &BinFileDB<LocalBlockchainV10Datas>,
+        forks_blocks_db: Option<&BinFileDB<ForksBlocksV10Datas>>,
+        blockstamp: &Blockstamp,
+    ) -> Result<Option<DALBlock>, DALError> {
+        let dal_block = blockchain_db.read(|db| db.get(&blockstamp.id).cloned())?;
+        if dal_block.is_none() && forks_blocks_db.is_some() {
+            Ok(forks_blocks_db
+                .expect("safe unwrap")
+                .read(|db| db.get(&blockstamp).cloned())?)
         } else {
-            None
+            Ok(dal_block)
         }
     }
 
-    pub fn get_current_frame(&self, db: &DuniterDB) -> HashMap<PubKey, usize> {
-        let frame_begin = i64::from(self.block.number.0) - (self.block.issuers_frame as i64);
-        let mut current_frame: HashMap<PubKey, usize> = HashMap::new();
-        let mut cursor = db
-            .0
-            .prepare("SELECT issuer FROM blocks WHERE fork=0 AND number>=? LIMIT ?;")
-            .expect("get current frame blocks failure at step 1 !")
-            .cursor();
-        cursor
-            .bind(&[
-                sqlite::Value::Integer(frame_begin),
-                sqlite::Value::Integer(self.block.issuers_frame as i64),
-            ])
-            .expect("get current frame blocks failure at step 2 !");
+    pub fn get_block_in_local_blockchain(
+        db: &BinFileDB<LocalBlockchainV10Datas>,
+        block_id: BlockId,
+    ) -> Result<Option<BlockDocument>, DALError> {
+        Ok(db.read(|db| {
+            if let Some(dal_block) = db.get(&block_id) {
+                Some(dal_block.block.clone())
+            } else {
+                None
+            }
+        })?)
+    }
 
-        while let Some(row) = cursor
-            .next()
-            .expect("get current frame blocks failure at step 3 !")
-        {
-            let current_frame_copy = current_frame.clone();
-            match current_frame_copy.get(&PubKey::Ed25519(
-                ed25519::PublicKey::from_base58(row[0].as_string().unwrap()).unwrap(),
-            )) {
-                Some(blocks_count) => {
-                    if let Some(new_blocks_count) = current_frame.get_mut(&PubKey::Ed25519(
-                        ed25519::PublicKey::from_base58(row[0].as_string().unwrap()).unwrap(),
-                    )) {
-                        *new_blocks_count = *blocks_count + 1;
-                    }
-                }
-                None => {
-                    current_frame.insert(
-                        PubKey::Ed25519(
-                            ed25519::PublicKey::from_base58(row[0].as_string().unwrap()).unwrap(),
-                        ),
-                        0,
-                    );
-                }
+    pub fn get_current_frame(
+        &self,
+        db: &BinFileDB<LocalBlockchainV10Datas>,
+    ) -> Result<HashMap<PubKey, usize>, DALError> {
+        let frame_begin = self.block.number.0 - self.block.issuers_frame as u32;
+        Ok(db.read(|db| {
+            let mut current_frame: HashMap<PubKey, usize> = HashMap::new();
+            for block_number in frame_begin..self.block.number.0 {
+                let issuer = db
+                    .get(&BlockId(block_number))
+                    .expect(&format!("Fail to get block #{} !", block_number))
+                    .block
+                    .issuers()[0];
+                let issuer_count_blocks =
+                    if let Some(issuer_count_blocks) = current_frame.get(&issuer) {
+                        issuer_count_blocks + 1
+                    } else {
+                        1
+                    };
+                current_frame.insert(issuer, issuer_count_blocks);
             }
-        }
-        current_frame
+            current_frame
+        })?)
     }
 
-    pub fn compute_median_issuers_frame(&mut self, db: &DuniterDB) -> () {
-        let current_frame = self.get_current_frame(db);
+    pub fn compute_median_issuers_frame(&mut self, db: &BinFileDB<LocalBlockchainV10Datas>) -> () {
+        let current_frame = self
+            .get_current_frame(db)
+            .expect("Fatal error : fail to read LocalBlockchainV10DB !");
         if !current_frame.is_empty() {
             let mut current_frame_vec: Vec<_> = current_frame.values().cloned().collect();
             current_frame_vec.sort_unstable();
diff --git a/dal/certs.rs b/dal/certs.rs
new file mode 100644
index 0000000000000000000000000000000000000000..96269f2018d8d0aa3a50b3c2904972d0b8bc63a3
--- /dev/null
+++ b/dal/certs.rs
@@ -0,0 +1,42 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+extern crate duniter_documents;
+extern crate duniter_wotb;
+
+use duniter_documents::BlockId;
+use duniter_wotb::NodeId;
+use rustbreak::backend::Backend;
+use std::collections::HashMap;
+use std::fmt::Debug;
+use {BinDB, CertsExpirV10Datas, DALError};
+
+/// Find certifications that emitted in indicated blocks expiring
+pub fn find_expire_certs<B: Backend + Debug>(
+    certs_db: &BinDB<CertsExpirV10Datas, B>,
+    blocks_expiring: Vec<BlockId>,
+) -> Result<HashMap<(NodeId, NodeId), BlockId>, DALError> {
+    Ok(certs_db.read(|db| {
+        let mut all_expire_certs = HashMap::new();
+        for expire_block_id in blocks_expiring {
+            if let Some(expire_certs) = db.get(&expire_block_id) {
+                for (source, target) in expire_certs {
+                    all_expire_certs.insert((*source, *target), expire_block_id);
+                }
+            }
+        }
+        all_expire_certs
+    })?)
+}
diff --git a/dal/clippy.toml b/dal/clippy.toml
index 32d82dfbf950f5dc319caea82ce84904d3c18b3e..c7d9c1ce50bafcd14af4d0017da2da289edaa23a 100644
--- a/dal/clippy.toml
+++ b/dal/clippy.toml
@@ -1 +1,2 @@
-cyclomatic-complexity-threshold = 35
\ No newline at end of file
+cyclomatic-complexity-threshold = 35
+too-many-arguments-threshold = 10
\ No newline at end of file
diff --git a/dal/constants.rs b/dal/constants.rs
index a35986fcd9853f13f592c16643c3b7aae14f3b9b..5e210fef37fb7cff9d3bfc33f6b46738617d0194 100644
--- a/dal/constants.rs
+++ b/dal/constants.rs
@@ -1,48 +1,4 @@
-#[derive(Debug, Copy, Clone)]
-pub struct CurrencyParametersV10 {
-    pub c: f64,
-    pub dt: i64,
-    pub ud0: i64,
-    pub sig_period: u64,
-    pub sig_stock: i64,
-    pub sig_window: i64,
-    pub sig_validity: i64,
-    pub sig_qty: i64,
-    pub idty_window: i64,
-    pub ms_window: i64,
-    pub x_percent: f64,
-    pub ms_validity: u64,
-    pub step_max: u32,
-    pub median_time_blocks: i64,
-    pub avg_gen_time: i64,
-    pub dt_diff_eval: i64,
-    pub percent_rot: f64,
-    pub ud_time0: i64,
-    pub ud_reeval_time0: i64,
-    pub dt_reeval: i64,
-}
-
-pub static G1_PARAMS: &'static CurrencyParametersV10 = &CurrencyParametersV10 {
-    c: 0.0488,
-    dt: 86_400,
-    ud0: 1_000,
-    sig_period: 432_000,
-    sig_stock: 100,
-    sig_window: 5_259_600,
-    sig_validity: 63_115_200,
-    sig_qty: 5,
-    idty_window: 5_259_600,
-    ms_window: 5_259_600,
-    x_percent: 0.8,
-    ms_validity: 31_557_600,
-    step_max: 5,
-    median_time_blocks: 24,
-    avg_gen_time: 300,
-    dt_diff_eval: 12,
-    percent_rot: 0.67,
-    ud_time0: 1_488_970_800,
-    ud_reeval_time0: 1_490_094_000,
-    dt_reeval: 15_778_800,
-};
-pub static G1_CONNECTIVITY_MAX: &'static usize = &125;
-pub static MAX_FORKS: &'static usize = &50;
+pub static DEFAULT_SIG_RENEW_PERIOD: &'static u64 = &5_259_600;
+pub static DEFAULT_MS_PERIOD: &'static u64 = &5_259_600;
+pub static DEFAULT_TX_WINDOW: &'static u64 = &604_800;
+pub static MAX_FORKS: &'static usize = &100;
diff --git a/dal/currency_params.rs b/dal/currency_params.rs
new file mode 100644
index 0000000000000000000000000000000000000000..6bf742e3fcea053bf02ca97ab5c9843d91b7e6ef
--- /dev/null
+++ b/dal/currency_params.rs
@@ -0,0 +1,117 @@
+use constants::*;
+use duniter_documents::blockchain::v10::documents::block::{BlockV10Parameters, CurrencyName};
+use *;
+
+#[derive(Debug, Copy, Clone)]
+pub struct CurrencyParameters {
+    pub protocol_version: usize,
+    pub c: f64,
+    pub dt: u64,
+    pub ud0: usize,
+    pub sig_period: u64,
+    pub sig_renew_period: u64,
+    pub sig_stock: usize,
+    pub sig_window: u64,
+    pub sig_validity: u64,
+    pub sig_qty: usize,
+    pub idty_window: u64,
+    pub ms_window: u64,
+    pub tx_window: u64,
+    pub x_percent: f64,
+    pub ms_validity: u64,
+    pub ms_period: u64,
+    pub step_max: usize,
+    pub median_time_blocks: usize,
+    pub avg_gen_time: u64,
+    pub dt_diff_eval: usize,
+    pub percent_rot: f64,
+    pub ud_time0: u64,
+    pub ud_reeval_time0: u64,
+    pub dt_reeval: u64,
+}
+
+impl From<(CurrencyName, BlockV10Parameters)> for CurrencyParameters {
+    fn from(source: (CurrencyName, BlockV10Parameters)) -> CurrencyParameters {
+        let (currency_name, block_params) = source;
+        let sig_renew_period = match currency_name.0.as_str() {
+            "default_currency" => *DEFAULT_SIG_RENEW_PERIOD,
+            "g1" => 5_259_600,
+            "g1-test" => 5_259_600 / 5,
+            _ => *DEFAULT_SIG_RENEW_PERIOD,
+        };
+        let ms_period = match currency_name.0.as_str() {
+            "default_currency" => *DEFAULT_MS_PERIOD,
+            "g1" => 5_259_600,
+            "g1-test" => 5_259_600 / 5,
+            _ => *DEFAULT_MS_PERIOD,
+        };
+        let tx_window = match currency_name.0.as_str() {
+            "default_currency" => *DEFAULT_TX_WINDOW,
+            "g1" => 604_800,
+            "g1-test" => 604_800,
+            _ => *DEFAULT_TX_WINDOW,
+        };
+        CurrencyParameters {
+            protocol_version: 10,
+            c: block_params.c,
+            dt: block_params.dt,
+            ud0: block_params.ud0,
+            sig_period: block_params.sig_period,
+            sig_renew_period,
+            sig_stock: block_params.sig_stock,
+            sig_window: block_params.sig_window,
+            sig_validity: block_params.sig_validity,
+            sig_qty: block_params.sig_qty,
+            idty_window: block_params.idty_window,
+            ms_window: block_params.ms_window,
+            tx_window,
+            x_percent: block_params.x_percent,
+            ms_validity: block_params.ms_validity,
+            ms_period,
+            step_max: block_params.step_max,
+            median_time_blocks: block_params.median_time_blocks,
+            avg_gen_time: block_params.avg_gen_time,
+            dt_diff_eval: block_params.dt_diff_eval,
+            percent_rot: block_params.percent_rot,
+            ud_time0: block_params.ud_time0,
+            ud_reeval_time0: block_params.ud_reeval_time0,
+            dt_reeval: block_params.dt_reeval,
+        }
+    }
+}
+
+impl Default for CurrencyParameters {
+    fn default() -> CurrencyParameters {
+        CurrencyParameters::from((
+            CurrencyName(String::from("default_currency")),
+            BlockV10Parameters::default(),
+        ))
+    }
+}
+
+impl CurrencyParameters {
+    /// Get max value of connectivity (=1/x_percent)
+    pub fn max_connectivity(&self) -> f64 {
+        1.0 / self.x_percent
+    }
+}
+
+/// Get currency parameters
+pub fn get_currency_params(
+    blockchain_db: &BinFileDB<LocalBlockchainV10Datas>,
+) -> Result<Option<CurrencyParameters>, DALError> {
+    Ok(blockchain_db.read(|db| {
+        if let Some(genesis_block) = db.get(&BlockId(0)) {
+            if genesis_block.block.parameters.is_some() {
+                Some(CurrencyParameters::from((
+                    genesis_block.block.currency.clone(),
+                    genesis_block.block.parameters.expect("safe unwrap"),
+                )))
+            } else {
+                panic!("The genesis block are None parameters !");
+            }
+        } else {
+            None
+        }
+    })?)
+}
diff --git a/dal/dal_event.rs b/dal/dal_event.rs
index 99e41058ea4b33366263c6b7cfef62717866f303..954be790e19b14af4c563391b3cccf93a2281a95 100644
--- a/dal/dal_event.rs
+++ b/dal/dal_event.rs
@@ -1,12 +1,10 @@
-extern crate duniter_documents;
-extern crate serde;
-
-use self::duniter_documents::blockchain::v10::documents::BlockDocument;
-use self::duniter_documents::blockchain::BlockchainProtocol;
+use duniter_documents::blockchain::v10::documents::BlockDocument;
+use duniter_documents::blockchain::BlockchainProtocol;
+use duniter_documents::Blockstamp;
 
 #[derive(Debug, Clone)]
 pub enum DALEvent {
-    StackUpValidBlock(Box<BlockDocument>),
+    StackUpValidBlock(Box<BlockDocument>, Blockstamp),
     RevertBlocks(Vec<Box<BlockDocument>>),
     NewValidPendingDoc(BlockchainProtocol),
     RefusedPendingDoc(BlockchainProtocol),
diff --git a/dal/dal_requests.rs b/dal/dal_requests.rs
index 608de5e6422e8d9a29faaa1385faf17ae005092d..851f458e382b5de09236b079ee4d9c3a33cd4bca 100644
--- a/dal/dal_requests.rs
+++ b/dal/dal_requests.rs
@@ -1,14 +1,12 @@
-extern crate duniter_crypto;
-extern crate duniter_documents;
 extern crate duniter_module;
 extern crate serde;
 
-use self::duniter_crypto::keys::*;
-use self::duniter_documents::blockchain::v10::documents::{
+use self::duniter_module::ModuleReqFullId;
+use duniter_crypto::keys::*;
+use duniter_documents::blockchain::v10::documents::{
     BlockDocument, CertificationDocument, IdentityDocument, MembershipDocument, RevocationDocument,
 };
-use self::duniter_documents::Hash;
-use self::duniter_module::ModuleReqFullId;
+use duniter_documents::{Blockstamp, Hash};
 use std::collections::HashMap;
 
 #[derive(Debug, Copy, Clone)]
@@ -50,7 +48,7 @@ pub enum DALResPendings {
 
 #[derive(Debug, Clone)]
 pub enum DALResBlockchain {
-    CurrentBlock(ModuleReqFullId, Box<BlockDocument>),
+    CurrentBlock(ModuleReqFullId, Box<BlockDocument>, Blockstamp),
     BlockByNumber(ModuleReqFullId, Box<BlockDocument>),
     Chunk(ModuleReqFullId, Vec<BlockDocument>),
     UIDs(HashMap<PubKey, Option<String>>),
diff --git a/dal/endpoint.rs b/dal/endpoint.rs
deleted file mode 100644
index 382b41dc507aaa8401f04b7c4c60d6965eeb7e5a..0000000000000000000000000000000000000000
--- a/dal/endpoint.rs
+++ /dev/null
@@ -1,165 +0,0 @@
-extern crate crypto;
-extern crate duniter_crypto;
-extern crate sqlite;
-
-use std::time::Duration;
-
-use self::crypto::digest::Digest;
-use self::crypto::sha2::Sha256;
-use self::duniter_crypto::keys::*;
-use super::DuniterDB;
-use super::WriteToDuniterDB;
-
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
-pub enum DALEndpointApi {
-    WS2P,
-    //WS2PS,
-    //WS2PTOR,
-    //DASA,
-    //BMA,
-    //BMAS,
-}
-
-impl From<u32> for DALEndpointApi {
-    fn from(integer: u32) -> Self {
-        match integer {
-            _ => DALEndpointApi::WS2P,
-        }
-    }
-}
-
-pub fn string_to_api(api: &str) -> Option<DALEndpointApi> {
-    match api {
-        "WS2P" => Some(DALEndpointApi::WS2P),
-        //"WS2PS" => Some(DALEndpointApi::WS2PS),
-        //"WS2PTOR" => Some(DALEndpointApi::WS2PTOR),
-        //"DASA" => Some(DALEndpointApi::DASA),
-        //"BASIC_MERKLED_API" => Some(DALEndpointApi::BMA),
-        //"BMAS" => Some(DALEndpointApi::BMAS),
-        &_ => None,
-    }
-}
-
-pub fn api_to_integer(api: &DALEndpointApi) -> i64 {
-    match *api {
-        DALEndpointApi::WS2P => 0,
-        //DALEndpointApi::WS2PS => 1,
-        //DALEndpointApi::WS2PTOR => 2,
-        //DALEndpointApi::DASA => 3,
-        //DALEndpointApi::BMA => 4,
-        //DALEndpointApi::BMAS => 5,
-    }
-}
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-pub struct DALEndpoint {
-    pub hash_full_id: String,
-    pub status: u32,
-    pub node_id: u32,
-    pub pubkey: PubKey,
-    pub api: DALEndpointApi,
-    pub version: usize,
-    pub endpoint: String,
-    pub last_check: u64,
-}
-
-impl DALEndpoint {
-    pub fn new(
-        status: u32,
-        node_id: u32,
-        pubkey: PubKey,
-        api: DALEndpointApi,
-        version: usize,
-        endpoint: String,
-        last_check: Duration,
-    ) -> DALEndpoint {
-        let mut sha = Sha256::new();
-        sha.input_str(&format!(
-            "{}{}{}{}",
-            node_id,
-            pubkey,
-            api_to_integer(&api),
-            version
-        ));
-        DALEndpoint {
-            hash_full_id: sha.result_str(),
-            status,
-            node_id,
-            pubkey,
-            api,
-            version,
-            endpoint,
-            last_check: last_check.as_secs(),
-        }
-    }
-    pub fn get_endpoints_for_api(db: &DuniterDB, api: DALEndpointApi) -> Vec<DALEndpoint> {
-        let mut cursor:sqlite::Cursor = db.0
-        .prepare("SELECT hash_full_id, status, node_id, pubkey, api, version, endpoint, last_check FROM endpoints WHERE api=? ORDER BY status DESC;")
-        .expect("get_endpoints_for_api() : Error in SQL request !")
-        .cursor();
-
-        cursor
-            .bind(&[sqlite::Value::Integer(api_to_integer(&api))])
-            .expect("get_endpoints_for_api() : Error in cursor binding !");
-        let mut endpoints = Vec::new();
-        while let Some(row) = cursor
-            .next()
-            .expect("get_endpoints_for_api() : Error in cursor.next()")
-        {
-            endpoints.push(DALEndpoint {
-                hash_full_id: row[0].as_string().unwrap().to_string(),
-                status: row[1].as_integer().unwrap() as u32,
-                node_id: row[2].as_integer().unwrap() as u32,
-                pubkey: PubKey::Ed25519(ed25519::PublicKey::from_base58(row[3].as_string().unwrap()).unwrap()),
-                api: DALEndpointApi::from(row[4].as_integer().unwrap() as u32),
-                version: row[5].as_integer().unwrap() as usize,
-                endpoint: row[6].as_string().unwrap().to_string(),
-                last_check: row[7].as_integer().unwrap() as u64,
-            });
-        }
-        endpoints
-    }
-}
-
-impl WriteToDuniterDB for DALEndpoint {
-    fn write(
-        &self,
-        db: &DuniterDB,
-        _written_blockstamp: super::block_v10::BlockStampV10,
-        _written_timestamp: u64,
-    ) {
-        // Check if endpoint it's already written
-        let mut cursor: sqlite::Cursor = db.0
-            .prepare("SELECT status FROM endpoints WHERE hash_full_id=? ORDER BY status DESC;")
-            .expect("get_endpoints_for_api() : Error in SQL request !")
-            .cursor();
-        cursor
-            .bind(&[sqlite::Value::String(self.hash_full_id.clone())])
-            .expect("get_endpoints_for_api() : Error in cursor binding !");
-
-        // If endpoint it's already written, update status
-        if let Some(row) = cursor
-            .next()
-            .expect("get_endpoints_for_api() : Error in cursor.next()")
-        {
-            if row[0].as_integer().unwrap() as u32 != self.status {
-                db.0
-                    .execute(format!(
-                        "UPDATE endpoints SET status={} WHERE hash_full_id='{}'",
-                        self.status, self.hash_full_id
-                    ))
-                    .unwrap();
-            }
-        } else {
-            db.0
-            .execute(
-                format!(
-                    "INSERT INTO endpoints (hash_full_id, status, node_id, pubkey, api, version, endpoint, last_check) VALUES ('{}', {}, {}, '{}', {}, {}, '{}', {});",
-                    self.hash_full_id, self.status, self.node_id, self.pubkey.to_string(),
-                    api_to_integer(&self.api), self.version, self.endpoint, self.last_check
-                )
-            )
-            .unwrap();
-        }
-    }
-}
diff --git a/dal/identity.rs b/dal/identity.rs
index dd0dd3442dd5468b2428bee682058f7f8eaa7b62..38ee0955b4e0a2dc379c3749bcc7565684d69b51 100644
--- a/dal/identity.rs
+++ b/dal/identity.rs
@@ -1,272 +1,266 @@
-extern crate sqlite;
-
-use super::block::{blockstamp_to_timestamp, DALBlock};
-use super::DuniterDB;
+use currency_params::CurrencyParameters;
 use duniter_crypto::keys::*;
-use duniter_documents::blockchain::v10::documents::identity::IdentityDocumentBuilder;
 use duniter_documents::blockchain::v10::documents::IdentityDocument;
-use duniter_documents::blockchain::{Document, DocumentBuilder};
-use duniter_documents::Blockstamp;
+use duniter_documents::{BlockId, Blockstamp};
 use duniter_wotb::NodeId;
+use rustbreak::backend::Backend;
 use std::collections::HashMap;
+use std::fmt::Debug;
+use {BinDB, DALError, IdentitiesV10Datas, MsExpirV10Datas};
+
+#[derive(Clone, Debug, Deserialize, Serialize)]
+pub enum DALIdentityState {
+    Member(Vec<usize>),
+    ExpireMember(Vec<usize>),
+    ExplicitRevoked(Vec<usize>),
+    ExplicitExpireRevoked(Vec<usize>),
+    ImplicitRevoked(Vec<usize>),
+}
 
-#[derive(Debug, Clone)]
+#[derive(Clone, Debug, Deserialize, Serialize)]
 pub struct DALIdentity {
     pub hash: String,
-    pub state: isize,
+    pub state: DALIdentityState,
     pub joined_on: Blockstamp,
-    pub penultimate_renewed_on: Blockstamp,
-    pub last_renewed_on: Blockstamp,
-    pub expires_on: u64,
-    pub revokes_on: u64,
     pub expired_on: Option<Blockstamp>,
     pub revoked_on: Option<Blockstamp>,
     pub idty_doc: IdentityDocument,
+    pub wotb_id: NodeId,
+    pub ms_chainable_on: Vec<u64>,
+    pub cert_chainable_on: Vec<u64>,
+}
+
+pub fn get_uid<B: Backend + Debug>(
+    identities_db: &BinDB<IdentitiesV10Datas, B>,
+    pubkey: PubKey,
+) -> Result<Option<String>, DALError> {
+    Ok(identities_db.read(|db| {
+        if let Some(dal_idty) = db.get(&pubkey) {
+            Some(String::from(dal_idty.idty_doc.username()))
+        } else {
+            None
+        }
+    })?)
+}
+
+pub fn get_pubkey_from_uid<B: Backend + Debug>(
+    identities_db: &BinDB<IdentitiesV10Datas, B>,
+    uid: &str,
+) -> Result<Option<PubKey>, DALError> {
+    Ok(identities_db.read(|db| {
+        for (pubkey, dal_idty) in db {
+            if uid == dal_idty.idty_doc.username() {
+                return Some(*pubkey);
+            }
+        }
+        None
+    })?)
 }
 
 impl DALIdentity {
-    pub fn exclude_identity(
-        db: &DuniterDB,
-        wotb_id: NodeId,
-        renewal_blockstamp: Blockstamp,
+    pub fn exclude_identity<B: Backend + Debug>(
+        identities_db: &BinDB<IdentitiesV10Datas, B>,
+        pubkey: &PubKey,
+        exclusion_blockstamp: &Blockstamp,
         revert: bool,
-    ) {
-        let state = if revert { 0 } else { 1 };
-        let expired_on = if revert {
+    ) -> Result<(), DALError> {
+        let mut idty_datas = identities_db
+            .read(|db| db.get(pubkey).cloned())?
+            .expect("Fatal error : try to renewal unknow identity !");
+        idty_datas.state = if revert {
+            match idty_datas.state {
+                DALIdentityState::ExpireMember(renewed_counts) => {
+                    DALIdentityState::Member(renewed_counts)
+                }
+                _ => panic!("Try to revert exclusion for a no excluded identity !"),
+            }
+        } else {
+            match idty_datas.state {
+                DALIdentityState::Member(renewed_counts) => {
+                    DALIdentityState::ExpireMember(renewed_counts)
+                }
+                _ => panic!("Try to exclude for an already excluded/revoked identity !"),
+            }
+        };
+        idty_datas.expired_on = if revert {
             None
         } else {
-            Some(renewal_blockstamp)
+            Some(*exclusion_blockstamp)
         };
-        let mut cursor = db
-            .0
-            .prepare("UPDATE identities SET state=?, expired_on=?  WHERE wotb_id=?;")
-            .expect("Fail to exclude idty !")
-            .cursor();
-
-        cursor
-            .bind(&[
-                sqlite::Value::Integer(i64::from(state)),
-                sqlite::Value::String(expired_on.unwrap_or_else(Blockstamp::default).to_string()),
-                sqlite::Value::Integer(wotb_id.0 as i64),
-            ])
-            .expect("Fail to exclude idty !");
+        // Write new identity datas
+        identities_db.write(|db| {
+            db.insert(*pubkey, idty_datas);
+        })?;
+        Ok(())
     }
 
-    pub fn get_wotb_index(db: &DuniterDB) -> HashMap<PubKey, NodeId> {
-        let mut wotb_index: HashMap<PubKey, NodeId> = HashMap::new();
-
-        let mut cursor = db
-            .0
-            .prepare("SELECT wotb_id, pubkey FROM identities ORDER BY wotb_id ASC;")
-            .unwrap()
-            .cursor();
-
-        while let Some(row) = cursor.next().unwrap() {
-            wotb_index.insert(
-                PubKey::Ed25519(
-                    ed25519::PublicKey::from_base58(row[1].as_string().unwrap()).unwrap(),
-                ),
-                NodeId(row[0].as_integer().unwrap() as usize),
-            );
-        }
-        wotb_index
+    pub fn get_wotb_index<B: Backend + Debug>(
+        identities_db: &BinDB<IdentitiesV10Datas, B>,
+    ) -> Result<HashMap<PubKey, NodeId>, DALError> {
+        Ok(identities_db.read(|db| {
+            let mut wotb_index: HashMap<PubKey, NodeId> = HashMap::new();
+            for (pubkey, member_datas) in db {
+                let wotb_id = member_datas.wotb_id;
+                wotb_index.insert(*pubkey, wotb_id);
+            }
+            wotb_index
+        })?)
     }
 
     pub fn create_identity(
-        db: &DuniterDB,
+        currency_params: &CurrencyParameters,
         idty_doc: &IdentityDocument,
+        wotb_id: NodeId,
         current_blockstamp: Blockstamp,
+        current_bc_time: u64,
     ) -> DALIdentity {
-        let created_on = idty_doc.blockstamp();
-        let created_time = blockstamp_to_timestamp(&created_on, &db)
-            .expect("convert blockstamp to timestamp failure !");
-
+        let mut idty_doc = idty_doc.clone();
+        idty_doc.reduce();
         DALIdentity {
             hash: "0".to_string(),
-            state: 0,
+            state: DALIdentityState::Member(vec![0]),
             joined_on: current_blockstamp,
-            penultimate_renewed_on: created_on,
-            last_renewed_on: created_on,
-            expires_on: created_time + super::constants::G1_PARAMS.ms_validity,
-            revokes_on: created_time + super::constants::G1_PARAMS.ms_validity,
             expired_on: None,
             revoked_on: None,
-            idty_doc: idty_doc.clone(),
+            idty_doc,
+            wotb_id,
+            ms_chainable_on: vec![current_bc_time + currency_params.ms_period],
+            cert_chainable_on: vec![],
         }
     }
 
-    pub fn revoke_identity(
-        db: &DuniterDB,
-        wotb_id: NodeId,
+    pub fn revoke_identity<B: Backend + Debug>(
+        identities_db: &BinDB<IdentitiesV10Datas, B>,
+        pubkey: &PubKey,
         renewal_blockstamp: &Blockstamp,
+        explicit: bool,
         revert: bool,
-    ) {
-        let state = if revert { 2 } else { 1 };
-        let revoked_on = if revert {
-            String::from("")
+    ) -> Result<(), DALError> {
+        let mut member_datas = identities_db
+            .read(|db| db.get(pubkey).cloned())?
+            .expect("Fatal error : Try to revoke unknow idty !");
+
+        member_datas.state = if revert {
+            match member_datas.state {
+                DALIdentityState::ExplicitRevoked(renewed_counts) => {
+                    DALIdentityState::Member(renewed_counts)
+                }
+                DALIdentityState::ExplicitExpireRevoked(renewed_counts)
+                | DALIdentityState::ImplicitRevoked(renewed_counts) => {
+                    DALIdentityState::ExpireMember(renewed_counts)
+                }
+                _ => panic!("Try to revert revoke_identity() for a no revoked idty !"),
+            }
         } else {
-            renewal_blockstamp.to_string()
+            match member_datas.state {
+                DALIdentityState::ExpireMember(renewed_counts) => {
+                    DALIdentityState::ExplicitExpireRevoked(renewed_counts)
+                }
+                DALIdentityState::Member(renewed_counts) => if explicit {
+                    DALIdentityState::ExplicitRevoked(renewed_counts)
+                } else {
+                    DALIdentityState::ImplicitRevoked(renewed_counts)
+                },
+                _ => panic!("Try to revert revoke an already revoked idty !"),
+            }
+        };
+        member_datas.revoked_on = if revert {
+            None
+        } else {
+            Some(*renewal_blockstamp)
         };
-        let mut cursor = db
-            .0
-            .prepare("UPDATE identities SET state=?, revoked_on=?  WHERE wotb_id=?;")
-            .expect("Fail to exclude idty !")
-            .cursor();
 
-        cursor
-            .bind(&[
-                sqlite::Value::Integer(state),
-                sqlite::Value::String(revoked_on),
-                sqlite::Value::Integer(wotb_id.0 as i64),
-            ])
-            .expect("Fail to exclude idty !");
+        identities_db.write(|db| {
+            db.insert(*pubkey, member_datas);
+        })?;
+        Ok(())
     }
 
-    pub fn renewal_identity(
+    pub fn renewal_identity<B: Backend + Debug>(
         &mut self,
-        db: &DuniterDB,
+        currency_params: &CurrencyParameters,
+        identities_db: &BinDB<IdentitiesV10Datas, B>,
+        ms_db: &BinDB<MsExpirV10Datas, B>,
         pubkey: &PubKey,
-        renewal_blockstamp: &Blockstamp,
-        renawal_timestamp: u64,
+        idty_wot_id: NodeId,
+        renewal_timestamp: u64,
+        ms_created_block_id: BlockId,
         revert: bool,
-    ) {
-        let mut penultimate_renewed_block: Option<DALBlock> = None;
-        let revert_excluding = if revert {
-            penultimate_renewed_block = Some(
-                DALBlock::get_block(self.idty_doc.currency(), db, &self.penultimate_renewed_on)
-                    .expect("renewal_identity: Fail to get penultimate_renewed_block"),
-            );
-            penultimate_renewed_block.clone().unwrap().block.median_time
-                + super::constants::G1_PARAMS.ms_validity < renawal_timestamp
+    ) -> Result<(), DALError> {
+        // Get idty_datas
+        let mut idty_datas = identities_db
+            .read(|db| db.get(pubkey).cloned())?
+            .expect("Fatal error : try to renewal unknow identity !");
+        // Calculate new state value
+        idty_datas.state = if revert {
+            match idty_datas.state {
+                DALIdentityState::Member(renewed_counts) => {
+                    let mut new_renewed_counts = renewed_counts.clone();
+                    new_renewed_counts[renewed_counts.len() - 1] -= 1;
+                    if new_renewed_counts[renewed_counts.len() - 1] > 0 {
+                        DALIdentityState::Member(new_renewed_counts)
+                    } else {
+                        DALIdentityState::ExpireMember(new_renewed_counts)
+                    }
+                }
+                _ => panic!("Try to revert renewal_identity() for an excluded or revoked idty !"),
+            }
         } else {
-            false
+            match idty_datas.state {
+                DALIdentityState::Member(renewed_counts) => {
+                    let mut new_renewed_counts = renewed_counts.clone();
+                    new_renewed_counts[renewed_counts.len() - 1] += 1;
+                    DALIdentityState::Member(new_renewed_counts)
+                }
+                DALIdentityState::ExpireMember(renewed_counts) => {
+                    let mut new_renewed_counts = renewed_counts.clone();
+                    new_renewed_counts.push(0);
+                    DALIdentityState::Member(new_renewed_counts)
+                }
+                _ => panic!("Try to renewed a revoked identity !"),
+            }
         };
-        self.state = if revert && revert_excluding { 1 } else { 0 };
-        self.expires_on = if revert {
-            penultimate_renewed_block.unwrap().block.median_time
-                + super::constants::G1_PARAMS.ms_validity
+        // Calculate new ms_chainable_on value
+        if revert {
+            idty_datas.ms_chainable_on.pop();
         } else {
-            renawal_timestamp + super::constants::G1_PARAMS.ms_validity
-        };
-        let mut cursor = db.0
-            .prepare(
-                "UPDATE identities SET state=?, last_renewed_on=?, expires_on=?, revokes_on=?  WHERE pubkey=?;",
-            )
-            .expect("Fail to renewal idty !")
-            .cursor();
-
-        cursor
-            .bind(&[
-                sqlite::Value::Integer(self.state as i64),
-                sqlite::Value::String(renewal_blockstamp.to_string()),
-                sqlite::Value::Integer(self.expires_on as i64),
-                sqlite::Value::Integer(
-                    (renawal_timestamp + (super::constants::G1_PARAMS.ms_validity * 2)) as i64,
-                ),
-                sqlite::Value::String(pubkey.to_string()),
-            ])
-            .expect("Fail to renewal idty !");
+            idty_datas
+                .ms_chainable_on
+                .push(renewal_timestamp + currency_params.ms_period);
+        }
+        // Write new identity datas
+        identities_db.write(|db| {
+            db.insert(*pubkey, idty_datas);
+        })?;
+        // Update MsExpirV10DB
+        ms_db.write(|db| {
+            let mut memberships = db.get(&ms_created_block_id).cloned().unwrap_or_default();
+            memberships.insert(idty_wot_id);
+            db.insert(ms_created_block_id, memberships);
+        })?;
+        Ok(())
     }
 
-    pub fn remove_identity(db: &DuniterDB, wotb_id: NodeId) -> () {
-        db.0
-            .execute(format!(
-                "DELETE FROM identities WHERE wotb_id={}",
-                wotb_id.0
-            ))
-            .unwrap();
+    pub fn remove_identity<B: Backend + Debug>(
+        db: &BinDB<IdentitiesV10Datas, B>,
+        pubkey: PubKey,
+    ) -> Result<(), DALError> {
+        db.write(|db| {
+            db.remove(&pubkey);
+        })?;
+        Ok(())
     }
 
-    pub fn get_identity(currency: &str, db: &DuniterDB, pubkey: &PubKey) -> Option<DALIdentity> {
-        let mut cursor = db
-            .0
-            .prepare(
-                "SELECT uid, hash, sig,
-                state, created_on, joined_on, penultimate_renewed_on, last_renewed_on,
-                expires_on, revokes_on, expired_on, revoked_on FROM identities WHERE pubkey=?;",
-            )
-            .expect("Fail to get idty !")
-            .cursor();
-
-        cursor
-            .bind(&[sqlite::Value::String(pubkey.to_string())])
-            .expect("Fail to get idty !");
-
-        if let Some(row) = cursor.next().expect("get_identity: cursor error") {
-            let idty_doc_builder = IdentityDocumentBuilder {
-                currency,
-                username: row[0]
-                    .as_string()
-                    .expect("get_identity: fail to parse username"),
-                blockstamp: &Blockstamp::from_string(
-                    row[4]
-                        .as_string()
-                        .expect("DB Error : idty created_on invalid !"),
-                ).expect("DB Error : idty created_on invalid (2) !"),
-                issuer: &pubkey,
-            };
-            let idty_sig = Sig::Ed25519(
-                ed25519::Signature::from_base64(
-                    row[2].as_string().expect("get_identity: fail to parse sig"),
-                ).expect("get_identity: fail to parse sig (2)"),
-            );
-            let idty_doc = idty_doc_builder.build_with_signature(vec![idty_sig]);
-
-            let expired_on = match Blockstamp::from_string(
-                row[10]
-                    .as_string()
-                    .expect("get_identity: fail to parse expire on"),
-            ) {
-                Ok(blockstamp) => Some(blockstamp),
-                Err(_) => None,
-            };
-            let revoked_on = match Blockstamp::from_string(
-                row[11]
-                    .as_string()
-                    .expect("get_identity: fail to parse revoked on"),
-            ) {
-                Ok(blockstamp) => Some(blockstamp),
-                Err(_) => None,
-            };
-            Some(DALIdentity {
-                hash: row[2]
-                    .as_string()
-                    .expect("get_identity: fail to parse hash")
-                    .to_string(),
-                state: row[3]
-                    .as_integer()
-                    .expect("get_identity: fail to parse state") as isize,
-                joined_on: Blockstamp::from_string(
-                    row[5]
-                        .as_string()
-                        .expect("DB Error : idty joined_on invalid !"),
-                ).expect("DB Error : idty joined_on invalid !"),
-                penultimate_renewed_on: Blockstamp::from_string(
-                    row[6]
-                        .as_string()
-                        .expect("DB Error : idty penultimate_renewed_on invalid !"),
-                ).expect(
-                    "DB Error : idty penultimate_renewed_on invalid (2) !",
-                ),
-                last_renewed_on: Blockstamp::from_string(
-                    row[7]
-                        .as_string()
-                        .expect("get_identity: fail to parse last_renewed_on"),
-                ).expect("get_identity: fail to parse last_renewed_on (2)"),
-                expires_on: row[8]
-                    .as_integer()
-                    .expect("get_identity: fail to parse expires_on")
-                    as u64,
-                revokes_on: row[9]
-                    .as_integer()
-                    .expect("get_identity: fail to parse revokes_on")
-                    as u64,
-                expired_on,
-                revoked_on,
-                idty_doc,
-            })
-        } else {
-            None
-        }
+    pub fn get_identity<B: Backend + Debug>(
+        db: &BinDB<IdentitiesV10Datas, B>,
+        pubkey: &PubKey,
+    ) -> Result<Option<DALIdentity>, DALError> {
+        Ok(db.read(|db| {
+            if let Some(member_datas) = db.get(&pubkey) {
+                Some(member_datas.clone())
+            } else {
+                None
+            }
+        })?)
     }
 }
diff --git a/dal/lib.rs b/dal/lib.rs
index bdd108611c0e95b08f2ccc9eb935d05c0fad7d5a..3327c62df8bdb567fe46dc85336fcc5c346c7801 100644
--- a/dal/lib.rs
+++ b/dal/lib.rs
@@ -13,8 +13,7 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-//! Defined the few global types used by all modules,
-//! as well as the DuniterModule trait that all modules must implement.
+//! Datas Access Layer
 
 #![cfg_attr(feature = "strict", deny(warnings))]
 #![cfg_attr(feature = "cargo-clippy", allow(implicit_hasher))]
@@ -28,61 +27,256 @@
 extern crate log;
 #[macro_use]
 extern crate serde_json;
+#[macro_use]
+extern crate serde_derive;
 
 extern crate duniter_crypto;
 extern crate duniter_documents;
 extern crate duniter_wotb;
+extern crate rustbreak;
 extern crate serde;
-extern crate sqlite;
 
+pub mod balance;
 pub mod block;
+pub mod certs;
 pub mod constants;
+pub mod currency_params;
 pub mod dal_event;
 pub mod dal_requests;
 pub mod identity;
 pub mod parsers;
+pub mod sources;
 pub mod tools;
 pub mod writers;
 
 use duniter_crypto::keys::*;
-use duniter_documents::blockchain::v10::documents::BlockDocument;
-use duniter_documents::{BlockHash, BlockId, Blockstamp, Hash};
+use duniter_documents::blockchain::v10::documents::transaction::*;
+use duniter_documents::{BlockHash, BlockId, Blockstamp, Hash, PreviousBlockstamp};
 use duniter_wotb::operations::file::FileFormater;
 use duniter_wotb::{NodeId, WebOfTrust};
+use rustbreak::backend::{Backend, FileBackend, MemoryBackend};
+use rustbreak::error::{RustbreakError, RustbreakErrorKind};
+use rustbreak::{deser::Bincode, Database, FileDatabase, MemoryDatabase};
+use serde::de::DeserializeOwned;
+use serde::Serialize;
+use std::collections::{HashMap, HashSet};
+use std::default::Default;
 use std::fmt::Debug;
-use std::marker;
+use std::fs;
 use std::path::PathBuf;
 
-use self::block::DALBlock;
+use block::DALBlock;
+use identity::DALIdentity;
+use sources::{SourceAmount, UTXOContentV10, UTXOIndexV10};
+use writers::transaction::DALTxV10;
+
+#[derive(Debug, Deserialize, Copy, Clone, Ord, PartialEq, PartialOrd, Eq, Hash, Serialize)]
+/// Each fork has a unique identifier. The local blockchain (also called local branch) has ForkId equal to zero.
+pub struct ForkId(pub usize);
+
+pub type LocalBlockchainV10Datas = HashMap<BlockId, DALBlock>;
+pub type ForksV10Datas = HashMap<ForkId, HashMap<PreviousBlockstamp, BlockHash>>;
+pub type ForksBlocksV10Datas = HashMap<Blockstamp, DALBlock>;
+pub type IdentitiesV10Datas = HashMap<PubKey, DALIdentity>;
+pub type MsExpirV10Datas = HashMap<BlockId, HashSet<NodeId>>;
+pub type CertsExpirV10Datas = HashMap<BlockId, HashSet<(NodeId, NodeId)>>;
+pub type TxV10Datas = HashMap<Hash, DALTxV10>;
+pub type UTXOsV10Datas = HashMap<UTXOIndexV10, UTXOContentV10>;
+pub type DUsV10Datas = HashMap<PubKey, HashSet<BlockId>>;
+pub type BalancesV10Datas =
+    HashMap<TransactionOutputConditionGroup, (SourceAmount, HashSet<UTXOIndexV10>)>;
+
+pub type BinDB<D, B> = Database<D, B, Bincode>;
+pub type BinFileDB<D> = FileDatabase<D, Bincode>;
+pub type BinMemDB<D> = MemoryDatabase<D, Bincode>;
+
+#[derive(Debug)]
+/// Set of databases storing block information
+pub struct BlocksV10DBs {
+    /// Local blockchain database
+    pub blockchain_db: BinFileDB<LocalBlockchainV10Datas>,
+    /// Forks meta datas
+    pub forks_db: BinFileDB<ForksV10Datas>,
+    /// Forks blocks
+    pub forks_blocks_db: BinFileDB<ForksBlocksV10Datas>,
+}
+
+impl BlocksV10DBs {
+    /// Open blocks databases from their respective files
+    pub fn open(db_path: &PathBuf, _memory_mode: bool) -> BlocksV10DBs {
+        BlocksV10DBs {
+            blockchain_db: open_db::<LocalBlockchainV10Datas>(&db_path, "blockchain.db")
+                .expect("Fail to open LocalBlockchainV10DB"),
+            forks_db: open_db::<ForksV10Datas>(&db_path, "forks.db")
+                .expect("Fail to open ForksV10DB"),
+            forks_blocks_db: open_db::<ForksBlocksV10Datas>(&db_path, "forks_blocks.db")
+                .expect("Fail to open ForksBlocksV10DB"),
+        }
+    }
+    /// Open blocks databases from their respective files
+    pub fn save_dbs(&self) {
+        self.blockchain_db
+            .save()
+            .expect("Fatal error : fail to save LocalBlockchainV10DB !");
+        self.forks_db
+            .save()
+            .expect("Fatal error : fail to save ForksV10DB !");
+        self.forks_blocks_db
+            .save()
+            .expect("Fatal error : fail to save ForksBlocksV10DB !");
+    }
+}
+
+#[derive(Debug)]
+/// Set of databases storing web of trust information
+pub struct WotsV10DBs {
+    /// Store iedntities
+    pub identities_db: BinFileDB<IdentitiesV10Datas>,
+    /// Store memberships created_block_id (Use only to detect expirations)
+    pub ms_db: BinFileDB<MsExpirV10Datas>,
+    /// Store certifications created_block_id (Use only to detect expirations)
+    pub certs_db: BinFileDB<CertsExpirV10Datas>,
+}
+
+impl WotsV10DBs {
+    /// Open wot databases from their respective files
+    pub fn open(db_path: &PathBuf, _memory_mode: bool) -> WotsV10DBs {
+        WotsV10DBs {
+            identities_db: open_db::<IdentitiesV10Datas>(&db_path, "identities.db")
+                .expect("Fail to open IdentitiesV10DB"),
+            ms_db: open_db::<MsExpirV10Datas>(&db_path, "ms.db")
+                .expect("Fail to open MsExpirV10DB"),
+            certs_db: open_db::<CertsExpirV10Datas>(&db_path, "certs.db")
+                .expect("Fail to open CertsExpirV10DB"),
+        }
+    }
+    /// Save wot databases from their respective files
+    pub fn save_dbs(&self) {
+        self.identities_db
+            .save()
+            .expect("Fatal error : fail to save IdentitiesV10DB !");
+        self.ms_db
+            .save()
+            .expect("Fatal error : fail to save MsExpirV10DB !");
+        self.certs_db
+            .save()
+            .expect("Fatal error : fail to save CertsExpirV10DB !");
+    }
+}
 
-pub struct DuniterDB(pub sqlite::Connection);
+#[derive(Debug)]
+/// Set of databases storing currency information
+pub struct CurrencyV10DBs<B: Backend + Debug> {
+    /// Store all DU sources
+    pub du_db: BinDB<DUsV10Datas, B>,
+    /// Store all Transactions
+    pub tx_db: BinDB<TxV10Datas, B>,
+    /// Store all UTXOs
+    pub utxos_db: BinDB<UTXOsV10Datas, B>,
+    /// Store balances of all address (and theirs UTXOs indexs)
+    pub balances_db: BinDB<BalancesV10Datas, B>,
+}
 
-impl Debug for DuniterDB {
-    fn fmt(&self, f: &mut ::std::fmt::Formatter) -> ::std::fmt::Result {
-        write!(f, "DuniterDB {{ }}")
+impl CurrencyV10DBs<MemoryBackend> {
+    pub fn open_memory_mode() -> CurrencyV10DBs<MemoryBackend> {
+        CurrencyV10DBs {
+            du_db: open_memory_db::<DUsV10Datas>().expect("Fail to open DUsV10DB"),
+            tx_db: open_memory_db::<TxV10Datas>().expect("Fail to open TxV10DB"),
+            utxos_db: open_memory_db::<UTXOsV10Datas>().expect("Fail to open UTXOsV10DB"),
+            balances_db: open_memory_db::<BalancesV10Datas>().expect("Fail to open BalancesV10DB"),
+        }
+    }
+}
+
+impl CurrencyV10DBs<FileBackend> {
+    /// Open currency databases from their respective files
+    pub fn open(db_path: &PathBuf) -> CurrencyV10DBs<FileBackend> {
+        CurrencyV10DBs {
+            du_db: open_db::<DUsV10Datas>(&db_path, "du.db").expect("Fail to open DUsV10DB"),
+            tx_db: open_db::<TxV10Datas>(&db_path, "tx.db").expect("Fail to open TxV10DB"),
+            utxos_db: open_db::<UTXOsV10Datas>(&db_path, "sources.db")
+                .expect("Fail to open UTXOsV10DB"),
+            balances_db: open_db::<BalancesV10Datas>(&db_path, "balances.db")
+                .expect("Fail to open BalancesV10DB"),
+        }
+    }
+    /// Save currency databases in their respective files
+    pub fn save_dbs(&self, tx: bool, du: bool) {
+        if tx {
+            self.tx_db
+                .save()
+                .expect("Fatal error : fail to save LocalBlockchainV10DB !");
+            self.utxos_db
+                .save()
+                .expect("Fatal error : fail to save UTXOsV10DB !");
+            self.balances_db
+                .save()
+                .expect("Fatal error : fail to save BalancesV10DB !");
+        }
+        if du {
+            self.du_db
+                .save()
+                .expect("Fatal error : fail to save DUsV10DB !");
+        }
     }
 }
 
-pub trait FromJsonValue
-where
-    Self: marker::Sized,
-{
-    fn from_json_value(value: &serde_json::Value) -> Option<Self>;
+#[derive(Debug, Deserialize, Copy, Clone, PartialEq, Eq, Hash, Serialize)]
+/// Data Access Layer Error
+pub enum DALError {
+    /// Error in write operation
+    WriteError,
+    /// Error in read operation
+    ReadError,
+    /// A database is corrupted, you have to reset the data completely
+    DBCorrupted,
+    /// Error with the file system
+    FileSystemError,
+    /// Capturing a panic signal during a write operation
+    WritePanic,
+    /// Unknown error
+    UnknowError,
 }
 
-pub trait WriteToDuniterDB {
-    fn write(&self, db: &DuniterDB, written_blockstamp: Blockstamp, written_timestamp: u64);
+impl From<RustbreakError> for DALError {
+    fn from(rust_break_error: RustbreakError) -> DALError {
+        match rust_break_error.kind() {
+            RustbreakErrorKind::Serialization => DALError::WriteError,
+            RustbreakErrorKind::Deserialization => DALError::ReadError,
+            RustbreakErrorKind::Poison => DALError::DBCorrupted,
+            RustbreakErrorKind::Backend => DALError::FileSystemError,
+            RustbreakErrorKind::WritePanic => DALError::WritePanic,
+            _ => DALError::UnknowError,
+        }
+    }
 }
 
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
-pub enum ForkState {
+/// ForkAlreadyCheck
+pub struct ForkAlreadyCheck(pub bool);
+
+#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+/// Stores a state associated with a ForkId
+pub enum ForkStatus {
+    /// This ForkId is empty (available to welcome a new fork)
     Free(),
-    Full(),
+    /// This ForkId points on a stackable fork with no roll back
+    Stackable(ForkAlreadyCheck),
+    /// This ForkId points on a stackable fork with roll back.
+    /// `BlockId` points to the last block in common
+    RollBack(ForkAlreadyCheck, BlockId),
+    /// This ForkId points on a stackable fork with roll back
+    /// but the last block in common is too old (beyond the maximum FORK_WINDOW_SIZE)
+    TooOld(ForkAlreadyCheck),
+    /// This ForkId points on an isolate fork
+    /// An isolated fork is a fork that has no block in common with the local blockchain.
     Isolate(),
+    /// This ForkId points on an invalid fork
+    Invalid(),
 }
 
-#[derive(Debug, Clone)]
-pub struct WotState {
+/*#[derive(Debug, Clone)]
+pub struct WotStats {
     pub block_number: u32,
     pub block_hash: String,
     pub sentries_count: usize,
@@ -93,200 +287,70 @@ pub struct WotState {
     pub connectivities: Vec<usize>,
     pub average_centrality: usize,
     pub centralities: Vec<u64>,
-}
+}*/
 
 fn _use_json_macro() -> serde_json::Value {
     json!({})
 }
 
-pub fn open_db(db_path: &PathBuf, memory_mode: bool) -> Result<DuniterDB, sqlite::Error> {
-    let conn: sqlite::Connection;
-    if memory_mode || !db_path.as_path().exists() {
-        if memory_mode {
-            conn = sqlite::open(":memory:")?;
-        } else {
-            conn = sqlite::open(db_path.as_path())?;
-        }
-        //conn.execute("PRAGMA synchronous = 0;")
-        //.expect("Fail to configure SQLite DB (PRAGMA) !");
-        conn.execute(
-            "
-        CREATE TABLE wot_history (block_number INTEGER, block_hash TEXT, sentries_count INTEGER,
-            average_density INTEGER, average_distance INTEGER,
-            distances TEXT, average_connectivity INTEGER, connectivities TEXT,
-            average_centrality INTEGER, centralities TEXT);
-        CREATE TABLE blocks (fork INTEGER, isolate INTEGER, version INTEGER, nonce INTEGER, number INTEGER,
-            pow_min INTEGER, time INTEGER, median_time INTEGER, members_count INTEGER,
-            monetary_mass INTEGER, unit_base INTEGER, issuers_count INTEGER, issuers_frame INTEGER,
-            issuers_frame_var INTEGER, median_frame INTEGER, second_tiercile_frame INTEGER,
-            currency TEXT, issuer TEXT, signature TEXT, hash TEXT, previous_hash TEXT, inner_hash TEXT, dividend INTEGER, identities TEXT, joiners TEXT,
-            actives TEXT, leavers TEXT, revoked TEXT, excluded TEXT, certifications TEXT,
-            transactions TEXT);
-        CREATE TABLE identities (wotb_id INTEGER, uid TEXT, pubkey TEXT, hash TEXT, sig TEXT,
-            state INTEGER, created_on TEXT, joined_on TEXT, penultimate_renewed_on TEXT, last_renewed_on TEXT,
-            expires_on INTEGER, revokes_on INTEGER, expired_on TEXT, revoked_on TEXT);
-        CREATE TABLE certifications (pubkey_from TEXT, pubkey_to TEXT, created_on TEXT,
-            signature TEXT, written_on TEXT, expires_on INTEGER, chainable_on INTEGER);
-        ",
-        )?;
-    } else {
-        conn = sqlite::open(db_path.as_path())?;
-    }
-    Ok(DuniterDB(conn))
-}
-
-pub fn close_db(db: &DuniterDB) {
-    db.0
-        .execute("PRAGMA optimize;")
-        .expect("Fail to optimize SQLite DB !");
-}
-
-pub fn get_uid(db: &DuniterDB, wotb_id: NodeId) -> Option<String> {
-    let mut cursor: sqlite::Cursor = db
-        .0
-        .prepare("SELECT uid FROM identities WHERE wotb_id=? AND state=0 LIMIT 1;")
-        .expect("Request SQL get_current_block is wrong !")
-        .cursor();
-    cursor
-        .bind(&[sqlite::Value::Integer(wotb_id.0 as i64)])
-        .expect("0");
-    if let Some(row) = cursor.next().expect("fait to get_uid() : cursor error") {
-        Some(String::from(
-            row[0]
-                .as_string()
-                .expect("get_uid: Fail to parse uid field in str !"),
-        ))
-    } else {
-        None
-    }
+/// Open Rustbreak memory database
+pub fn open_memory_db<D: Serialize + DeserializeOwned + Debug + Default + Clone + Send>(
+) -> Result<BinMemDB<D>, DALError> {
+    let backend = MemoryBackend::new();
+    let db = MemoryDatabase::<D, Bincode>::from_parts(D::default(), backend, Bincode);
+    Ok(db)
 }
 
-pub fn new_get_current_block(db: &DuniterDB) -> Option<BlockDocument> {
-    let mut cursor: sqlite::Cursor = db.0
-        .prepare(
-            "SELECT version, nonce, number, pow_min, time, median_time, members_count, monetary_mass, unit_base, issuers_count, issuers_frame, issuers_frame_var, median_frame, second_tiercile_frame, currency, issuer, signature, hash, dividend, joiners, actives, leavers, revoked, excluded, certifications, transactions FROM blocks
-            WHERE fork=0 ORDER BY median_time DESC LIMIT ?;",
-        )
-        .expect("Request SQL get_current_block is wrong !")
-        .cursor();
-
-    cursor.bind(&[sqlite::Value::Integer(1)]).expect("0");
-    if let Some(row) = cursor.next().expect("1") {
-        let dividend = row[18].as_integer().expect("dividend");
-        let dividend = if dividend > 0 {
-            Some(dividend as usize)
-        } else {
-            None
-        };
-        return Some(BlockDocument {
-            nonce: row[1].as_integer().expect("nonce") as u64,
-            number: BlockId(row[2].as_integer().expect("2") as u32),
-            pow_min: row[3].as_integer().expect("version") as usize,
-            time: row[4].as_integer().expect("time") as u64,
-            median_time: row[5].as_integer().expect("median_time") as u64,
-            members_count: row[6].as_integer().expect("7") as usize,
-            monetary_mass: row[7].as_integer().expect("8") as usize,
-            unit_base: row[8].as_integer().expect("unit_base") as usize,
-            issuers_count: row[9].as_integer().expect("issuers_count") as usize,
-            issuers_frame: row[10].as_integer().expect("issuers_frame") as isize,
-            issuers_frame_var: row[11].as_integer().expect("issuers_frame_var") as isize,
-            currency: row[14].as_string().expect("currency").to_string(),
-            issuers: vec![PubKey::Ed25519(
-                ed25519::PublicKey::from_base58(row[15].as_string().expect("issuer")).unwrap(),
-            )],
-            signatures: vec![Sig::Ed25519(
-                ed25519::Signature::from_base64(row[16].as_string().expect("signature")).unwrap(),
-            )],
-            hash: Some(BlockHash(
-                Hash::from_hex(row[17].as_string().expect("hash")).unwrap(),
-            )),
-            parameters: None,
-            previous_hash: Hash::default(),
-            previous_issuer: None,
-            inner_hash: None,
-            dividend,
-            identities: Vec::with_capacity(0),
-            joiners: Vec::with_capacity(0),
-            actives: Vec::with_capacity(0),
-            leavers: Vec::with_capacity(0),
-            revoked: Vec::with_capacity(0),
-            excluded: Vec::with_capacity(0),
-            certifications: Vec::with_capacity(0),
-            transactions: Vec::with_capacity(0),
-            inner_hash_and_nonce_str: String::new(),
-        });
-    }
-    None
-}
-
-pub fn get_current_block(currency: &str, db: &DuniterDB) -> Option<DALBlock> {
-    let mut cursor: sqlite::Cursor = db
-        .0
-        .prepare("SELECT number, hash FROM blocks WHERE fork=0 ORDER BY median_time DESC LIMIT ?;")
-        .expect("Request SQL get_current_block is wrong !")
-        .cursor();
-
-    cursor.bind(&[sqlite::Value::Integer(1)]).expect("0");
-
-    if let Some(row) = cursor.next().unwrap() {
-        let blockstamp = Blockstamp {
-            id: BlockId(row[0].as_integer().unwrap() as u32),
-            hash: BlockHash(Hash::from_hex(row[1].as_string().unwrap()).unwrap()),
-        };
-        DALBlock::get_block(currency, db, &blockstamp)
+/// Open Rustbreak database
+pub fn open_db<D: Serialize + DeserializeOwned + Debug + Default + Clone + Send>(
+    dbs_folder_path: &PathBuf,
+    db_file_name: &str,
+) -> Result<BinFileDB<D>, DALError> {
+    let mut db_path = dbs_folder_path.clone();
+    db_path.push(db_file_name);
+    let file_path = db_path.as_path();
+    if file_path.exists()
+        && fs::metadata(file_path)
+            .expect("fail to get file size")
+            .len() > 0
+    {
+        let backend = FileBackend::open(db_path.as_path())?;
+        let db = FileDatabase::<D, Bincode>::from_parts(D::default(), backend, Bincode);
+        db.load()?;
+        Ok(db)
     } else {
-        None
+        Ok(FileDatabase::<D, Bincode>::from_path(
+            db_path.as_path(),
+            D::default(),
+        )?)
     }
 }
 
+/// Open wot file (cf. duniter-wot crate)
 pub fn open_wot_file<W: WebOfTrust, WF: FileFormater>(
     file_formater: &WF,
     wot_path: &PathBuf,
+    sig_stock: usize,
 ) -> (W, Blockstamp) {
     if wot_path.as_path().exists() {
         match file_formater.from_file(
-            wot_path.as_path().to_str().unwrap(),
-            constants::G1_PARAMS.sig_stock as usize,
+            wot_path
+                .as_path()
+                .to_str()
+                .expect("Fail to convert wo_path to str"),
+            sig_stock,
         ) {
             Ok((wot, binary_blockstamp)) => match ::std::str::from_utf8(&binary_blockstamp) {
-                Ok(str_blockstamp) => (wot, Blockstamp::from_string(str_blockstamp).unwrap()),
+                Ok(str_blockstamp) => (
+                    wot,
+                    Blockstamp::from_string(str_blockstamp)
+                        .expect("Fail to deserialize wot blockstamp"),
+                ),
                 Err(e) => panic!("Invalid UTF-8 sequence: {}", e),
             },
             Err(e) => panic!("Fatal Error : fail to read wot file : {:?}", e),
         }
     } else {
-        (
-            W::new(constants::G1_PARAMS.sig_stock as usize),
-            Blockstamp::default(),
-        )
-    }
-}
-
-pub fn register_wot_state(db: &DuniterDB, wot_state: &WotState) {
-    if wot_state.block_number != 1 {
-        db.0
-            .execute(format!(
-                "INSERT INTO wot_history (block_number, block_hash, sentries_count,
-                average_density, average_distance, distances,
-                average_connectivity, connectivities, average_centrality, centralities)
-                VALUES ({}, '{}', {}, {}, {}, '{}', {}, '{}', {}, '{}');",
-                wot_state.block_number,
-                wot_state.block_hash,
-                wot_state.sentries_count,
-                wot_state.average_density,
-                wot_state.average_distance,
-                serde_json::to_string(&wot_state.distances).unwrap(),
-                wot_state.average_connectivity,
-                serde_json::to_string(&wot_state.connectivities).unwrap(),
-                wot_state.average_centrality,
-                serde_json::to_string(&wot_state.centralities).unwrap(),
-            ))
-            .unwrap();
+        (W::new(sig_stock), Blockstamp::default())
     }
 }
-
-#[derive(Debug, Copy, Clone)]
-pub enum BlockchainError {
-    UnexpectedBlockNumber(),
-    UnknowError(),
-}
diff --git a/dal/parsers/certifications.rs b/dal/parsers/certifications.rs
index 2b37e3f44325363c115da87b0887e89be4d92c34..02f0a6d810d439a9acd2b18b05cc1e36ebe775f4 100644
--- a/dal/parsers/certifications.rs
+++ b/dal/parsers/certifications.rs
@@ -1,20 +1,10 @@
 extern crate serde;
 extern crate serde_json;
-extern crate sqlite;
 
-use super::super::block::DALBlock;
-use super::super::identity::DALIdentity;
-use super::super::DuniterDB;
 use duniter_crypto::keys::*;
-use duniter_documents::blockchain::v10::documents::certification::{
-    CertificationDocumentBuilder, CompactCertificationDocument,
-};
-use duniter_documents::blockchain::v10::documents::{
-    CertificationDocument, IdentityDocument, TextDocumentFormat,
-};
-use duniter_documents::blockchain::{Document, DocumentBuilder};
-use duniter_documents::{BlockHash, BlockId, Blockstamp, Hash};
-use std::collections::HashMap;
+use duniter_documents::blockchain::v10::documents::certification::CompactCertificationDocument;
+use duniter_documents::blockchain::v10::documents::{CertificationDocument, TextDocumentFormat};
+use duniter_documents::BlockId;
 
 pub fn parse_certifications_into_compact(
     json_certs: &[serde_json::Value],
@@ -50,94 +40,3 @@ pub fn parse_certifications_into_compact(
     }
     certifications
 }
-
-pub fn parse_certifications_from_json_value(
-    currency: &str,
-    db: &DuniterDB,
-    block_identities: &HashMap<PubKey, IdentityDocument>,
-    array_certifications: &[serde_json::Value],
-) -> Vec<TextDocumentFormat<CertificationDocument>> {
-    let mut certifications: Vec<TextDocumentFormat<CertificationDocument>> = Vec::new();
-    for certification in array_certifications.iter() {
-        let certification_datas: Vec<&str> = certification
-            .as_str()
-            .expect("Fail to parse certs : json isn't str !")
-            .split(':')
-            .collect();
-        if certification_datas.len() == 4 {
-            let target = PubKey::Ed25519(
-                ed25519::PublicKey::from_base58(certification_datas[1])
-                    .expect("Fail to parse cert target !"),
-            );
-            let target_idty_doc: IdentityDocument = match block_identities.get(&target) {
-                Some(idty_doc) => idty_doc.clone(),
-                None => {
-                    let dal_idty = DALIdentity::get_identity(currency, db, &target)
-                        .expect("target identity not found in bdd !");
-                    dal_idty.idty_doc
-                }
-            };
-            let cert_blockstamp_id = BlockId(
-                certification_datas[2]
-                    .parse()
-                    .expect("Fail to parse cert blockstamp !"),
-            );
-            let cert_builder =
-                CertificationDocumentBuilder {
-                    currency,
-                    issuer: &PubKey::Ed25519(
-                        ed25519::PublicKey::from_base58(certification_datas[0])
-                            .expect("Fail to parse cert issuer !"),
-                    ),
-                    blockstamp: &Blockstamp {
-                        id: cert_blockstamp_id,
-                        hash: if cert_blockstamp_id == BlockId(0) {
-                            BlockHash(Hash::from_hex(
-                            "E3B0C44298FC1C149AFBF4C8996FB92427AE41E4649B934CA495991B7852B855",
-                        ).expect("Fail to parse cert : invalid genesis hash"))
-                        } else {
-                            DALBlock::get_block_hash(db, &cert_blockstamp_id).expect(&format!(
-                                "Fatal Error : Block {} not found in bdd !",
-                                cert_blockstamp_id
-                            ))
-                        },
-                    },
-                    target: &target,
-                    identity_username: target_idty_doc.username(),
-                    identity_blockstamp: &target_idty_doc.blockstamp(),
-                    identity_sig: &target_idty_doc.signatures()[0],
-                };
-            let cert_sig = Sig::Ed25519(
-                ed25519::Signature::from_base64(certification_datas[3])
-                    .expect("Fail to parse cert sig !"),
-            );
-            certifications.push(TextDocumentFormat::Complete(
-                cert_builder.build_with_signature(vec![cert_sig]),
-            ));
-        }
-    }
-    certifications
-}
-
-pub fn parse_certifications(
-    currency: &str,
-    db: &DuniterDB,
-    block_identities: &HashMap<PubKey, IdentityDocument>,
-    json_datas: &str,
-) -> Option<Vec<TextDocumentFormat<CertificationDocument>>> {
-    let raw_certifications: serde_json::Value =
-        serde_json::from_str(json_datas).expect("Fail to parse certs: str isn't json !");
-
-    if raw_certifications.is_array() {
-        Some(parse_certifications_from_json_value(
-            currency,
-            db,
-            block_identities,
-            raw_certifications
-                .as_array()
-                .expect("Fail to parse certs: json datas must be an array !"),
-        ))
-    } else {
-        None
-    }
-}
diff --git a/dal/parsers/mod.rs b/dal/parsers/mod.rs
index 34b7772097b5d7ef26f8ba97b15beae677d663e7..92d83a2bdc2ea9ed7781782cce34957c5b97a466 100644
--- a/dal/parsers/mod.rs
+++ b/dal/parsers/mod.rs
@@ -1,134 +1,17 @@
-pub mod blocks;
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
 pub mod certifications;
-pub mod excluded;
-pub mod identities;
-pub mod memberships;
 pub mod revoked;
-pub mod transactions;
-
-#[cfg(test)]
-mod tests {
-    use super::transactions::*;
-    use duniter_crypto::keys::*;
-    use duniter_documents::blockchain::v10::documents::transaction::*;
-    use duniter_documents::blockchain::DocumentBuilder;
-    use duniter_documents::Blockstamp;
-
-    #[test]
-    fn parse_json_tx() {
-        let tx_json = json!({
-            "version": 10,
-            "currency": "g1",
-            "locktime": 0,
-            "hash": "3424206EF64C69E5F8C3906AAE571E378A498FCDAE0B85E9405A5205D7148EFE",
-            "blockstamp": "112533-000002150F2E805E604D9B31212D079570AAD8D3A4D8BB75F2C15A94A345B6B1",
-            "blockstampTime": 0,
-            "issuers": [
-                "51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2"
-            ],
-            "inputs": [
-                "1000:0:D:51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2:46496"
-            ],
-            "outputs": [
-                "1000:0:SIG(2yN8BRSkARcqE8NCxKMBiHfTpx1EvwULFn56Myf6qRmy)"
-            ],
-            "unlocks": [
-                "0:SIG(0)"
-            ],
-            "signatures": [
-                "5olrjFylTCsVq8I5Yr7FpXeviynICyvIwe1yG5N0RJF+VZb+bCFBnLAMpmMCU2qzUvK7z41UXOrMRybXiLa2Dw=="
-            ],
-            "comment": "Merci pour la calligraphie ;) de Liam"
-        });
-
-        let tx_builder = TransactionDocumentBuilder {
-            currency: "g1",
-            blockstamp: &Blockstamp::from_string(
-                "112533-000002150F2E805E604D9B31212D079570AAD8D3A4D8BB75F2C15A94A345B6B1",
-            ).unwrap(),
-            locktime: &0,
-            issuers: &vec![PubKey::Ed25519(
-                ed25519::PublicKey::from_base58("51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2")
-                    .unwrap(),
-            )],
-            inputs: &vec![
-                TransactionInput::parse_from_str(
-                    "1000:0:D:51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2:46496",
-                ).unwrap(),
-            ],
-            outputs: &vec![
-                TransactionOutput::parse_from_str(
-                    "1000:0:SIG(2yN8BRSkARcqE8NCxKMBiHfTpx1EvwULFn56Myf6qRmy)",
-                ).unwrap(),
-            ],
-            unlocks: &vec![TransactionInputUnlocks::parse_from_str("0:SIG(0)").unwrap()],
-            comment: "Merci pour la calligraphie ;) de Liam",
-        };
-
-        assert_eq!(
-            parse_transaction("g1", &tx_json).expect("Fail to parse transaction !"),
-            tx_builder.build_with_signature(vec![Sig::Ed25519(ed25519::Signature::from_base64("5olrjFylTCsVq8I5Yr7FpXeviynICyvIwe1yG5N0RJF+VZb+bCFBnLAMpmMCU2qzUvK7z41UXOrMRybXiLa2Dw==").unwrap())])
-        );
-    }
-
-    #[test]
-    fn parse_json_tx2() {
-        let tx_json = json!({
-            "version": 10,
-            "currency": "g1",
-            "locktime": 0,
-            "hash": "F98BF7A8BF82E76F5B69E70CEF0A07A08BFDB03561955EC57B254DB1E958529C",
-            "blockstamp": "58-00005B9167EBA1E32C6EAD42AE7F72D8F14B765D3C9E47D233B553D47C5AEE0C",
-            "blockstampTime": 1488990541,
-            "issuers": [
-                "FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD"
-            ],
-            "inputs": [
-                "1000:0:D:FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD:1"
-            ],
-            "outputs": [
-                "3:0:SIG(7vU9BMDhN6fBuRa2iK3JRbC6pqQKb4qDMGsFcQuT5cz)",
-                "997:0:SIG(FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD)"
-            ],
-            "unlocks": [
-                "0:SIG(0)"
-            ],
-            "signatures": [
-                "VWbvsiybM4L2X5+o+6lIiuKNw5KrD1yGZqmV+lHtA28XoRUFzochSIgfoUqBsTAaYEHY45vSX917LDXudTEzBg=="
-            ],
-            "comment": "Un petit cafe ;-)"
-        });
-
-        let tx_builder = TransactionDocumentBuilder {
-            currency: "g1",
-            blockstamp: &Blockstamp::from_string(
-                "58-00005B9167EBA1E32C6EAD42AE7F72D8F14B765D3C9E47D233B553D47C5AEE0C",
-            ).unwrap(),
-            locktime: &0,
-            issuers: &vec![PubKey::Ed25519(
-                ed25519::PublicKey::from_base58("FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD")
-                    .unwrap(),
-            )],
-            inputs: &vec![
-                TransactionInput::parse_from_str(
-                    "1000:0:D:FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD:1",
-                ).unwrap(),
-            ],
-            outputs: &vec![
-                TransactionOutput::parse_from_str(
-                    "3:0:SIG(7vU9BMDhN6fBuRa2iK3JRbC6pqQKb4qDMGsFcQuT5cz)",
-                ).unwrap(),
-                TransactionOutput::parse_from_str(
-                    "997:0:SIG(FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD)",
-                ).unwrap(),
-            ],
-            unlocks: &vec![TransactionInputUnlocks::parse_from_str("0:SIG(0)").unwrap()],
-            comment: "Un petit cafe ;-)",
-        };
-
-        assert_eq!(
-            parse_transaction("g1", &tx_json).expect("Fail to parse transaction !"),
-            tx_builder.build_with_signature(vec![Sig::Ed25519(ed25519::Signature::from_base64("VWbvsiybM4L2X5+o+6lIiuKNw5KrD1yGZqmV+lHtA28XoRUFzochSIgfoUqBsTAaYEHY45vSX917LDXudTEzBg==").unwrap())])
-        );
-    }
-}
diff --git a/dal/parsers/revoked.rs b/dal/parsers/revoked.rs
index fec4ca70bb6a1a82aacb3f467229efe7204ebf8b..2bea8150fba51e0b6c38ce6e69729035e218b3cc 100644
--- a/dal/parsers/revoked.rs
+++ b/dal/parsers/revoked.rs
@@ -1,18 +1,8 @@
 extern crate serde_json;
 
 use duniter_crypto::keys::*;
-use duniter_documents::blockchain::v10::documents::revocation::{
-    CompactRevocationDocument, RevocationDocumentBuilder,
-};
-use duniter_documents::blockchain::v10::documents::{
-    IdentityDocument, RevocationDocument, TextDocumentFormat,
-};
-use duniter_documents::blockchain::{Document, DocumentBuilder};
-
-use super::super::identity::DALIdentity;
-use super::super::DuniterDB;
-
-use std::collections::HashMap;
+use duniter_documents::blockchain::v10::documents::revocation::CompactRevocationDocument;
+use duniter_documents::blockchain::v10::documents::{RevocationDocument, TextDocumentFormat};
 
 pub fn parse_revocations_into_compact(
     json_recocations: &[serde_json::Value],
@@ -39,59 +29,3 @@ pub fn parse_revocations_into_compact(
     }
     revocations
 }
-
-pub fn parse_revocations(
-    currency: &str,
-    db: &DuniterDB,
-    block_identities: &HashMap<PubKey, IdentityDocument>,
-    json_datas: &str,
-) -> Option<Vec<TextDocumentFormat<RevocationDocument>>> {
-    let raw_revocations: serde_json::Value = serde_json::from_str(json_datas).unwrap();
-
-    if raw_revocations.is_array() {
-        Some(parse_revocations_from_json_value(
-            currency,
-            db,
-            block_identities,
-            raw_revocations.as_array().unwrap(),
-        ))
-    } else {
-        None
-    }
-}
-
-pub fn parse_revocations_from_json_value(
-    currency: &str,
-    db: &DuniterDB,
-    block_identities: &HashMap<PubKey, IdentityDocument>,
-    array_revocations: &[serde_json::Value],
-) -> Vec<TextDocumentFormat<RevocationDocument>> {
-    let mut revocations: Vec<TextDocumentFormat<RevocationDocument>> = Vec::new();
-    for revocation in array_revocations.iter() {
-        let revocations_datas: Vec<&str> = revocation.as_str().unwrap().split(':').collect();
-        if revocations_datas.len() == 2 {
-            let idty_pubkey =
-                PubKey::Ed25519(ed25519::PublicKey::from_base58(revocations_datas[0]).unwrap());
-            let idty_doc: IdentityDocument = match block_identities.get(&idty_pubkey) {
-                Some(idty_doc) => idty_doc.clone(),
-                None => {
-                    let dal_idty = DALIdentity::get_identity(currency, db, &idty_pubkey).unwrap();
-                    dal_idty.idty_doc
-                }
-            };
-            let revoc_doc_builder = RevocationDocumentBuilder {
-                currency,
-                issuer: &idty_pubkey,
-                identity_username: idty_doc.username(),
-                identity_blockstamp: &idty_doc.blockstamp(),
-                identity_sig: &idty_doc.signatures()[0],
-            };
-            let revoc_sig =
-                Sig::Ed25519(ed25519::Signature::from_base64(revocations_datas[1]).unwrap());
-            revocations.push(TextDocumentFormat::Complete(
-                revoc_doc_builder.build_with_signature(vec![revoc_sig]),
-            ));
-        }
-    }
-    revocations
-}
diff --git a/dal/sources.rs b/dal/sources.rs
new file mode 100644
index 0000000000000000000000000000000000000000..dcb27bbafdee5b75b10f3f60df846b91487e085b
--- /dev/null
+++ b/dal/sources.rs
@@ -0,0 +1,97 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+extern crate duniter_crypto;
+extern crate duniter_documents;
+
+use duniter_crypto::keys::PubKey;
+use duniter_documents::blockchain::v10::documents::transaction::*;
+use duniter_documents::{BlockId, Hash};
+use std::ops::{Add, Sub};
+
+#[derive(Copy, Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
+pub struct SourceAmount(pub TxAmount, pub TxBase);
+
+impl Default for SourceAmount {
+    fn default() -> SourceAmount {
+        SourceAmount(TxAmount(0), TxBase(0))
+    }
+}
+
+impl Add for SourceAmount {
+    type Output = SourceAmount;
+    fn add(self, s2: SourceAmount) -> Self::Output {
+        if self.1 == s2.1 {
+            SourceAmount(self.0 + s2.0, self.1)
+        } else {
+            panic!("Source change base not yet supported !")
+        }
+    }
+}
+
+impl Sub for SourceAmount {
+    type Output = SourceAmount;
+    fn sub(self, s2: SourceAmount) -> Self::Output {
+        if self.1 == s2.1 {
+            SourceAmount(self.0 - s2.0, self.1)
+        } else {
+            panic!("Source change base not yet supported !")
+        }
+    }
+}
+
+#[derive(Copy, Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
+pub struct UTXOIndexV10(pub Hash, pub TxIndex);
+
+pub type UTXOContentV10 = TransactionOutput;
+
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct UTXOV10(pub UTXOIndexV10, pub UTXOContentV10);
+
+impl UTXOV10 {
+    pub fn get_conditions(&self) -> TransactionOutputConditionGroup {
+        self.1.conditions.clone()
+    }
+    pub fn get_amount(&self) -> SourceAmount {
+        SourceAmount(self.1.amount, self.1.base)
+    }
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub enum UTXO {
+    V10(UTXOV10),
+    V11(),
+}
+
+impl UTXO {
+    pub fn get_conditions(&self) -> TransactionOutputConditionGroup {
+        match *self {
+            UTXO::V10(ref utxo_v10) => utxo_v10.get_conditions(),
+            _ => panic!("UTXO version not supported !"),
+        }
+    }
+    pub fn get_amount(&self) -> SourceAmount {
+        match *self {
+            UTXO::V10(ref utxo_v10) => utxo_v10.get_amount(),
+            _ => panic!("UTXO version not supported !"),
+        }
+    }
+}
+
+#[derive(Copy, Clone, Debug, Deserialize, Eq, Hash, PartialEq, Serialize)]
+pub enum SourceIndexV10 {
+    UTXO(UTXOIndexV10),
+    DU(PubKey, BlockId),
+}
diff --git a/dal/tools.rs b/dal/tools.rs
index 9a4ecee0aa5f649b94ccd172c0eee2d326659533..8943916f16b757f00aa4f5bb6ceb496675923a8d 100644
--- a/dal/tools.rs
+++ b/dal/tools.rs
@@ -40,7 +40,10 @@ pub fn calculate_average_density<T: WebOfTrust>(wot: &T) -> usize {
     let enabled_members_count = enabled_members.len();
     let mut count_actives_links: usize = 0;
     for member in &enabled_members {
-        count_actives_links += wot.issued_count(*member).unwrap();
+        count_actives_links += wot.issued_count(*member).expect(&format!(
+            "Fail to get issued_count of wot_id {}",
+            (*member).0
+        ));
     }
     ((count_actives_links as f32 / enabled_members_count as f32) * 1_000.0) as usize
 }
diff --git a/dal/writers/block.rs b/dal/writers/block.rs
index 6137156b51fce0ca2c9b0361828c0ce1d767c13d..e86bf9f6714ef94e7bdc19962a1e01d47d7f5378 100644
--- a/dal/writers/block.rs
+++ b/dal/writers/block.rs
@@ -1,74 +1,71 @@
-extern crate duniter_crypto;
-extern crate duniter_documents;
-extern crate duniter_wotb;
-extern crate serde;
-extern crate serde_json;
-extern crate sqlite;
+use block::DALBlock;
+use duniter_documents::blockchain::Document;
+use duniter_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
+use std::collections::HashMap;
+use ForkId;
+use {BinFileDB, DALError, ForksBlocksV10Datas, ForksV10Datas, LocalBlockchainV10Datas};
 
-use self::duniter_documents::blockchain::v10::documents::BlockDocument;
-use self::duniter_documents::blockchain::Document;
-use super::super::block::DALBlock;
-use super::super::DuniterDB;
+/// Write DALBlock in databases
+pub fn write(
+    blockchain_db: &BinFileDB<LocalBlockchainV10Datas>,
+    forks_db: &BinFileDB<ForksV10Datas>,
+    forks_blocks_db: &BinFileDB<ForksBlocksV10Datas>,
+    dal_block: &DALBlock,
+    old_fork_id: Option<ForkId>,
+    sync: bool,
+) -> Result<(), DALError> {
+    if dal_block.fork_id.0 == 0 {
+        blockchain_db.write(|db| {
+            db.insert(dal_block.block.number, dal_block.clone());
+        })?;
 
-pub fn write_network_block(
-    db: &DuniterDB,
-    block: &BlockDocument,
-    fork: usize,
-    isolate: bool,
-    revoked: &[serde_json::Value],
-    certifications: &[serde_json::Value],
-) {
-    db.0
-        .execute(
-            format!("INSERT INTO blocks (fork, isolate, version, nonce, number, pow_min, time, median_time, members_count, monetary_mass, unit_base, issuers_count, issuers_frame, issuers_frame_var, currency, issuer, signature, hash, previous_hash, inner_hash, dividend, identities, joiners, actives, leavers, revoked, excluded, certifications, transactions) VALUES ({}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, '{}', '{}', '{}', '{}', '{}', '{}', {}, '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}');",
-                fork, if isolate { 1 } else { 0 }, 10,
-                block.nonce, block.number, block.pow_min, block.time, block.median_time,
-                block.members_count, block.monetary_mass, block.unit_base, block.issuers_count,
-                block.issuers_frame, block.issuers_frame_var, block.currency, block.issuers[0],
-                block.signatures[0].to_string(), block.hash.unwrap().0.to_string(),
-                block.previous_hash.to_string(), block.inner_hash.unwrap().to_string(),
-                block.dividend.unwrap_or(0),
-                serde_json::to_string(&block.identities).unwrap(),
-                serde_json::to_string(&block.joiners).unwrap(), serde_json::to_string(&block.actives).unwrap(),
-                serde_json::to_string(&block.leavers).unwrap(), serde_json::to_string(revoked).unwrap(),
-                serde_json::to_string(&block.excluded).unwrap(), serde_json::to_string(certifications).unwrap(),
-                serde_json::to_string(&block.transactions).unwrap()
-            ))
-        .unwrap();
-}
-
-pub fn write(db: &DuniterDB, block: &BlockDocument, fork: usize, isolate: bool) {
-    let mut insert = true;
-    if fork == 0 {
-        if let Some(_fork) = DALBlock::get_block_fork(db, &block.blockstamp()) {
-            insert = false;
-            db.0
-                .execute(format!(
-                    "UPDATE blocks SET fork=0 WHERE number={} AND hash='{}';",
-                    block.number,
-                    block.hash.unwrap().0.to_string()
-                ))
-                .unwrap();
+        if old_fork_id.is_some() {
+            forks_blocks_db.write(|db| {
+                db.remove(&dal_block.block.blockstamp());
+            })?;
         }
     }
-
-    if insert {
-        db.0
-            .execute(
-                format!("INSERT INTO blocks (fork, isolate, version, nonce, number, pow_min, time, median_time, members_count, monetary_mass, unit_base, issuers_count, issuers_frame, issuers_frame_var, currency, issuer, signature, hash, previous_hash, inner_hash, dividend, identities, joiners, actives, leavers, revoked, excluded, certifications, transactions) VALUES ({}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, '{}', '{}', '{}', '{}', '{}', '{}', {}, '{}', '{}', '{}', '{}', '{}', '{}', '{}', '{}');",
-                    fork, if isolate { 1 } else { 0 }, 10,
-                    block.nonce, block.number, block.pow_min, block.time, block.median_time,
-                    block.members_count, block.monetary_mass, block.unit_base, block.issuers_count,
-                    block.issuers_frame, block.issuers_frame_var, block.currency, block.issuers[0],
-                    block.signatures[0].to_string(), block.hash.unwrap().0.to_string(),
-                    block.previous_hash.to_string(), block.inner_hash.unwrap().to_string(),
-                    block.dividend.unwrap_or(0), serde_json::to_string(&block.identities).unwrap(),
-                    serde_json::to_string(&block.joiners).unwrap(), serde_json::to_string(&block.actives).unwrap(),
-                    serde_json::to_string(&block.leavers).unwrap(), serde_json::to_string(&block.revoked).unwrap(),
-                    serde_json::to_string(&block.excluded).unwrap(), serde_json::to_string(&block.certifications).unwrap(),
-                    serde_json::to_string(&block.transactions).unwrap()
-                ),
-            )
-            .unwrap();
+    if let Some(old_fork_id) = old_fork_id {
+        forks_db.write(|db| {
+            let mut fork_meta_datas = db
+                .get(&old_fork_id)
+                .expect("old_fork_id don(t exist !")
+                .clone();
+            let previous_blockstamp = Blockstamp {
+                id: BlockId(dal_block.block.blockstamp().id.0 - 1),
+                hash: dal_block
+                    .block
+                    .hash
+                    .expect("Try to get hash of an uncompleted or reduce block !"),
+            };
+            fork_meta_datas.remove(&previous_blockstamp);
+            db.insert(old_fork_id, fork_meta_datas);
+            if dal_block.fork_id.0 > 0 {
+                let mut fork_meta_datas = db.get(&dal_block.fork_id).unwrap().clone();
+                fork_meta_datas.insert(previous_blockstamp, dal_block.block.hash.unwrap());
+                db.insert(old_fork_id, fork_meta_datas);
+            }
+        })?;
+    }
+    if !sync {
+        let mut blockchain_meta_datas: HashMap<PreviousBlockstamp, BlockHash> = forks_db
+            .read(|db| db.get(&ForkId(0)).cloned())
+            .expect("Get blockchain meta datas : DALError")
+            .unwrap_or_else(HashMap::new);
+        let block_previous_hash = if dal_block.block.number.0 == 0 {
+            PreviousBlockstamp::default()
+        } else {
+            PreviousBlockstamp {
+                id: BlockId(dal_block.block.number.0 - 1),
+                hash: BlockHash(dal_block.block.previous_hash),
+            }
+        };
+        blockchain_meta_datas.insert(block_previous_hash, dal_block.block.hash.unwrap());
+        forks_db
+            .write(|db| {
+                db.insert(ForkId(0), blockchain_meta_datas);
+            })
+            .expect("Write blockchain meta datas : DALError");
     }
+    Ok(())
 }
diff --git a/dal/writers/certification.rs b/dal/writers/certification.rs
index f175de0f681a0b4a1272ebb14eb5748102b2acf3..5f593fcbb8b7bfed507f61d7e1ff806400250f60 100644
--- a/dal/writers/certification.rs
+++ b/dal/writers/certification.rs
@@ -1,54 +1,84 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
 extern crate serde;
 extern crate serde_json;
-extern crate sqlite;
 
-use super::super::DuniterDB;
+use currency_params::CurrencyParameters;
 use duniter_crypto::keys::*;
 use duniter_documents::blockchain::v10::documents::certification::CompactCertificationDocument;
-use duniter_documents::Blockstamp;
+use duniter_documents::BlockId;
+use duniter_wotb::NodeId;
+use {BinFileDB, CertsExpirV10Datas, DALError, IdentitiesV10Datas};
 
 pub fn write_certification(
-    cert: &CompactCertificationDocument,
-    db: &DuniterDB,
-    written_blockstamp: Blockstamp,
+    currency_params: &CurrencyParameters,
+    identities_db: &BinFileDB<IdentitiesV10Datas>,
+    certs_db: &BinFileDB<CertsExpirV10Datas>,
+    source_pubkey: PubKey,
+    source: NodeId,
+    target: NodeId,
+    created_block_id: BlockId,
     written_timestamp: u64,
-) {
-    let mut cursor = db
-        .0
-        .prepare("SELECT median_time FROM blocks WHERE number=? AND fork=0 LIMIT 1;")
-        .expect("invalid write_certification sql request")
-        .cursor();
-
-    cursor
-        .bind(&[sqlite::Value::Integer(i64::from(cert.block_number.0))])
-        .expect("convert blockstamp to timestamp failure at step 1 !");
-
-    let mut created_timestamp: i64 = 0;
-    if let Some(row) = cursor
-        .next()
-        .expect("convert blockstamp to timestamp failure at step 2 !")
-    {
-        created_timestamp = row[0]
-            .as_integer()
-            .expect("Fail to write cert, impossible to get created_timestamp !");
-    }
-
-    db.0
-        .execute(
-            format!("INSERT INTO certifications (pubkey_from, pubkey_to, created_on, signature, written_on, expires_on, chainable_on) VALUES ('{}', '{}', '{}', '{}', '{}', {}, {});",
-                cert.issuer, cert.target, cert.block_number.0, cert.signature,
-                written_blockstamp.to_string(),
-                created_timestamp+super::super::constants::G1_PARAMS.sig_validity,
-                written_timestamp+super::super::constants::G1_PARAMS.sig_period
-            ))
-        .expect("Fail to execute INSERT certification !");
+) -> Result<(), DALError> {
+    // Get cert_chainable_on
+    let mut member_datas = identities_db.read(|db| {
+        db.get(&source_pubkey)
+            .expect("Database Corrupted, please reset data !")
+            .clone()
+    })?;
+    // Push new cert_chainable_on
+    member_datas
+        .cert_chainable_on
+        .push(written_timestamp + currency_params.sig_period);
+    // Write new identity datas
+    identities_db.write(|db| {
+        db.insert(source_pubkey, member_datas);
+    })?;
+    // Add cert in certs_db
+    certs_db.write(|db| {
+        let mut created_certs = db.get(&created_block_id).cloned().unwrap_or_default();
+        created_certs.insert((source, target));
+        db.insert(created_block_id, created_certs);
+    })?;
+    Ok(())
 }
 
-pub fn remove_certification(from: PubKey, to: PubKey, db: &DuniterDB) {
-    db.0
-        .execute(format!(
-            "DELETE FROM certifications WHERE pubkey_from={} AND pubkey_to={}",
-            from, to
-        ))
-        .expect("Fail to execute DELETE certification !");
+/// Revert writtent certification
+pub fn revert_write_cert(
+    identities_db: &BinFileDB<IdentitiesV10Datas>,
+    certs_db: &BinFileDB<CertsExpirV10Datas>,
+    compact_doc: CompactCertificationDocument,
+    source: NodeId,
+    target: NodeId,
+) -> Result<(), DALError> {
+    // Remove CertsExpirV10Datas entry
+    certs_db.write(|db| {
+        let mut certs = db
+            .get(&compact_doc.block_number)
+            .cloned()
+            .unwrap_or_default();
+        certs.remove(&(source, target));
+        db.insert(compact_doc.block_number, certs);
+    })?;
+    // Pop last cert_chainable_on
+    identities_db.write(|db| {
+        if let Some(mut member_datas) = db.get(&compact_doc.issuer).cloned() {
+            member_datas.cert_chainable_on.pop();
+            db.insert(compact_doc.issuer, member_datas);
+        }
+    })?;
+    Ok(())
 }
diff --git a/dal/writers/dividend.rs b/dal/writers/dividend.rs
new file mode 100644
index 0000000000000000000000000000000000000000..7f7a4a350bdc0872de0fac405f48fe6f0ba4dd1a
--- /dev/null
+++ b/dal/writers/dividend.rs
@@ -0,0 +1,73 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use duniter_crypto::keys::PubKey;
+use duniter_documents::blockchain::v10::documents::transaction::*;
+use duniter_documents::BlockId;
+use rustbreak::backend::Backend;
+use sources::SourceAmount;
+use std::collections::{HashMap, HashSet};
+use std::fmt::Debug;
+use *;
+
+pub fn create_du<B: Backend + Debug>(
+    du_db: &BinDB<DUsV10Datas, B>,
+    balances_db: &BinDB<BalancesV10Datas, B>,
+    du_amount: &SourceAmount,
+    du_block_id: &BlockId,
+    members: &[PubKey],
+) -> Result<(), DALError> {
+    // Insert DU sources in DUsV10DB
+    du_db.write(|db| {
+        for pubkey in members {
+            let mut pubkey_dus = db.get(&pubkey).cloned().unwrap_or_default();
+            pubkey_dus.insert(*du_block_id);
+            db.insert(*pubkey, pubkey_dus);
+        }
+    })?;
+    // Get members balances
+    let members_balances: HashMap<PubKey, (SourceAmount, HashSet<UTXOIndexV10>)> = balances_db
+        .read(|db| {
+            let mut members_balances = HashMap::new();
+            for pubkey in members {
+                members_balances.insert(
+                    *pubkey,
+                    db.get(&TransactionOutputConditionGroup::Single(
+                        TransactionOutputCondition::Sig(*pubkey),
+                    )).cloned()
+                        .unwrap_or_default(),
+                );
+            }
+            members_balances
+        })?;
+    // Increment members balance
+    let members_balances: Vec<(PubKey, (SourceAmount, HashSet<UTXOIndexV10>))> = members_balances
+        .iter()
+        .map(|(pubkey, (balance, utxos_indexs))| {
+            let new_balance = *balance + *du_amount;
+            (*pubkey, (new_balance, utxos_indexs.clone()))
+        })
+        .collect();
+    // Write new members balance
+    balances_db.write(|db| {
+        for (pubkey, (balance, utxos_indexs)) in members_balances {
+            db.insert(
+                TransactionOutputConditionGroup::Single(TransactionOutputCondition::Sig(pubkey)),
+                (balance, utxos_indexs),
+            );
+        }
+    })?;
+    Ok(())
+}
diff --git a/dal/writers/identity.rs b/dal/writers/identity.rs
index ea106fce344f2489d635bf26b09c49446fc67c4e..214f4dbd769baee30dabe49325f8124c723bfe25 100644
--- a/dal/writers/identity.rs
+++ b/dal/writers/identity.rs
@@ -1,37 +1,25 @@
-extern crate duniter_wotb;
-extern crate sqlite;
-
 use super::super::identity::DALIdentity;
-use super::super::DuniterDB;
 use duniter_documents::blockchain::Document;
-use duniter_documents::Blockstamp;
+use duniter_documents::BlockId;
 use duniter_wotb::NodeId;
+use {BinFileDB, DALError, IdentitiesV10Datas, MsExpirV10Datas};
 
 pub fn write(
     idty: &DALIdentity,
-    wotb_id: &NodeId,
-    db: &DuniterDB,
-    _written_blockstamp: Blockstamp,
-    _written_timestamp: u64,
-) {
-    let expired_on = match idty.expired_on {
-        Some(ref tmp) => tmp.to_string(),
-        None => String::from(""),
-    };
-    let revoked_on = match idty.revoked_on {
-        Some(ref tmp) => tmp.to_string(),
-        None => String::from(""),
-    };
-    db.0
-        .execute(
-            format!("INSERT INTO identities (wotb_id, uid, pubkey, hash, sig, state, created_on, joined_on, penultimate_renewed_on, last_renewed_on, expires_on, revokes_on, expired_on, revoked_on) VALUES ({}, '{}', '{}', '{}', '{}', {}, '{}', '{}', '{}', '{}', {}, {}, '{}', '{}');",
-                (*wotb_id).0, idty.idty_doc.username(), idty.idty_doc.issuers()[0], idty.hash,
-                idty.idty_doc.signatures()[0], idty.state,
-                idty.idty_doc.blockstamp().to_string(),
-                idty.joined_on.to_string(),
-                idty.penultimate_renewed_on.to_string(),
-                idty.last_renewed_on.to_string(),
-                idty.expires_on, idty.revokes_on, expired_on, revoked_on
-            ))
-        .unwrap();
+    idty_wot_id: NodeId,
+    identities_db: &BinFileDB<IdentitiesV10Datas>,
+    ms_db: &BinFileDB<MsExpirV10Datas>,
+    ms_created_block_id: BlockId,
+) -> Result<(), DALError> {
+    // Write Identity
+    identities_db.write(|db| {
+        db.insert(idty.idty_doc.issuers()[0], idty.clone());
+    })?;
+    // Update IdentitiesV10DB
+    ms_db.write(|db| {
+        let mut memberships = db.get(&ms_created_block_id).cloned().unwrap_or_default();
+        memberships.insert(idty_wot_id);
+        db.insert(ms_created_block_id, memberships);
+    })?;
+    Ok(())
 }
diff --git a/dal/writers/mod.rs b/dal/writers/mod.rs
index aba7f80035e06d003fbf4387999ea70411845262..e7310d3caab4e139439f13839035b652dbf7c39e 100644
--- a/dal/writers/mod.rs
+++ b/dal/writers/mod.rs
@@ -1,4 +1,6 @@
 pub mod block;
 pub mod certification;
+pub mod dividend;
 pub mod identity;
 pub mod requests;
+pub mod transaction;
diff --git a/dal/writers/requests.rs b/dal/writers/requests.rs
index 13a909a5976b7f084835668bdcfa625497422139..ef5ca6216a1bf9948080e0e3a60a22b24b4cf29f 100644
--- a/dal/writers/requests.rs
+++ b/dal/writers/requests.rs
@@ -1,85 +1,209 @@
-extern crate duniter_crypto;
-extern crate duniter_documents;
-extern crate duniter_wotb;
 extern crate serde;
 extern crate serde_json;
-extern crate sqlite;
 
-use self::duniter_crypto::keys::PubKey;
-use self::duniter_documents::blockchain::v10::documents::certification::CompactCertificationDocument;
-use self::duniter_documents::blockchain::v10::documents::identity::IdentityDocument;
-use self::duniter_documents::Blockstamp;
-use self::duniter_wotb::NodeId;
-use super::super::block::DALBlock;
-use super::super::identity::DALIdentity;
-use super::super::DuniterDB;
+use block::DALBlock;
+use currency_params::CurrencyParameters;
+use duniter_crypto::keys::PubKey;
+use duniter_documents::blockchain::v10::documents::identity::IdentityDocument;
+use duniter_documents::Blockstamp;
+use duniter_wotb::NodeId;
+use identity::DALIdentity;
+use rustbreak::backend::Backend;
+use sources::SourceAmount;
+use std::fmt::Debug;
 use std::ops::Deref;
+use *;
 
-#[derive(Debug)]
-/// Contain a pending write request for blockchain database
-pub enum DBWriteRequest {
-    /// Newcomer
-    CreateIdentity(NodeId, Blockstamp, u64, Box<IdentityDocument>),
-    /// Active
-    RenewalIdentity(PubKey, Blockstamp, u64),
-    /// Excluded
-    ExcludeIdentity(NodeId, Blockstamp, u64),
-    /// Revoked
-    RevokeIdentity(NodeId, Blockstamp, u64),
-    /// Certification
-    CreateCert(Blockstamp, u64, CompactCertificationDocument),
-    /// Certification expiry
-    CertExpiry(NodeId, NodeId, Blockstamp, u64),
+#[derive(Debug, Clone)]
+/// Contain a pending write request for databases
+pub enum DBsWriteRequest {
+    /// Contain a pending write request for blocks database
+    BlocksDB(BlocksDBsWriteQuery),
+    /// Contain a pending write request for wots databases
+    WotDBs(WotsDBsWriteQuery),
+    /// Contain a pending write request for currency databases
+    CurrencyDBs(CurrencyDBsWriteQuery),
+}
+
+#[derive(Debug, Clone)]
+/// Contain a pending write request for blocks databases
+pub enum BlocksDBsWriteQuery {
     /// Write block
-    WriteBlock(Box<DALBlock>),
+    WriteBlock(Box<DALBlock>, Option<ForkId>, PreviousBlockstamp, BlockHash),
     /// Revert block
     RevertBlock(Box<DALBlock>),
 }
 
-impl DBWriteRequest {
-    pub fn apply(&self, currency: &str, db: &DuniterDB) {
+impl BlocksDBsWriteQuery {
+    pub fn apply(&self, databases: &BlocksV10DBs, sync: bool) -> Result<(), DALError> {
+        if let BlocksDBsWriteQuery::WriteBlock(
+            ref dal_block,
+            ref old_fork_id,
+            ref _previous_blockstamp,
+            ref _block_hash,
+        ) = *self
+        {
+            let dal_block = dal_block.deref();
+            trace!("BlocksDBsWriteQuery::WriteBlock...");
+            super::block::write(
+                &databases.blockchain_db,
+                &databases.forks_db,
+                &databases.forks_blocks_db,
+                &dal_block,
+                *old_fork_id,
+                sync,
+            )?;
+            trace!("BlocksDBsWriteQuery::WriteBlock...finish");
+        }
+        Ok(())
+    }
+}
+
+#[derive(Debug, Clone)]
+/// Contain a pending write request for wots databases
+pub enum WotsDBsWriteQuery {
+    /// Newcomer (wotb_id, blockstamp, current_bc_time, idty_doc, ms_created_block_id)
+    CreateIdentity(NodeId, Blockstamp, u64, Box<IdentityDocument>, BlockId),
+    /// Active (pubKey, idty_wot_id, current_bc_time, ms_created_block_id)
+    RenewalIdentity(PubKey, NodeId, u64, BlockId),
+    /// Excluded
+    ExcludeIdentity(PubKey, Blockstamp),
+    /// Revoked
+    RevokeIdentity(PubKey, Blockstamp),
+    /// Certification (source_pubkey, source, target, created_block_id, median_time)
+    CreateCert(PubKey, NodeId, NodeId, BlockId, u64),
+    /// Certification expiry (source, target, created_block_id)
+    ExpireCert(NodeId, NodeId, BlockId),
+}
+
+impl WotsDBsWriteQuery {
+    pub fn apply(
+        &self,
+        databases: &WotsV10DBs,
+        currency_params: &CurrencyParameters,
+    ) -> Result<(), DALError> {
         match *self {
-            DBWriteRequest::CreateIdentity(
+            WotsDBsWriteQuery::CreateIdentity(
                 ref wotb_id,
                 ref blockstamp,
-                ref median_time,
+                ref current_bc_time,
                 ref idty_doc,
+                ref ms_created_block_id,
             ) => {
-                trace!("DBWriteRequest::CreateIdentity...");
-                let idty = DALIdentity::create_identity(db, idty_doc.deref(), *blockstamp);
-                super::identity::write(&idty, wotb_id, db, *blockstamp, *median_time);
-                trace!("DBWriteRequest::CreateIdentity...finish.");
+                trace!("WotsDBsWriteQuery::CreateIdentity...");
+                let idty = DALIdentity::create_identity(
+                    currency_params,
+                    idty_doc.deref(),
+                    *wotb_id,
+                    *blockstamp,
+                    *current_bc_time,
+                );
+                super::identity::write(
+                    &idty,
+                    *wotb_id,
+                    &databases.identities_db,
+                    &databases.ms_db,
+                    *ms_created_block_id,
+                )?;
+                trace!("WotsDBsWriteQuery::CreateIdentity...finish.");
             }
-            DBWriteRequest::RenewalIdentity(ref pubkey, ref blockstamp, ref median_time) => {
-                trace!("DBWriteRequest::RenewalIdentity...");
-                let mut idty = DALIdentity::get_identity(currency, db, pubkey)
-                    .expect("Fatal error : impossible ton renewal an identidy that don't exist !");
-                idty.renewal_identity(db, pubkey, blockstamp, *median_time, false);
-                trace!("DBWriteRequest::RenewalIdentity...");
+            WotsDBsWriteQuery::RenewalIdentity(
+                ref pubkey,
+                ref idty_wot_id,
+                ref current_bc_time,
+                ms_created_block_id,
+            ) => {
+                trace!("WotsDBsWriteQuery::RenewalIdentity...");
+                let mut idty = DALIdentity::get_identity(&databases.identities_db, pubkey)?
+                    .expect("Fatal error : impossible to renewal an identidy that don't exist !");
+                idty.renewal_identity(
+                    currency_params,
+                    &databases.identities_db,
+                    &databases.ms_db,
+                    pubkey,
+                    *idty_wot_id,
+                    *current_bc_time,
+                    ms_created_block_id,
+                    false,
+                )?;
+                trace!("DBWrWotsDBsWriteQueryiteRequest::RenewalIdentity...");
             }
-            DBWriteRequest::ExcludeIdentity(ref wotb_id, ref blockstamp, ref _median_time) => {
-                DALIdentity::exclude_identity(db, *wotb_id, *blockstamp, false);
+            WotsDBsWriteQuery::ExcludeIdentity(ref pubkey, ref blockstamp) => {
+                DALIdentity::exclude_identity(&databases.identities_db, pubkey, blockstamp, false)?;
             }
-            DBWriteRequest::RevokeIdentity(ref wotb_id, ref blockstamp, ref _median_time) => {
-                DALIdentity::revoke_identity(db, *wotb_id, blockstamp, false);
+            WotsDBsWriteQuery::RevokeIdentity(ref pubkey, ref blockstamp) => {
+                DALIdentity::revoke_identity(
+                    &databases.identities_db,
+                    pubkey,
+                    blockstamp,
+                    true,
+                    false,
+                )?;
             }
-            DBWriteRequest::CreateCert(ref blockstamp, ref median_time, ref compact_cert) => {
-                trace!("DBWriteRequest::CreateCert...");
+            WotsDBsWriteQuery::CreateCert(
+                ref source_pubkey,
+                ref source,
+                ref target,
+                ref created_block_id,
+                ref median_time,
+            ) => {
+                trace!("WotsDBsWriteQuery::CreateCert...");
                 super::certification::write_certification(
-                    compact_cert,
-                    db,
-                    *blockstamp,
+                    currency_params,
+                    &databases.identities_db,
+                    &databases.certs_db,
+                    *source_pubkey,
+                    *source,
+                    *target,
+                    *created_block_id,
                     *median_time,
-                );
-                trace!("DBWriteRequest::CreateCert...finish");
+                )?;
+                trace!("WotsDBsWriteQuery::CreateCert...finish");
+            }
+            WotsDBsWriteQuery::ExpireCert(ref _source, ref _target, ref _created_block_id) => {
+                /*super::certification::expire_cert(
+                    &databases.certs_db,
+                    *source,
+                    *target,
+                    *created_block_id,
+                )?;*/
+            }
+        }
+        Ok(())
+    }
+}
+
+#[derive(Debug, Clone)]
+/// Contain a pending write request for currency databases
+pub enum CurrencyDBsWriteQuery {
+    /// Write transaction
+    WriteTx(Box<TransactionDocument>),
+    /// Create dividend
+    CreateDU(SourceAmount, BlockId, Vec<PubKey>),
+}
+
+impl CurrencyDBsWriteQuery {
+    pub fn apply<B: Backend + Debug>(&self, databases: &CurrencyV10DBs<B>) -> Result<(), DALError> {
+        match *self {
+            CurrencyDBsWriteQuery::WriteTx(ref tx_doc) => {
+                super::transaction::apply_and_write_tx::<B>(
+                    &databases.tx_db,
+                    &databases.utxos_db,
+                    &databases.du_db,
+                    &databases.balances_db,
+                    tx_doc.deref(),
+                )?;
             }
-            DBWriteRequest::WriteBlock(ref dal_block) => {
-                let dal_block = dal_block.deref();
-                trace!("DBWriteRequest::WriteBlock...");
-                super::block::write(db, &dal_block.block, dal_block.fork, dal_block.isolate);
-                trace!("DBWriteRequest::WriteBlock...finish");
+            CurrencyDBsWriteQuery::CreateDU(ref du_amount, ref block_id, ref members) => {
+                super::dividend::create_du::<B>(
+                    &databases.du_db,
+                    &databases.balances_db,
+                    du_amount,
+                    block_id,
+                    members,
+                )?;
             }
-            _ => {}
         }
+        Ok(())
     }
 }
diff --git a/dal/writers/transaction.rs b/dal/writers/transaction.rs
new file mode 100644
index 0000000000000000000000000000000000000000..662dc1712148fb84e5480bf1a184612758c49c88
--- /dev/null
+++ b/dal/writers/transaction.rs
@@ -0,0 +1,337 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use duniter_documents::blockchain::v10::documents::transaction::*;
+use rustbreak::backend::Backend;
+use sources::{SourceAmount, SourceIndexV10, UTXOIndexV10, UTXOV10};
+use std::fmt::Debug;
+use *;
+
+#[derive(Debug, Copy, Clone)]
+pub enum TxError {
+    UnkonwError(),
+    DALError(DALError),
+}
+
+impl From<DALError> for TxError {
+    fn from(err: DALError) -> TxError {
+        TxError::DALError(err)
+    }
+}
+
+#[derive(Debug, Clone, Deserialize, Serialize)]
+pub struct DALTxV10 {
+    tx_doc: TransactionDocument,
+    sources_destroyed: HashSet<UTXOIndexV10>,
+}
+
+pub fn apply_and_write_tx<B: Backend + Debug>(
+    tx_db: &BinDB<TxV10Datas, B>,
+    utxos_db: &BinDB<UTXOsV10Datas, B>,
+    dus_db: &BinDB<DUsV10Datas, B>,
+    balances_db: &BinDB<BalancesV10Datas, B>,
+    tx_doc: &TransactionDocument,
+) -> Result<(), DALError> {
+    let mut tx_doc = tx_doc.clone();
+    let tx_hash = tx_doc.get_hash();
+    let mut sources_destroyed = HashSet::new();
+    // Index consumed sources
+    let consumed_sources: HashMap<SourceIndexV10, SourceAmount> = tx_doc
+        .get_inputs()
+        .iter()
+        .map(|input| match *input {
+            TransactionInput::D(tx_amout, tx_amout_base, pubkey, block_id) => (
+                SourceIndexV10::DU(pubkey, block_id),
+                SourceAmount(tx_amout, tx_amout_base),
+            ),
+            TransactionInput::T(tx_amout, tx_amout_base, hash, tx_index) => (
+                SourceIndexV10::UTXO(UTXOIndexV10(hash, tx_index)),
+                SourceAmount(tx_amout, tx_amout_base),
+            ),
+        })
+        .collect();
+    // Find adress of consumed sources
+    let consumed_adress: HashMap<
+        TransactionOutputConditionGroup,
+        (SourceAmount, HashSet<UTXOIndexV10>),
+    > = utxos_db.read(|db| {
+        let mut consumed_adress: HashMap<
+            TransactionOutputConditionGroup,
+            (SourceAmount, HashSet<UTXOIndexV10>),
+        > = HashMap::new();
+        for (source_index, source_amount) in &consumed_sources {
+            if let SourceIndexV10::UTXO(utxo_index) = source_index {
+                // Get utxo
+                let utxo = db
+                    .get(&utxo_index)
+                    .expect("ApplyBLockError : unknow UTXO in inputs !");
+                // Get utxo conditions(=address)
+                let conditions = &utxo.conditions;
+                // Calculate new balances datas for "conditions" address
+                let (mut balance, mut utxos_index) =
+                    consumed_adress.get(conditions).cloned().unwrap_or_default();
+                balance = balance + *source_amount;
+                utxos_index.insert(*utxo_index);
+                // Write new balances datas for "conditions" address
+                consumed_adress.insert(conditions.clone(), (balance, utxos_index));
+            } else if let SourceIndexV10::DU(pubkey, _block_id) = source_index {
+                let address = TransactionOutputConditionGroup::Single(
+                    TransactionOutputCondition::Sig(*pubkey),
+                );
+                let (mut balance, utxos_index) =
+                    consumed_adress.get(&address).cloned().unwrap_or_default();
+                balance = balance + *source_amount;
+                consumed_adress.insert(address, (balance, utxos_index));
+            }
+        }
+        consumed_adress
+    })?;
+    // Recalculate balance of consumed adress
+    let new_balances_consumed_adress = balances_db.read(|db| {
+        let mut new_balances_consumed_adress = Vec::new();
+        for (conditions, (amount_consumed, adress_consumed_sources)) in consumed_adress {
+            if let Some((balance, sources)) = db.get(&conditions) {
+                let mut new_balance = *balance - amount_consumed;
+                if new_balance.0 < TxAmount(100) {
+                    sources_destroyed = sources.union(&sources_destroyed).cloned().collect();
+                    new_balance = SourceAmount(TxAmount(0), new_balance.1);
+                }
+                let mut new_sources_index = sources.clone();
+                for source in adress_consumed_sources {
+                    new_sources_index.remove(&source);
+                }
+                new_balances_consumed_adress
+                    .push((conditions.clone(), (new_balance, new_sources_index)));
+            } else {
+                panic!("Apply Tx : try to consume a source, but the owner address is not found in balances db : {:?}", conditions)
+            }
+        }
+        new_balances_consumed_adress
+    })?;
+    // Write new balance of consumed adress
+    balances_db.write(|db| {
+        for (conditions, (balance, sources_index)) in new_balances_consumed_adress {
+            db.insert(conditions, (balance, sources_index));
+        }
+    })?;
+    // Remove consumed sources
+    for source_index in consumed_sources.keys() {
+        if let SourceIndexV10::UTXO(utxo_index) = source_index {
+            utxos_db.write(|db| {
+                db.remove(utxo_index);
+            })?;
+        } else if let SourceIndexV10::DU(pubkey, block_id) = source_index {
+            let mut pubkey_dus: HashSet<BlockId> =
+                dus_db.read(|db| db.get(&pubkey).cloned().unwrap_or_default())?;
+            pubkey_dus.remove(block_id);
+            dus_db.write(|db| {
+                db.insert(*pubkey, pubkey_dus);
+            })?;
+        }
+    }
+    // Index created sources
+    /*let mut created_utxos: Vec<UTXOV10> = Vec::new();
+    let mut output_index = 0;
+    for output in tx_doc.get_outputs() {
+        created_utxos.push(UTXOV10(
+            UTXOIndexV10(tx_hash, TxIndex(output_index)),
+            output.clone(),
+        ));
+        output_index += 1;
+    }*/
+    let created_utxos: Vec<UTXOV10> = tx_doc
+        .get_outputs()
+        .iter()
+        .enumerate()
+        .map(|(tx_index, output)| UTXOV10(UTXOIndexV10(tx_hash, TxIndex(tx_index)), output.clone()))
+        .collect();
+    // Recalculate balance of supplied adress
+    let new_balances_supplied_adress = balances_db.read(|db| {
+        let mut new_balances_supplied_adress: HashMap<
+            TransactionOutputConditionGroup,
+            (SourceAmount, HashSet<UTXOIndexV10>),
+        > = HashMap::new();
+        for source in &created_utxos {
+            let source_amount = source.get_amount();
+            let conditions = source.get_conditions();
+            let (balance, new_sources_index) = if let Some((balance, sources_index)) =
+                new_balances_supplied_adress.get(&conditions)
+            {
+                let mut new_sources_index = sources_index.clone();
+                new_sources_index.insert(source.0);
+                (*balance, new_sources_index)
+            } else if let Some((balance, sources_index)) = db.get(&conditions) {
+                let mut new_sources_index = sources_index.clone();
+                new_sources_index.insert(source.0);
+                (*balance, new_sources_index)
+            } else {
+                let mut new_sources_index = HashSet::new();
+                new_sources_index.insert(source.0);
+                (SourceAmount::default(), new_sources_index)
+            };
+            new_balances_supplied_adress
+                .insert(conditions, (balance + source_amount, new_sources_index));
+        }
+        new_balances_supplied_adress
+    })?;
+    // Insert created UTXOs
+    utxos_db.write(|db| {
+        for utxo_v10 in created_utxos {
+            db.insert(utxo_v10.0, utxo_v10.1);
+        }
+    })?;
+    // Write new balance of supplied adress
+    balances_db.write(|db| {
+        for (conditions, (balance, sources_index)) in new_balances_supplied_adress {
+            db.insert(conditions, (balance, sources_index));
+        }
+    })?;
+    // Write tx
+    tx_doc.reduce();
+    tx_db.write(|db| {
+        db.insert(
+            tx_hash,
+            DALTxV10 {
+                tx_doc,
+                sources_destroyed,
+            },
+        );
+    })?;
+    Ok(())
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use duniter_documents::blockchain::{Document, DocumentBuilder, VerificationResult};
+
+    fn build_first_tx_of_g1() -> TransactionDocument {
+        let pubkey = PubKey::Ed25519(
+            ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+                .unwrap(),
+        );
+        let sig = Sig::Ed25519(ed25519::Signature::from_base64(
+            "fAH5Gor+8MtFzQZ++JaJO6U8JJ6+rkqKtPrRr/iufh3MYkoDGxmjzj6jCADQL+hkWBt8y8QzlgRkz0ixBcKHBw==",
+        ).unwrap());
+        let block = Blockstamp::from_string(
+            "50-00001DAA4559FEDB8320D1040B0F22B631459F36F237A0D9BC1EB923C12A12E7",
+        ).unwrap();
+        let builder = TransactionDocumentBuilder {
+            currency: "g1",
+            blockstamp: &block,
+            locktime: &0,
+            issuers: &vec![pubkey],
+            inputs: &vec![
+                TransactionInput::parse_from_str(
+                    "1000:0:D:2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ:1",
+                ).expect("fail to parse input !"),
+            ],
+            unlocks: &vec![
+                TransactionInputUnlocks::parse_from_str("0:SIG(0)")
+                    .expect("fail to parse unlock !"),
+            ],
+            outputs: &vec![
+                TransactionOutput::parse_from_str(
+                    "1:0:SIG(Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm)",
+                ).expect("fail to parse output !"),
+                TransactionOutput::parse_from_str(
+                    "999:0:SIG(2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ)",
+                ).expect("fail to parse output !"),
+            ],
+            comment: "TEST",
+        };
+        builder.build_with_signature(vec![sig])
+    }
+
+    #[test]
+    fn apply_one_tx() {
+        // Get document of first g1 transaction
+        let tx_doc = build_first_tx_of_g1();
+        assert_eq!(tx_doc.verify_signatures(), VerificationResult::Valid());
+        // Get pubkey of receiver
+        let tortue_pubkey = PubKey::Ed25519(
+            ed25519::PublicKey::from_base58("Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm")
+                .unwrap(),
+        );
+        // Open currencys_db in memory mode
+        let currency_dbs = CurrencyV10DBs::open_memory_mode();
+        // Create first g1 DU for cgeek and tortue
+        writers::dividend::create_du(
+            &currency_dbs.du_db,
+            &currency_dbs.balances_db,
+            &SourceAmount(TxAmount(1000), TxBase(0)),
+            &BlockId(1),
+            &vec![tx_doc.issuers()[0], tortue_pubkey],
+        ).expect("Fail to create first g1 DU !");
+        // Check members balance
+        let cgeek_new_balance = currency_dbs
+            .balances_db
+            .read(|db| {
+                db.get(&TransactionOutputConditionGroup::Single(
+                    TransactionOutputCondition::Sig(tx_doc.issuers()[0]),
+                )).cloned()
+            })
+            .expect("Fail to read cgeek new balance")
+            .expect("Error : cgeek is not referenced in balances_db !");
+        assert_eq!(cgeek_new_balance.0, SourceAmount(TxAmount(1000), TxBase(0)));
+        let tortue_new_balance = currency_dbs
+            .balances_db
+            .read(|db| {
+                db.get(&TransactionOutputConditionGroup::Single(
+                    TransactionOutputCondition::Sig(tortue_pubkey),
+                )).cloned()
+            })
+            .expect("Fail to read receiver new balance")
+            .expect("Error : receiver is not referenced in balances_db !");
+        assert_eq!(
+            tortue_new_balance.0,
+            SourceAmount(TxAmount(1000), TxBase(0))
+        );
+        // Apply first g1 transaction
+        apply_and_write_tx(
+            &currency_dbs.tx_db,
+            &currency_dbs.utxos_db,
+            &currency_dbs.du_db,
+            &currency_dbs.balances_db,
+            &tx_doc,
+        ).expect("Fail to apply first g1 tx");
+        // Check issuer new balance
+        let cgeek_new_balance = currency_dbs
+            .balances_db
+            .read(|db| {
+                db.get(&TransactionOutputConditionGroup::Single(
+                    TransactionOutputCondition::Sig(tx_doc.issuers()[0]),
+                )).cloned()
+            })
+            .expect("Fail to read cgeek new balance")
+            .expect("Error : cgeek is not referenced in balances_db !");
+        assert_eq!(cgeek_new_balance.0, SourceAmount(TxAmount(999), TxBase(0)));
+
+        // Check receiver new balance
+        let receiver_new_balance = currency_dbs
+            .balances_db
+            .read(|db| {
+                db.get(&TransactionOutputConditionGroup::Single(
+                    TransactionOutputCondition::Sig(tortue_pubkey),
+                )).cloned()
+            })
+            .expect("Fail to read receiver new balance")
+            .expect("Error : receiver is not referenced in balances_db !");
+        assert_eq!(
+            receiver_new_balance.0,
+            SourceAmount(TxAmount(1001), TxBase(0))
+        );
+    }
+}
diff --git a/documents/Cargo.toml b/documents/Cargo.toml
index df9ac267ff3a03ee8f766480de436664fab732cb..539c2476d0091e09bd8fe2770ba95c209f4d49c5 100644
--- a/documents/Cargo.toml
+++ b/documents/Cargo.toml
@@ -20,6 +20,7 @@ linked-hash-map = "0.5.1"
 regex = "1.0.0"
 rust-crypto = "0.2.36"
 serde = "1.0.57"
+serde_derive = "1.0.57"
 
 [features]
 # Treat warnings as a build error.
diff --git a/documents/blockchain/v10/documents/block.rs b/documents/blockchain/v10/documents/block.rs
index 8201f50d6ead3be019eb6717ccfc51fe26117676..5cc23ba33cddede3460126e7457a694bee571d9d 100644
--- a/documents/blockchain/v10/documents/block.rs
+++ b/documents/blockchain/v10/documents/block.rs
@@ -26,31 +26,64 @@ use blockchain::v10::documents::revocation::RevocationDocument;
 use blockchain::v10::documents::transaction::TransactionDocument;
 use blockchain::v10::documents::*;
 use blockchain::{BlockchainProtocol, Document, IntoSpecializedDocument};
+use std::fmt::{Display, Error, Formatter};
+use std::ops::Deref;
 use {BlockHash, BlockId, Blockstamp, Hash};
 
+/// Currency name
+#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
+pub struct CurrencyName(pub String);
+
+impl Display for CurrencyName {
+    fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
+        write!(f, "{}", self.0)
+    }
+}
+
+#[derive(Debug, Clone)]
+/// Store error in block parameters parsing
+pub enum ParseParamsError {
+    /// ParseIntError
+    ParseIntError(::std::num::ParseIntError),
+    /// ParseFloatError
+    ParseFloatError(::std::num::ParseFloatError),
+}
+
+impl From<::std::num::ParseIntError> for ParseParamsError {
+    fn from(err: ::std::num::ParseIntError) -> ParseParamsError {
+        ParseParamsError::ParseIntError(err)
+    }
+}
+
+impl From<::std::num::ParseFloatError> for ParseParamsError {
+    fn from(err: ::std::num::ParseFloatError) -> ParseParamsError {
+        ParseParamsError::ParseFloatError(err)
+    }
+}
+
 /// Currency parameters
-#[derive(Debug, Copy, Clone, PartialEq)]
-pub struct BlockParameters {
+#[derive(Debug, Copy, Clone, PartialEq, Deserialize, Serialize)]
+pub struct BlockV10Parameters {
     /// UD target growth rate (see Relative Theorie of Money)
     pub c: f64,
     /// Duration between the creation of two DU (in seconds)
-    pub dt: i64,
+    pub dt: u64,
     /// Amount of the initial UD
-    pub ud0: i64,
+    pub ud0: usize,
     /// Minimum duration between the writing of 2 certifications from the same issuer (in seconds)
     pub sig_period: u64,
     /// Maximum number of active certifications at the same time (for the same issuer)
-    pub sig_stock: i64,
+    pub sig_stock: usize,
     /// Maximum retention period of a pending certification
-    pub sig_window: i64,
+    pub sig_window: u64,
     /// Time to expiry of written certification
-    pub sig_validity: i64,
+    pub sig_validity: u64,
     /// Minimum number of certifications required to become a member
-    pub sig_qty: i64,
+    pub sig_qty: usize,
     /// Maximum retention period of a pending identity
-    pub idty_window: i64,
+    pub idty_window: u64,
     /// Maximum retention period of a pending membership
-    pub ms_window: i64,
+    pub ms_window: u64,
     /// Percentage of referring members who must be within step_max steps of each member
     pub x_percent: f64,
     /// Time to expiry of written membership
@@ -58,27 +91,114 @@ pub struct BlockParameters {
     /// For a member to respect the distance rule,
     /// there must exist for more than x_percent % of the referring members
     /// a path of less than step_max steps from the referring member to the evaluated member.
-    pub step_max: u32,
+    pub step_max: usize,
     /// Number of blocks used for calculating median time.
-    pub median_time_blocks: i64,
+    pub median_time_blocks: usize,
     /// The average time for writing 1 block (wished time)
-    pub avg_gen_time: i64,
+    pub avg_gen_time: u64,
     /// The number of blocks required to evaluate again PoWMin value
-    pub dt_diff_eval: i64,
+    pub dt_diff_eval: usize,
     /// The percent of previous issuers to reach for personalized difficulty
     pub percent_rot: f64,
     /// Time of first UD.
-    pub ud_time0: i64,
+    pub ud_time0: u64,
     /// Time of first reevaluation of the UD.
-    pub ud_reeval_time0: i64,
+    pub ud_reeval_time0: u64,
     /// Time period between two re-evaluation of the UD.
-    pub dt_reeval: i64,
+    pub dt_reeval: u64,
+}
+
+impl Default for BlockV10Parameters {
+    fn default() -> BlockV10Parameters {
+        BlockV10Parameters {
+            c: 0.0488,
+            dt: 86_400,
+            ud0: 1_000,
+            sig_period: 432_000,
+            sig_stock: 100,
+            sig_window: 5_259_600,
+            sig_validity: 63_115_200,
+            sig_qty: 5,
+            idty_window: 5_259_600,
+            ms_window: 5_259_600,
+            x_percent: 0.8,
+            ms_validity: 31_557_600,
+            step_max: 5,
+            median_time_blocks: 24,
+            avg_gen_time: 300,
+            dt_diff_eval: 12,
+            percent_rot: 0.67,
+            ud_time0: 1_488_970_800,
+            ud_reeval_time0: 1_490_094_000,
+            dt_reeval: 15_778_800,
+        }
+    }
+}
+
+impl ::std::str::FromStr for BlockV10Parameters {
+    type Err = ParseParamsError;
+
+    fn from_str(source: &str) -> Result<Self, Self::Err> {
+        let params: Vec<&str> = source.split(':').collect();
+        Ok(BlockV10Parameters {
+            c: params[0].parse()?,
+            dt: params[1].parse()?,
+            ud0: params[2].parse()?,
+            sig_period: params[3].parse()?,
+            sig_stock: params[4].parse()?,
+            sig_window: params[5].parse()?,
+            sig_validity: params[6].parse()?,
+            sig_qty: params[7].parse()?,
+            idty_window: params[8].parse()?,
+            ms_window: params[9].parse()?,
+            x_percent: params[10].parse()?,
+            ms_validity: params[11].parse()?,
+            step_max: params[12].parse()?,
+            median_time_blocks: params[13].parse()?,
+            avg_gen_time: params[14].parse()?,
+            dt_diff_eval: params[15].parse()?,
+            percent_rot: params[16].parse()?,
+            ud_time0: params[17].parse()?,
+            ud_reeval_time0: params[18].parse()?,
+            dt_reeval: params[19].parse()?,
+        })
+    }
+}
+
+/// Store a transaction document or just its hash.
+#[derive(Debug, Clone, PartialEq, Deserialize, Serialize)]
+pub enum TxDocOrTxHash {
+    /// Transaction document
+    TxDoc(Box<TransactionDocument>),
+    /// transaction hash
+    TxHash(Hash),
+}
+
+impl TxDocOrTxHash {
+    /// Lightens the TxDocOrTxHash (for example to store it while minimizing the space required)
+    /// lightening consists in transforming the document by its hash.
+    pub fn reduce(&self) -> TxDocOrTxHash {
+        if let TxDocOrTxHash::TxDoc(ref tx_doc) = self {
+            let mut tx_doc = tx_doc.deref().clone();
+            TxDocOrTxHash::TxHash(tx_doc.get_hash())
+        } else {
+            self.clone()
+        }
+    }
+    /// Get TxDoc variant
+    pub fn unwrap_doc(&self) -> TransactionDocument {
+        if let TxDocOrTxHash::TxDoc(ref tx_doc) = self {
+            tx_doc.deref().clone()
+        } else {
+            panic!("Try to unwrap_doc() in a TxHash() variant of TxDocOrTxHash !")
+        }
+    }
 }
 
 /// Wrap a Block document.
 ///
 /// Must be created by parsing a text document or using a builder.
-#[derive(Debug, Clone)]
+#[derive(Clone, Debug, Deserialize, Serialize)]
 pub struct BlockDocument {
     /// Nonce
     pub nonce: u64,
@@ -103,7 +223,7 @@ pub struct BlockDocument {
     /// Current frame variation buffer
     pub issuers_frame_var: isize,
     /// Currency.
-    pub currency: String,
+    pub currency: CurrencyName,
     /// Document issuer (there should be only one).
     pub issuers: Vec<PubKey>,
     /// Document signature (there should be only one).
@@ -112,7 +232,7 @@ pub struct BlockDocument {
     /// The hash is None, when the block is generated but the proof of work has not yet started
     pub hash: Option<BlockHash>,
     /// Currency parameters (only in genesis block)
-    pub parameters: Option<BlockParameters>,
+    pub parameters: Option<BlockV10Parameters>,
     /// Hash of the previous block
     pub previous_hash: Hash,
     /// Issuer of the previous block
@@ -136,12 +256,23 @@ pub struct BlockDocument {
     /// Certifications
     pub certifications: Vec<TextDocumentFormat<CertificationDocument>>,
     /// Transactions
-    pub transactions: Vec<TransactionDocument>,
+    pub transactions: Vec<TxDocOrTxHash>,
     /// Part to sign
     pub inner_hash_and_nonce_str: String,
 }
 
 impl BlockDocument {
+    /// Return previous blockstamp
+    pub fn previous_blockstamp(&self) -> Blockstamp {
+        if self.number.0 > 0 {
+            Blockstamp {
+                id: BlockId(self.number.0 - 1),
+                hash: BlockHash(self.previous_hash),
+            }
+        } else {
+            Blockstamp::default()
+        }
+    }
     /// Compute inner hash
     pub fn compute_inner_hash(&mut self) {
         let mut sha256 = Sha256::new();
@@ -154,7 +285,9 @@ impl BlockDocument {
         self.nonce = new_nonce;
         self.inner_hash_and_nonce_str = format!(
             "InnerHash: {}\nNonce: {}\n",
-            self.inner_hash.unwrap().to_hex(),
+            self.inner_hash
+                .expect("Try to get inner_hash of an uncompleted or reduce block !")
+                .to_hex(),
             self.nonce
         );
     }
@@ -167,12 +300,28 @@ impl BlockDocument {
         let mut sha256 = Sha256::new();
         sha256.input_str(&format!(
             "InnerHash: {}\nNonce: {}\n{}\n",
-            self.inner_hash.unwrap().to_hex(),
+            self.inner_hash
+                .expect("Try to get inner_hash of an uncompleted or reduce block !")
+                .to_hex(),
             self.nonce,
             self.signatures[0]
         ));
         self.hash = Some(BlockHash(Hash::from_hex(&sha256.result_str()).unwrap()));
     }
+    /// Lightens the block (for example to store it while minimizing the space required)
+    pub fn reduce(&mut self) {
+        //self.hash = None;
+        self.inner_hash = None;
+        self.inner_hash_and_nonce_str = String::with_capacity(0);
+        self.identities
+            .iter_mut()
+            .map(|i| i.reduce())
+            .collect::<()>();
+        self.joiners.iter_mut().map(|i| i.reduce()).collect::<()>();
+        self.actives.iter_mut().map(|i| i.reduce()).collect::<()>();
+        self.leavers.iter_mut().map(|i| i.reduce()).collect::<()>();
+        self.transactions = self.transactions.iter_mut().map(|t| t.reduce()).collect();
+    }
     /// Generate compact inner text (for compute inner_hash)
     pub fn generate_compact_inner_text(&self) -> String {
         let mut identities_str = String::from("");
@@ -217,8 +366,10 @@ impl BlockDocument {
         }
         let mut transactions_str = String::from("");
         for transaction in self.transactions.clone() {
-            transactions_str.push_str("\n");
-            transactions_str.push_str(&transaction.generate_compact_text());
+            if let TxDocOrTxHash::TxDoc(transaction) = transaction {
+                transactions_str.push_str("\n");
+                transactions_str.push_str(&transaction.deref().generate_compact_text());
+            }
         }
         let mut dividend_str = String::from("");
         if let Some(dividend) = self.dividend {
@@ -288,13 +439,15 @@ impl Document for BlockDocument {
     }
 
     fn currency(&self) -> &str {
-        &self.currency
+        &self.currency.0
     }
 
     fn blockstamp(&self) -> Blockstamp {
         Blockstamp {
             id: self.number,
-            hash: self.hash.unwrap(),
+            hash: self
+                .hash
+                .expect("Fatal error : try to get blockstamp of an uncomplete or reduce block !"),
         }
     }
 
@@ -317,7 +470,9 @@ impl CompactTextDocument for BlockDocument {
         format!(
             "{}InnerHash: {}\nNonce: ",
             compact_inner_text,
-            self.inner_hash.unwrap().to_hex()
+            self.inner_hash
+                .expect("Try to get inner_hash of an uncompleted or reduce block !")
+                .to_hex()
         )
     }
 }
@@ -361,7 +516,7 @@ mod tests {
             issuers_count: 41,
             issuers_frame: 201,
             issuers_frame_var: 5,
-            currency: String::from("g1"),
+            currency: CurrencyName(String::from("g1")),
             issuers: vec![PubKey::Ed25519(ed25519::PublicKey::from_base58("2sZF6j2PkxBDNAqUde7Dgo5x3crkerZpQ4rBqqJGn8QT").unwrap())],
             signatures: vec![Sig::Ed25519(ed25519::Signature::from_base64("FsRxB+NOiL+8zTr2d3B2j2KBItDuCa0KjFMF6hXmdQzfqXAs9g3m7DlGgYLcqzqe6JXjx/Lyzqze1HBR4cS0Aw==").unwrap())],
             hash: None,
@@ -384,7 +539,10 @@ mod tests {
         block.compute_inner_hash();
         println!("{}", block.generate_compact_text());
         assert_eq!(
-            block.inner_hash.unwrap().to_hex(),
+            block
+                .inner_hash
+                .expect("Try to get inner_hash of an uncompleted or reduce block !")
+                .to_hex(),
             "95948AC4D45E46DA07CE0713EDE1CE0295C227EE4CA5557F73F56B7DD46FE89C"
         );
         // test generate_compact_text()
@@ -422,7 +580,11 @@ Nonce: "
         // Test hash computation
         block.compute_hash();
         assert_eq!(
-            block.hash.unwrap().0.to_hex(),
+            block
+                .hash
+                .expect("Try to get hash of an uncompleted or reduce block !")
+                .0
+                .to_hex(),
             "000002D3296A2D257D01F6FEE8AEC5C3E5779D04EA43F08901F41998FA97D9A1"
         );
     }
@@ -496,7 +658,7 @@ a9PHPuSfw7jW8FRQHXFsGi/bnLjbtDnTYvEVgUC9u0WlR7GVofa+Xb+l5iy6NwuEXiwvueAkf08wPVY8
             issuers_count: 42,
             issuers_frame: 211,
             issuers_frame_var: 0,
-            currency: String::from("g1"),
+            currency: CurrencyName(String::from("g1")),
             issuers: vec![PubKey::Ed25519(ed25519::PublicKey::from_base58("DA4PYtXdvQqk1nCaprXH52iMsK5Ahxs1nRWbWKLhpVkQ").unwrap())],
             signatures: vec![Sig::Ed25519(ed25519::Signature::from_base64("92id58VmkhgVNee4LDqBGSm8u/ooHzAD67JM6fhAE/CV8LCz7XrMF1DvRl+eRpmlaVkp6I+Iy8gmZ1WUM5C8BA==").unwrap())],
             hash: None,
@@ -512,14 +674,17 @@ a9PHPuSfw7jW8FRQHXFsGi/bnLjbtDnTYvEVgUC9u0WlR7GVofa+Xb+l5iy6NwuEXiwvueAkf08wPVY8
             revoked: Vec::new(),
             excluded: Vec::new(),
             certifications: vec![TextDocumentFormat::Complete(cert1)],
-            transactions: vec![tx1, tx2],
+            transactions: vec![TxDocOrTxHash::TxDoc(Box::new(tx1)), TxDocOrTxHash::TxDoc(Box::new(tx2))],
             inner_hash_and_nonce_str: String::new(),
         };
         // test inner_hash computation
         block.compute_inner_hash();
         println!("{}", block.generate_compact_text());
         assert_eq!(
-            block.inner_hash.unwrap().to_hex(),
+            block
+                .inner_hash
+                .expect("Try to get inner_hash of an uncompleted or reduce block !")
+                .to_hex(),
             "C8AB69E33ECE2612EADC7AB30D069B1F1A3D8C95EBBFD50DE583AC8E3666CCA1"
         );
         // test generate_compact_text()
@@ -574,7 +739,11 @@ Nonce: "
         // Test hash computation
         block.compute_hash();
         assert_eq!(
-            block.hash.unwrap().0.to_hex(),
+            block
+                .hash
+                .expect("Try to get hash of an uncompleted or reduce block !")
+                .0
+                .to_hex(),
             "000004F8B84A3590243BA562E5F2BA379F55A0B387C5D6FAC1022DFF7FFE6014"
         );
     }
diff --git a/documents/blockchain/v10/documents/certification.rs b/documents/blockchain/v10/documents/certification.rs
index 67ff2e877741aa672cde5fffb3df11b39779160e..ec94e36098cde2ab75eb375350338459908f4a39 100644
--- a/documents/blockchain/v10/documents/certification.rs
+++ b/documents/blockchain/v10/documents/certification.rs
@@ -15,9 +15,6 @@
 
 //! Wrappers around Certification documents.
 
-extern crate serde;
-
-use self::serde::ser::{Serialize, Serializer};
 use duniter_crypto::keys::*;
 use regex::Regex;
 
@@ -32,7 +29,7 @@ lazy_static! {
     ).unwrap();
 }
 
-#[derive(Debug, Copy, Clone)]
+#[derive(Copy, Clone, Debug, Deserialize, Serialize)]
 /// Wrap an Compact Revocation document (in block content)
 pub struct CompactCertificationDocument {
     /// Issuer
@@ -60,7 +57,7 @@ impl CompactTextDocument for CompactCertificationDocument {
 /// Wrap an Certification document.
 ///
 /// Must be created by parsing a text document or using a builder.
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, Deserialize, Serialize)]
 pub struct CertificationDocument {
     /// Document as text.
     ///
@@ -148,15 +145,6 @@ impl TextDocument for CertificationDocument {
     }
 }
 
-impl Serialize for TextDocumentFormat<CertificationDocument> {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
-    where
-        S: Serializer,
-    {
-        serializer.serialize_str(&self.as_compact_text())
-    }
-}
-
 impl IntoSpecializedDocument<BlockchainProtocol> for CertificationDocument {
     fn into_specialized(self) -> BlockchainProtocol {
         BlockchainProtocol::V10(Box::new(V10Document::Certification(Box::new(self))))
diff --git a/documents/blockchain/v10/documents/identity.rs b/documents/blockchain/v10/documents/identity.rs
index 310b3adc113adb3e0e3330dc2a4b4be4f51b1643..c4e8c6a7c6fde5f64ca5b6c62192836902cf66d4 100644
--- a/documents/blockchain/v10/documents/identity.rs
+++ b/documents/blockchain/v10/documents/identity.rs
@@ -15,14 +15,9 @@
 
 //! Wrappers around Identity documents.
 
-extern crate serde;
-
-use self::serde::ser::{Serialize, Serializer};
-use duniter_crypto::keys::*;
-use regex::Regex;
-
 use blockchain::v10::documents::*;
 use blockchain::{BlockchainProtocol, Document, DocumentBuilder, IntoSpecializedDocument};
+use regex::Regex;
 use Blockstamp;
 
 lazy_static! {
@@ -34,13 +29,13 @@ lazy_static! {
 /// Wrap an Identity document.
 ///
 /// Must be created by parsing a text document or using a builder.
-#[derive(Debug, Clone, PartialEq, Hash)]
+#[derive(Clone, Debug, Deserialize, Hash, PartialEq, Serialize)]
 pub struct IdentityDocument {
     /// Document as text.
     ///
     /// Is used to check signatures, and other values
     /// must be extracted from it.
-    text: String,
+    text: Option<String>,
 
     /// Currency.
     currency: String,
@@ -59,6 +54,11 @@ impl IdentityDocument {
     pub fn username(&self) -> &str {
         &self.username
     }
+
+    /// Lightens the membership (for example to store it while minimizing the space required)
+    pub fn reduce(&mut self) {
+        self.text = None;
+    }
 }
 
 impl Document for IdentityDocument {
@@ -106,7 +106,11 @@ impl TextDocument for IdentityDocument {
     type CompactTextDocument_ = IdentityDocument;
 
     fn as_text(&self) -> &str {
-        &self.text
+        if let Some(ref text) = self.text {
+            text
+        } else {
+            panic!("Try to get text of reduce identity !")
+        }
     }
 
     fn to_compact_document(&self) -> Self::CompactTextDocument_ {
@@ -114,15 +118,6 @@ impl TextDocument for IdentityDocument {
     }
 }
 
-impl Serialize for IdentityDocument {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
-    where
-        S: Serializer,
-    {
-        serializer.serialize_str(&self.generate_compact_text())
-    }
-}
-
 impl IntoSpecializedDocument<BlockchainProtocol> for IdentityDocument {
     fn into_specialized(self) -> BlockchainProtocol {
         BlockchainProtocol::V10(Box::new(V10Document::Identity(self)))
@@ -145,7 +140,7 @@ pub struct IdentityDocumentBuilder<'a> {
 impl<'a> IdentityDocumentBuilder<'a> {
     fn build_with_text_and_sigs(self, text: String, signatures: Vec<Sig>) -> IdentityDocument {
         IdentityDocument {
-            text,
+            text: Some(text),
             currency: self.currency.to_string(),
             username: self.username.to_string(),
             blockstamp: *self.blockstamp,
@@ -209,7 +204,7 @@ impl StandardTextDocumentParser for IdentityDocumentParser {
             let blockstamp = Blockstamp::from_string(blockstamp).unwrap();
 
             Ok(V10Document::Identity(IdentityDocument {
-                text: doc.to_owned(),
+                text: Some(doc.to_owned()),
                 currency: currency.to_owned(),
                 username: uid.to_owned(),
                 blockstamp,
diff --git a/documents/blockchain/v10/documents/membership.rs b/documents/blockchain/v10/documents/membership.rs
index 414ebf815cc94dc83c042a442e8ab649d8f2cdb7..2c339b68b2aa907ad7d3c2fc14010e7e7a07372e 100644
--- a/documents/blockchain/v10/documents/membership.rs
+++ b/documents/blockchain/v10/documents/membership.rs
@@ -15,9 +15,6 @@
 
 //! Wrappers around Membership documents.
 
-extern crate serde;
-
-use self::serde::ser::{Serialize, Serializer};
 use duniter_crypto::keys::*;
 use regex::Regex;
 
@@ -36,7 +33,7 @@ lazy_static! {
 }
 
 /// Type of a Membership.
-#[derive(Debug, Clone, Copy, PartialEq, Hash)]
+#[derive(Debug, Deserialize, Clone, Copy, PartialEq, Hash, Serialize)]
 pub enum MembershipType {
     /// The member wishes to opt-in.
     In(),
@@ -47,12 +44,12 @@ pub enum MembershipType {
 /// Wrap an Membership document.
 ///
 /// Must be created by parsing a text document or using a builder.
-#[derive(Debug, Clone, PartialEq, Hash)]
+#[derive(Debug, Clone, PartialEq, Hash, Deserialize, Serialize)]
 pub struct MembershipDocument {
     /// Document as text.
     ///
     /// Is used to check signatures, and other values mut be extracted from it.
-    text: String,
+    text: Option<String>,
 
     /// Name of the currency.
     currency: String,
@@ -70,6 +67,32 @@ pub struct MembershipDocument {
     signatures: Vec<Sig>,
 }
 
+#[derive(Debug, Copy, Clone, PartialEq, Hash, Deserialize, Serialize)]
+/// Membership event type (blockchain event)
+pub enum MembershipEventType {
+    /// Newcomer
+    Join(),
+    /// Renewal
+    Renewal(),
+    /// Renewal after expire or leave
+    Rejoin(),
+    /// Expire
+    Expire(),
+}
+
+#[derive(Debug, Clone, PartialEq, Hash, Deserialize, Serialize)]
+/// Membership event (blockchain event)
+pub struct MembershipEvent {
+    /// Blockstamp of block event
+    pub blockstamp: Blockstamp,
+    /// Membership document
+    pub doc: MembershipDocument,
+    /// Event type
+    pub event_type: MembershipEventType,
+    /// Chainable time
+    pub chainable_on: u64,
+}
+
 impl MembershipDocument {
     /// Membership message.
     pub fn membership(&self) -> MembershipType {
@@ -80,6 +103,11 @@ impl MembershipDocument {
     pub fn identity_username(&self) -> &str {
         &self.identity_username
     }
+
+    /// Lightens the membership (for example to store it while minimizing the space required)
+    pub fn reduce(&mut self) {
+        self.text = None;
+    }
 }
 
 impl Document for MembershipDocument {
@@ -128,7 +156,11 @@ impl TextDocument for MembershipDocument {
     type CompactTextDocument_ = MembershipDocument;
 
     fn as_text(&self) -> &str {
-        &self.text
+        if let Some(ref text) = self.text {
+            text
+        } else {
+            panic!("Try to get text of reduce membership !")
+        }
     }
 
     fn to_compact_document(&self) -> Self::CompactTextDocument_ {
@@ -136,15 +168,6 @@ impl TextDocument for MembershipDocument {
     }
 }
 
-impl Serialize for MembershipDocument {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
-    where
-        S: Serializer,
-    {
-        serializer.serialize_str(&self.generate_compact_text())
-    }
-}
-
 impl IntoSpecializedDocument<BlockchainProtocol> for MembershipDocument {
     fn into_specialized(self) -> BlockchainProtocol {
         BlockchainProtocol::V10(Box::new(V10Document::Membership(self)))
@@ -171,7 +194,7 @@ pub struct MembershipDocumentBuilder<'a> {
 impl<'a> MembershipDocumentBuilder<'a> {
     fn build_with_text_and_sigs(self, text: String, signatures: Vec<Sig>) -> MembershipDocument {
         MembershipDocument {
-            text,
+            text: Some(text),
             currency: self.currency.to_string(),
             issuers: vec![*self.issuer],
             blockstamp: *self.blockstamp,
@@ -254,7 +277,7 @@ impl StandardTextDocumentParser for MembershipDocumentParser {
             let ity_block = Blockstamp::from_string(ity_block).unwrap();
 
             Ok(V10Document::Membership(MembershipDocument {
-                text: doc.to_owned(),
+                text: Some(doc.to_owned()),
                 issuers: vec![issuer],
                 currency: currency.to_owned(),
                 blockstamp,
diff --git a/documents/blockchain/v10/documents/mod.rs b/documents/blockchain/v10/documents/mod.rs
index 091d60fc2bd200d8c80d39f16bab7ca283170dad..ec32ead1118ac403204b81bbbf3658eacb74c598 100644
--- a/documents/blockchain/v10/documents/mod.rs
+++ b/documents/blockchain/v10/documents/mod.rs
@@ -15,12 +15,9 @@
 
 //! Provide wrappers around Duniter blockchain documents for protocol version 10.
 
-extern crate crypto;
-
-use self::crypto::digest::Digest;
-
 use blockchain::v10::documents::identity::IdentityDocumentParser;
 use blockchain::{Document, DocumentBuilder, DocumentParser};
+use crypto::digest::Digest;
 use duniter_crypto::keys::*;
 use regex::Regex;
 
@@ -56,7 +53,7 @@ lazy_static! {
     static ref SIGNATURES_REGEX: Regex = Regex::new("[[:alnum:]+/=]+\n?").unwrap();
 }
 
-#[derive(Debug, Clone)]
+#[derive(Clone, Debug, Deserialize, Serialize)]
 /// Contains a document in full or compact format
 pub enum TextDocumentFormat<D: TextDocument> {
     /// Complete format (Allows to check the validity of the signature)
diff --git a/documents/blockchain/v10/documents/revocation.rs b/documents/blockchain/v10/documents/revocation.rs
index f8d3877b58b28d644561175a5e711deefe492f44..00f738d7199f71f334d4b952821be086d30f0780 100644
--- a/documents/blockchain/v10/documents/revocation.rs
+++ b/documents/blockchain/v10/documents/revocation.rs
@@ -15,9 +15,6 @@
 
 //! Wrappers around Revocation documents.
 
-extern crate serde;
-
-use self::serde::ser::{Serialize, Serializer};
 use duniter_crypto::keys::*;
 use regex::Regex;
 
@@ -34,7 +31,7 @@ lazy_static! {
     ).unwrap();
 }
 
-#[derive(Debug, Copy, Clone)]
+#[derive(Debug, Copy, Clone, Deserialize, Serialize)]
 /// Wrap an Compact Revocation document (in block content)
 pub struct CompactRevocationDocument {
     /// Issuer
@@ -56,7 +53,7 @@ impl CompactTextDocument for CompactRevocationDocument {
 /// Wrap an Revocation document.
 ///
 /// Must be created by parsing a text document or using a builder.
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, Deserialize, Serialize)]
 pub struct RevocationDocument {
     /// Document as text.
     ///
@@ -128,15 +125,6 @@ impl TextDocument for RevocationDocument {
     }
 }
 
-impl Serialize for TextDocumentFormat<RevocationDocument> {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
-    where
-        S: Serializer,
-    {
-        serializer.serialize_str(&self.as_compact_text())
-    }
-}
-
 impl IntoSpecializedDocument<BlockchainProtocol> for RevocationDocument {
     fn into_specialized(self) -> BlockchainProtocol {
         BlockchainProtocol::V10(Box::new(V10Document::Revocation(Box::new(self))))
diff --git a/documents/blockchain/v10/documents/transaction.rs b/documents/blockchain/v10/documents/transaction.rs
index 5a3483bcdc0727574745673768dd631d17456f67..aa6ddacd9db91975ccd1a4d803bf1fe2e9cbe62a 100644
--- a/documents/blockchain/v10/documents/transaction.rs
+++ b/documents/blockchain/v10/documents/transaction.rs
@@ -15,19 +15,17 @@
 
 //! Wrappers around Transaction documents.
 
-extern crate serde;
-
-use std::ops::Deref;
+use std::ops::{Add, Deref, Sub};
 
+use crypto::digest::Digest;
+use crypto::sha2::Sha256;
 use duniter_crypto::keys::*;
 use regex::Regex;
 use regex::RegexBuilder;
 
-use self::serde::ser::{Serialize, Serializer};
-
 use blockchain::v10::documents::*;
 use blockchain::{BlockchainProtocol, Document, DocumentBuilder, IntoSpecializedDocument};
-use Blockstamp;
+use {BlockId, Blockstamp, Hash};
 
 lazy_static! {
     static ref TRANSACTION_REGEX_SIZE: &'static usize = &40_000_000;
@@ -58,23 +56,49 @@ lazy_static! {
     ).unwrap();
 }
 
+/// Wrap a transaction amout
+#[derive(Debug, Copy, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Hash, Serialize)]
+pub struct TxAmount(pub isize);
+
+impl Add for TxAmount {
+    type Output = TxAmount;
+    fn add(self, a: TxAmount) -> Self::Output {
+        TxAmount(self.0 + a.0)
+    }
+}
+
+impl Sub for TxAmount {
+    type Output = TxAmount;
+    fn sub(self, a: TxAmount) -> Self::Output {
+        TxAmount(self.0 - a.0)
+    }
+}
+
+/// Wrap a transaction amout base
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Hash, Serialize)]
+pub struct TxBase(pub usize);
+
+/// Wrap a transaction index
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)]
+pub struct TxIndex(pub usize);
+
 /// Wrap a transaction input
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserialize, Serialize)]
 pub enum TransactionInput {
     /// Universal Dividend Input
-    D(isize, usize, PubKey, u64),
+    D(TxAmount, TxBase, PubKey, BlockId),
     /// Previous Transaction Input
-    T(isize, usize, String, usize),
+    T(TxAmount, TxBase, Hash, TxIndex),
 }
 
 impl ToString for TransactionInput {
     fn to_string(&self) -> String {
         match *self {
             TransactionInput::D(amount, base, pubkey, block_number) => {
-                format!("{}:{}:D:{}:{}", amount, base, pubkey, block_number)
+                format!("{}:{}:D:{}:{}", amount.0, base.0, pubkey, block_number.0)
             }
             TransactionInput::T(amount, base, ref tx_hash, tx_index) => {
-                format!("{}:{}:T:{}:{}", amount, base, tx_hash, tx_index)
+                format!("{}:{}:T:{}:{}", amount.0, base.0, tx_hash, tx_index.0)
             }
         }
     }
@@ -89,14 +113,16 @@ impl TransactionInput {
             let pubkey = &caps["pubkey"];
             let block_number = &caps["block_number"];
             Ok(TransactionInput::D(
-                amount.parse().expect("fail to parse input amount !"),
-                base.parse().expect("fail to parse input base !"),
+                TxAmount(amount.parse().expect("fail to parse input amount !")),
+                TxBase(base.parse().expect("fail to parse input base !")),
                 PubKey::Ed25519(
                     ed25519::PublicKey::from_base58(pubkey).expect("fail to parse input pubkey !"),
                 ),
-                block_number
-                    .parse()
-                    .expect("fail to parse input block_number !"),
+                BlockId(
+                    block_number
+                        .parse()
+                        .expect("fail to parse input block_number !"),
+                ),
             ))
         //Ok(TransactionInput::D(10, 0, PubKey::Ed25519(ed25519::PublicKey::from_base58("FD9wujR7KABw88RyKEGBYRLz8PA6jzVCbcBAsrBXBqSa").unwrap(), 0)))
         } else if let Some(caps) = T_INPUT_REGEX.captures(source) {
@@ -105,10 +131,10 @@ impl TransactionInput {
             let tx_hash = &caps["tx_hash"];
             let tx_index = &caps["tx_index"];
             Ok(TransactionInput::T(
-                amount.parse().expect("fail to parse input amount"),
-                base.parse().expect("fail to parse base amount"),
-                String::from(tx_hash),
-                tx_index.parse().expect("fail to parse tx_index amount"),
+                TxAmount(amount.parse().expect("fail to parse input amount")),
+                TxBase(base.parse().expect("fail to parse base amount")),
+                Hash::from_hex(tx_hash).expect("fail to parse tx_hash"),
+                TxIndex(tx_index.parse().expect("fail to parse tx_index amount")),
             ))
         } else {
             println!("Fail to parse this input = {:?}", source);
@@ -120,7 +146,7 @@ impl TransactionInput {
 }
 
 /// Wrap a transaction unlock proof
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
 pub enum TransactionUnlockProof {
     /// Indicates that the signature of the corresponding key is at the bottom of the document
     Sig(usize),
@@ -156,7 +182,7 @@ impl TransactionUnlockProof {
 }
 
 /// Wrap a transaction unlocks input
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
 pub struct TransactionInputUnlocks {
     /// Input index
     pub index: usize,
@@ -204,12 +230,12 @@ impl TransactionInputUnlocks {
 }
 
 /// Wrap a transaction ouput condition
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)]
 pub enum TransactionOutputCondition {
     /// The consumption of funds will require a valid signature of the specified key
     Sig(PubKey),
     /// The consumption of funds will require to provide a code with the hash indicated
-    Xhx(String),
+    Xhx(Hash),
     /// Funds may not be consumed until the blockchain reaches the timestamp indicated.
     Cltv(u64),
     /// Funds may not be consumed before the duration indicated, starting from the timestamp of the block where the transaction is written.
@@ -230,24 +256,37 @@ impl ToString for TransactionOutputCondition {
 impl TransactionOutputCondition {
     fn parse_from_str(source: &str) -> Result<TransactionOutputCondition, V10DocumentParsingError> {
         if let Some(caps) = OUTPUT_COND_SIG_REGEX.captures(source) {
-            Ok(TransactionOutputCondition::Sig(PubKey::Ed25519(
-                ed25519::PublicKey::from_base58(&caps["pubkey"])
-                    .expect("fail to parse SIG TransactionOutputCondition"),
-            )))
+            if let Ok(pubkey) = ed25519::PublicKey::from_base58(&caps["pubkey"]) {
+                Ok(TransactionOutputCondition::Sig(PubKey::Ed25519(pubkey)))
+            } else {
+                Err(V10DocumentParsingError::InvalidInnerFormat(String::from(
+                    "OutputCondition : Fail to parse SIG pubkey.",
+                )))
+            }
         } else if let Some(caps) = OUTPUT_COND_XHX_REGEX.captures(source) {
-            Ok(TransactionOutputCondition::Xhx(String::from(&caps["hash"])))
+            if let Ok(hash) = Hash::from_hex(&caps["hash"]) {
+                Ok(TransactionOutputCondition::Xhx(hash))
+            } else {
+                Err(V10DocumentParsingError::InvalidInnerFormat(String::from(
+                    "OutputCondition : Fail to parse XHX Hash.",
+                )))
+            }
         } else if let Some(caps) = OUTPUT_COND_CLTV_REGEX.captures(source) {
-            Ok(TransactionOutputCondition::Cltv(
-                caps["timestamp"]
-                    .parse()
-                    .expect("fail to parse CLTV TransactionOutputCondition"),
-            ))
+            if let Ok(timestamp) = caps["timestamp"].parse() {
+                Ok(TransactionOutputCondition::Cltv(timestamp))
+            } else {
+                Err(V10DocumentParsingError::InvalidInnerFormat(String::from(
+                    "OutputCondition : Fail to parse CLTV timestamp.",
+                )))
+            }
         } else if let Some(caps) = OUTPUT_COND_CSV_REGEX.captures(source) {
-            Ok(TransactionOutputCondition::Csv(
-                caps["duration"]
-                    .parse()
-                    .expect("fail to parse CSV TransactionOutputCondition"),
-            ))
+            if let Ok(duration) = caps["duration"].parse() {
+                Ok(TransactionOutputCondition::Csv(duration))
+            } else {
+                Err(V10DocumentParsingError::InvalidInnerFormat(String::from(
+                    "OutputCondition : Fail to parse CSV duraion.",
+                )))
+            }
         } else {
             Err(V10DocumentParsingError::InvalidInnerFormat(
                 "Transaction5".to_string(),
@@ -257,7 +296,7 @@ impl TransactionOutputCondition {
 }
 
 /// Wrap a transaction ouput condition group
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)]
 pub enum TransactionOutputConditionGroup {
     /// Single
     Single(TransactionOutputCondition),
@@ -340,12 +379,12 @@ impl TransactionOutputConditionGroup {
 }
 
 /// Wrap a transaction ouput
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
 pub struct TransactionOutput {
     /// Amount
-    pub amount: isize,
+    pub amount: TxAmount,
     /// Base
-    pub base: usize,
+    pub base: TxBase,
     /// List of conditions for consum this output
     pub conditions: TransactionOutputConditionGroup,
 }
@@ -354,8 +393,8 @@ impl ToString for TransactionOutput {
     fn to_string(&self) -> String {
         format!(
             "{}:{}:{}",
-            self.amount,
-            self.base,
+            self.amount.0,
+            self.base.0,
             self.conditions.to_string()
         )
     }
@@ -365,8 +404,8 @@ impl TransactionOutput {
     /// Parse Transaction Output from string.
     pub fn parse_from_str(source: &str) -> Result<TransactionOutput, V10DocumentParsingError> {
         if let Some(caps) = OUTPUT_REGEX.captures(source) {
-            let amount = caps["amount"].parse().expect("fail to parse output amount");
-            let base = caps["base"].parse().expect("fail to parse base amount");
+            let amount = TxAmount(caps["amount"].parse().expect("fail to parse output amount"));
+            let base = TxBase(caps["base"].parse().expect("fail to parse base amount"));
             let conditions = TransactionOutputConditionGroup::parse_from_str(&caps["conditions"])?;
             Ok(TransactionOutput {
                 conditions,
@@ -384,13 +423,13 @@ impl TransactionOutput {
 /// Wrap a Transaction document.
 ///
 /// Must be created by parsing a text document or using a builder.
-#[derive(Debug, Clone, PartialEq, Eq)]
+#[derive(Debug, Clone, PartialEq, Eq, Deserialize, Serialize)]
 pub struct TransactionDocument {
     /// Document as text.
     ///
     /// Is used to check signatures, and other values
     /// must be extracted from it.
-    text: String,
+    text: Option<String>,
 
     /// Currency.
     currency: String,
@@ -410,6 +449,47 @@ pub struct TransactionDocument {
     comment: String,
     /// Document signature (there should be only one).
     signatures: Vec<Sig>,
+    /// Transaction hash
+    hash: Option<Hash>,
+}
+
+impl TransactionDocument {
+    /// Compute transaction hash
+    pub fn compute_hash(&mut self) -> Hash {
+        let mut sha256 = Sha256::new();
+        let mut hashing_text = if let Some(ref text) = self.text {
+            text.clone()
+        } else {
+            panic!("Try to compute_hash of tx with None text !")
+        };
+        hashing_text.push_str(&self.signatures[0].to_string());
+        hashing_text.push_str("\n");
+        //println!("tx_text_hasing={}", hashing_text);
+        sha256.input_str(&hashing_text);
+        self.hash = Some(Hash::from_hex(&sha256.result_str()).unwrap());
+        self.hash.expect("Try to get hash of a reduce tx !")
+    }
+    /// Get transaction hash
+    pub fn get_hash(&mut self) -> Hash {
+        if let Some(hash) = self.hash {
+            hash
+        } else {
+            self.compute_hash()
+        }
+    }
+    /// Get transaction inputs
+    pub fn get_inputs(&self) -> &[TransactionInput] {
+        &self.inputs
+    }
+    /// Get transaction outputs
+    pub fn get_outputs(&self) -> &[TransactionOutput] {
+        &self.outputs
+    }
+    /// Lightens the transaction (for example to store it while minimizing the space required)
+    pub fn reduce(&mut self) {
+        self.text = None;
+        self.hash = None;
+    }
 }
 
 impl Document for TransactionDocument {
@@ -498,7 +578,11 @@ impl TextDocument for TransactionDocument {
     type CompactTextDocument_ = TransactionDocument;
 
     fn as_text(&self) -> &str {
-        &self.text
+        if let Some(ref text) = self.text {
+            text
+        } else {
+            panic!("Try to get text of tx whti None text !")
+        }
     }
 
     fn to_compact_document(&self) -> Self::CompactTextDocument_ {
@@ -506,16 +590,6 @@ impl TextDocument for TransactionDocument {
     }
 }
 
-impl Serialize for TransactionDocument {
-    fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
-    where
-        S: Serializer,
-    {
-        let compact_text = self.to_compact_document().generate_compact_text();
-        serializer.serialize_str(&compact_text.replace("\n", "$"))
-    }
-}
-
 impl IntoSpecializedDocument<BlockchainProtocol> for TransactionDocument {
     fn into_specialized(self) -> BlockchainProtocol {
         BlockchainProtocol::V10(Box::new(V10Document::Transaction(Box::new(self))))
@@ -546,7 +620,7 @@ pub struct TransactionDocumentBuilder<'a> {
 impl<'a> TransactionDocumentBuilder<'a> {
     fn build_with_text_and_sigs(self, text: String, signatures: Vec<Sig>) -> TransactionDocument {
         TransactionDocument {
-            text,
+            text: Some(text),
             currency: self.currency.to_string(),
             blockstamp: *self.blockstamp,
             locktime: *self.locktime,
@@ -556,6 +630,7 @@ impl<'a> TransactionDocumentBuilder<'a> {
             outputs: self.outputs.clone(),
             comment: String::from(self.comment),
             signatures,
+            hash: None,
         }
     }
 }
@@ -670,7 +745,7 @@ impl StandardTextDocumentParser for TransactionDocumentParser {
             }
 
             Ok(V10Document::Transaction(Box::new(TransactionDocument {
-                text: doc.to_owned(),
+                text: Some(doc.to_owned()),
                 currency: currency.to_owned(),
                 blockstamp,
                 locktime,
@@ -680,6 +755,7 @@ impl StandardTextDocumentParser for TransactionDocumentParser {
                 outputs,
                 comment,
                 signatures,
+                hash: None,
             })))
         } else {
             Err(V10DocumentParsingError::InvalidInnerFormat(
@@ -751,6 +827,54 @@ mod tests {
         );
     }
 
+    #[test]
+    fn compute_transaction_hash() {
+        let pubkey = PubKey::Ed25519(
+            ed25519::PublicKey::from_base58("FEkbc4BfJukSWnCU6Hed6dgwwTuPFTVdgz5LpL4iHr9J")
+                .unwrap(),
+        );
+
+        let sig = Sig::Ed25519(ed25519::Signature::from_base64(
+            "XEwKwKF8AI1gWPT7elR4IN+bW3Qn02Dk15TEgrKtY/S2qfZsNaodsLofqHLI24BBwZ5aadpC88ntmjo/UW9oDQ==",
+        ).unwrap());
+
+        let block = Blockstamp::from_string(
+            "60-00001FE00410FCD5991EDD18AA7DDF15F4C8393A64FA92A1DB1C1CA2E220128D",
+        ).unwrap();
+
+        let builder = TransactionDocumentBuilder {
+            currency: "g1",
+            blockstamp: &block,
+            locktime: &0,
+            issuers: &vec![pubkey],
+            inputs: &vec![
+                TransactionInput::parse_from_str(
+                    "950:0:T:2CF1ACD8FE8DC93EE39A1D55881C50D87C55892AE8E4DB71D4EBAB3D412AA8FD:1",
+                ).expect("fail to parse input !"),
+            ],
+            unlocks: &vec![
+                TransactionInputUnlocks::parse_from_str("0:SIG(0)")
+                    .expect("fail to parse unlock !"),
+            ],
+            outputs: &vec![
+                TransactionOutput::parse_from_str(
+                    "30:0:SIG(38MEAZN68Pz1DTvT3tqgxx4yQP6snJCQhPqEFxbDk4aE)",
+                ).expect("fail to parse output !"),
+                TransactionOutput::parse_from_str(
+                    "920:0:SIG(FEkbc4BfJukSWnCU6Hed6dgwwTuPFTVdgz5LpL4iHr9J)",
+                ).expect("fail to parse output !"),
+            ],
+            comment: "Pour cesium merci",
+        };
+        let mut tx_doc = builder.build_with_signature(vec![sig]);
+        assert_eq!(tx_doc.verify_signatures(), VerificationResult::Valid());
+        assert_eq!(
+            tx_doc.get_hash(),
+            Hash::from_hex("876D2430E0B66E2CE4467866D8F923D68896CACD6AA49CDD8BDD0096B834DEF1")
+                .expect("fail to parse hash")
+        );
+    }
+
     #[test]
     fn transaction_standard_regex() {
         let tx_regex: Regex = RegexBuilder::new(&TRANSACTION_REGEX_BUILDER)
diff --git a/documents/lib.rs b/documents/lib.rs
index f80ced520879f3dfb3e6e3a488513ae7fba74699..f39b5f5db3d16f2a8cf5336fcedc2939dd6ed6e3 100644
--- a/documents/lib.rs
+++ b/documents/lib.rs
@@ -16,32 +16,34 @@
 //! Implements the Duniter Documents Protocol.
 
 #![cfg_attr(feature = "strict", deny(warnings))]
+#![cfg_attr(feature = "cargo-clippy", allow(unused_collect))]
 #![deny(
     missing_docs, missing_debug_implementations, missing_copy_implementations, trivial_casts,
     trivial_numeric_casts, unsafe_code, unstable_features, unused_import_braces,
     unused_qualifications
 )]
 
+#[macro_use]
+extern crate lazy_static;
+#[macro_use]
+extern crate serde_derive;
+
 extern crate base58;
 extern crate base64;
 extern crate crypto;
 extern crate duniter_crypto;
-#[macro_use]
-extern crate lazy_static;
 extern crate linked_hash_map;
 extern crate regex;
 extern crate serde;
 
+use duniter_crypto::keys::BaseConvertionError;
 use std::cmp::Ordering;
 use std::fmt::{Debug, Display, Error, Formatter};
 
-use self::serde::ser::{Serialize, Serializer};
-use duniter_crypto::keys::BaseConvertionError;
-
 pub mod blockchain;
 
 /// A block Id.
-#[derive(Debug, Copy, Clone, Ord, PartialEq, PartialOrd, Eq, Hash)]
+#[derive(Debug, Deserialize, Copy, Clone, Ord, PartialEq, PartialOrd, Eq, Hash, Serialize)]
 pub struct BlockId(pub u32);
 
 impl Display for BlockId {
@@ -53,7 +55,7 @@ impl Display for BlockId {
 /// A hash wrapper.
 ///
 /// A hash is often provided as string composed of 64 hexadecimal character (0 to 9 then A to F).
-#[derive(Copy, Clone, Eq, Ord, PartialEq, PartialOrd, Hash)]
+#[derive(Copy, Clone, Deserialize, Eq, Ord, PartialEq, PartialOrd, Hash, Serialize)]
 pub struct Hash(pub [u8; 32]);
 
 impl Display for Hash {
@@ -122,7 +124,7 @@ impl Hash {
 }
 
 /// Wrapper of a block hash.
-#[derive(Copy, Clone, Eq, Ord, PartialEq, PartialOrd, Hash)]
+#[derive(Copy, Clone, Default, Deserialize, Eq, Ord, PartialEq, PartialOrd, Hash, Serialize)]
 pub struct BlockHash(pub Hash);
 
 impl Display for BlockHash {
@@ -160,7 +162,7 @@ pub enum BlockUIdParseError {
 ///
 /// [`BlockId`]: struct.BlockId.html
 /// [`BlockHash`]: struct.BlockHash.html
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, Deserialize, PartialEq, Eq, Hash, Serialize)]
 pub struct Blockstamp {
     /// Block Id.
     pub id: BlockId,
@@ -168,6 +170,9 @@ pub struct Blockstamp {
     pub hash: BlockHash,
 }
 
+/// Previous blockstamp (BlockId-1, previous_hash)
+pub type PreviousBlockstamp = Blockstamp;
+
 impl Display for Blockstamp {
     fn fmt(&self, f: &mut Formatter) -> Result<(), Error> {
         write!(f, "{}-{}", self.id, self.hash)
@@ -189,14 +194,81 @@ impl Default for Blockstamp {
     }
 }
 
-impl Serialize for Blockstamp {
+/*impl Serialize for Blockstamp {
     fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
     where
         S: Serializer,
     {
         serializer.serialize_str(&format!("{}-{}", self.id, self.hash))
     }
-}
+}*/
+
+/*impl<'de> Deserialize<'de> for Blockstamp {
+    fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
+    where
+        D: Deserializer<'de>,
+    {
+        #[derive(Deserialize)]
+        #[serde(field_identifier, rename_all = "lowercase")]
+        enum Field {
+            Id,
+            Hash,
+        }
+
+        struct BlockstampVisitor;
+
+        impl<'de> Visitor<'de> for BlockstampVisitor {
+            type Value = Blockstamp;
+
+            fn expecting(&self, formatter: &mut Formatter) -> fmt::Result {
+                formatter.write_str("struct Blockstamp")
+            }
+
+            fn visit_seq<V>(self, mut seq: V) -> Result<Blockstamp, V::Error>
+            where
+                V: SeqAccess<'de>,
+            {
+                let id = seq
+                    .next_element()?
+                    .ok_or_else(|| de::Error::invalid_length(0, &self))?;
+                let hash = seq
+                    .next_element()?
+                    .ok_or_else(|| de::Error::invalid_length(1, &self))?;
+                Ok(Blockstamp { id, hash })
+            }
+
+            fn visit_map<V>(self, mut map: V) -> Result<Blockstamp, V::Error>
+            where
+                V: MapAccess<'de>,
+            {
+                let mut id = None;
+                let mut hash = None;
+                while let Some(key) = map.next_key()? {
+                    match key {
+                        Field::Id => {
+                            if id.is_some() {
+                                return Err(de::Error::duplicate_field("id"));
+                            }
+                            id = Some(map.next_value()?);
+                        }
+                        Field::Hash => {
+                            if hash.is_some() {
+                                return Err(de::Error::duplicate_field("hash"));
+                            }
+                            hash = Some(map.next_value()?);
+                        }
+                    }
+                }
+                let id = id.ok_or_else(|| de::Error::missing_field("id"))?;
+                let hash = hash.ok_or_else(|| de::Error::missing_field("hash"))?;
+                Ok(Blockstamp { id, hash })
+            }
+        }
+
+        const FIELDS: &'static [&'static str] = &["id", "hash"];
+        deserializer.deserialize_struct("Blockstamp", FIELDS, BlockstampVisitor)
+    }
+}*/
 
 impl PartialOrd for Blockstamp {
     fn partial_cmp(&self, other: &Blockstamp) -> Option<Ordering> {
@@ -232,7 +304,9 @@ impl Blockstamp {
             } else {
                 Ok(Blockstamp {
                     id: BlockId(id.unwrap()),
-                    hash: BlockHash(hash.unwrap()),
+                    hash: BlockHash(
+                        hash.expect("Try to get hash of an uncompleted or reduce block !"),
+                    ),
                 })
             }
         }
diff --git a/network/lib.rs b/network/lib.rs
index 8130c8e90fd4a81849cf8e0a63d5750b0c56e1fe..62d54d7fba9967f3b0998dbb1ffbcc816648ac54 100644
--- a/network/lib.rs
+++ b/network/lib.rs
@@ -36,8 +36,6 @@ pub mod network_endpoint;
 pub mod network_head;
 pub mod network_peer;
 
-use self::network_head::NetworkHead;
-use self::network_peer::NetworkPeer;
 use crypto::digest::Digest;
 use crypto::sha2::Sha256;
 use duniter_crypto::keys::*;
@@ -48,6 +46,8 @@ use duniter_documents::blockchain::v10::documents::{
 use duniter_documents::blockchain::Document;
 use duniter_documents::{BlockHash, BlockId, Blockstamp, Hash};
 use duniter_module::{ModuleReqFullId, ModuleReqId};
+use network_head::NetworkHead;
+use network_peer::NetworkPeer;
 use std::fmt::{Debug, Display, Error, Formatter};
 use std::ops::Deref;
 
@@ -134,6 +134,15 @@ pub enum NetworkBlock {
 }
 
 impl NetworkBlock {
+    /// Return uncompleted block document
+    pub fn uncompleted_block_doc(&self) -> BlockDocument {
+        match *self {
+            NetworkBlock::V10(ref network_block_v10) => {
+                network_block_v10.deref().uncompleted_block_doc.clone()
+            }
+            _ => panic!("Block version not supported !"),
+        }
+    }
     /// Return blockstamp
     pub fn blockstamp(&self) -> Blockstamp {
         match *self {
diff --git a/tui/lib.rs b/tui/lib.rs
index 17d8b21e3fa57e3581dfd3f521e8ceb77624a610..6458841de95c0f03a62e2c183ae6b692528d43e1 100644
--- a/tui/lib.rs
+++ b/tui/lib.rs
@@ -492,7 +492,7 @@ impl DuniterModule<DuniterMessage> for TuiModule {
                                 }
                             }
                             DuniterMessage::DALEvent(ref dal_event) => match *dal_event {
-                                DALEvent::StackUpValidBlock(ref _block) => {}
+                                DALEvent::StackUpValidBlock(ref _block, ref _blockstamp) => {}
                                 DALEvent::RevertBlocks(ref _blocks) => {}
                                 _ => {}
                             },
diff --git a/ws2p/lib.rs b/ws2p/lib.rs
index c4dffa53f807810445890dcb8adabf07bb7aee30..dc696d57c50c293ba687b5496891f207b75c016a 100644
--- a/ws2p/lib.rs
+++ b/ws2p/lib.rs
@@ -54,33 +54,32 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH};
 use duniter_crypto::keys::*;
 use duniter_dal::dal_event::DALEvent;
 use duniter_dal::dal_requests::{DALReqBlockchain, DALRequest, DALResBlockchain, DALResponse};
-use duniter_dal::parsers::blocks::parse_json_block;
-use duniter_documents::blockchain::Document;
 use duniter_documents::Blockstamp;
 use duniter_message::DuniterMessage;
 use duniter_module::*;
 use duniter_network::network_endpoint::*;
 use duniter_network::network_head::*;
 use duniter_network::*;
-
+use parsers::blocks::parse_json_block;
 use websocket::{ClientBuilder, Message};
 
 mod ack_message;
 mod connect_message;
 pub mod constants;
 mod ok_message;
+pub mod parsers;
 pub mod serializer;
 pub mod ws2p_connection;
 pub mod ws2p_db;
 pub mod ws2p_requests;
 
-use self::ack_message::WS2PAckMessageV1;
-use self::connect_message::WS2PConnectMessageV1;
-use self::constants::*;
-use self::ok_message::WS2POkMessageV1;
-use self::rand::Rng;
-use self::ws2p_connection::*;
-use self::ws2p_requests::network_request_to_json;
+use ack_message::WS2PAckMessageV1;
+use connect_message::WS2PConnectMessageV1;
+use constants::*;
+use ok_message::WS2POkMessageV1;
+use rand::Rng;
+use ws2p_connection::*;
+use ws2p_requests::network_request_to_json;
 
 #[derive(Debug, Clone, PartialEq, Eq, Hash)]
 pub struct WS2PConf {
@@ -239,7 +238,8 @@ impl DuniterModule<DuniterMessage> for WS2PModule {
         let mut ws2p_endpoints = HashMap::new();
         for ep in conf.sync_endpoints.clone() {
             ws2p_endpoints.insert(
-                ep.node_full_id().unwrap(),
+                ep.node_full_id()
+                    .expect("Fail to get endpoint node_full_id"),
                 (ep.clone(), WS2PConnectionState::Close),
             );
             info!("Load sync endpoint {}", ep.raw());
@@ -297,7 +297,8 @@ impl DuniterModule<DuniterMessage> for WS2PModule {
             if ep.api() == NetworkEndpointApi(String::from("WS2P")) && ep.port() != 443 {
                 count += 1;
                 ws2p_module.ws2p_endpoints.insert(
-                    ep.node_full_id().unwrap(),
+                    ep.node_full_id()
+                        .expect("WS2P: Fail to get ep.node_full_id() !"),
                     (ep.clone(), WS2PConnectionState::from(ep.status())),
                 );
             }
@@ -423,8 +424,8 @@ impl DuniterModule<DuniterMessage> for WS2PModule {
                                 _ => {}
                             },
                             DuniterMessage::DALEvent(ref dal_event) => match *dal_event {
-                                DALEvent::StackUpValidBlock(ref block) => {
-                                    current_blockstamp = block.deref().blockstamp();
+                                DALEvent::StackUpValidBlock(ref _block, ref blockstamp) => {
+                                    current_blockstamp = *blockstamp;
                                     debug!(
                                         "WS2PModule : current_blockstamp = {}",
                                         current_blockstamp
@@ -472,13 +473,14 @@ impl DuniterModule<DuniterMessage> for WS2PModule {
                                         DALResBlockchain::CurrentBlock(
                                             ref _requester_full_id,
                                             ref current_block,
+                                            ref current_blockstamp_,
                                         ) => {
-                                            let current_block = current_block.deref();
+                                            let _current_block = current_block.deref();
                                             debug!(
                                                 "WS2PModule : receive DALResBc::CurrentBlock({})",
-                                                current_block.blockstamp()
+                                                current_blockstamp
                                             );
-                                            current_blockstamp = current_block.blockstamp();
+                                            current_blockstamp = *current_blockstamp_;
                                             if ws2p_module.my_head.is_none() {
                                                 ws2p_module.my_head =
                                                     Some(WS2PModuleDatas::generate_my_head(
@@ -606,17 +608,19 @@ impl DuniterModule<DuniterMessage> for WS2PModule {
                             //ws2p_module.send_network_event(NetworkEvent::ReceivePeers(_));
                             for ep in ws2p_endpoints {
                                 if ep.port() != 443 {
-                                    match ws2p_module
-                                        .ws2p_endpoints
-                                        .get(&ep.node_full_id().unwrap())
-                                    {
+                                    match ws2p_module.ws2p_endpoints.get(
+                                        &ep.node_full_id()
+                                            .expect("WS2P: Fail to get ep.node_full_id() !"),
+                                    ) {
                                         Some(_) => {}
                                         None => {
                                             if let Some(_api) =
                                                 ws2p_db::string_to_api(&ep.api().0.clone())
                                             {
                                                 endpoints_to_update_status.insert(
-                                                    ep.node_full_id().unwrap(),
+                                                    ep.node_full_id().expect(
+                                                        "WS2P: Fail to get ep.node_full_id() !",
+                                                    ),
                                                     SystemTime::now(),
                                                 );
                                             }
@@ -946,13 +950,13 @@ impl WS2PModuleDatas {
                             for endpoint in array_endpoints {
                                 sync_endpoints.push(
                                     NetworkEndpoint::parse_from_raw(
-                                        endpoint.as_str().unwrap(),
+                                        endpoint.as_str().expect("WS2P: Fail to get ep.as_str() !"),
                                         pubkey,
                                         0,
                                         0,
                                     ).expect(&format!(
                                         "WS2PConf Error : fail to parse sync Endpoint = {:?}",
-                                        endpoint.as_str().unwrap()
+                                        endpoint.as_str().expect("WS2P: Fail to get ep.as_str() !")
                                     )),
                                 );
                             }
@@ -1054,8 +1058,9 @@ impl WS2PModuleDatas {
             let blockstamps_occurences_copy = blockstamps_occurences.clone();
             match blockstamps_occurences_copy.get(&head.blockstamp()) {
                 Some(occurences) => {
-                    let mut occurences_mut =
-                        blockstamps_occurences.get_mut(&head.blockstamp()).unwrap();
+                    let mut occurences_mut = blockstamps_occurences
+                        .get_mut(&head.blockstamp())
+                        .expect("WS2P: Fail to get_mut blockstamps_occurences !");
                     *occurences_mut += 1;
                     if *occurences > dominant_blockstamp_occurences {
                         dominant_blockstamp_occurences = *occurences;
@@ -1101,13 +1106,20 @@ impl WS2PModuleDatas {
                 _ => unreachable_endpoints.push(ep),
             }
         }
-        let mut free_outcoming_rooms =
-            self.conf.clone().unwrap().outcoming_quota - count_established_connections;
+        let mut free_outcoming_rooms = self
+            .conf
+            .clone()
+            .expect("WS2P: Fail to get conf !")
+            .outcoming_quota - count_established_connections;
         while free_outcoming_rooms > 0 {
             let ep = if !reachable_endpoints.is_empty() {
-                reachable_endpoints.pop().unwrap()
+                reachable_endpoints
+                    .pop()
+                    .expect("WS2P: Fail to pop() reachable_endpoints !")
             } else if !unreachable_endpoints.is_empty() {
-                unreachable_endpoints.pop().unwrap()
+                unreachable_endpoints
+                    .pop()
+                    .expect("WS2P: Fail to pop() unreachable_endpoints !")
             } else {
                 break;
             };
@@ -1117,21 +1129,36 @@ impl WS2PModuleDatas {
     }
     pub fn connect_to(&mut self, endpoint: &NetworkEndpoint) -> () {
         // Add endpoint to endpoints list (if there isn't already)
-        match self.ws2p_endpoints.get(&endpoint.node_full_id().unwrap()) {
+        match self.ws2p_endpoints.get(
+            &endpoint
+                .node_full_id()
+                .expect("WS2P: Fail to get ep.node_full_id() !"),
+        ) {
             Some(_) => {
                 self.ws2p_endpoints
-                    .get_mut(&endpoint.node_full_id().unwrap())
-                    .unwrap()
+                    .get_mut(
+                        &endpoint
+                            .node_full_id()
+                            .expect("WS2P: Fail to get ep.node_full_id() !"),
+                    )
+                    .expect("WS2P: Fail to get_mut() a ws2p_endpoint !")
                     .1 = WS2PConnectionState::NeverTry;
             }
             None => {
                 self.ws2p_endpoints.insert(
-                    endpoint.node_full_id().unwrap(),
+                    endpoint
+                        .node_full_id()
+                        .expect("WS2P: Fail to get ep.node_full_id() !"),
                     (endpoint.clone(), WS2PConnectionState::NeverTry),
                 );
             }
         };
-        if self.conf.clone().unwrap().outcoming_quota > self.count_established_connections() {
+        if self
+            .conf
+            .clone()
+            .expect("WS2P: Fail to get conf !")
+            .outcoming_quota > self.count_established_connections()
+        {
             self.connect_to_without_checking_quotas(&endpoint);
         }
     }
@@ -1164,23 +1191,32 @@ impl WS2PModuleDatas {
             WS2PConnectionMessagePayload::WrongUrl
             | WS2PConnectionMessagePayload::FailOpenWS
             | WS2PConnectionMessagePayload::FailToSplitWS => {
-                self.ws2p_endpoints.get_mut(&ws2p_full_id).unwrap().1 =
-                    WS2PConnectionState::WSError;
+                self.ws2p_endpoints
+                    .get_mut(&ws2p_full_id)
+                    .expect("WS2P: Fail to get mut ep !")
+                    .1 = WS2PConnectionState::WSError;
                 return WS2PSignal::WSError(ws2p_full_id);
             }
             WS2PConnectionMessagePayload::TryToSendConnectMess => {
-                self.ws2p_endpoints.get_mut(&ws2p_full_id).unwrap().1 =
-                    WS2PConnectionState::TryToSendConnectMess;
+                self.ws2p_endpoints
+                    .get_mut(&ws2p_full_id)
+                    .expect("WS2P: Fail to get mut ep !")
+                    .1 = WS2PConnectionState::TryToSendConnectMess;
             }
             WS2PConnectionMessagePayload::FailSendConnectMess => {
-                self.ws2p_endpoints.get_mut(&ws2p_full_id).unwrap().1 =
-                    WS2PConnectionState::Unreachable;
+                self.ws2p_endpoints
+                    .get_mut(&ws2p_full_id)
+                    .expect("WS2P: Fail to mut ep !")
+                    .1 = WS2PConnectionState::Unreachable;
             }
             WS2PConnectionMessagePayload::WebsocketOk(sender) => {
                 self.websockets.insert(ws2p_full_id, sender);
             }
             WS2PConnectionMessagePayload::ValidConnectMessage(response, new_con_state) => {
-                self.ws2p_endpoints.get_mut(&ws2p_full_id).unwrap().1 = new_con_state;
+                self.ws2p_endpoints
+                    .get_mut(&ws2p_full_id)
+                    .expect("WS2P: Fail to get mut ep !")
+                    .1 = new_con_state;
                 if let WS2PConnectionState::ConnectMessOk = self.ws2p_endpoints[&ws2p_full_id].1 {
                     trace!("Send: {:#?}", response);
                     self.websockets
@@ -1191,11 +1227,14 @@ impl WS2PModuleDatas {
                         ))
                         .0
                         .send_message(&Message::text(response))
-                        .unwrap();
+                        .expect("WS2P: Fail to send OK Message !");
                 }
             }
             WS2PConnectionMessagePayload::ValidAckMessage(r, new_con_state) => {
-                self.ws2p_endpoints.get_mut(&ws2p_full_id).unwrap().1 = new_con_state;
+                self.ws2p_endpoints
+                    .get_mut(&ws2p_full_id)
+                    .expect("WS2P: Fail to get mut ep !")
+                    .1 = new_con_state;
                 if let WS2PConnectionState::AckMessOk = self.ws2p_endpoints[&ws2p_full_id].1 {
                     trace!("DEBUG : Send: {:#?}", r);
                     self.websockets
@@ -1206,11 +1245,14 @@ impl WS2PModuleDatas {
                         ))
                         .0
                         .send_message(&Message::text(r))
-                        .unwrap();
+                        .expect("WS2P: Fail to send Message in websocket !");
                 }
             }
             WS2PConnectionMessagePayload::ValidOk(new_con_state) => {
-                self.ws2p_endpoints.get_mut(&ws2p_full_id).unwrap().1 = new_con_state;
+                self.ws2p_endpoints
+                    .get_mut(&ws2p_full_id)
+                    .expect("WS2P: Fail to get mut ep !")
+                    .1 = new_con_state;
                 match self.ws2p_endpoints[&ws2p_full_id].1 {
                     WS2PConnectionState::OkMessOkWaitingAckMess => {}
                     WS2PConnectionState::Established => {
@@ -1219,7 +1261,7 @@ impl WS2PModuleDatas {
                     _ => {
                         self.threads_senders_channels[&ws2p_full_id]
                             .send(WS2POrderForListeningThread::Close)
-                            .unwrap();
+                            .expect("WS2P: Fail to send Close signel to connections threads !");
                         self.close_connection(&ws2p_full_id, WS2PCloseConnectionReason::Unknow);
                         return WS2PSignal::Empty;
                     }
@@ -1238,7 +1280,11 @@ impl WS2PModuleDatas {
                         if head.verify()
                             && (self.my_head.is_none()
                                 || head.node_full_id()
-                                    != self.my_head.clone().unwrap().node_full_id())
+                                    != self
+                                        .my_head
+                                        .clone()
+                                        .expect("WS2P: Fail to clone my_head")
+                                        .node_full_id())
                             && head.apply(&mut self.heads_cache)
                         {
                             applied_heads.push(head);
@@ -1267,16 +1313,22 @@ impl WS2PModuleDatas {
             WS2PConnectionMessagePayload::NegociationTimeout => {
                 match self.ws2p_endpoints[&ws2p_full_id].1 {
                     WS2PConnectionState::AckMessOk | WS2PConnectionState::ConnectMessOk => {
-                        self.ws2p_endpoints.get_mut(&ws2p_full_id).unwrap().1 =
-                            WS2PConnectionState::Denial
+                        self.ws2p_endpoints
+                            .get_mut(&ws2p_full_id)
+                            .expect("WS2P: Fail to get mut ep !")
+                            .1 = WS2PConnectionState::Denial
                     }
                     WS2PConnectionState::WaitingConnectMess => {
-                        self.ws2p_endpoints.get_mut(&ws2p_full_id).unwrap().1 =
-                            WS2PConnectionState::NoResponse
+                        self.ws2p_endpoints
+                            .get_mut(&ws2p_full_id)
+                            .expect("WS2P: Fail to get mut ep !")
+                            .1 = WS2PConnectionState::NoResponse
                     }
                     _ => {
-                        self.ws2p_endpoints.get_mut(&ws2p_full_id).unwrap().1 =
-                            WS2PConnectionState::Unreachable
+                        self.ws2p_endpoints
+                            .get_mut(&ws2p_full_id)
+                            .expect("WS2P: Fail to get mut ep !")
+                            .1 = WS2PConnectionState::Unreachable
                     }
                 }
                 self.close_connection(&ws2p_full_id, WS2PCloseConnectionReason::NegociationTimeout);
@@ -1290,13 +1342,13 @@ impl WS2PModuleDatas {
                 "WS2P : Receive Unknow Message from {}.",
                 &self.connections_meta_datas[&ws2p_full_id]
                     .remote_pubkey
-                    .unwrap()
+                    .expect("WS2P: UnknowMessage : Fail to get remote_pubkey !")
             ),
             WS2PConnectionMessagePayload::WrongFormatMessage => warn!(
                 "WS2P : Receive Wrong Format Message from {}.",
                 &self.connections_meta_datas[&ws2p_full_id]
                     .remote_pubkey
-                    .unwrap()
+                    .expect("WS2P: WrongFormatMessage : Fail to get remote_pubkey !")
             ),
             WS2PConnectionMessagePayload::InvalidMessage => return WS2PSignal::Empty,
             WS2PConnectionMessagePayload::Close => {
@@ -1393,7 +1445,7 @@ impl WS2PModuleDatas {
     ) -> Result<(), websocket::WebSocketError> {
         self.websockets
             .get_mut(receiver_ws2p_full_id)
-            .unwrap()
+            .expect("WS2P: Fail to get mut websocket !")
             .0
             .send_message(&Message::text(
                 network_request_to_json(ws2p_request).to_string(),
@@ -1413,7 +1465,11 @@ impl WS2PModuleDatas {
     fn connect_to_without_checking_quotas(&mut self, endpoint: &NetworkEndpoint) -> () {
         // update connection state
         self.ws2p_endpoints
-            .get_mut(&endpoint.node_full_id().unwrap())
+            .get_mut(
+                &endpoint
+                    .node_full_id()
+                    .expect("WS2P: Fail to get ep.node_full_id() !"),
+            )
             .expect("Fatal error: try to connect to unlisted endpoint ! ")
             .1 = WS2PConnectionState::TryToOpenWS;
 
@@ -1425,23 +1481,31 @@ impl WS2PModuleDatas {
             "b60a14fd-0826-4ae0-83eb-1a92cd59fd5308535fd3-78f2-4678-9315-cd6e3b7871b1".to_string(),
         );
         conn_meta_datas.remote_pubkey = Some(endpoint.pubkey());
-        conn_meta_datas.remote_uuid = Some(endpoint.node_uuid().unwrap());
+        conn_meta_datas.remote_uuid = Some(
+            endpoint
+                .node_uuid()
+                .expect("WS2P: Fail to get ep.node_uuid() !"),
+        );
 
         // Prepare datas for listening thread
         let mut datas_for_listening_thread = WS2PDatasForListeningThread {
             conn_meta_datas: conn_meta_datas.clone(),
-            currency: self.currency.clone().unwrap(),
-            key_pair: self.key_pair.unwrap(),
+            currency: self.currency.clone().expect("WS2P: Fail to get currency !"),
+            key_pair: self.key_pair.expect("WS2P: Fail to get key_pair!"),
         };
 
         // Create CONNECT Message
         let mut connect_message = WS2PConnectMessageV1 {
-            currency: self.currency.clone().unwrap(),
-            pubkey: self.key_pair.unwrap().public_key(),
+            currency: self.currency.clone().expect("WS2P: Fail to getcurrency !"),
+            pubkey: self
+                .key_pair
+                .expect("WS2P: Fail to get key_pair!")
+                .public_key(),
             challenge: conn_meta_datas.challenge.clone(),
             signature: None,
         };
-        connect_message.signature = Some(connect_message.sign(self.key_pair.unwrap()));
+        connect_message.signature =
+            Some(connect_message.sign(self.key_pair.expect("WS2P: Fail to get key_pair !")));
         let json_connect_message =
             serde_json::to_string(&connect_message).expect("Fail to serialize CONNECT message !");
 
@@ -1619,11 +1683,11 @@ impl WS2PModuleDatas {
                                                 // Parse message
                                                 let m = Message::from(message);
                                                 let s: String = from_utf8(&m.payload)
-                                                    .unwrap()
+                                                    .expect("WS2P: Fail to convert message payload to String !")
                                                     .to_string();
                                                 let message: serde_json::Value =
                                                     serde_json::from_str(&s)
-                                                    .unwrap();
+                                                    .expect("WS2P: Fail to convert string message ton json value !");
                                                 let result = sender_to_main_thread.send(
                                                     WS2PThreadSignal::WS2PConnectionMessage(
                                                         WS2PConnectionMessage(
@@ -1677,14 +1741,13 @@ mod tests {
     extern crate duniter_module;
     extern crate duniter_network;
 
-    use self::duniter_crypto::keys::PublicKey;
-    use self::duniter_crypto::keys::*;
-    use self::duniter_dal::parsers::blocks::parse_json_block;
-    use self::duniter_documents::blockchain::v10::documents::BlockDocument;
-    use self::duniter_module::DuniterModule;
-    use self::duniter_network::network_endpoint::{NetworkEndpoint, NetworkEndpointApi};
-    use self::duniter_network::NetworkBlock;
+    use super::parsers::blocks::parse_json_block;
     use super::*;
+    use duniter_crypto::keys::PublicKey;
+    use duniter_documents::blockchain::v10::documents::BlockDocument;
+    use duniter_module::DuniterModule;
+    use duniter_network::network_endpoint::{NetworkEndpoint, NetworkEndpointApi};
+    use duniter_network::NetworkBlock;
     use std::fs;
     use std::path::PathBuf;
     use std::time::{SystemTime, UNIX_EPOCH};
@@ -1843,12 +1906,19 @@ mod tests {
                 }
             };
         assert_eq!(
-            block.inner_hash.unwrap().to_hex(),
+            block
+                .inner_hash
+                .expect("Try to get inner_hash of an uncompleted or reduce block !")
+                .to_hex(),
             "61F02B1A6AE2E4B9A1FD66CE673258B4B21C0076795571EE3C9DC440DD06C46C"
         );
         block.compute_hash();
         assert_eq!(
-            block.hash.unwrap().0.to_hex(),
+            block
+                .hash
+                .expect("Try to get hash of an uncompleted or reduce block !")
+                .0
+                .to_hex(),
             "000000EF5B2AA849F4C3AF3D35E1284EA1F34A9F617EA806CE8371619023DC74"
         );
     }
diff --git a/dal/parsers/blocks.rs b/ws2p/parsers/blocks.rs
similarity index 85%
rename from dal/parsers/blocks.rs
rename to ws2p/parsers/blocks.rs
index 87712fdd9d902de8a8ffdbc90a9ac88fb037685c..1d08cc31175f1920faba9f08492a6f8e99fb6d84 100644
--- a/dal/parsers/blocks.rs
+++ b/ws2p/parsers/blocks.rs
@@ -1,18 +1,17 @@
-extern crate duniter_crypto;
-extern crate duniter_documents;
-extern crate duniter_network;
 extern crate serde_json;
 
-use self::duniter_network::{NetworkBlock, NetworkBlockV10};
 use super::excluded::parse_exclusions_from_json_value;
 use super::identities::parse_compact_identity;
 use super::transactions::parse_transaction;
 use duniter_crypto::keys::*;
-use duniter_documents::blockchain::v10::documents::membership::{
-    MembershipDocument, MembershipType,
+use duniter_documents::blockchain::v10::documents::block::{
+    BlockV10Parameters, CurrencyName, TxDocOrTxHash,
 };
+use duniter_documents::blockchain::v10::documents::membership::*;
 use duniter_documents::blockchain::v10::documents::BlockDocument;
 use duniter_documents::{BlockHash, BlockId, Hash};
+use duniter_network::{NetworkBlock, NetworkBlockV10};
+use std::str::FromStr;
 
 fn parse_previous_hash(block_number: &BlockId, source: &serde_json::Value) -> Option<Hash> {
     match source.get("previousHash")?.as_str() {
@@ -73,6 +72,15 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option<NetworkBlock> {
         Ok(hash) => hash,
         Err(_) => return None,
     };
+    let parameters = if let Some(params_json) = source.get("parameters") {
+        if let Ok(params) = BlockV10Parameters::from_str(params_json.as_str()?) {
+            Some(params)
+        } else {
+            None
+        }
+    } else {
+        None
+    };
     let previous_hash = parse_previous_hash(&number, source)?;
     let previous_issuer = parse_previous_issuer(source);
     let inner_hash = match Hash::from_hex(source.get("inner_hash")?.as_str()?) {
@@ -92,7 +100,9 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option<NetworkBlock> {
     let leavers = parse_memberships(&currency, MembershipType::Out(), source.get("actives")?)?;
     let mut transactions = Vec::new();
     for json_tx in source.get("transactions")?.as_array()? {
-        transactions.push(parse_transaction("g1", &json_tx)?);
+        transactions.push(TxDocOrTxHash::TxDoc(Box::new(parse_transaction(
+            "g1", &json_tx,
+        )?)));
     }
     let block_doc = BlockDocument {
         nonce: source.get("nonce")?.as_i64()? as u64,
@@ -106,11 +116,11 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option<NetworkBlock> {
         issuers_count: source.get("issuersCount")?.as_u64()? as usize,
         issuers_frame: source.get("issuersFrame")?.as_i64()? as isize,
         issuers_frame_var: source.get("issuersFrameVar")?.as_i64()? as isize,
-        currency,
+        currency: CurrencyName(currency),
         issuers: vec![issuer],
         signatures: vec![sig],
         hash: Some(BlockHash(hash)),
-        parameters: None,
+        parameters,
         previous_hash,
         previous_issuer,
         inner_hash,
@@ -125,7 +135,9 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option<NetworkBlock> {
         transactions,
         inner_hash_and_nonce_str: format!(
             "InnerHash: {}\nNonce: {}\n",
-            inner_hash.unwrap().to_hex(),
+            inner_hash
+                .expect("Try to get inner_hash of an uncompleted or reduce block !")
+                .to_hex(),
             source.get("nonce")?.as_u64()?
         ),
     };
diff --git a/dal/parsers/excluded.rs b/ws2p/parsers/excluded.rs
similarity index 100%
rename from dal/parsers/excluded.rs
rename to ws2p/parsers/excluded.rs
diff --git a/dal/parsers/identities.rs b/ws2p/parsers/identities.rs
similarity index 99%
rename from dal/parsers/identities.rs
rename to ws2p/parsers/identities.rs
index 4af0f43549472c9842a38bde1b49551826edd1bf..ef62335415055c325cef050850d8957efc949fb9 100644
--- a/dal/parsers/identities.rs
+++ b/ws2p/parsers/identities.rs
@@ -1,5 +1,4 @@
 extern crate serde_json;
-extern crate sqlite;
 
 use duniter_crypto::keys::*;
 use duniter_documents::blockchain::v10::documents::identity::IdentityDocumentBuilder;
diff --git a/dal/parsers/memberships.rs b/ws2p/parsers/memberships.rs
similarity index 99%
rename from dal/parsers/memberships.rs
rename to ws2p/parsers/memberships.rs
index 27229024b110948d74f0029d2602f780338c8aa0..51704255891f3b2372b25865cb593adc04b05848 100644
--- a/dal/parsers/memberships.rs
+++ b/ws2p/parsers/memberships.rs
@@ -1,5 +1,4 @@
 extern crate serde_json;
-extern crate sqlite;
 
 use duniter_crypto::keys::*;
 use duniter_documents::blockchain::v10::documents::membership::{
diff --git a/ws2p/parsers/mod.rs b/ws2p/parsers/mod.rs
new file mode 100644
index 0000000000000000000000000000000000000000..74de739010c0008f445306527df55b761c30744d
--- /dev/null
+++ b/ws2p/parsers/mod.rs
@@ -0,0 +1,147 @@
+//  Copyright (C) 2018  The Duniter Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+pub mod blocks;
+pub mod excluded;
+pub mod identities;
+pub mod memberships;
+pub mod transactions;
+
+#[cfg(test)]
+mod tests {
+    use super::transactions::*;
+    use duniter_crypto::keys::*;
+    use duniter_documents::blockchain::v10::documents::transaction::*;
+    use duniter_documents::blockchain::DocumentBuilder;
+    use duniter_documents::Blockstamp;
+
+    #[test]
+    fn parse_json_tx() {
+        let tx_json = json!({
+            "version": 10,
+            "currency": "g1",
+            "locktime": 0,
+            "hash": "3424206EF64C69E5F8C3906AAE571E378A498FCDAE0B85E9405A5205D7148EFE",
+            "blockstamp": "112533-000002150F2E805E604D9B31212D079570AAD8D3A4D8BB75F2C15A94A345B6B1",
+            "blockstampTime": 0,
+            "issuers": [
+                "51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2"
+            ],
+            "inputs": [
+                "1000:0:D:51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2:46496"
+            ],
+            "outputs": [
+                "1000:0:SIG(2yN8BRSkARcqE8NCxKMBiHfTpx1EvwULFn56Myf6qRmy)"
+            ],
+            "unlocks": [
+                "0:SIG(0)"
+            ],
+            "signatures": [
+                "5olrjFylTCsVq8I5Yr7FpXeviynICyvIwe1yG5N0RJF+VZb+bCFBnLAMpmMCU2qzUvK7z41UXOrMRybXiLa2Dw=="
+            ],
+            "comment": "Merci pour la calligraphie ;) de Liam"
+        });
+
+        let tx_builder = TransactionDocumentBuilder {
+            currency: "g1",
+            blockstamp: &Blockstamp::from_string(
+                "112533-000002150F2E805E604D9B31212D079570AAD8D3A4D8BB75F2C15A94A345B6B1",
+            ).unwrap(),
+            locktime: &0,
+            issuers: &vec![PubKey::Ed25519(
+                ed25519::PublicKey::from_base58("51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2")
+                    .unwrap(),
+            )],
+            inputs: &vec![
+                TransactionInput::parse_from_str(
+                    "1000:0:D:51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2:46496",
+                ).unwrap(),
+            ],
+            outputs: &vec![
+                TransactionOutput::parse_from_str(
+                    "1000:0:SIG(2yN8BRSkARcqE8NCxKMBiHfTpx1EvwULFn56Myf6qRmy)",
+                ).unwrap(),
+            ],
+            unlocks: &vec![TransactionInputUnlocks::parse_from_str("0:SIG(0)").unwrap()],
+            comment: "Merci pour la calligraphie ;) de Liam",
+        };
+
+        assert_eq!(
+            parse_transaction("g1", &tx_json).expect("Fail to parse transaction !"),
+            tx_builder.build_with_signature(vec![Sig::Ed25519(ed25519::Signature::from_base64("5olrjFylTCsVq8I5Yr7FpXeviynICyvIwe1yG5N0RJF+VZb+bCFBnLAMpmMCU2qzUvK7z41UXOrMRybXiLa2Dw==").unwrap())])
+        );
+    }
+
+    #[test]
+    fn parse_json_tx2() {
+        let tx_json = json!({
+            "version": 10,
+            "currency": "g1",
+            "locktime": 0,
+            "hash": "F98BF7A8BF82E76F5B69E70CEF0A07A08BFDB03561955EC57B254DB1E958529C",
+            "blockstamp": "58-00005B9167EBA1E32C6EAD42AE7F72D8F14B765D3C9E47D233B553D47C5AEE0C",
+            "blockstampTime": 1488990541,
+            "issuers": [
+                "FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD"
+            ],
+            "inputs": [
+                "1000:0:D:FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD:1"
+            ],
+            "outputs": [
+                "3:0:SIG(7vU9BMDhN6fBuRa2iK3JRbC6pqQKb4qDMGsFcQuT5cz)",
+                "997:0:SIG(FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD)"
+            ],
+            "unlocks": [
+                "0:SIG(0)"
+            ],
+            "signatures": [
+                "VWbvsiybM4L2X5+o+6lIiuKNw5KrD1yGZqmV+lHtA28XoRUFzochSIgfoUqBsTAaYEHY45vSX917LDXudTEzBg=="
+            ],
+            "comment": "Un petit cafe ;-)"
+        });
+
+        let tx_builder = TransactionDocumentBuilder {
+            currency: "g1",
+            blockstamp: &Blockstamp::from_string(
+                "58-00005B9167EBA1E32C6EAD42AE7F72D8F14B765D3C9E47D233B553D47C5AEE0C",
+            ).unwrap(),
+            locktime: &0,
+            issuers: &vec![PubKey::Ed25519(
+                ed25519::PublicKey::from_base58("FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD")
+                    .unwrap(),
+            )],
+            inputs: &vec![
+                TransactionInput::parse_from_str(
+                    "1000:0:D:FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD:1",
+                ).unwrap(),
+            ],
+            outputs: &vec![
+                TransactionOutput::parse_from_str(
+                    "3:0:SIG(7vU9BMDhN6fBuRa2iK3JRbC6pqQKb4qDMGsFcQuT5cz)",
+                ).unwrap(),
+                TransactionOutput::parse_from_str(
+                    "997:0:SIG(FVUFRrk1K5TQGsY7PRLwqHgdHRoHrwb1hcucp4C2N5tD)",
+                ).unwrap(),
+            ],
+            unlocks: &vec![TransactionInputUnlocks::parse_from_str("0:SIG(0)").unwrap()],
+            comment: "Un petit cafe ;-)",
+        };
+
+        assert_eq!(
+            parse_transaction("g1", &tx_json).expect("Fail to parse transaction !"),
+            tx_builder.build_with_signature(vec![Sig::Ed25519(ed25519::Signature::from_base64("VWbvsiybM4L2X5+o+6lIiuKNw5KrD1yGZqmV+lHtA28XoRUFzochSIgfoUqBsTAaYEHY45vSX917LDXudTEzBg==").unwrap())])
+        );
+    }
+}
diff --git a/dal/parsers/transactions.rs b/ws2p/parsers/transactions.rs
similarity index 100%
rename from dal/parsers/transactions.rs
rename to ws2p/parsers/transactions.rs
diff --git a/ws2p/ws2p_connection.rs b/ws2p/ws2p_connection.rs
index 6305b29d4a26cd70275739a89d98b23af97ca689..84578ca8a48def9bb3084b73900b9fc828ea0a96 100644
--- a/ws2p/ws2p_connection.rs
+++ b/ws2p/ws2p_connection.rs
@@ -2,10 +2,10 @@ extern crate serde_json;
 extern crate websocket;
 
 use duniter_crypto::keys::*;
-use duniter_dal::parsers::blocks::parse_json_block;
 use duniter_module::ModuleReqId;
 use duniter_network::network_endpoint::{NetworkEndpoint, NetworkEndpointApi};
 use duniter_network::{NetworkDocument, NodeUUID};
+use parsers::blocks::parse_json_block;
 use std::fmt::Debug;
 use std::net::TcpStream;