From bc5db45dfda7ed9fbb3ce7aadf8a1e23fe260578 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sun, 6 Jan 2019 21:21:47 +0100
Subject: [PATCH 01/26] [ref] manage block version & clarify block verify
 section

---
 lib/core/message/events.rs                    |  18 +-
 lib/core/message/responses.rs                 |  10 +-
 lib/core/network/documents.rs                 |  12 +-
 .../blockchain/blockchain-dal/block.rs        |   2 +-
 .../blockchain-dal/currency_params.rs         |   2 +-
 .../blockchain/blockchain-dal/identity.rs     |   2 +-
 lib/modules/blockchain/blockchain-dal/lib.rs  |   4 +-
 .../blockchain-dal/parsers/certifications.rs  |   6 +-
 .../blockchain-dal/parsers/revoked.rs         |   4 +-
 .../blockchain/blockchain-dal/sources.rs      |   2 +-
 .../blockchain-dal/writers/certification.rs   |   2 +-
 .../blockchain-dal/writers/dividend.rs        |   2 +-
 .../blockchain-dal/writers/identity.rs        |   2 +-
 .../blockchain-dal/writers/requests.rs        |   4 +-
 .../blockchain-dal/writers/transaction.rs     |   2 +-
 .../blockchain/apply_valid_block.rs           |   4 +-
 .../blockchain/check_and_apply_block.rs       |  19 +-
 lib/modules/blockchain/blockchain/dbex.rs     |   2 +-
 lib/modules/blockchain/blockchain/lib.rs      | 230 ++++++++----------
 .../blockchain/blockchain/revert_block.rs     |   4 +-
 .../blockchain/blockchain/ts_parsers.rs       |  10 +-
 .../blockchain/blockchain/verify_block.rs     |  57 +++++
 lib/modules/skeleton/lib.rs                   |   2 +-
 lib/modules/tui/lib.rs                        |   2 +-
 lib/modules/ws2p-v1-legacy/lib.rs             |   4 +-
 lib/modules/ws2p-v1-legacy/parsers/blocks.rs  |   7 +-
 .../ws2p-v1-legacy/parsers/identities.rs      |   2 +-
 .../ws2p-v1-legacy/parsers/memberships.rs     |   2 +-
 lib/modules/ws2p-v1-legacy/parsers/mod.rs     |   2 +-
 .../ws2p-v1-legacy/parsers/transactions.rs    |   2 +-
 lib/modules/ws2p/ws2p-messages/lib.rs         |   2 +-
 .../ws2p-messages/v2/payload_container.rs     |  12 +-
 .../ws2p/ws2p-messages/v2/req_responses.rs    |   8 +-
 .../documents/src/{v10 => documents}/block.rs |  21 +-
 .../src/{v10 => documents}/certification.rs   |   6 +-
 .../src/{v10 => documents}/identity.rs        |   6 +-
 .../src/{v10 => documents}/membership.rs      |   6 +-
 .../documents/src/{v10 => documents}/mod.rs   | 221 ++++-------------
 .../src/{v10 => documents}/revocation.rs      |   6 +-
 .../src/{v10 => documents}/transaction.rs     |   6 +-
 .../documents/src/documents_grammar.pest      |   1 -
 lib/tools/documents/src/lib.rs                |  41 +---
 .../documents/src/text_document_traits.rs     | 132 ++++++++++
 43 files changed, 478 insertions(+), 413 deletions(-)
 create mode 100644 lib/modules/blockchain/blockchain/verify_block.rs
 rename lib/tools/documents/src/{v10 => documents}/block.rs (98%)
 rename lib/tools/documents/src/{v10 => documents}/certification.rs (99%)
 rename lib/tools/documents/src/{v10 => documents}/identity.rs (99%)
 rename lib/tools/documents/src/{v10 => documents}/membership.rs (99%)
 rename lib/tools/documents/src/{v10 => documents}/mod.rs (63%)
 rename lib/tools/documents/src/{v10 => documents}/revocation.rs (99%)
 rename lib/tools/documents/src/{v10 => documents}/transaction.rs (99%)
 create mode 100644 lib/tools/documents/src/text_document_traits.rs

diff --git a/lib/core/message/events.rs b/lib/core/message/events.rs
index fad3f21a..9bb86786 100644
--- a/lib/core/message/events.rs
+++ b/lib/core/message/events.rs
@@ -14,7 +14,8 @@
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 use crate::*;
-use dubp_documents::v10::block::BlockDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::DUBPDocument;
 use dubp_documents::*;
 use duniter_network::events::NetworkEvent;
 
@@ -24,7 +25,9 @@ pub enum DursEvent {
     /// Arbitrary datas.
     ArbitraryDatas(ArbitraryDatas),
     /// Blockchain event
-    BlockchainEvent(BlockchainEvent),
+    BlockchainEvent(Box<BlockchainEvent>),
+    /// MemPool Event (local node find next block)
+    MemPoolEvent(MemPoolEvent),
     /// Network event
     NetworkEvent(Box<NetworkEvent>),
     /// Client API event
@@ -32,7 +35,16 @@ pub enum DursEvent {
 }
 
 #[derive(Debug, Clone)]
-/// Event to be transmitted to the other modules
+/// MemPool module events
+pub enum MemPoolEvent {
+    /// FindNextBlock (local node find next block)
+    FindNextBlock(Box<BlockDocument>),
+    /// Store new Blockhain Document in Pool
+    StoreNewDocInPool(Box<DUBPDocument>),
+}
+
+#[derive(Debug, Clone)]
+/// Blockchain module events
 pub enum BlockchainEvent {
     /// Stack up new valid block in local blockchain
     StackUpValidBlock(Box<BlockDocument>, Blockstamp),
diff --git a/lib/core/message/responses.rs b/lib/core/message/responses.rs
index 54479281..6449ef54 100644
--- a/lib/core/message/responses.rs
+++ b/lib/core/message/responses.rs
@@ -13,11 +13,11 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::v10::block::BlockDocument;
-use dubp_documents::v10::certification::CertificationDocument;
-use dubp_documents::v10::identity::IdentityDocument;
-use dubp_documents::v10::membership::MembershipDocument;
-use dubp_documents::v10::revocation::RevocationDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::certification::CertificationDocument;
+use dubp_documents::documents::identity::IdentityDocument;
+use dubp_documents::documents::membership::MembershipDocument;
+use dubp_documents::documents::revocation::RevocationDocument;
 use dubp_documents::BlockId;
 use dubp_documents::Blockstamp;
 use duniter_module::ModuleReqId;
diff --git a/lib/core/network/documents.rs b/lib/core/network/documents.rs
index 05d351e2..a103c4d2 100644
--- a/lib/core/network/documents.rs
+++ b/lib/core/network/documents.rs
@@ -15,12 +15,12 @@
 
 //! Defined all network documents
 
-use dubp_documents::v10::block::BlockDocument;
-use dubp_documents::v10::certification::CertificationDocument;
-use dubp_documents::v10::identity::IdentityDocument;
-use dubp_documents::v10::membership::MembershipDocument;
-use dubp_documents::v10::revocation::RevocationDocument;
-use dubp_documents::v10::transaction::TransactionDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::certification::CertificationDocument;
+use dubp_documents::documents::identity::IdentityDocument;
+use dubp_documents::documents::membership::MembershipDocument;
+use dubp_documents::documents::revocation::RevocationDocument;
+use dubp_documents::documents::transaction::TransactionDocument;
 use dubp_documents::Document;
 use dubp_documents::{BlockHash, BlockId, Blockstamp};
 use serde_json;
diff --git a/lib/modules/blockchain/blockchain-dal/block.rs b/lib/modules/blockchain/blockchain-dal/block.rs
index 60ccb8a2..c6e95e05 100644
--- a/lib/modules/blockchain/blockchain-dal/block.rs
+++ b/lib/modules/blockchain/blockchain-dal/block.rs
@@ -15,7 +15,7 @@
 
 use super::constants::MAX_FORKS;
 use crate::*;
-use dubp_documents::v10::BlockDocument;
+use dubp_documents::documents::block::BlockDocument;
 use dubp_documents::Document;
 use dubp_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
 use dup_crypto::keys::*;
diff --git a/lib/modules/blockchain/blockchain-dal/currency_params.rs b/lib/modules/blockchain/blockchain-dal/currency_params.rs
index a695f05f..8e017048 100644
--- a/lib/modules/blockchain/blockchain-dal/currency_params.rs
+++ b/lib/modules/blockchain/blockchain-dal/currency_params.rs
@@ -15,7 +15,7 @@
 
 use crate::constants::*;
 use crate::*;
-use dubp_documents::v10::block::BlockV10Parameters;
+use dubp_documents::documents::block::BlockV10Parameters;
 use dubp_documents::CurrencyName;
 
 #[derive(Debug, Copy, Clone)]
diff --git a/lib/modules/blockchain/blockchain-dal/identity.rs b/lib/modules/blockchain/blockchain-dal/identity.rs
index eec031d7..7cdb0132 100644
--- a/lib/modules/blockchain/blockchain-dal/identity.rs
+++ b/lib/modules/blockchain/blockchain-dal/identity.rs
@@ -15,7 +15,7 @@
 
 use crate::currency_params::CurrencyParameters;
 use crate::{BinDB, DALError, IdentitiesV10Datas, MsExpirV10Datas};
-use dubp_documents::v10::identity::IdentityDocument;
+use dubp_documents::documents::identity::IdentityDocument;
 use dubp_documents::{BlockId, Blockstamp};
 use dup_crypto::keys::*;
 use durs_wot::NodeId;
diff --git a/lib/modules/blockchain/blockchain-dal/lib.rs b/lib/modules/blockchain/blockchain-dal/lib.rs
index 78468397..34b43e48 100644
--- a/lib/modules/blockchain/blockchain-dal/lib.rs
+++ b/lib/modules/blockchain/blockchain-dal/lib.rs
@@ -65,8 +65,8 @@ pub mod tools;
 /// Contains all write databases functions
 pub mod writers;
 
-use dubp_documents::v10::block::BlockV10Parameters;
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::block::BlockV10Parameters;
+use dubp_documents::documents::transaction::*;
 use dubp_documents::CurrencyName;
 use dubp_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
 use dup_crypto::hashs::Hash;
diff --git a/lib/modules/blockchain/blockchain-dal/parsers/certifications.rs b/lib/modules/blockchain/blockchain-dal/parsers/certifications.rs
index 868c8286..760a38d8 100644
--- a/lib/modules/blockchain/blockchain-dal/parsers/certifications.rs
+++ b/lib/modules/blockchain/blockchain-dal/parsers/certifications.rs
@@ -13,8 +13,10 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::v10::certification::{CertificationDocument, CompactCertificationDocument};
-use dubp_documents::v10::TextDocumentFormat;
+use dubp_documents::documents::certification::{
+    CertificationDocument, CompactCertificationDocument,
+};
+use dubp_documents::text_document_traits::TextDocumentFormat;
 use dubp_documents::BlockId;
 use dup_crypto::keys::*;
 use serde_json;
diff --git a/lib/modules/blockchain/blockchain-dal/parsers/revoked.rs b/lib/modules/blockchain/blockchain-dal/parsers/revoked.rs
index c47b486a..837e84b0 100644
--- a/lib/modules/blockchain/blockchain-dal/parsers/revoked.rs
+++ b/lib/modules/blockchain/blockchain-dal/parsers/revoked.rs
@@ -13,8 +13,8 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::v10::revocation::{CompactRevocationDocument, RevocationDocument};
-use dubp_documents::v10::TextDocumentFormat;
+use dubp_documents::documents::revocation::{CompactRevocationDocument, RevocationDocument};
+use dubp_documents::text_document_traits::TextDocumentFormat;
 use dup_crypto::keys::*;
 use serde_json;
 
diff --git a/lib/modules/blockchain/blockchain-dal/sources.rs b/lib/modules/blockchain/blockchain-dal/sources.rs
index 3055e9e8..64e5bc68 100644
--- a/lib/modules/blockchain/blockchain-dal/sources.rs
+++ b/lib/modules/blockchain/blockchain-dal/sources.rs
@@ -13,7 +13,7 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::transaction::*;
 use dubp_documents::BlockId;
 use dup_crypto::hashs::Hash;
 use dup_crypto::keys::PubKey;
diff --git a/lib/modules/blockchain/blockchain-dal/writers/certification.rs b/lib/modules/blockchain/blockchain-dal/writers/certification.rs
index de0f052b..555ee97b 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/certification.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/certification.rs
@@ -15,7 +15,7 @@
 
 use crate::currency_params::CurrencyParameters;
 use crate::{BinDB, CertsExpirV10Datas, DALError, IdentitiesV10Datas};
-use dubp_documents::v10::certification::CompactCertificationDocument;
+use dubp_documents::documents::certification::CompactCertificationDocument;
 use dubp_documents::BlockId;
 use dup_crypto::keys::*;
 use durs_wot::NodeId;
diff --git a/lib/modules/blockchain/blockchain-dal/writers/dividend.rs b/lib/modules/blockchain/blockchain-dal/writers/dividend.rs
index 94ca2645..46ef576b 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/dividend.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/dividend.rs
@@ -15,7 +15,7 @@
 
 use crate::sources::SourceAmount;
 use crate::*;
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::transaction::*;
 use dubp_documents::BlockId;
 use dup_crypto::keys::PubKey;
 use std::collections::{HashMap, HashSet};
diff --git a/lib/modules/blockchain/blockchain-dal/writers/identity.rs b/lib/modules/blockchain/blockchain-dal/writers/identity.rs
index fe9d9983..09d04757 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/identity.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/identity.rs
@@ -16,7 +16,7 @@
 use crate::currency_params::CurrencyParameters;
 use crate::identity::{DALIdentity, DALIdentityState};
 use crate::{BinDB, DALError, IdentitiesV10Datas, MsExpirV10Datas};
-use dubp_documents::v10::identity::IdentityDocument;
+use dubp_documents::documents::identity::IdentityDocument;
 use dubp_documents::Document;
 use dubp_documents::{BlockId, Blockstamp};
 use dup_crypto::keys::PubKey;
diff --git a/lib/modules/blockchain/blockchain-dal/writers/requests.rs b/lib/modules/blockchain/blockchain-dal/writers/requests.rs
index 2ee202dc..09be3a2f 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/requests.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/requests.rs
@@ -19,8 +19,8 @@ use crate::identity::DALIdentity;
 use crate::sources::SourceAmount;
 use crate::writers::transaction::DALTxV10;
 use crate::*;
-use dubp_documents::v10::certification::CompactCertificationDocument;
-use dubp_documents::v10::identity::IdentityDocument;
+use dubp_documents::documents::certification::CompactCertificationDocument;
+use dubp_documents::documents::identity::IdentityDocument;
 use dubp_documents::Blockstamp;
 use dup_crypto::keys::PubKey;
 use durs_wot::NodeId;
diff --git a/lib/modules/blockchain/blockchain-dal/writers/transaction.rs b/lib/modules/blockchain/blockchain-dal/writers/transaction.rs
index 338f0579..a9e0b7c5 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/transaction.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/transaction.rs
@@ -13,7 +13,7 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::transaction::*;
 
 use crate::sources::{SourceAmount, SourceIndexV10, UTXOIndexV10, UTXOV10};
 use crate::*;
diff --git a/lib/modules/blockchain/blockchain/apply_valid_block.rs b/lib/modules/blockchain/blockchain/apply_valid_block.rs
index bd443de1..5242e879 100644
--- a/lib/modules/blockchain/blockchain/apply_valid_block.rs
+++ b/lib/modules/blockchain/blockchain/apply_valid_block.rs
@@ -13,8 +13,8 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::v10::transaction::{TxAmount, TxBase};
-use dubp_documents::v10::BlockDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::transaction::{TxAmount, TxBase};
 use dubp_documents::BlockId;
 use dubp_documents::Document;
 use dup_crypto::keys::*;
diff --git a/lib/modules/blockchain/blockchain/check_and_apply_block.rs b/lib/modules/blockchain/blockchain/check_and_apply_block.rs
index af4ee09e..4f869287 100644
--- a/lib/modules/blockchain/blockchain/check_and_apply_block.rs
+++ b/lib/modules/blockchain/blockchain/check_and_apply_block.rs
@@ -16,6 +16,7 @@
 use std::collections::HashMap;
 
 use crate::apply_valid_block::*;
+use crate::verify_block::*;
 use crate::*;
 use dubp_documents::Document;
 use dubp_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
@@ -29,7 +30,7 @@ pub enum BlockError {
     BlockVersionNotSupported(),
     CompletedBlockError(CompletedBlockError),
     DALError(DALError),
-    //CheckBlockError(),
+    InvalidBlock(InvalidBlockError),
     ApplyValidBlockError(ApplyValidBlockError),
     NoForkAvailable(),
     UnknowError(),
@@ -62,6 +63,7 @@ pub fn check_and_apply_block<W: WebOfTrust>(
     wot_db: &BinDB<W>,
     forks_states: &[ForkStatus],
 ) -> Result<ValidBlockApplyReqs, BlockError> {
+    // Get BlockDocument && check if already have block
     let (block_doc, already_have_block) = match *block {
         Block::NetworkBlock(network_block) => match *network_block {
             NetworkBlock::V10(ref network_block_v10) => {
@@ -76,6 +78,8 @@ pub fn check_and_apply_block<W: WebOfTrust>(
         },
         Block::LocalBlock(block_doc) => (block_doc, true),
     };
+
+    // Check block chainability
     if (block_doc.number.0 == current_blockstamp.id.0 + 1
         && block_doc.previous_hash.to_string() == current_blockstamp.hash.0.to_string())
         || (block_doc.number.0 == 0 && *current_blockstamp == Blockstamp::default())
@@ -88,6 +92,7 @@ pub fn check_and_apply_block<W: WebOfTrust>(
         let blocks_expiring = Vec::with_capacity(0);
         let expire_certs =
             durs_blockchain_dal::certs::find_expire_certs(certs_db, blocks_expiring)?;
+
         // Try stack up block
         let mut old_fork_id = None;
         let block_doc = match *block {
@@ -100,6 +105,16 @@ pub fn check_and_apply_block<W: WebOfTrust>(
                 block_doc.clone()
             }
         };
+
+        // Verify block validity (check all protocol rule, very long !)
+        verify_block_validity(
+            &block_doc,
+            &blocks_databases.blockchain_db,
+            certs_db,
+            wot_index,
+            wot_db,
+        )?;
+
         return Ok(apply_valid_block(
             &block_doc,
             wot_index,
@@ -184,7 +199,7 @@ pub fn check_and_apply_block<W: WebOfTrust>(
         }
     } else {
         debug!(
-            "stackable_block : block {} not chainable and already stored !",
+            "stackable_block : block {} not chainable and already stored or out of forkWindowSize !",
             block_doc.blockstamp()
         );
     }
diff --git a/lib/modules/blockchain/blockchain/dbex.rs b/lib/modules/blockchain/blockchain/dbex.rs
index 1a388f6c..97fdb82a 100644
--- a/lib/modules/blockchain/blockchain/dbex.rs
+++ b/lib/modules/blockchain/blockchain/dbex.rs
@@ -14,7 +14,7 @@
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 use crate::*;
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::transaction::*;
 use duniter_module::DuniterConf;
 use dup_crypto::keys::*;
 use durs_blockchain_dal::identity::DALIdentity;
diff --git a/lib/modules/blockchain/blockchain/lib.rs b/lib/modules/blockchain/blockchain/lib.rs
index cad6c573..8b700682 100644
--- a/lib/modules/blockchain/blockchain/lib.rs
+++ b/lib/modules/blockchain/blockchain/lib.rs
@@ -38,6 +38,7 @@ mod dbex;
 mod revert_block;
 mod sync;
 mod ts_parsers;
+mod verify_block;
 
 use std::collections::HashMap;
 use std::ops::Deref;
@@ -49,9 +50,9 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH};
 use crate::apply_valid_block::*;
 use crate::check_and_apply_block::*;
 pub use crate::dbex::{DBExQuery, DBExTxQuery, DBExWotQuery};
-use dubp_documents::v10::{BlockDocument, V10Document};
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::DUBPDocument;
 use dubp_documents::*;
-use dubp_documents::{DUBPDocument, Document};
 use duniter_module::*;
 use duniter_network::{
     cli::sync::SyncOpt,
@@ -115,6 +116,16 @@ pub enum Block<'a> {
     LocalBlock(&'a BlockDocument),
 }
 
+impl<'a> Block<'a> {
+    /// Return blockstamp
+    pub fn blockstamp(&self) -> Blockstamp {
+        match *self {
+            Block::NetworkBlock(ref network_block) => network_block.blockstamp(),
+            Block::LocalBlock(ref block) => block.blockstamp(),
+        }
+    }
+}
+
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
 /// When synchronizing the blockchain, checking all rules at each block really takes a long time.
 /// The user is therefore offered a fast synchronization that checks only what is strictly necessary for indexing the data.
@@ -290,7 +301,7 @@ impl BlockchainModule {
         self.router_sender
             .send(RouterThreadMessage::ModuleMessage(DursMsg::Event {
                 event_type: module_event,
-                event_content: DursEvent::BlockchainEvent(event.clone()),
+                event_content: DursEvent::BlockchainEvent(Box::new(event.clone())),
             }))
             .unwrap_or_else(|_| panic!("Fail to send BlockchainEvent to router"));
     }
@@ -316,143 +327,103 @@ impl BlockchainModule {
         wot_index: &mut HashMap<PubKey, NodeId>,
         wot_db: &BinDB<W>,
     ) -> Blockstamp {
-        let mut blockchain_documents = Vec::new();
         let mut current_blockstamp = *current_blockstamp;
-        let mut save_blocks_dbs = false;
-        let mut save_wots_dbs = false;
-        let mut save_currency_dbs = false;
+
         for network_document in network_documents {
-            match *network_document {
-                BlockchainDocument::Block(ref network_block) => {
-                    match check_and_apply_block::<W>(
-                        &self.blocks_databases,
-                        &self.wot_databases.certs_db,
-                        &Block::NetworkBlock(network_block),
-                        &current_blockstamp,
-                        wot_index,
-                        wot_db,
-                        &self.forks_states,
-                    ) {
-                        Ok(ValidBlockApplyReqs(block_req, wot_dbs_reqs, currency_dbs_reqs)) => {
-                            let block_doc = network_block.uncompleted_block_doc().clone();
-                            // Apply wot dbs requests
-                            for req in &wot_dbs_reqs {
-                                req.apply(&self.wot_databases, &self.currency_params)
-                                    .expect(
+            if let BlockchainDocument::Block(ref network_block) = network_document {
+                match check_and_apply_block::<W>(
+                    &self.blocks_databases,
+                    &self.wot_databases.certs_db,
+                    &Block::NetworkBlock(network_block),
+                    &current_blockstamp,
+                    wot_index,
+                    wot_db,
+                    &self.forks_states,
+                ) {
+                    Ok(ValidBlockApplyReqs(block_req, wot_dbs_reqs, currency_dbs_reqs)) => {
+                        let block_doc = network_block.uncompleted_block_doc().clone();
+                        let mut save_wots_dbs = false;
+                        let mut save_currency_dbs = false;
+
+                        // Apply wot dbs requests
+                        for req in &wot_dbs_reqs {
+                            req.apply(&self.wot_databases, &self.currency_params)
+                                .expect(
                                     "Fatal error : fail to apply WotsDBsWriteQuery : DALError !",
                                 );
-                            }
-                            // Apply currency dbs requests
-                            for req in currency_dbs_reqs {
-                                req.apply(&self.currency_databases).expect(
-                                    "Fatal error : fail to apply CurrencyDBsWriteQuery : DALError !",
-                                );
-                            }
-                            // Write block
-                            block_req.apply(&self.blocks_databases, false).expect(
-                                "Fatal error : fail to write block in BlocksDBs : DALError !",
-                            );
-                            if let BlocksDBsWriteQuery::WriteBlock(_, _, _, block_hash) = block_req
-                            {
-                                info!("StackUpValidBlock({})", block_doc.number.0);
-                                self.send_event(&BlockchainEvent::StackUpValidBlock(
-                                    Box::new(block_doc.clone()),
-                                    Blockstamp {
-                                        id: block_doc.number,
-                                        hash: block_hash,
-                                    },
-                                ));
-                            }
-                            current_blockstamp = network_block.blockstamp();
-                            // Update forks states
-                            self.forks_states = durs_blockchain_dal::block::get_forks(
-                                &self.blocks_databases.forks_db,
-                                current_blockstamp,
-                            )
-                            .expect("get_forks() : DALError");
-                            save_blocks_dbs = true;
-                            if !wot_dbs_reqs.is_empty() {
-                                save_wots_dbs = true;
-                            }
-                            if !block_doc.transactions.is_empty()
-                                || (block_doc.dividend.is_some()
-                                    && block_doc.dividend.expect("safe unwrap") > 0)
-                            {
-                                save_currency_dbs = true;
-                            }
                         }
-                        Err(_) => {
-                            warn!(
-                                "RefusedBlock({})",
-                                network_block.uncompleted_block_doc().number.0
+                        // Apply currency dbs requests
+                        for req in currency_dbs_reqs {
+                            req.apply(&self.currency_databases).expect(
+                                "Fatal error : fail to apply CurrencyDBsWriteQuery : DALError !",
                             );
-                            self.send_event(&BlockchainEvent::RefusedPendingDoc(
-                                DUBPDocument::V10(Box::new(V10Document::Block(Box::new(
-                                    network_block.uncompleted_block_doc().clone(),
-                                )))),
+                        }
+                        // Write block
+                        block_req
+                            .apply(&self.blocks_databases, false)
+                            .expect("Fatal error : fail to write block in BlocksDBs : DALError !");
+                        if let BlocksDBsWriteQuery::WriteBlock(_, _, _, block_hash) = block_req {
+                            info!("StackUpValidBlock({})", block_doc.number.0);
+                            self.send_event(&BlockchainEvent::StackUpValidBlock(
+                                Box::new(block_doc.clone()),
+                                Blockstamp {
+                                    id: block_doc.number,
+                                    hash: block_hash,
+                                },
                             ));
                         }
+                        current_blockstamp = network_block.blockstamp();
+                        // Update forks states
+                        self.forks_states = durs_blockchain_dal::block::get_forks(
+                            &self.blocks_databases.forks_db,
+                            current_blockstamp,
+                        )
+                        .expect("get_forks() : DALError");
+
+                        if !wot_dbs_reqs.is_empty() {
+                            save_wots_dbs = true;
+                        }
+                        if !block_doc.transactions.is_empty()
+                            || (block_doc.dividend.is_some()
+                                && block_doc.dividend.expect("safe unwrap") > 0)
+                        {
+                            save_currency_dbs = true;
+                        }
+
+                        // Save databases
+                        self.blocks_databases.save_dbs();
+                        if save_wots_dbs {
+                            self.wot_databases.save_dbs();
+                        }
+                        if save_currency_dbs {
+                            self.currency_databases.save_dbs(true, true);
+                        }
+                    }
+                    Err(_) => {
+                        warn!(
+                            "RefusedBlock({})",
+                            network_block.uncompleted_block_doc().number.0
+                        );
+                        self.send_event(&BlockchainEvent::RefusedPendingDoc(DUBPDocument::Block(
+                            Box::new(network_block.uncompleted_block_doc().clone()),
+                        )));
                     }
-                }
-                BlockchainDocument::Identity(ref doc) => blockchain_documents.push(
-                    DUBPDocument::V10(Box::new(V10Document::Identity(doc.deref().clone()))),
-                ),
-                BlockchainDocument::Membership(ref doc) => blockchain_documents.push(
-                    DUBPDocument::V10(Box::new(V10Document::Membership(doc.deref().clone()))),
-                ),
-                BlockchainDocument::Certification(ref doc) => {
-                    blockchain_documents.push(DUBPDocument::V10(Box::new(
-                        V10Document::Certification(Box::new(doc.deref().clone())),
-                    )))
-                }
-                BlockchainDocument::Revocation(ref doc) => {
-                    blockchain_documents.push(DUBPDocument::V10(Box::new(V10Document::Revocation(
-                        Box::new(doc.deref().clone()),
-                    ))))
-                }
-                BlockchainDocument::Transaction(ref doc) => {
-                    blockchain_documents.push(DUBPDocument::V10(Box::new(
-                        V10Document::Transaction(Box::new(doc.deref().clone())),
-                    )))
                 }
             }
         }
-        if !blockchain_documents.is_empty() {
-            self.receive_documents(&blockchain_documents);
-        }
-        // Save databases
-        if save_blocks_dbs {
-            self.blocks_databases.save_dbs();
-        }
-        if save_wots_dbs {
-            self.wot_databases.save_dbs();
-        }
-        if save_currency_dbs {
-            self.currency_databases.save_dbs(true, true);
-        }
+
         current_blockstamp
     }
-    fn receive_documents(&self, documents: &[DUBPDocument]) {
-        debug!("BlockchainModule : receive_documents()");
-        for document in documents {
-            trace!("BlockchainModule : Treat one document.");
-            match *document {
-                DUBPDocument::V10(ref doc_v10) => match doc_v10.deref() {
-                    _ => {}
-                },
-                _ => self.send_event(&BlockchainEvent::RefusedPendingDoc(document.clone())),
-            }
-        }
-    }
+
     fn receive_blocks<W: WebOfTrust>(
         &mut self,
-        blocks_in_box: &[Box<NetworkBlock>],
+        blocks_in_box: &[Box<Block>],
         current_blockstamp: &Blockstamp,
         wot_index: &mut HashMap<PubKey, NodeId>,
         wot: &BinDB<W>,
     ) -> Blockstamp {
         debug!("BlockchainModule : receive_blocks()");
-        let blocks: Vec<&NetworkBlock> = blocks_in_box.iter().map(|b| b.deref()).collect();
+        let blocks: Vec<&Block> = blocks_in_box.iter().map(|b| b.deref()).collect();
         let mut current_blockstamp = *current_blockstamp;
         let mut save_blocks_dbs = false;
         let mut save_wots_dbs = false;
@@ -462,7 +433,7 @@ impl BlockchainModule {
                 check_and_apply_block::<W>(
                     &self.blocks_databases,
                     &self.wot_databases.certs_db,
-                    &Block::NetworkBlock(block),
+                    &block,
                     &current_blockstamp,
                     wot_index,
                     wot,
@@ -653,8 +624,16 @@ impl BlockchainModule {
                                     _ => {}
                                 }
                             }
-                            DursEvent::ReceiveValidDocsFromClient(ref docs) => {
-                                self.receive_documents(docs);
+                            DursEvent::MemPoolEvent(ref mempool_event) => {
+                                if let MemPoolEvent::FindNextBlock(next_block_box) = mempool_event {
+                                    let new_current_blockstamp = self.receive_blocks(
+                                        &[Box::new(Block::LocalBlock(next_block_box.deref()))],
+                                        &current_blockstamp,
+                                        &mut wot_index,
+                                        &wot_db,
+                                    );
+                                    current_blockstamp = new_current_blockstamp;
+                                }
                             }
                             _ => {} // Others modules events
                         },
@@ -710,8 +689,13 @@ impl BlockchainModule {
                                             if let NetworkResponse::Chunk(_, _, ref blocks) =
                                                 *network_response.deref()
                                             {
+                                                let blocks: Vec<Box<Block>> = blocks
+                                                    .iter()
+                                                    .map(|b| Box::new(Block::NetworkBlock(b)))
+                                                    .collect();
+
                                                 let new_current_blockstamp = self.receive_blocks(
-                                                    blocks,
+                                                    &blocks,
                                                     &current_blockstamp,
                                                     &mut wot_index,
                                                     &wot_db,
diff --git a/lib/modules/blockchain/blockchain/revert_block.rs b/lib/modules/blockchain/blockchain/revert_block.rs
index b1d7f7e8..4721eda0 100644
--- a/lib/modules/blockchain/blockchain/revert_block.rs
+++ b/lib/modules/blockchain/blockchain/revert_block.rs
@@ -13,8 +13,8 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::v10::block::TxDocOrTxHash;
-use dubp_documents::v10::transaction::{TxAmount, TxBase};
+use dubp_documents::documents::block::TxDocOrTxHash;
+use dubp_documents::documents::transaction::{TxAmount, TxBase};
 use dubp_documents::Document;
 use dup_crypto::keys::*;
 use durs_blockchain_dal::block::DALBlock;
diff --git a/lib/modules/blockchain/blockchain/ts_parsers.rs b/lib/modules/blockchain/blockchain/ts_parsers.rs
index 1abf79ea..cf9a5ddd 100644
--- a/lib/modules/blockchain/blockchain/ts_parsers.rs
+++ b/lib/modules/blockchain/blockchain/ts_parsers.rs
@@ -14,11 +14,10 @@
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 use crate::sync::BlockHeader;
-use dubp_documents::v10::block::{BlockV10Parameters, TxDocOrTxHash};
-use dubp_documents::v10::identity::*;
-use dubp_documents::v10::membership::*;
-use dubp_documents::v10::transaction::*;
-use dubp_documents::v10::*;
+use dubp_documents::documents::block::{BlockDocument, BlockV10Parameters, TxDocOrTxHash};
+use dubp_documents::documents::identity::*;
+use dubp_documents::documents::membership::*;
+use dubp_documents::documents::transaction::*;
 use dubp_documents::CurrencyName;
 use dubp_documents::DocumentBuilder;
 use dubp_documents::{BlockHash, BlockId, Blockstamp};
@@ -118,6 +117,7 @@ pub fn parse_ts_block(row: &[sqlite::Value]) -> NetworkBlock {
             .expect("Fail to parse excluded (2)");
     let uncompleted_block_doc = BlockDocument {
         nonce: row[17].as_integer().expect("Fail to parse nonce") as u64,
+        version: row[8].as_integer().expect("Fail to parse version") as u32,
         number: current_header.number,
         pow_min: row[15].as_integer().expect("Fail to parse pow_min") as usize,
         time: row[14].as_integer().expect("Fail to parse time") as u64,
diff --git a/lib/modules/blockchain/blockchain/verify_block.rs b/lib/modules/blockchain/blockchain/verify_block.rs
new file mode 100644
index 00000000..2d67b00f
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/verify_block.rs
@@ -0,0 +1,57 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use crate::check_and_apply_block::BlockError;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::*;
+use dup_crypto::keys::PubKey;
+use durs_blockchain_dal::block::DALBlock;
+use durs_blockchain_dal::*;
+use durs_wot::*;
+use std::collections::HashMap;
+
+#[derive(Debug, Copy, Clone)]
+pub enum InvalidBlockError {
+    NoPreviousBlock,
+    VersionDecrease,
+}
+
+pub fn verify_block_validity<W: WebOfTrust>(
+    block: &BlockDocument,
+    blockchain_db: &BinDB<LocalBlockchainV10Datas>,
+    _certs_db: &BinDB<CertsExpirV10Datas>,
+    _wot_index: &mut HashMap<PubKey, NodeId>,
+    _wot_db: &BinDB<W>,
+) -> Result<(), BlockError> {
+    // Rules that do not concern genesis block
+    if block.number.0 > 0 {
+        // Get previous block
+        let previous_block_opt =
+            DALBlock::get_block_in_local_blockchain(blockchain_db, BlockId(block.number.0 - 1))?;
+
+        // Previous block must exist
+        if previous_block_opt.is_none() {
+            return Err(BlockError::InvalidBlock(InvalidBlockError::NoPreviousBlock));
+        }
+        let previous_block = previous_block_opt.expect("safe unwrap");
+
+        // Block version must not decrease
+        if previous_block.version > block.version {
+            return Err(BlockError::InvalidBlock(InvalidBlockError::VersionDecrease));
+        }
+    }
+
+    Ok(())
+}
diff --git a/lib/modules/skeleton/lib.rs b/lib/modules/skeleton/lib.rs
index 52679a7e..74c68c88 100644
--- a/lib/modules/skeleton/lib.rs
+++ b/lib/modules/skeleton/lib.rs
@@ -267,7 +267,7 @@ impl DursModule<DuRsConf, DursMsg> for SkeletonModule {
                                 ref event_content, ..
                             } => match *event_content {
                                 DursEvent::BlockchainEvent(ref blockchain_event) => {
-                                    match *blockchain_event {
+                                    match *blockchain_event.deref() {
                                         BlockchainEvent::StackUpValidBlock(
                                             ref _block,
                                             ref _blockstamp,
diff --git a/lib/modules/tui/lib.rs b/lib/modules/tui/lib.rs
index 57c6e016..ba30d292 100644
--- a/lib/modules/tui/lib.rs
+++ b/lib/modules/tui/lib.rs
@@ -526,7 +526,7 @@ impl DursModule<DuRsConf, DursMsg> for TuiModule {
                         DursMsg::Event {
                             ref event_content, ..
                         } => match *event_content {
-                            DursEvent::BlockchainEvent(ref dal_event) => match *dal_event {
+                            DursEvent::BlockchainEvent(ref dal_event) => match *dal_event.deref() {
                                 BlockchainEvent::StackUpValidBlock(ref _block, ref _blockstamp) => {
                                 }
                                 BlockchainEvent::RevertBlocks(ref _blocks) => {}
diff --git a/lib/modules/ws2p-v1-legacy/lib.rs b/lib/modules/ws2p-v1-legacy/lib.rs
index 9235cc02..c0755d7e 100644
--- a/lib/modules/ws2p-v1-legacy/lib.rs
+++ b/lib/modules/ws2p-v1-legacy/lib.rs
@@ -471,7 +471,7 @@ impl DursModule<DuRsConf, DursMsg> for WS2PModule {
                                 ref event_content, ..
                             } => {
                                 if let DursEvent::BlockchainEvent(ref bc_event) = *event_content {
-                                    match *bc_event {
+                                    match *bc_event.deref() {
                                         BlockchainEvent::StackUpValidBlock(
                                             ref _block,
                                             ref blockstamp,
@@ -933,7 +933,7 @@ impl DursModule<DuRsConf, DursMsg> for WS2PModule {
 mod tests {
     use super::parsers::blocks::parse_json_block;
     use super::*;
-    use dubp_documents::v10::BlockDocument;
+    use dubp_documents::documents::block::BlockDocument;
     use duniter_module::DursModule;
     use duniter_network::documents::NetworkBlock;
     use dup_crypto::keys::PublicKey;
diff --git a/lib/modules/ws2p-v1-legacy/parsers/blocks.rs b/lib/modules/ws2p-v1-legacy/parsers/blocks.rs
index dfbe67ac..9a8dd79c 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/blocks.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/blocks.rs
@@ -1,9 +1,9 @@
 use super::excluded::parse_exclusions_from_json_value;
 use super::identities::parse_compact_identity;
 use super::transactions::parse_transaction;
-use dubp_documents::v10::block::{BlockV10Parameters, TxDocOrTxHash};
-use dubp_documents::v10::membership::*;
-use dubp_documents::v10::BlockDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::block::{BlockV10Parameters, TxDocOrTxHash};
+use dubp_documents::documents::membership::*;
 use dubp_documents::CurrencyName;
 use dubp_documents::{BlockHash, BlockId};
 use duniter_network::documents::{NetworkBlock, NetworkBlockV10};
@@ -106,6 +106,7 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option<NetworkBlock> {
     }
     let block_doc = BlockDocument {
         nonce: source.get("nonce")?.as_i64()? as u64,
+        version: source.get("version")?.as_u64()? as u32,
         number: BlockId(source.get("number")?.as_u64()? as u32),
         pow_min: source.get("powMin")?.as_u64()? as usize,
         time: source.get("time")?.as_u64()?,
diff --git a/lib/modules/ws2p-v1-legacy/parsers/identities.rs b/lib/modules/ws2p-v1-legacy/parsers/identities.rs
index 10f3608a..02aab9a9 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/identities.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/identities.rs
@@ -1,4 +1,4 @@
-use dubp_documents::v10::identity::*;
+use dubp_documents::documents::identity::*;
 use dubp_documents::Blockstamp;
 use dubp_documents::DocumentBuilder;
 use dup_crypto::keys::*;
diff --git a/lib/modules/ws2p-v1-legacy/parsers/memberships.rs b/lib/modules/ws2p-v1-legacy/parsers/memberships.rs
index 43ceddcb..a04d2d99 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/memberships.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/memberships.rs
@@ -1,4 +1,4 @@
-use dubp_documents::v10::membership::*;
+use dubp_documents::documents::membership::*;
 use dubp_documents::Blockstamp;
 use dubp_documents::DocumentBuilder;
 use dup_crypto::keys::*;
diff --git a/lib/modules/ws2p-v1-legacy/parsers/mod.rs b/lib/modules/ws2p-v1-legacy/parsers/mod.rs
index 5392408a..ca9a42a8 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/mod.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/mod.rs
@@ -22,7 +22,7 @@ pub mod transactions;
 #[cfg(test)]
 mod tests {
     use super::transactions::*;
-    use dubp_documents::v10::transaction::*;
+    use dubp_documents::documents::transaction::*;
     use dubp_documents::Blockstamp;
     use dubp_documents::DocumentBuilder;
     use dup_crypto::keys::*;
diff --git a/lib/modules/ws2p-v1-legacy/parsers/transactions.rs b/lib/modules/ws2p-v1-legacy/parsers/transactions.rs
index 0a665a38..f979aa73 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/transactions.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/transactions.rs
@@ -1,4 +1,4 @@
-use dubp_documents::v10::transaction::{
+use dubp_documents::documents::transaction::{
     TransactionDocument, TransactionDocumentBuilder, TransactionInput, TransactionInputUnlocks,
     TransactionOutput,
 };
diff --git a/lib/modules/ws2p/ws2p-messages/lib.rs b/lib/modules/ws2p/ws2p-messages/lib.rs
index d853927e..a8920a3e 100644
--- a/lib/modules/ws2p/ws2p-messages/lib.rs
+++ b/lib/modules/ws2p/ws2p-messages/lib.rs
@@ -150,7 +150,7 @@ mod tests {
     use crate::v2::WS2Pv2Message;
     use bincode;
     use bincode::{deserialize, serialize};
-    use dubp_documents::v10::certification::*;
+    use dubp_documents::documents::certification::*;
     use dubp_documents::{Blockstamp, CurrencyName};
     use dup_crypto::keys::bin_signable::BinSignable;
     use dup_crypto::keys::*;
diff --git a/lib/modules/ws2p/ws2p-messages/v2/payload_container.rs b/lib/modules/ws2p/ws2p-messages/v2/payload_container.rs
index d136b8ea..92bce766 100644
--- a/lib/modules/ws2p/ws2p-messages/v2/payload_container.rs
+++ b/lib/modules/ws2p/ws2p-messages/v2/payload_container.rs
@@ -18,12 +18,12 @@ use super::ok::WS2Pv2OkMsg;
 use super::req_responses::WS2Pv2ReqRes;
 use super::requests::WS2Pv2Request;
 use super::secret_flags::WS2Pv2SecretFlagsMsg;
-use dubp_documents::v10::block::BlockDocument;
-use dubp_documents::v10::certification::CertificationDocument;
-use dubp_documents::v10::identity::IdentityDocument;
-use dubp_documents::v10::membership::MembershipDocument;
-use dubp_documents::v10::revocation::RevocationDocument;
-use dubp_documents::v10::transaction::TransactionDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::certification::CertificationDocument;
+use dubp_documents::documents::identity::IdentityDocument;
+use dubp_documents::documents::membership::MembershipDocument;
+use dubp_documents::documents::revocation::RevocationDocument;
+use dubp_documents::documents::transaction::TransactionDocument;
 use dup_crypto::hashs::Hash;
 use durs_network_documents::network_head_v2::NetworkHeadV2;
 use durs_network_documents::network_head_v3::NetworkHeadV3;
diff --git a/lib/modules/ws2p/ws2p-messages/v2/req_responses.rs b/lib/modules/ws2p/ws2p-messages/v2/req_responses.rs
index 4013a219..c90e4614 100644
--- a/lib/modules/ws2p/ws2p-messages/v2/req_responses.rs
+++ b/lib/modules/ws2p/ws2p-messages/v2/req_responses.rs
@@ -13,10 +13,10 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::v10::block::BlockDocument;
-use dubp_documents::v10::certification::CompactCertificationDocument;
-use dubp_documents::v10::identity::CompactIdentityDocument;
-use dubp_documents::v10::membership::CompactPoolMembershipDoc;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::certification::CompactCertificationDocument;
+use dubp_documents::documents::identity::CompactIdentityDocument;
+use dubp_documents::documents::membership::CompactPoolMembershipDoc;
 use dubp_documents::Blockstamp;
 use dup_crypto::hashs::Hash;
 use std::str;
diff --git a/lib/tools/documents/src/v10/block.rs b/lib/tools/documents/src/documents/block.rs
similarity index 98%
rename from lib/tools/documents/src/v10/block.rs
rename to lib/tools/documents/src/documents/block.rs
index fcb30e62..38c4a65e 100644
--- a/lib/tools/documents/src/v10/block.rs
+++ b/lib/tools/documents/src/documents/block.rs
@@ -20,13 +20,13 @@ use dup_crypto::keys::*;
 use std::ops::Deref;
 
 use crate::blockstamp::Blockstamp;
-use crate::v10::certification::CertificationDocument;
-use crate::v10::identity::IdentityDocument;
-use crate::v10::membership::MembershipDocument;
-use crate::v10::revocation::RevocationDocument;
-use crate::v10::transaction::TransactionDocument;
-use crate::v10::*;
-use crate::*;
+use crate::documents::certification::CertificationDocument;
+use crate::documents::identity::IdentityDocument;
+use crate::documents::membership::MembershipDocument;
+use crate::documents::revocation::RevocationDocument;
+use crate::documents::transaction::TransactionDocument;
+use crate::documents::*;
+use crate::text_document_traits::*;
 
 #[derive(Debug, Clone)]
 /// Store error in block parameters parsing
@@ -192,6 +192,8 @@ impl TxDocOrTxHash {
 /// Must be created by parsing a text document or using a builder.
 #[derive(Clone, Debug, Deserialize, Serialize)]
 pub struct BlockDocument {
+    /// Version
+    pub version: u32,
     /// Nonce
     pub nonce: u64,
     /// number
@@ -496,7 +498,7 @@ impl TextDocument for BlockDocument {
 
 impl IntoSpecializedDocument<DUBPDocument> for BlockDocument {
     fn into_specialized(self) -> DUBPDocument {
-        DUBPDocument::V10(Box::new(V10Document::Block(Box::new(self))))
+        DUBPDocument::Block(Box::new(self))
     }
 }
 
@@ -511,6 +513,7 @@ mod tests {
     fn generate_and_verify_empty_block() {
         let mut block = BlockDocument {
             nonce: 100_010_200_000_006_940,
+            version: 10,
             number: BlockId(174_260),
             pow_min: 68,
             time: 1_525_296_873,
@@ -612,6 +615,7 @@ a9PHPuSfw7jW8FRQHXFsGi/bnLjbtDnTYvEVgUC9u0WlR7GVofa+Xb+l5iy6NwuEXiwvueAkf08wPVY8
 
         let mut block = BlockDocument {
             nonce: 0,
+            version: 10,
             number: BlockId(107_984),
             pow_min: 88,
             time: 1_522_685_861,
@@ -792,6 +796,7 @@ nxr4exGrt16jteN9ZX3XZPP9l+X0OUbZ1o/QjE1hbWQNtVU3HhH9SJoEvNj2iVl3gCRr9u2OA9uj9vCy
 
         let mut block = BlockDocument {
             nonce: 0,
+            version: 10,
             number: BlockId(165_647),
             pow_min: 90,
             time: 1_540_633_175,
diff --git a/lib/tools/documents/src/v10/certification.rs b/lib/tools/documents/src/documents/certification.rs
similarity index 99%
rename from lib/tools/documents/src/v10/certification.rs
rename to lib/tools/documents/src/documents/certification.rs
index de874e0a..a21db71c 100644
--- a/lib/tools/documents/src/v10/certification.rs
+++ b/lib/tools/documents/src/documents/certification.rs
@@ -19,8 +19,8 @@ use dup_crypto::keys::*;
 use pest::Parser;
 
 use crate::blockstamp::Blockstamp;
-use crate::v10::*;
-use crate::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
 
 #[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
 /// Wrap an Compact Revocation document (in block content)
@@ -178,7 +178,7 @@ impl TextDocument for CertificationDocument {
 
 impl IntoSpecializedDocument<DUBPDocument> for CertificationDocument {
     fn into_specialized(self) -> DUBPDocument {
-        DUBPDocument::V10(Box::new(V10Document::Certification(Box::new(self))))
+        DUBPDocument::Certification(Box::new(self))
     }
 }
 
diff --git a/lib/tools/documents/src/v10/identity.rs b/lib/tools/documents/src/documents/identity.rs
similarity index 99%
rename from lib/tools/documents/src/v10/identity.rs
rename to lib/tools/documents/src/documents/identity.rs
index 503e44f2..3b01663f 100644
--- a/lib/tools/documents/src/v10/identity.rs
+++ b/lib/tools/documents/src/documents/identity.rs
@@ -17,9 +17,9 @@
 
 use pest::Parser;
 
-use crate::v10::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
 use crate::Blockstamp;
-use crate::*;
 
 /// Wrap an Identity document.
 ///
@@ -162,7 +162,7 @@ impl TextDocument for IdentityDocument {
 
 impl IntoSpecializedDocument<DUBPDocument> for IdentityDocument {
     fn into_specialized(self) -> DUBPDocument {
-        DUBPDocument::V10(Box::new(V10Document::Identity(self)))
+        DUBPDocument::Identity(self)
     }
 }
 
diff --git a/lib/tools/documents/src/v10/membership.rs b/lib/tools/documents/src/documents/membership.rs
similarity index 99%
rename from lib/tools/documents/src/v10/membership.rs
rename to lib/tools/documents/src/documents/membership.rs
index 47c4f017..e7b9ada1 100644
--- a/lib/tools/documents/src/v10/membership.rs
+++ b/lib/tools/documents/src/documents/membership.rs
@@ -19,8 +19,8 @@ use dup_crypto::keys::*;
 use pest::Parser;
 
 use crate::blockstamp::Blockstamp;
-use crate::v10::*;
-use crate::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
 
 /// Type of a Membership.
 #[derive(Debug, Deserialize, Clone, Copy, Hash, Serialize, PartialEq, Eq)]
@@ -207,7 +207,7 @@ impl TextDocument for MembershipDocument {
 
 impl IntoSpecializedDocument<DUBPDocument> for MembershipDocument {
     fn into_specialized(self) -> DUBPDocument {
-        DUBPDocument::V10(Box::new(V10Document::Membership(self)))
+        DUBPDocument::Membership(self)
     }
 }
 
diff --git a/lib/tools/documents/src/v10/mod.rs b/lib/tools/documents/src/documents/mod.rs
similarity index 63%
rename from lib/tools/documents/src/v10/mod.rs
rename to lib/tools/documents/src/documents/mod.rs
index bf96fd99..2e151097 100644
--- a/lib/tools/documents/src/v10/mod.rs
+++ b/lib/tools/documents/src/documents/mod.rs
@@ -1,4 +1,4 @@
-//  Copyright (C) 2018  The Duniter Project Developers.
+//  Copyright (C) 2018  The Durs Project Developers.
 //
 // This program is free software: you can redistribute it and/or modify
 // it under the terms of the GNU Affero General Public License as
@@ -13,7 +13,19 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-//! Provide wrappers around Duniter blockchain documents for protocol version 10.
+//! Implements the Durs blockchain Documents.
+
+use crate::documents::block::*;
+use crate::documents::certification::*;
+use crate::documents::identity::*;
+use crate::documents::membership::*;
+use crate::documents::revocation::*;
+use crate::documents::transaction::*;
+use crate::Rule;
+use crate::*;
+
+use pest::iterators::Pair;
+use pest::Parser;
 
 pub mod block;
 pub mod certification;
@@ -22,40 +34,9 @@ pub mod membership;
 pub mod revocation;
 pub mod transaction;
 
-use dup_crypto::keys::PrivateKey;
-use pest::Parser;
-
-pub use crate::v10::block::BlockDocument;
-use crate::v10::certification::*;
-use crate::v10::identity::*;
-use crate::v10::membership::*;
-use crate::v10::revocation::*;
-use crate::v10::transaction::*;
-use crate::ToStringObject;
-use crate::*;
-
-#[derive(Clone, Debug, Deserialize, Serialize)]
-/// Contains a document in full or compact format
-pub enum TextDocumentFormat<D: TextDocument> {
-    /// Complete format (Allows to check the validity of the signature)
-    Complete(D),
-    /// Format present in the blocks (does not always allow to verify the signature)
-    Compact(D::CompactTextDocument_),
-}
-
-impl<D: TextDocument> TextDocumentFormat<D> {
-    /// To compact document
-    pub fn to_compact_document(&self) -> D::CompactTextDocument_ {
-        match *self {
-            TextDocumentFormat::Complete(ref doc) => doc.to_compact_document(),
-            TextDocumentFormat::Compact(ref compact_doc) => (*compact_doc).clone(),
-        }
-    }
-}
-
-/// List of wrapped document types.
+/// Document of DUBP (DUniter Blockhain Protocol)
 #[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum V10Document {
+pub enum DUBPDocument {
     /// Block document.
     Block(Box<BlockDocument>),
 
@@ -77,7 +58,7 @@ pub enum V10Document {
 
 /// List of stringified document types.
 #[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum V10DocumentString {
+pub enum DUBPDocumentStr {
     /// Block document (not yet implemented)
     Block(),
 
@@ -97,57 +78,66 @@ pub enum V10DocumentString {
     Revocation(Box<RevocationStringDocument>),
 }
 
-impl ToStringObject for V10Document {
-    type StringObject = V10DocumentString;
+impl ToStringObject for DUBPDocument {
+    type StringObject = DUBPDocumentStr;
     /// Transforms an object into a json object
     fn to_string_object(&self) -> Self::StringObject {
         match *self {
-            V10Document::Block(_) => unimplemented!(),
-            V10Document::Identity(ref doc) => V10DocumentString::Identity(doc.to_string_object()),
-            V10Document::Membership(ref doc) => {
-                V10DocumentString::Membership(doc.to_string_object())
+            DUBPDocument::Block(_) => unimplemented!(),
+            DUBPDocument::Identity(ref doc) => DUBPDocumentStr::Identity(doc.to_string_object()),
+            DUBPDocument::Membership(ref doc) => {
+                DUBPDocumentStr::Membership(doc.to_string_object())
             }
-            V10Document::Certification(ref doc) => {
-                V10DocumentString::Certification(Box::new(doc.to_string_object()))
+            DUBPDocument::Certification(ref doc) => {
+                DUBPDocumentStr::Certification(Box::new(doc.to_string_object()))
             }
-            V10Document::Revocation(ref doc) => {
-                V10DocumentString::Revocation(Box::new(doc.to_string_object()))
+            DUBPDocument::Revocation(ref doc) => {
+                DUBPDocumentStr::Revocation(Box::new(doc.to_string_object()))
             }
-            V10Document::Transaction(ref doc) => {
-                V10DocumentString::Transaction(Box::new(doc.to_string_object()))
+            DUBPDocument::Transaction(ref doc) => {
+                DUBPDocumentStr::Transaction(Box::new(doc.to_string_object()))
             }
         }
     }
 }
 
-impl TextDocumentParser<Rule> for V10Document {
-    type DocumentType = V10Document;
+impl TextDocumentParser<Rule> for DUBPDocument {
+    type DocumentType = DUBPDocument;
 
-    fn parse(doc: &str) -> Result<Self::DocumentType, TextDocumentParseError> {
-        match DocumentsParser::parse(Rule::document_v10, doc) {
-            Ok(mut document_v10_pairs) => Ok(V10Document::from_pest_pair(
-                document_v10_pairs.next().unwrap(),
-            )), // get and unwrap the `document_v10` rule; never fails
+    fn parse(doc: &str) -> Result<DUBPDocument, TextDocumentParseError> {
+        match DocumentsParser::parse(Rule::document, doc) {
+            Ok(mut doc_pairs) => Ok(DUBPDocument::from_pest_pair(doc_pairs.next().unwrap())), // get and unwrap the `document` rule; never fails
             Err(pest_error) => Err(TextDocumentParseError::PestError(format!("{}", pest_error))),
         }
     }
     fn from_pest_pair(pair: Pair<Rule>) -> Self::DocumentType {
+        let doc_vx_pair = pair.into_inner().next().unwrap(); // get and unwrap the `document_vX` rule; never fails
+
+        match doc_vx_pair.as_rule() {
+            Rule::document_v10 => DUBPDocument::from_pest_pair_v10(doc_vx_pair),
+            _ => panic!("unexpected rule: {:?}", doc_vx_pair.as_rule()), // Grammar ensures that we never reach this line
+        }
+    }
+}
+
+impl DUBPDocument {
+    pub fn from_pest_pair_v10(pair: Pair<Rule>) -> DUBPDocument {
         let doc_type_v10_pair = pair.into_inner().next().unwrap(); // get and unwrap the `{DOC_TYPE}_v10` rule; never fails
 
         match doc_type_v10_pair.as_rule() {
-            Rule::idty_v10 => V10Document::Identity(
+            Rule::idty_v10 => DUBPDocument::Identity(
                 identity::IdentityDocumentParser::from_pest_pair(doc_type_v10_pair),
             ),
-            Rule::membership_v10 => V10Document::Membership(
+            Rule::membership_v10 => DUBPDocument::Membership(
                 membership::MembershipDocumentParser::from_pest_pair(doc_type_v10_pair),
             ),
-            Rule::cert_v10 => V10Document::Certification(Box::new(
+            Rule::cert_v10 => DUBPDocument::Certification(Box::new(
                 certification::CertificationDocumentParser::from_pest_pair(doc_type_v10_pair),
             )),
-            Rule::revoc_v10 => V10Document::Revocation(Box::new(
+            Rule::revoc_v10 => DUBPDocument::Revocation(Box::new(
                 revocation::RevocationDocumentParser::from_pest_pair(doc_type_v10_pair),
             )),
-            Rule::tx_v10 => V10Document::Transaction(Box::new(
+            Rule::tx_v10 => DUBPDocument::Transaction(Box::new(
                 transaction::TransactionDocumentParser::from_pest_pair(doc_type_v10_pair),
             )),
             _ => panic!("unexpected rule: {:?}", doc_type_v10_pair.as_rule()), // Grammar ensures that we never reach this line
@@ -155,121 +145,18 @@ impl TextDocumentParser<Rule> for V10Document {
     }
 }
 
-/// Trait for a compact V10 document.
-pub trait CompactTextDocument: Sized + Clone {
-    /// Generate document compact text.
-    /// the compact format is the one used in the blocks.
-    ///
-    /// - Don't contains leading signatures
-    /// - Contains line breaks on all line.
-    fn as_compact_text(&self) -> String;
-}
-
-impl<D: TextDocument> CompactTextDocument for TextDocumentFormat<D> {
-    fn as_compact_text(&self) -> String {
-        match *self {
-            TextDocumentFormat::Complete(ref doc) => doc.generate_compact_text(),
-            TextDocumentFormat::Compact(ref doc) => doc.as_compact_text(),
-        }
-    }
-}
-
-/// Trait for a V10 document.
-pub trait TextDocument: Document<PublicKey = PubKey, CurrencyType = str> {
-    /// Type of associated compact document.
-    type CompactTextDocument_: CompactTextDocument;
-
-    /// Return document as text.
-    fn as_text(&self) -> &str;
-
-    /// Return document as text without signature.
-    fn as_text_without_signature(&self) -> &str {
-        let text = self.as_text();
-        let mut lines: Vec<&str> = self.as_text().split('\n').collect();
-        let sigs = self.signatures();
-        let mut sigs_str_len = sigs.len() - 1;
-        for _ in sigs {
-            sigs_str_len += lines.pop().unwrap_or("").len();
-        }
-        &text[0..(text.len() - sigs_str_len)]
-    }
-
-    /*/// Return document as text with leading signatures.
-    fn as_text_with_signatures(&self) -> String {
-        let mut text = self.as_text().to_string();
-
-        for sig in self.signatures() {
-            text = format!("{}{}\n", text, sig.to_base64());
-        }
-
-        text
-    }*/
-
-    /// Generate compact document.
-    /// the compact format is the one used in the blocks.
-    /// - Don't contains leading signatures
-    fn to_compact_document(&self) -> Self::CompactTextDocument_;
-
-    /// Generate document compact text.
-    /// the compact format is the one used in the blocks.
-    ///
-    /// - Don't contains leading signatures
-    /// - Contains line breaks on all line.
-    fn generate_compact_text(&self) -> String {
-        self.to_compact_document().as_compact_text()
-    }
-}
-
-/// Trait for a V10 document builder.
-pub trait TextDocumentBuilder: DocumentBuilder {
-    /// Generate document text.
-    ///
-    /// - Don't contains leading signatures
-    /// - Contains line breaks on all line.
-    fn generate_text(&self) -> String;
-
-    /// Generate final document with signatures, and also return them in an array.
-    ///
-    /// Returns :
-    ///
-    /// - Text without signatures
-    /// - Signatures
-    fn build_signed_text(&self, private_keys: Vec<PrivKey>) -> (String, Vec<Sig>) {
-        let text = self.generate_text();
-
-        let signatures: Vec<_> = {
-            let text_bytes = text.as_bytes();
-            private_keys
-                .iter()
-                .map(|key| key.sign(text_bytes))
-                .collect()
-        };
-
-        (text, signatures)
-    }
-}
-
-/// V10 Documents in separated parts
-#[derive(Debug, Clone)]
-pub struct V10DocumentParts {
-    /// Whole document in text
-    pub doc: String,
-    /// Payload
-    pub body: String,
-    /// Currency
-    pub currency: String,
-    /// Signatures
-    pub signatures: Vec<Sig>,
-}
-
 #[cfg(test)]
 mod tests {
+    use crate::blockstamp::Blockstamp;
+    use crate::*;
+
     use super::certification::CertificationDocumentParser;
     use super::identity::IdentityDocumentParser;
     use super::membership::MembershipDocumentParser;
     use super::revocation::RevocationDocumentParser;
     use super::transaction::TransactionDocumentParser;
     use super::*;
+
     use dup_crypto::keys::*;
 
     // simple text document for signature testing
diff --git a/lib/tools/documents/src/v10/revocation.rs b/lib/tools/documents/src/documents/revocation.rs
similarity index 99%
rename from lib/tools/documents/src/v10/revocation.rs
rename to lib/tools/documents/src/documents/revocation.rs
index cd98ea22..0f3cbca7 100644
--- a/lib/tools/documents/src/v10/revocation.rs
+++ b/lib/tools/documents/src/documents/revocation.rs
@@ -19,8 +19,8 @@ use dup_crypto::keys::*;
 use pest::Parser;
 
 use crate::blockstamp::Blockstamp;
-use crate::v10::*;
-use crate::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
 
 #[derive(Debug, Copy, Clone, Deserialize, Serialize, PartialEq, Eq)]
 /// Wrap an Compact Revocation document (in block content)
@@ -150,7 +150,7 @@ impl TextDocument for RevocationDocument {
 
 impl IntoSpecializedDocument<DUBPDocument> for RevocationDocument {
     fn into_specialized(self) -> DUBPDocument {
-        DUBPDocument::V10(Box::new(V10Document::Revocation(Box::new(self))))
+        DUBPDocument::Revocation(Box::new(self))
     }
 }
 
diff --git a/lib/tools/documents/src/v10/transaction.rs b/lib/tools/documents/src/documents/transaction.rs
similarity index 99%
rename from lib/tools/documents/src/v10/transaction.rs
rename to lib/tools/documents/src/documents/transaction.rs
index d56f5793..610524b4 100644
--- a/lib/tools/documents/src/v10/transaction.rs
+++ b/lib/tools/documents/src/documents/transaction.rs
@@ -23,8 +23,8 @@ use std::ops::{Add, Deref, Sub};
 use std::str::FromStr;
 
 use crate::blockstamp::Blockstamp;
-use crate::v10::*;
-use crate::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
 
 /// Wrap a transaction amount
 #[derive(Debug, Copy, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Hash, Serialize)]
@@ -713,7 +713,7 @@ impl TextDocument for TransactionDocument {
 
 impl IntoSpecializedDocument<DUBPDocument> for TransactionDocument {
     fn into_specialized(self) -> DUBPDocument {
-        DUBPDocument::V10(Box::new(V10Document::Transaction(Box::new(self))))
+        DUBPDocument::Transaction(Box::new(self))
     }
 }
 
diff --git a/lib/tools/documents/src/documents_grammar.pest b/lib/tools/documents/src/documents_grammar.pest
index 12f7d497..f9a3b960 100644
--- a/lib/tools/documents/src/documents_grammar.pest
+++ b/lib/tools/documents/src/documents_grammar.pest
@@ -16,7 +16,6 @@ uid = @{ ASCII_ALPHA ~ (ASCII_ALPHANUMERIC | "-" | "_")* }
 block_id = @{ u_int }
 blockstamp =  ${ block_id ~ "-" ~ hash }
 ed25519_sig = @{ base64{88} | (base64{87} ~ "=") | (base64{86} ~ "==") }
-//^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$
 
 v10 = _{ "Version: 10" }
 
diff --git a/lib/tools/documents/src/lib.rs b/lib/tools/documents/src/lib.rs
index 20308e47..43851551 100644
--- a/lib/tools/documents/src/lib.rs
+++ b/lib/tools/documents/src/lib.rs
@@ -1,4 +1,4 @@
-//  Copyright (C) 2018  The Duniter Project Developers.
+//  Copyright (C) 2018  The Durs Project Developers.
 //
 // This program is free software: you can redistribute it and/or modify
 // it under the terms of the GNU Affero General Public License as
@@ -13,7 +13,7 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-//! Implements the Duniter Documents Protocol.
+//! Implements the Durs Documents Protocol.
 
 #![cfg_attr(feature = "strict", deny(warnings))]
 #![deny(
@@ -36,14 +36,15 @@ extern crate serde_derive;
 
 pub mod blockstamp;
 mod currencies_codes;
-pub mod v10;
+pub mod documents;
+pub mod text_document_traits;
 
 use crate::currencies_codes::*;
 use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
 use dup_crypto::hashs::Hash;
 use dup_crypto::keys::*;
 use pest::iterators::Pair;
-use pest::{Parser, RuleType};
+use pest::RuleType;
 use serde::Serialize;
 use std::cmp::Ordering;
 use std::fmt::{Debug, Display, Error, Formatter};
@@ -80,36 +81,6 @@ pub enum TextDocumentParseError {
     UnknownType,
 }
 
-/// Document of DUBP (DUniter Blockhain Protocol)
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum DUBPDocument {
-    /// Version 10.
-    V10(Box<v10::V10Document>),
-    /// Version 11. (not done yet, but defined for tests)
-    V11(),
-}
-
-impl TextDocumentParser<Rule> for DUBPDocument {
-    type DocumentType = DUBPDocument;
-
-    fn parse(doc: &str) -> Result<DUBPDocument, TextDocumentParseError> {
-        match DocumentsParser::parse(Rule::document, doc) {
-            Ok(mut doc_pairs) => Ok(DUBPDocument::from_pest_pair(doc_pairs.next().unwrap())), // get and unwrap the `document` rule; never fails
-            Err(pest_error) => Err(TextDocumentParseError::PestError(format!("{}", pest_error))),
-        }
-    }
-    fn from_pest_pair(pair: Pair<Rule>) -> DUBPDocument {
-        let doc_vx_pair = pair.into_inner().next().unwrap(); // get and unwrap the `document_vX` rule; never fails
-
-        match doc_vx_pair.as_rule() {
-            Rule::document_v10 => {
-                DUBPDocument::V10(Box::new(v10::V10Document::from_pest_pair(doc_vx_pair)))
-            }
-            _ => panic!("unexpected rule: {:?}", doc_vx_pair.as_rule()), // Grammar ensures that we never reach this line
-        }
-    }
-}
-
 /// Currency name
 #[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize, Hash)]
 pub struct CurrencyName(pub String);
@@ -333,7 +304,7 @@ impl<T: ToStringObject> ToJsonObject for T {}
 #[cfg(test)]
 mod tests {
     use super::*;
-    //use dup_crypto::keys::*;
+    use crate::documents::DUBPDocument;
 
     #[test]
     fn parse_dubp_document() {
diff --git a/lib/tools/documents/src/text_document_traits.rs b/lib/tools/documents/src/text_document_traits.rs
new file mode 100644
index 00000000..2829905b
--- /dev/null
+++ b/lib/tools/documents/src/text_document_traits.rs
@@ -0,0 +1,132 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+//! Define the Text Document Traits.
+
+use crate::*;
+use dup_crypto::keys::*;
+
+#[derive(Clone, Debug, Deserialize, Serialize)]
+/// Contains a document in full or compact format
+pub enum TextDocumentFormat<D: TextDocument> {
+    /// Complete format (Allows to check the validity of the signature)
+    Complete(D),
+    /// Format present in the blocks (does not always allow to verify the signature)
+    Compact(D::CompactTextDocument_),
+}
+
+impl<D: TextDocument> TextDocumentFormat<D> {
+    /// To compact document
+    pub fn to_compact_document(&self) -> D::CompactTextDocument_ {
+        match *self {
+            TextDocumentFormat::Complete(ref doc) => doc.to_compact_document(),
+            TextDocumentFormat::Compact(ref compact_doc) => (*compact_doc).clone(),
+        }
+    }
+}
+
+/// Trait for a compact text document.
+pub trait CompactTextDocument: Sized + Clone {
+    /// Generate document compact text.
+    /// the compact format is the one used in the blocks.
+    ///
+    /// - Don't contains leading signatures
+    /// - Contains line breaks on all line.
+    fn as_compact_text(&self) -> String;
+}
+
+impl<D: TextDocument> CompactTextDocument for TextDocumentFormat<D> {
+    fn as_compact_text(&self) -> String {
+        match *self {
+            TextDocumentFormat::Complete(ref doc) => doc.generate_compact_text(),
+            TextDocumentFormat::Compact(ref doc) => doc.as_compact_text(),
+        }
+    }
+}
+
+/// Trait for a V10 document.
+pub trait TextDocument: Document<PublicKey = PubKey, CurrencyType = str> {
+    /// Type of associated compact document.
+    type CompactTextDocument_: CompactTextDocument;
+
+    /// Return document as text.
+    fn as_text(&self) -> &str;
+
+    /// Return document as text without signature.
+    fn as_text_without_signature(&self) -> &str {
+        let text = self.as_text();
+        let mut lines: Vec<&str> = self.as_text().split('\n').collect();
+        let sigs = self.signatures();
+        let mut sigs_str_len = sigs.len() - 1;
+        for _ in sigs {
+            sigs_str_len += lines.pop().unwrap_or("").len();
+        }
+        &text[0..(text.len() - sigs_str_len)]
+    }
+
+    /*/// Return document as text with leading signatures.
+    fn as_text_with_signatures(&self) -> String {
+        let mut text = self.as_text().to_string();
+
+        for sig in self.signatures() {
+            text = format!("{}{}\n", text, sig.to_base64());
+        }
+
+        text
+    }*/
+
+    /// Generate compact document.
+    /// the compact format is the one used in the blocks.
+    /// - Don't contains leading signatures
+    fn to_compact_document(&self) -> Self::CompactTextDocument_;
+
+    /// Generate document compact text.
+    /// the compact format is the one used in the blocks.
+    ///
+    /// - Don't contains leading signatures
+    /// - Contains line breaks on all line.
+    fn generate_compact_text(&self) -> String {
+        self.to_compact_document().as_compact_text()
+    }
+}
+
+/// Trait for a V10 document builder.
+pub trait TextDocumentBuilder: DocumentBuilder {
+    /// Generate document text.
+    ///
+    /// - Don't contains leading signatures
+    /// - Contains line breaks on all line.
+    fn generate_text(&self) -> String;
+
+    /// Generate final document with signatures, and also return them in an array.
+    ///
+    /// Returns :
+    ///
+    /// - Text without signatures
+    /// - Signatures
+    fn build_signed_text(&self, private_keys: Vec<PrivKey>) -> (String, Vec<Sig>) {
+        let text = self.generate_text();
+
+        let signatures: Vec<_> = {
+            let text_bytes = text.as_bytes();
+            private_keys
+                .iter()
+                .map(|key| key.sign(text_bytes))
+                .collect()
+        };
+
+        (text, signatures)
+    }
+}
-- 
GitLab


From a28b21b9c608627f3e5f9aed011fc418a64f7728 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 12 Jan 2019 23:10:38 +0100
Subject: [PATCH 02/26] [docs] add commit type deps

---
 doc/fr/conventions-git.md | 1 +
 1 file changed, 1 insertion(+)

diff --git a/doc/fr/conventions-git.md b/doc/fr/conventions-git.md
index 33d1e4e7..37bec85b 100644
--- a/doc/fr/conventions-git.md
+++ b/doc/fr/conventions-git.md
@@ -43,6 +43,7 @@ Exemple, renomage d'un trait `Toto` en `Titi` dans la crate `durs-bidule` :
 
 * `build` : Modification des script de build, de packaging ou/et de publication des livrables.
 * `ci` : Modification de la chaine d'intégration continue.
+* `deps` : Modification des dépendances sans modification du code : ce peut-être pour mettre à jours des dépendances tierces ou pour supprimer des dépendances tierces qui ne sont plus utilisées.
 * `docs` : Modification de la documentation (y compris traduction et création de nouveau contenu).
 * `feat` : Développement d'une nouvelle fonctionnalitée.
 * `fix` : Correction d'un bug
-- 
GitLab


From 7b8b6bc9e40e5df95849983c93b7da19253d0d49 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 12 Jan 2019 23:10:59 +0100
Subject: [PATCH 03/26] [deps] upgrade pest dep

---
 lib/tools/documents/Cargo.toml         | 4 ++--
 lib/tools/network-documents/Cargo.toml | 4 ++--
 2 files changed, 4 insertions(+), 4 deletions(-)

diff --git a/lib/tools/documents/Cargo.toml b/lib/tools/documents/Cargo.toml
index 95f2c1dd..e4ecb19a 100644
--- a/lib/tools/documents/Cargo.toml
+++ b/lib/tools/documents/Cargo.toml
@@ -17,8 +17,8 @@ base58 = "0.1.*"
 base64 = "0.9.*"
 byteorder = "1.2.3"
 dup-crypto = { path = "../crypto" }
-pest = "2.0"
-pest_derive = "2.0"
+pest = "2.1.0"
+pest_derive = "2.1.0"
 serde = "1.0.*"
 serde_derive = "1.0.*"
 serde_json = "1.0.*"
diff --git a/lib/tools/network-documents/Cargo.toml b/lib/tools/network-documents/Cargo.toml
index 280664eb..0057cd19 100644
--- a/lib/tools/network-documents/Cargo.toml
+++ b/lib/tools/network-documents/Cargo.toml
@@ -14,8 +14,8 @@ base58 = "0.1.*"
 dup-crypto = { path = "../crypto" }
 dubp-documents= { path = "../documents" }
 hex = "0.3.*"
-pest = "2.0"
-pest_derive = "2.0"
+pest = "2.1.0"
+pest_derive = "2.1.0"
 serde = "1.0.*"
 serde_derive = "1.0.*"
 serde_json = "1.0.*"
-- 
GitLab


From 07cfed6cc259a3a4d2a296b2e3dce1301f27cfef Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 12 Jan 2019 23:06:05 +0100
Subject: [PATCH 04/26] [feat] add crate lib/tools/json-pest-parser

---
 Cargo.lock                                    |  57 ++--
 Cargo.toml                                    |   1 +
 lib/tools/json-pest-parser/Cargo.toml         |  21 ++
 .../json-pest-parser/src/json_grammar.pest    |  33 +++
 lib/tools/json-pest-parser/src/lib.rs         | 263 ++++++++++++++++++
 5 files changed, 346 insertions(+), 29 deletions(-)
 create mode 100644 lib/tools/json-pest-parser/Cargo.toml
 create mode 100644 lib/tools/json-pest-parser/src/json_grammar.pest
 create mode 100644 lib/tools/json-pest-parser/src/lib.rs

diff --git a/Cargo.lock b/Cargo.lock
index bfacaf71..ccbd3ad2 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -249,8 +249,8 @@ dependencies = [
  "base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "dup-crypto 0.5.0",
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_derive 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -407,8 +407,8 @@ dependencies = [
  "dubp-documents 0.11.0",
  "dup-crypto 0.5.0",
  "hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_derive 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -638,6 +638,16 @@ name = "itoa"
 version = "0.4.3"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 
+[[package]]
+name = "json-pest-parser"
+version = "0.1.0"
+dependencies = [
+ "maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "kernel32-sys"
 version = "0.2.2"
@@ -823,7 +833,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "pest"
-version = "2.0.2"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "ucd-trie 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -831,32 +841,32 @@ dependencies = [
 
 [[package]]
 name = "pest_derive"
-version = "2.0.1"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_generator 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "pest_generator"
-version = "2.0.0"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_meta 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_meta 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
  "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "pest_meta"
-version = "2.0.3"
+version = "2.1.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "sha-1 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -1153,16 +1163,6 @@ dependencies = [
  "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
-[[package]]
-name = "syn"
-version = "0.14.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
 [[package]]
 name = "syn"
 version = "0.15.22"
@@ -1415,10 +1415,10 @@ dependencies = [
 "checksum openssl-sys 0.9.39 (registry+https://github.com/rust-lang/crates.io-index)" = "278c1ad40a89aa1e741a1eed089a2f60b18fab8089c3139b542140fc7d674106"
 "checksum pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "deb73390ab68d81992bd994d145f697451bb0b54fd39738e72eef32458ad6907"
 "checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831"
-"checksum pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a677051ad923732bb5c70f2d45f8985a96e3eee2e2bff86697e3b11b0c3fcfde"
-"checksum pest_derive 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b76f477146419bc539a63f4ef40e902166cb43b3e51cecc71d9136fd12c567e7"
-"checksum pest_generator 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ebee4e9680be4fd162e6f3394ae4192a6b60b1e4d17d845e631f0c68d1a3386"
-"checksum pest_meta 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1f6d5f6f0e6082578c86af197d780dc38328e3f768cec06aac9bc46d714e8221"
+"checksum pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "54f0c72a98d8ab3c99560bfd16df8059cc10e1f9a8e83e6e3b97718dd766e9c3"
+"checksum pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0"
+"checksum pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "63120576c4efd69615b5537d3d052257328a4ca82876771d6944424ccfd9f646"
+"checksum pest_meta 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5a3492a4ed208ffc247adcdcc7ba2a95be3104f58877d0d02f0df39bf3efb5e"
 "checksum pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "676e8eb2b1b4c9043511a9b7bea0915320d7e502b0a079fb03f9635a5252b18c"
 "checksum pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a029430f0d744bc3d15dd474d591bed2402b645d024583082b9f63bb936dac6"
 "checksum proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09"
@@ -1455,7 +1455,6 @@ dependencies = [
 "checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
 "checksum structopt 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "41c4a2479a078509940d82773d90ff824a8c89533ab3b59cd3ce8b0c0e369c02"
 "checksum structopt-derive 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "5352090cfae7a2c85e1a31146268b53396106c88ca5d6ccee2e3fae83b6e35c2"
-"checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741"
 "checksum syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)" = "ae8b29eb5210bc5cf63ed6149cbf9adfc82ac0be023d8735c176ee74a2db4da7"
 "checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
 "checksum term 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5e6b677dd1e8214ea1ef4297f85dbcbed8e8cdddb561040cc998ca2551c37561"
diff --git a/Cargo.toml b/Cargo.toml
index 400aeae3..cabad887 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -15,6 +15,7 @@ members = [
     "lib/modules/ws2p/ws2p-messages",
     "lib/tools/crypto",
     "lib/tools/documents",
+    "lib/tools/json-pest-parser",
     "lib/tools/network-documents",
     "lib/tools/wot",
 ]
diff --git a/lib/tools/json-pest-parser/Cargo.toml b/lib/tools/json-pest-parser/Cargo.toml
new file mode 100644
index 00000000..607e03a2
--- /dev/null
+++ b/lib/tools/json-pest-parser/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "json-pest-parser"
+version = "0.1.0"
+authors = ["elois <elois@duniter.org>"]
+description = "Parse JSON with pest grammar."
+repository = "https://git.duniter.org/nodes/rust/duniter-rs"
+readme = "README.md"
+keywords = ["json", "pest", "parser"]
+license = "AGPL-3.0"
+edition = "2018"
+
+[lib]
+path = "src/lib.rs"
+
+[dependencies]
+pest = "2.1.0"
+pest_derive = "2.1.0"
+
+[dev-dependencies]
+pretty_assertions = "0.5.1"
+maplit = "1.0.1"
\ No newline at end of file
diff --git a/lib/tools/json-pest-parser/src/json_grammar.pest b/lib/tools/json-pest-parser/src/json_grammar.pest
new file mode 100644
index 00000000..adc4149c
--- /dev/null
+++ b/lib/tools/json-pest-parser/src/json_grammar.pest
@@ -0,0 +1,33 @@
+WHITESPACE = _{ " " | "\t" | "\r" | "\n" }
+
+null = { "null" }
+boolean = { "true" | "false" }
+number = @{
+    "-"?
+    ~ ("0" | ASCII_NONZERO_DIGIT ~ ASCII_DIGIT*)
+    ~ ("." ~ ASCII_DIGIT*)?
+    ~ (^"e" ~ ("+" | "-")? ~ ASCII_DIGIT+)?
+}
+
+string = ${ "\"" ~ inner_string ~ "\"" }
+inner_string = @{ char* }
+char = {
+    !("\"" | "\\") ~ ANY
+    | "\\" ~ ("\"" | "\\" | "/" | "b" | "f" | "n" | "r" | "t")
+    | "\\" ~ ("u" ~ ASCII_HEX_DIGIT{4})
+}
+
+object = {
+    "{" ~ "}" |
+    "{" ~ pair ~ ("," ~ pair)* ~ "}"
+}
+pair = { string ~ ":" ~ value }
+
+array = {
+    "[" ~ "]" |
+    "[" ~ value ~ ("," ~ value)* ~ "]"
+}
+
+value = _{ object | array | string | number | boolean | null }
+
+json = _{ SOI ~ (object | array) ~ EOI }
diff --git a/lib/tools/json-pest-parser/src/lib.rs b/lib/tools/json-pest-parser/src/lib.rs
new file mode 100644
index 00000000..c33c9436
--- /dev/null
+++ b/lib/tools/json-pest-parser/src/lib.rs
@@ -0,0 +1,263 @@
+//  Copyright (C) 2019  Éloïs SANCHEZ
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+//! Parse JSON String.
+
+#![deny(
+    missing_debug_implementations,
+    missing_copy_implementations,
+    trivial_casts,
+    trivial_numeric_casts,
+    unsafe_code,
+    unstable_features,
+    unused_import_braces
+)]
+
+#[macro_use]
+extern crate pest_derive;
+
+#[cfg(test)]
+#[macro_use]
+extern crate maplit;
+
+#[cfg(test)]
+#[macro_use]
+extern crate pretty_assertions;
+
+use pest::error::Error;
+use pest::iterators::Pair;
+use pest::Parser;
+use std::collections::HashMap;
+
+#[derive(Parser)]
+#[grammar = "json_grammar.pest"]
+struct JSONParser;
+
+#[derive(Debug, PartialEq)]
+pub enum JSONValue<'a> {
+    Object(HashMap<&'a str, JSONValue<'a>>),
+    Array(Vec<JSONValue<'a>>),
+    String(&'a str),
+    Number(f64),
+    Boolean(bool),
+    Null,
+}
+
+impl<'a> JSONValue<'a> {
+    pub fn is_object(&self) -> bool {
+        if let JSONValue::Object(_) = self {
+            true
+        } else {
+            false
+        }
+    }
+
+    pub fn to_object(&self) -> Option<&HashMap<&'a str, JSONValue<'a>>> {
+        if let JSONValue::Object(object) = self {
+            Some(object)
+        } else {
+            None
+        }
+    }
+
+    pub fn is_array(&self) -> bool {
+        if let JSONValue::Array(_) = self {
+            true
+        } else {
+            false
+        }
+    }
+
+    pub fn to_array(&self) -> Option<&Vec<JSONValue<'a>>> {
+        if let JSONValue::Array(array) = self {
+            Some(array)
+        } else {
+            None
+        }
+    }
+
+    pub fn is_str(&self) -> bool {
+        if let JSONValue::String(_) = self {
+            true
+        } else {
+            false
+        }
+    }
+
+    pub fn to_str(&self) -> Option<&'a str> {
+        if let JSONValue::String(string) = self {
+            Some(string)
+        } else {
+            None
+        }
+    }
+
+    pub fn is_number(&self) -> bool {
+        if let JSONValue::Number(_) = self {
+            true
+        } else {
+            false
+        }
+    }
+
+    pub fn to_number(&self) -> Option<f64> {
+        if let JSONValue::Number(number) = self {
+            Some(*number)
+        } else {
+            None
+        }
+    }
+
+    pub fn is_bool(&self) -> bool {
+        if let JSONValue::Boolean(_) = self {
+            true
+        } else {
+            false
+        }
+    }
+
+    pub fn to_bool(&self) -> Option<bool> {
+        if let JSONValue::Boolean(boolean) = self {
+            Some(*boolean)
+        } else {
+            None
+        }
+    }
+
+    pub fn is_null(&self) -> bool {
+        if let JSONValue::Null = self {
+            true
+        } else {
+            false
+        }
+    }
+}
+
+impl<'a> ToString for JSONValue<'a> {
+    fn to_string(&self) -> String {
+        match self {
+            JSONValue::Object(o) => {
+                let contents: Vec<_> = o
+                    .iter()
+                    .map(|(name, value)| format!("\"{}\":{}", name, value.to_string()))
+                    .collect();
+                format!("{{{}}}", contents.join(","))
+            }
+            JSONValue::Array(a) => {
+                let contents: Vec<_> = a.iter().map(Self::to_string).collect();
+                format!("[{}]", contents.join(","))
+            }
+            JSONValue::String(s) => format!("\"{}\"", s),
+            JSONValue::Number(n) => format!("{}", n),
+            JSONValue::Boolean(b) => format!("{}", b),
+            JSONValue::Null => "null".to_owned(),
+        }
+    }
+}
+
+pub fn parse_json_string(source: &str) -> Result<JSONValue, Error<Rule>> {
+    let json = JSONParser::parse(Rule::json, source)?.next().unwrap();
+
+    Ok(parse_value(json))
+}
+
+fn parse_value(pair: Pair<Rule>) -> JSONValue {
+    match pair.as_rule() {
+        Rule::object => JSONValue::Object(
+            pair.into_inner()
+                .map(|pair| {
+                    let mut inner_rules = pair.into_inner();
+                    let name = inner_rules
+                        .next()
+                        .unwrap()
+                        .into_inner()
+                        .next()
+                        .unwrap()
+                        .as_str();
+                    let value = parse_value(inner_rules.next().unwrap());
+                    (name, value)
+                })
+                .collect(),
+        ),
+        Rule::array => JSONValue::Array(pair.into_inner().map(parse_value).collect()),
+        Rule::string => JSONValue::String(pair.into_inner().next().unwrap().as_str()),
+        Rule::number => JSONValue::Number(pair.as_str().parse().unwrap()),
+        Rule::boolean => JSONValue::Boolean(pair.as_str().parse().unwrap()),
+        Rule::null => JSONValue::Null,
+        Rule::json
+        | Rule::EOI
+        | Rule::pair
+        | Rule::value
+        | Rule::inner_string
+        | Rule::char
+        | Rule::WHITESPACE => unreachable!(),
+    }
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn test_parse_json_string() {
+        let json_string = "{
+            \"name\": \"toto\",
+            \"age\": 25,
+            \"friends\": [
+                \"titi\",
+                \"tata\"
+            ]
+        }";
+
+        let json_value = parse_json_string(json_string).expect("Fail to parse json string !");
+
+        assert_eq!(
+            JSONValue::Object(hashmap![
+                "name" => JSONValue::String("toto"),
+                "age" => JSONValue::Number(25f64),
+                "friends" => JSONValue::Array(vec![JSONValue::String("titi"), JSONValue::String("tata"),])
+            ]),
+            json_value
+        );
+
+        assert!(json_value.is_object());
+
+        let json_object = json_value.to_object().expect("safe unwrap");
+
+        assert_eq!(json_object.get("name"), Some(&JSONValue::String("toto")));
+        assert_eq!(json_object.get("age"), Some(&JSONValue::Number(25f64)));
+
+        let friends = json_object
+            .get("friends")
+            .expect("frinds field must be exist")
+            .to_array()
+            .expect("frinds field must be an array");
+
+        assert_eq!(2, friends.len());
+        assert_eq!(
+            "titi",
+            friends[0]
+                .to_str()
+                .expect("friends field must be an array of String")
+        );
+        assert_eq!(
+            "tata",
+            friends[1]
+                .to_str()
+                .expect("friends field must be an array of String")
+        );
+    }
+
+}
-- 
GitLab


From bd92b949bea7100b7d7ce624e4619c022056b8a7 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sun, 13 Jan 2019 15:09:10 +0100
Subject: [PATCH 05/26] [feat] sync: add currency param

---
 lib/core/network/cli/sync.rs | 3 +++
 1 file changed, 3 insertions(+)

diff --git a/lib/core/network/cli/sync.rs b/lib/core/network/cli/sync.rs
index c39158ea..60b1508c 100644
--- a/lib/core/network/cli/sync.rs
+++ b/lib/core/network/cli/sync.rs
@@ -29,6 +29,9 @@ pub struct SyncOpt {
     /// The source type
     #[structopt(short = "t", long = "type", default_value = "ts")]
     pub source_type: SyncSourceType,
+    /// Currency
+    #[structopt(short = "c", long = "currency")]
+    pub currency: Option<String>,
     /// End block
     #[structopt(short = "e", long = "end")]
     pub end: Option<u32>,
-- 
GitLab


From 1301307dfc766bb5b15f04f46182e96d7bf9ff4d Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sun, 13 Jan 2019 23:08:07 +0100
Subject: [PATCH 06/26] [feat] add crate common-tools

---
 Cargo.toml                        |  1 +
 lib/tools/common-tools/Cargo.toml | 19 +++++++++++++++
 lib/tools/common-tools/src/lib.rs | 40 +++++++++++++++++++++++++++++++
 3 files changed, 60 insertions(+)
 create mode 100644 lib/tools/common-tools/Cargo.toml
 create mode 100644 lib/tools/common-tools/src/lib.rs

diff --git a/Cargo.toml b/Cargo.toml
index cabad887..85a45af6 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -14,6 +14,7 @@ members = [
     "lib/modules/ws2p/ws2p",
     "lib/modules/ws2p/ws2p-messages",
     "lib/tools/crypto",
+    "lib/tools/common-tools",
     "lib/tools/documents",
     "lib/tools/json-pest-parser",
     "lib/tools/network-documents",
diff --git a/lib/tools/common-tools/Cargo.toml b/lib/tools/common-tools/Cargo.toml
new file mode 100644
index 00000000..12bce291
--- /dev/null
+++ b/lib/tools/common-tools/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "durs-common-tools"
+version = "0.1.0"
+authors = ["elois <elois@duniter.org>"]
+description = "Common rust tools for DURS project."
+repository = "https://git.duniter.org/nodes/rust/duniter-rs"
+readme = "README.md"
+keywords = ["durs", "tools"]
+license = "AGPL-3.0"
+edition = "2018"
+
+[lib]
+path = "src/lib.rs"
+
+[dependencies]
+log = "0.4.*"
+
+[dev-dependencies]
+pretty_assertions = "0.5.1"
diff --git a/lib/tools/common-tools/src/lib.rs b/lib/tools/common-tools/src/lib.rs
new file mode 100644
index 00000000..fb446fda
--- /dev/null
+++ b/lib/tools/common-tools/src/lib.rs
@@ -0,0 +1,40 @@
+//  Copyright (C) 2019  Éloïs SANCHEZ
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+//! Common rust tools for DURS project.
+
+#![deny(
+    missing_docs,
+    missing_debug_implementations,
+    missing_copy_implementations,
+    trivial_casts,
+    trivial_numeric_casts,
+    unsafe_code,
+    unstable_features,
+    unused_import_braces
+)]
+
+#[macro_use]
+extern crate log;
+
+/// Interrupts the program and log error message
+pub fn fatal_error(msg: &str) {
+    if cfg!(feature = "log_panics") {
+        panic!(format!("Fatal Error : {}", msg));
+    } else {
+        error!("{}", &format!("Fatal Error : {}", msg));
+        panic!(format!("Fatal Error : {}", msg));
+    }
+}
-- 
GitLab


From aeee2ea65d43773359c1ddc6239dce02a22981df Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sun, 20 Jan 2019 23:06:45 +0100
Subject: [PATCH 07/26] [feat] crypto: use failure for BaseConversionError

---
 lib/tools/crypto/Cargo.toml      | 1 +
 lib/tools/crypto/src/keys/mod.rs | 5 ++++-
 lib/tools/crypto/src/lib.rs      | 2 ++
 3 files changed, 7 insertions(+), 1 deletion(-)

diff --git a/lib/tools/crypto/Cargo.toml b/lib/tools/crypto/Cargo.toml
index 530dddd8..08f9b2f6 100644
--- a/lib/tools/crypto/Cargo.toml
+++ b/lib/tools/crypto/Cargo.toml
@@ -16,6 +16,7 @@ path = "src/lib.rs"
 base58 = "0.1.*"
 base64 = "0.10.*"
 bincode = "1.0.*"
+failure = "0.1.5"
 rand = "0.5.*"
 rust-crypto-wasm = "0.3.1"
 serde = "1.0.*"
diff --git a/lib/tools/crypto/src/keys/mod.rs b/lib/tools/crypto/src/keys/mod.rs
index f0b7f5b2..4fabbfed 100644
--- a/lib/tools/crypto/src/keys/mod.rs
+++ b/lib/tools/crypto/src/keys/mod.rs
@@ -74,12 +74,15 @@ pub trait GetKeysAlgo: Clone + Debug + PartialEq + Eq {
 }
 
 /// Errors enumeration for Base58/64 strings convertion.
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Fail)]
 pub enum BaseConvertionError {
+    #[fail(display = "Data have invalid key length.")]
     /// Data have invalid key length (found, expected).
     InvalidKeyLendth(usize, usize),
+    #[fail(display = "Invalid character.")]
     /// Base58 have an invalid character.
     InvalidCharacter(char, usize),
+    #[fail(display = "Invalid base converter length.")]
     /// Base58 have invalid lendth
     InvalidBaseConverterLength(),
 }
diff --git a/lib/tools/crypto/src/lib.rs b/lib/tools/crypto/src/lib.rs
index 3a824cc8..c7d4c63c 100644
--- a/lib/tools/crypto/src/lib.rs
+++ b/lib/tools/crypto/src/lib.rs
@@ -29,6 +29,8 @@
 )]
 #![allow(non_camel_case_types)]
 
+#[macro_use]
+extern crate failure;
 #[macro_use]
 extern crate serde_derive;
 
-- 
GitLab


From a6a1f48215e1970b6f01f04b9378746161996b57 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sun, 20 Jan 2019 23:07:43 +0100
Subject: [PATCH 08/26] [feat] json-pest-parser: use failure for ParseJsonError

---
 lib/tools/json-pest-parser/Cargo.toml |  1 +
 lib/tools/json-pest-parser/src/lib.rs | 18 ++++++++++++++----
 2 files changed, 15 insertions(+), 4 deletions(-)

diff --git a/lib/tools/json-pest-parser/Cargo.toml b/lib/tools/json-pest-parser/Cargo.toml
index 607e03a2..5dc20657 100644
--- a/lib/tools/json-pest-parser/Cargo.toml
+++ b/lib/tools/json-pest-parser/Cargo.toml
@@ -13,6 +13,7 @@ edition = "2018"
 path = "src/lib.rs"
 
 [dependencies]
+failure = "0.1.5"
 pest = "2.1.0"
 pest_derive = "2.1.0"
 
diff --git a/lib/tools/json-pest-parser/src/lib.rs b/lib/tools/json-pest-parser/src/lib.rs
index c33c9436..53689544 100644
--- a/lib/tools/json-pest-parser/src/lib.rs
+++ b/lib/tools/json-pest-parser/src/lib.rs
@@ -25,6 +25,8 @@
     unused_import_braces
 )]
 
+#[macro_use]
+extern crate failure;
 #[macro_use]
 extern crate pest_derive;
 
@@ -36,7 +38,6 @@ extern crate maplit;
 #[macro_use]
 extern crate pretty_assertions;
 
-use pest::error::Error;
 use pest::iterators::Pair;
 use pest::Parser;
 use std::collections::HashMap;
@@ -167,10 +168,19 @@ impl<'a> ToString for JSONValue<'a> {
     }
 }
 
-pub fn parse_json_string(source: &str) -> Result<JSONValue, Error<Rule>> {
-    let json = JSONParser::parse(Rule::json, source)?.next().unwrap();
+#[derive(Debug, Fail)]
+#[fail(display = "Fail to parse JSON String : {:?}", cause)]
+pub struct ParseJsonError {
+    pub cause: String,
+}
 
-    Ok(parse_value(json))
+pub fn parse_json_string(source: &str) -> Result<JSONValue, ParseJsonError> {
+    match JSONParser::parse(Rule::json, source) {
+        Ok(mut pair) => Ok(parse_value(pair.next().unwrap())),
+        Err(pest_error) => Err(ParseJsonError {
+            cause: format!("{:?}", pest_error),
+        }),
+    }
 }
 
 fn parse_value(pair: Pair<Rule>) -> JSONValue {
-- 
GitLab


From cde7090f951be664081f8b0380800ab05696246d Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sun, 20 Jan 2019 23:09:59 +0100
Subject: [PATCH 09/26] [feat] documents: use failure for BlockstampParseError

---
 lib/tools/documents/Cargo.toml        | 1 +
 lib/tools/documents/src/blockstamp.rs | 5 ++++-
 lib/tools/documents/src/lib.rs        | 2 ++
 3 files changed, 7 insertions(+), 1 deletion(-)

diff --git a/lib/tools/documents/Cargo.toml b/lib/tools/documents/Cargo.toml
index e4ecb19a..9da42dd4 100644
--- a/lib/tools/documents/Cargo.toml
+++ b/lib/tools/documents/Cargo.toml
@@ -17,6 +17,7 @@ base58 = "0.1.*"
 base64 = "0.9.*"
 byteorder = "1.2.3"
 dup-crypto = { path = "../crypto" }
+failure = "0.1.5"
 pest = "2.1.0"
 pest_derive = "2.1.0"
 serde = "1.0.*"
diff --git a/lib/tools/documents/src/blockstamp.rs b/lib/tools/documents/src/blockstamp.rs
index f800178e..a042bac9 100644
--- a/lib/tools/documents/src/blockstamp.rs
+++ b/lib/tools/documents/src/blockstamp.rs
@@ -20,13 +20,16 @@ use crate::*;
 /// Type of errors for [`BlockUId`] parsing.
 ///
 /// [`BlockUId`]: struct.BlockUId.html
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Fail)]
 pub enum BlockstampParseError {
     /// Given string have invalid format
+    #[fail(display = "Given string have invalid format")]
     InvalidFormat(),
     /// [`BlockId`](struct.BlockHash.html) part is not a valid number.
+    #[fail(display = "BlockId part is not a valid number.")]
     InvalidBlockId(),
     /// [`BlockHash`](struct.BlockHash.html) part is not a valid hex number.
+    #[fail(display = "BlockHash part is not a valid hex number.")]
     InvalidBlockHash(),
 }
 
diff --git a/lib/tools/documents/src/lib.rs b/lib/tools/documents/src/lib.rs
index 43851551..410d89c7 100644
--- a/lib/tools/documents/src/lib.rs
+++ b/lib/tools/documents/src/lib.rs
@@ -26,6 +26,8 @@
     unused_import_braces
 )]
 
+#[macro_use]
+extern crate failure;
 #[macro_use]
 extern crate pest_derive;
 #[cfg(test)]
-- 
GitLab


From e24575c9294cf844b09f702bf14cb4b7692f8999 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sun, 20 Jan 2019 23:13:33 +0100
Subject: [PATCH 10/26] [feat] documents: create json block parser use new lib
 json-pest-parser

---
 lib/modules/blockchain/blockchain-dal/lib.rs  |   3 -
 lib/tools/documents/Cargo.toml                |   1 +
 lib/tools/documents/src/lib.rs                |   1 +
 lib/tools/documents/src/parsers/blocks.rs     | 171 ++++++++++++++++++
 .../documents/src}/parsers/certifications.rs  |  21 +--
 .../documents/src/parsers/excluded.rs}        |  18 +-
 lib/tools/documents/src/parsers/identities.rs |  71 ++++++++
 .../documents/src/parsers/memberships.rs      |  56 ++++++
 lib/tools/documents/src/parsers/mod.rs        |  48 +++++
 .../documents/src}/parsers/revoked.rs         |  17 +-
 10 files changed, 374 insertions(+), 33 deletions(-)
 create mode 100644 lib/tools/documents/src/parsers/blocks.rs
 rename lib/{modules/blockchain/blockchain-dal => tools/documents/src}/parsers/certifications.rs (78%)
 rename lib/{modules/blockchain/blockchain-dal/parsers/mod.rs => tools/documents/src/parsers/excluded.rs} (57%)
 create mode 100644 lib/tools/documents/src/parsers/identities.rs
 create mode 100644 lib/tools/documents/src/parsers/memberships.rs
 create mode 100644 lib/tools/documents/src/parsers/mod.rs
 rename lib/{modules/blockchain/blockchain-dal => tools/documents/src}/parsers/revoked.rs (75%)

diff --git a/lib/modules/blockchain/blockchain-dal/lib.rs b/lib/modules/blockchain/blockchain-dal/lib.rs
index 34b43e48..66cf116f 100644
--- a/lib/modules/blockchain/blockchain-dal/lib.rs
+++ b/lib/modules/blockchain/blockchain-dal/lib.rs
@@ -53,9 +53,6 @@ pub mod currency_params;
 /// Identity operations
 pub mod identity;
 
-/// Parsers
-pub mod parsers;
-
 /// Define currency sources types
 pub mod sources;
 
diff --git a/lib/tools/documents/Cargo.toml b/lib/tools/documents/Cargo.toml
index 9da42dd4..f695d275 100644
--- a/lib/tools/documents/Cargo.toml
+++ b/lib/tools/documents/Cargo.toml
@@ -18,6 +18,7 @@ base64 = "0.9.*"
 byteorder = "1.2.3"
 dup-crypto = { path = "../crypto" }
 failure = "0.1.5"
+json-pest-parser = { path = "../json-pest-parser" }
 pest = "2.1.0"
 pest_derive = "2.1.0"
 serde = "1.0.*"
diff --git a/lib/tools/documents/src/lib.rs b/lib/tools/documents/src/lib.rs
index 410d89c7..3e6d06fb 100644
--- a/lib/tools/documents/src/lib.rs
+++ b/lib/tools/documents/src/lib.rs
@@ -39,6 +39,7 @@ extern crate serde_derive;
 pub mod blockstamp;
 mod currencies_codes;
 pub mod documents;
+pub mod parsers;
 pub mod text_document_traits;
 
 use crate::currencies_codes::*;
diff --git a/lib/tools/documents/src/parsers/blocks.rs b/lib/tools/documents/src/parsers/blocks.rs
new file mode 100644
index 00000000..2b0fa0ba
--- /dev/null
+++ b/lib/tools/documents/src/parsers/blocks.rs
@@ -0,0 +1,171 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use crate::documents::block::BlockDocument;
+use crate::documents::membership::MembershipType;
+use crate::parsers::*;
+use crate::*;
+use dup_crypto::hashs::Hash;
+use dup_crypto::keys::*;
+use failure::Error;
+use json_pest_parser::JSONValue;
+use std::collections::HashMap;
+
+pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error> {
+    if !json_block.is_object() {
+        return Err(ParseBlockError {
+            cause: "Json block must be an object !".to_owned(),
+        }
+        .into());
+    }
+
+    let json_block = json_block.to_object().expect("safe unwrap");
+
+    let currency = get_str(json_block, "currency")?;
+
+    Ok(BlockDocument {
+        version: get_number(json_block, "version")?.trunc() as u32,
+        nonce: get_number(json_block, "nonce")?.trunc() as u64,
+        number: BlockId(get_number(json_block, "number")?.trunc() as u32),
+        pow_min: get_number(json_block, "powMin")?.trunc() as usize,
+        time: get_number(json_block, "time")?.trunc() as u64,
+        median_time: get_number(json_block, "medianTime")?.trunc() as u64,
+        members_count: get_number(json_block, "membersCount")?.trunc() as usize,
+        monetary_mass: get_number(json_block, "monetaryMass")?.trunc() as usize,
+        unit_base: get_number(json_block, "unitbase")?.trunc() as usize,
+        issuers_count: get_number(json_block, "issuersCount")?.trunc() as usize,
+        issuers_frame: get_number(json_block, "issuersFrame")?.trunc() as isize,
+        issuers_frame_var: get_number(json_block, "issuersFrameVar")?.trunc() as isize,
+        currency: CurrencyName(currency.to_owned()),
+        issuers: vec![PubKey::Ed25519(ed25519::PublicKey::from_base58(get_str(
+            json_block, "issuer",
+        )?)?)],
+        signatures: vec![Sig::Ed25519(ed25519::Signature::from_base64(get_str(
+            json_block,
+            "signature",
+        )?)?)],
+        hash: Some(BlockHash(Hash::from_hex(get_str(json_block, "hash")?)?)),
+        parameters: None,
+        previous_hash: Hash::from_hex(get_str(json_block, "previousHash")?)?,
+        previous_issuer: Some(PubKey::Ed25519(ed25519::PublicKey::from_base58(get_str(
+            json_block,
+            "previousIssuer",
+        )?)?)),
+        inner_hash: Some(Hash::from_hex(get_str(json_block, "inner_hash")?)?),
+        dividend: get_optional_usize(json_block, "dividend")?,
+        identities: crate::parsers::identities::parse_compact_identities(
+            currency,
+            get_str_array(json_block, "identities")?,
+        )?,
+        joiners: crate::parsers::memberships::parse_compact_memberships(
+            currency,
+            MembershipType::In(),
+            &get_str_array(json_block, "joiners")?,
+        )?,
+        actives: crate::parsers::memberships::parse_compact_memberships(
+            currency,
+            MembershipType::In(),
+            &get_str_array(json_block, "actives")?,
+        )?,
+        leavers: crate::parsers::memberships::parse_compact_memberships(
+            currency,
+            MembershipType::Out(),
+            &get_str_array(json_block, "leavers")?,
+        )?,
+        revoked: crate::parsers::revoked::parse_revocations_into_compact(&get_str_array(
+            json_block, "revoked",
+        )?),
+        excluded: crate::parsers::excluded::parse_excluded(&get_str_array(
+            json_block, "excluded",
+        )?)?,
+        certifications: crate::parsers::certifications::parse_certifications_into_compact(
+            &get_str_array(json_block, "certifications")?,
+        ),
+        transactions: vec![],
+        inner_hash_and_nonce_str: "".to_owned(),
+    })
+}
+
+fn get_optional_usize(
+    json_block: &HashMap<&str, JSONValue>,
+    field: &str,
+) -> Result<Option<usize>, ParseBlockError> {
+    Ok(match json_block.get(field) {
+        Some(value) => {
+            if !value.is_null() {
+                Some(
+                    value
+                        .to_number()
+                        .ok_or_else(|| ParseBlockError {
+                            cause: format!("Json block {} field must be a number !", field),
+                        })?
+                        .trunc() as usize,
+                )
+            } else {
+                None
+            }
+        }
+        None => None,
+    })
+}
+
+fn get_number(json_block: &HashMap<&str, JSONValue>, field: &str) -> Result<f64, ParseBlockError> {
+    Ok(json_block
+        .get(field)
+        .ok_or_else(|| ParseBlockError {
+            cause: format!("Json block must have {} field !", field),
+        })?
+        .to_number()
+        .ok_or_else(|| ParseBlockError {
+            cause: format!("Json block {} field must be a number !", field),
+        })?)
+}
+
+fn get_str<'a>(
+    json_block: &'a HashMap<&str, JSONValue>,
+    field: &str,
+) -> Result<&'a str, ParseBlockError> {
+    Ok(json_block
+        .get(field)
+        .ok_or_else(|| ParseBlockError {
+            cause: format!("Json block must have {} field !", field),
+        })?
+        .to_str()
+        .ok_or_else(|| ParseBlockError {
+            cause: format!("Json block {} field must be a string !", field),
+        })?)
+}
+
+fn get_str_array<'a>(
+    json_block: &'a HashMap<&str, JSONValue>,
+    field: &str,
+) -> Result<Vec<&'a str>, ParseBlockError> {
+    json_block
+        .get(field)
+        .ok_or_else(|| ParseBlockError {
+            cause: format!("Json block must have {} field !", field),
+        })?
+        .to_array()
+        .ok_or_else(|| ParseBlockError {
+            cause: format!("Json block {} field must be an array !", field),
+        })?
+        .iter()
+        .map(|v| {
+            v.to_str().ok_or_else(|| ParseBlockError {
+                cause: format!("Json block {} field must be an array of string !", field),
+            })
+        })
+        .collect()
+}
diff --git a/lib/modules/blockchain/blockchain-dal/parsers/certifications.rs b/lib/tools/documents/src/parsers/certifications.rs
similarity index 78%
rename from lib/modules/blockchain/blockchain-dal/parsers/certifications.rs
rename to lib/tools/documents/src/parsers/certifications.rs
index 760a38d8..c8653ec3 100644
--- a/lib/modules/blockchain/blockchain-dal/parsers/certifications.rs
+++ b/lib/tools/documents/src/parsers/certifications.rs
@@ -1,4 +1,4 @@
-//  Copyright (C) 2018  The Duniter Project Developers.
+//  Copyright (C) 2018  The Durs Project Developers.
 //
 // This program is free software: you can redistribute it and/or modify
 // it under the terms of the GNU Affero General Public License as
@@ -13,25 +13,18 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::documents::certification::{
-    CertificationDocument, CompactCertificationDocument,
-};
-use dubp_documents::text_document_traits::TextDocumentFormat;
-use dubp_documents::BlockId;
+use crate::documents::certification::{CertificationDocument, CompactCertificationDocument};
+use crate::text_document_traits::TextDocumentFormat;
+use crate::BlockId;
 use dup_crypto::keys::*;
-use serde_json;
 
 /// Parse array of certification json documents into vector of `CompactCertificationDocument`
 pub fn parse_certifications_into_compact(
-    json_certs: &[serde_json::Value],
+    str_certs: &[&str],
 ) -> Vec<TextDocumentFormat<CertificationDocument>> {
     let mut certifications: Vec<TextDocumentFormat<CertificationDocument>> = Vec::new();
-    for certification in json_certs.iter() {
-        let certifications_datas: Vec<&str> = certification
-            .as_str()
-            .expect("Receive block in wrong format : fail to split cert !")
-            .split(':')
-            .collect();
+    for certification in str_certs {
+        let certifications_datas: Vec<&str> = certification.split(':').collect();
         if certifications_datas.len() == 4 {
             certifications.push(TextDocumentFormat::Compact(CompactCertificationDocument {
                 issuer: PubKey::Ed25519(
diff --git a/lib/modules/blockchain/blockchain-dal/parsers/mod.rs b/lib/tools/documents/src/parsers/excluded.rs
similarity index 57%
rename from lib/modules/blockchain/blockchain-dal/parsers/mod.rs
rename to lib/tools/documents/src/parsers/excluded.rs
index 8da75d90..7c139b59 100644
--- a/lib/modules/blockchain/blockchain-dal/parsers/mod.rs
+++ b/lib/tools/documents/src/parsers/excluded.rs
@@ -1,4 +1,4 @@
-//  Copyright (C) 2018  The Duniter Project Developers.
+//  Copyright (C) 2018  The Durs Project Developers.
 //
 // This program is free software: you can redistribute it and/or modify
 // it under the terms of the GNU Affero General Public License as
@@ -13,8 +13,16 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-/// Parsers for certifications event
-pub mod certifications;
+use dup_crypto::keys::*;
+use failure::Error;
 
-/// Parsers for revoked event
-pub mod revoked;
+/// Parse array of revocations json documents into vector of `CompactRevocationDocument`
+pub fn parse_excluded(str_pubkeys: &[&str]) -> Result<Vec<PubKey>, Error> {
+    let mut excluded: Vec<PubKey> = Vec::with_capacity(str_pubkeys.len());
+    for str_pubkey in str_pubkeys {
+        excluded.push(PubKey::Ed25519(ed25519::PublicKey::from_base58(
+            str_pubkey,
+        )?))
+    }
+    Ok(excluded)
+}
diff --git a/lib/tools/documents/src/parsers/identities.rs b/lib/tools/documents/src/parsers/identities.rs
new file mode 100644
index 00000000..0c66a678
--- /dev/null
+++ b/lib/tools/documents/src/parsers/identities.rs
@@ -0,0 +1,71 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use crate::documents::identity::*;
+use crate::parsers::*;
+use crate::DocumentBuilder;
+use dup_crypto::keys::*;
+
+#[derive(Debug, Fail)]
+#[fail(display = "Fail to parse identity : {:?} !", cause)]
+pub struct ParseIdentityError {
+    pub cause: String,
+}
+
+/// Parse a compact identity
+pub fn parse_compact_identities(
+    currency: &str,
+    str_identities: Vec<&str>,
+) -> Result<Vec<IdentityDocument>, ParseIdentityError> {
+    let mut identities = Vec::with_capacity(str_identities.len());
+
+    for str_identity in str_identities {
+        let idty_elements: Vec<&str> = str_identity.split(':').collect();
+        let issuer = match ed25519::PublicKey::from_base58(idty_elements[0]) {
+            Ok(pubkey) => PubKey::Ed25519(pubkey),
+            Err(_) => {
+                return Err(ParseIdentityError {
+                    cause: "invalid pubkey".to_owned(),
+                });
+            }
+        };
+        let signature = match ed25519::Signature::from_base64(idty_elements[1]) {
+            Ok(sig) => Sig::Ed25519(sig),
+            Err(_) => {
+                return Err(ParseIdentityError {
+                    cause: "invalid signature".to_owned(),
+                });
+            }
+        };
+        let blockstamp = match Blockstamp::from_string(idty_elements[2]) {
+            Ok(blockstamp) => blockstamp,
+            Err(_) => {
+                return Err(ParseIdentityError {
+                    cause: "invalid blockstamp".to_owned(),
+                });
+            }
+        };
+        let username = idty_elements[3];
+        let idty_doc_builder = IdentityDocumentBuilder {
+            currency,
+            username,
+            blockstamp: &blockstamp,
+            issuer: &issuer,
+        };
+        identities.push(idty_doc_builder.build_with_signature(vec![signature]))
+    }
+
+    Ok(identities)
+}
diff --git a/lib/tools/documents/src/parsers/memberships.rs b/lib/tools/documents/src/parsers/memberships.rs
new file mode 100644
index 00000000..b1687807
--- /dev/null
+++ b/lib/tools/documents/src/parsers/memberships.rs
@@ -0,0 +1,56 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use crate::documents::membership::*;
+use crate::parsers::*;
+use crate::DocumentBuilder;
+use dup_crypto::keys::*;
+use failure::Error;
+
+#[derive(Debug, Fail, Copy, Clone)]
+pub enum ParseMembershipError {
+    #[fail(display = "Fail to parse membership : wrong format !")]
+    WrongFormat,
+}
+
+/// Parse memberships documents from array of str
+pub fn parse_compact_memberships(
+    currency: &str,
+    membership_type: MembershipType,
+    array_memberships: &[&str],
+) -> Result<Vec<MembershipDocument>, Error> {
+    //let memberships: Vec<MembershipDocument> = Vec::new();
+    array_memberships
+        .iter()
+        .map(|membership| {
+            let membership_datas: Vec<&str> = membership.split(':').collect();
+            if membership_datas.len() == 5 {
+                let membership_doc_builder = MembershipDocumentBuilder {
+                    currency,
+                    issuer: &PubKey::Ed25519(ed25519::PublicKey::from_base58(membership_datas[0])?),
+                    blockstamp: &Blockstamp::from_string(membership_datas[2])?,
+                    membership: membership_type,
+                    identity_username: membership_datas[4],
+                    identity_blockstamp: &Blockstamp::from_string(membership_datas[3])?,
+                };
+                let membership_sig =
+                    Sig::Ed25519(ed25519::Signature::from_base64(membership_datas[1])?);
+                Ok(membership_doc_builder.build_with_signature(vec![membership_sig]))
+            } else {
+                Err(ParseMembershipError::WrongFormat.into())
+            }
+        })
+        .collect()
+}
diff --git a/lib/tools/documents/src/parsers/mod.rs b/lib/tools/documents/src/parsers/mod.rs
new file mode 100644
index 00000000..127f6eda
--- /dev/null
+++ b/lib/tools/documents/src/parsers/mod.rs
@@ -0,0 +1,48 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+/// Parsers for block
+pub mod blocks;
+
+/// Parsers for certifications
+pub mod certifications;
+
+/// Parsers for exclusions
+pub mod excluded;
+
+/// Parsers for identities
+pub mod identities;
+
+/// Parsers for memberships
+pub mod memberships;
+
+/// Parsers for revocations
+pub mod revoked;
+
+use crate::*;
+
+#[derive(Debug, Fail)]
+#[fail(display = "Fail to parse JSON Block : {:?}", cause)]
+pub struct ParseBlockError {
+    pub cause: String,
+}
+
+impl From<BaseConvertionError> for ParseBlockError {
+    fn from(_: BaseConvertionError) -> ParseBlockError {
+        ParseBlockError {
+            cause: "base conversion error".to_owned(),
+        }
+    }
+}
diff --git a/lib/modules/blockchain/blockchain-dal/parsers/revoked.rs b/lib/tools/documents/src/parsers/revoked.rs
similarity index 75%
rename from lib/modules/blockchain/blockchain-dal/parsers/revoked.rs
rename to lib/tools/documents/src/parsers/revoked.rs
index 837e84b0..6b57990f 100644
--- a/lib/modules/blockchain/blockchain-dal/parsers/revoked.rs
+++ b/lib/tools/documents/src/parsers/revoked.rs
@@ -1,4 +1,4 @@
-//  Copyright (C) 2018  The Duniter Project Developers.
+//  Copyright (C) 2018  The Durs Project Developers.
 //
 // This program is free software: you can redistribute it and/or modify
 // it under the terms of the GNU Affero General Public License as
@@ -13,22 +13,17 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use dubp_documents::documents::revocation::{CompactRevocationDocument, RevocationDocument};
-use dubp_documents::text_document_traits::TextDocumentFormat;
+use crate::documents::revocation::{CompactRevocationDocument, RevocationDocument};
+use crate::text_document_traits::TextDocumentFormat;
 use dup_crypto::keys::*;
-use serde_json;
 
 /// Parse array of revocations json documents into vector of `CompactRevocationDocument`
 pub fn parse_revocations_into_compact(
-    json_revocations: &[serde_json::Value],
+    str_revocations: &[&str],
 ) -> Vec<TextDocumentFormat<RevocationDocument>> {
     let mut revocations: Vec<TextDocumentFormat<RevocationDocument>> = Vec::new();
-    for revocation in json_revocations.iter() {
-        let revocations_datas: Vec<&str> = revocation
-            .as_str()
-            .expect("Receive block in wrong format !")
-            .split(':')
-            .collect();
+    for revocation in str_revocations {
+        let revocations_datas: Vec<&str> = revocation.split(':').collect();
         if revocations_datas.len() == 2 {
             revocations.push(TextDocumentFormat::Compact(CompactRevocationDocument {
                 issuer: PubKey::Ed25519(
-- 
GitLab


From 8b3a175855ee56e1f5f072220ca60f9e30af3feb Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sun, 20 Jan 2019 23:19:04 +0100
Subject: [PATCH 11/26] [ref] documents : must be verify block hashs without
 mutability

---
 lib/tools/documents/src/documents/block.rs | 45 ++++++++++++++++------
 1 file changed, 34 insertions(+), 11 deletions(-)

diff --git a/lib/tools/documents/src/documents/block.rs b/lib/tools/documents/src/documents/block.rs
index 38c4a65e..cad636cf 100644
--- a/lib/tools/documents/src/documents/block.rs
+++ b/lib/tools/documents/src/documents/block.rs
@@ -279,18 +279,44 @@ impl BlockDocument {
     pub fn compute_inner_hash(&mut self) {
         self.inner_hash = Some(Hash::compute_str(&self.generate_compact_inner_text()));
     }
-    /// Fill inner_hash_and_nonce_str
-    pub fn fill_inner_hash_and_nonce_str(&mut self, new_nonce: Option<u64>) {
-        if let Some(new_nonce) = new_nonce {
-            self.nonce = new_nonce;
+    /// Compute inner hash
+    pub fn verify_inner_hash(&self) -> bool {
+        match self.inner_hash {
+            Some(inner_hash) => {
+                inner_hash == Hash::compute_str(&self.generate_compact_inner_text())
+            }
+            None => false,
         }
-        self.inner_hash_and_nonce_str = format!(
+    }
+    // Generate the character string that will be hashed
+    fn generate_will_hashed_string(&self) -> String {
+        format!(
             "InnerHash: {}\nNonce: {}\n",
             self.inner_hash
                 .expect("Try to get inner_hash of an uncompleted or reduce block !")
                 .to_hex(),
             self.nonce
-        );
+        )
+    }
+    /// Verify block hash
+    pub fn verify_hash(&self) -> bool {
+        match self.hash {
+            Some(hash) => {
+                hash == BlockHash(Hash::compute_str(&format!(
+                    "{}{}\n",
+                    self.generate_will_hashed_string(),
+                    self.signatures[0]
+                )))
+            }
+            None => false,
+        }
+    }
+    /// Fill inner_hash_and_nonce_str
+    pub fn fill_inner_hash_and_nonce_str(&mut self, new_nonce: Option<u64>) {
+        if let Some(new_nonce) = new_nonce {
+            self.nonce = new_nonce;
+        }
+        self.inner_hash_and_nonce_str = self.generate_will_hashed_string();
     }
     /// Sign block
     pub fn sign(&mut self, privkey: PrivKey) {
@@ -300,11 +326,8 @@ impl BlockDocument {
     /// Compute hash
     pub fn compute_hash(&mut self) {
         self.hash = Some(BlockHash(Hash::compute_str(&format!(
-            "InnerHash: {}\nNonce: {}\n{}\n",
-            self.inner_hash
-                .expect("Try to get inner_hash of an uncompleted or reduce block !")
-                .to_hex(),
-            self.nonce,
+            "{}{}\n",
+            self.generate_will_hashed_string(),
             self.signatures[0]
         ))));
     }
-- 
GitLab


From 91e2f6f5dc15d12bf2ceee93999e9db7181fd1c3 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sun, 20 Jan 2019 23:22:47 +0100
Subject: [PATCH 12/26] [ref] whole: remove NetworkBlock & migrate ts sync to
 json sync

---
 Cargo.lock                                    |  29 +-
 lib/core/core/lib.rs                          |   7 +-
 lib/core/network/cli/sync.rs                  |  11 +-
 lib/core/network/documents.rs                 |  61 +-
 lib/core/network/requests.rs                  |   7 +-
 lib/modules/blockchain/blockchain/Cargo.toml  |   3 +
 .../blockchain/check_and_apply_block.rs       |  44 +-
 .../blockchain/blockchain/constants.rs        |  26 +
 lib/modules/blockchain/blockchain/lib.rs      | 142 ++--
 lib/modules/blockchain/blockchain/sync.rs     | 684 ------------------
 .../blockchain/sync/apply/blocks_worker.rs    | 109 +++
 .../blockchain/blockchain/sync/apply/mod.rs   |  18 +
 .../blockchain/sync/apply/txs_worker.rs       |  59 ++
 .../blockchain/sync/apply/wot_worker.rs       |  63 ++
 .../sync/download/json_reader_worker.rs       | 240 ++++++
 .../blockchain/sync/download/mod.rs           |  16 +
 lib/modules/blockchain/blockchain/sync/mod.rs | 423 +++++++++++
 .../blockchain/blockchain/ts_parsers.rs       | 179 -----
 lib/modules/ws2p-v1-legacy/lib.rs             |  12 +-
 lib/modules/ws2p-v1-legacy/parsers/blocks.rs  |  30 +-
 lib/modules/ws2p-v1-legacy/ws2p_connection.rs |   4 +-
 21 files changed, 1102 insertions(+), 1065 deletions(-)
 create mode 100644 lib/modules/blockchain/blockchain/constants.rs
 delete mode 100644 lib/modules/blockchain/blockchain/sync.rs
 create mode 100644 lib/modules/blockchain/blockchain/sync/apply/blocks_worker.rs
 create mode 100644 lib/modules/blockchain/blockchain/sync/apply/mod.rs
 create mode 100644 lib/modules/blockchain/blockchain/sync/apply/txs_worker.rs
 create mode 100644 lib/modules/blockchain/blockchain/sync/apply/wot_worker.rs
 create mode 100644 lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
 create mode 100644 lib/modules/blockchain/blockchain/sync/download/mod.rs
 create mode 100644 lib/modules/blockchain/blockchain/sync/mod.rs

diff --git a/Cargo.lock b/Cargo.lock
index ccbd3ad2..7f4aacbf 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -249,6 +249,8 @@ dependencies = [
  "base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "dup-crypto 0.5.0",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "json-pest-parser 0.1.0",
  "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -328,6 +330,7 @@ dependencies = [
  "base58 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "base64 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "rust-crypto-wasm 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -356,9 +359,12 @@ dependencies = [
  "duniter-network 0.1.0-a0.1",
  "dup-crypto 0.5.0",
  "durs-blockchain-dal 0.1.0-a0.1",
+ "durs-common-tools 0.1.0",
  "durs-message 0.1.0-a0.1",
  "durs-network-documents 0.3.0",
  "durs-wot 0.8.0-a0.9",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "json-pest-parser 0.1.0",
  "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -384,6 +390,14 @@ dependencies = [
  "serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
+[[package]]
+name = "durs-common-tools"
+version = "0.1.0"
+dependencies = [
+ "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
 [[package]]
 name = "durs-message"
 version = "0.1.0-a0.1"
@@ -528,16 +542,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "failure"
-version = "0.1.3"
+version = "0.1.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
 name = "failure_derive"
-version = "0.1.3"
+version = "0.1.5"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -642,6 +656,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 name = "json-pest-parser"
 version = "0.1.0"
 dependencies = [
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -985,7 +1000,7 @@ version = "0.2.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "argon2rs 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "redox_syscall 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
@@ -1026,7 +1041,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
@@ -1377,8 +1392,8 @@ dependencies = [
 "checksum digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"
 "checksum dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "88972de891f6118092b643d85a0b28e0678e0f948d7f879aa32f2d5aafe97d2a"
 "checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
-"checksum failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7"
-"checksum failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "64c2d913fe8ed3b6c6518eedf4538255b989945c14c2a7d5cbff62a5e2120596"
+"checksum failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2"
+"checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1"
 "checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
 "checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
 "checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
diff --git a/lib/core/core/lib.rs b/lib/core/core/lib.rs
index 5465108f..22eab0c2 100644
--- a/lib/core/core/lib.rs
+++ b/lib/core/core/lib.rs
@@ -241,7 +241,9 @@ impl<'a, 'b: 'a> DuniterCore<'b, 'a, DuRsConf> {
         init_logger(profile.as_str(), self.soft_meta_datas.soft_name, &cli_args);
 
         // Print panic! in logs
-        log_panics::init();
+        if cfg!(feature = "log_panics") {
+            log_panics::init();
+        }
 
         // Load global conf
         let (conf, keypairs) = duniter_conf::load_conf(profile.as_str());
@@ -296,10 +298,9 @@ impl<'a, 'b: 'a> DuniterCore<'b, 'a, DuRsConf> {
             let opts = SyncOpt::from_clap(matches);
             match opts.source_type {
                 SyncSourceType::Network => unimplemented!(),
-                SyncSourceType::TsSqlDb => {
+                SyncSourceType::LocalDuniter => {
                     sync_ts(profile.as_str(), &conf, &opts);
                 }
-                SyncSourceType::JsonFiles => unimplemented!(),
             }
 
             false
diff --git a/lib/core/network/cli/sync.rs b/lib/core/network/cli/sync.rs
index 60b1508c..bc8f983a 100644
--- a/lib/core/network/cli/sync.rs
+++ b/lib/core/network/cli/sync.rs
@@ -24,7 +24,7 @@ use std::str::FromStr;
 )]
 /// Synchronization from network
 pub struct SyncOpt {
-    /// The source of datas (url of the node from which to synchronize OR path to local file)
+    /// The source of datas (url of the node from which to synchronize OR path to local folder)
     pub source: Option<String>,
     /// The source type
     #[structopt(short = "t", long = "type", default_value = "ts")]
@@ -48,10 +48,8 @@ pub struct SyncOpt {
 pub enum SyncSourceType {
     /// Sync from network
     Network,
-    /// Sync from Duniter-ts sqlite bdd
-    TsSqlDb,
-    /// Sync from json blocks in files
-    JsonFiles,
+    /// Sync from local Duniter json blocks in files
+    LocalDuniter,
 }
 
 impl FromStr for SyncSourceType {
@@ -60,8 +58,7 @@ impl FromStr for SyncSourceType {
     fn from_str(source: &str) -> Result<Self, Self::Err> {
         match source {
             "n" | "network" => Ok(SyncSourceType::Network),
-            "ts" | "ts-sql" => Ok(SyncSourceType::TsSqlDb),
-            "json" => Ok(SyncSourceType::JsonFiles),
+            "ts" | "duniter" => Ok(SyncSourceType::LocalDuniter),
             &_ => Err("Unknown source type".to_owned()),
         }
     }
diff --git a/lib/core/network/documents.rs b/lib/core/network/documents.rs
index a103c4d2..cda76e59 100644
--- a/lib/core/network/documents.rs
+++ b/lib/core/network/documents.rs
@@ -21,72 +21,13 @@ use dubp_documents::documents::identity::IdentityDocument;
 use dubp_documents::documents::membership::MembershipDocument;
 use dubp_documents::documents::revocation::RevocationDocument;
 use dubp_documents::documents::transaction::TransactionDocument;
-use dubp_documents::Document;
-use dubp_documents::{BlockHash, BlockId, Blockstamp};
 use serde_json;
-use std::ops::Deref;
-
-#[derive(Debug, Clone)]
-/// Block v10 in network format (Some events require a blockchain access to reconstitute the corresponding document)
-pub struct NetworkBlockV10 {
-    /// Uncompleted block document
-    pub uncompleted_block_doc: BlockDocument,
-    /// revoked
-    pub revoked: Vec<serde_json::Value>,
-    /// certifications
-    pub certifications: Vec<serde_json::Value>,
-}
-
-#[derive(Debug, Clone)]
-/// Block in network format (Some events require a blockchain access to reconstitute the corresponding document)
-pub enum NetworkBlock {
-    /// Block V1
-    V10(Box<NetworkBlockV10>),
-    /// Block V11
-    V11(),
-}
-
-impl NetworkBlock {
-    /// Return uncompleted block document
-    pub fn uncompleted_block_doc(&self) -> BlockDocument {
-        match *self {
-            NetworkBlock::V10(ref network_block_v10) => {
-                network_block_v10.deref().uncompleted_block_doc.clone()
-            }
-            _ => panic!("Block version not supported !"),
-        }
-    }
-    /// Return blockstamp
-    pub fn blockstamp(&self) -> Blockstamp {
-        match *self {
-            NetworkBlock::V10(ref network_block_v10) => {
-                network_block_v10.deref().uncompleted_block_doc.blockstamp()
-            }
-            _ => panic!("Block version not supported !"),
-        }
-    }
-    /// Return previous blockstamp
-    pub fn previous_blockstamp(&self) -> Blockstamp {
-        match *self {
-            NetworkBlock::V10(ref network_block_v10) => Blockstamp {
-                id: BlockId(network_block_v10.deref().uncompleted_block_doc.number.0 - 1),
-                hash: BlockHash(
-                    network_block_v10
-                        .deref()
-                        .uncompleted_block_doc
-                        .previous_hash,
-                ),
-            },
-            _ => panic!("Block version not supported !"),
-        }
-    }
-}
 
 #[derive(Debug, Clone)]
 /// Network Document
 pub enum BlockchainDocument {
     /// Network Block
-    Block(NetworkBlock),
+    Block(Box<BlockDocument>),
     /// Identity Document
     Identity(Box<IdentityDocument>),
     /// Membership Document
diff --git a/lib/core/network/requests.rs b/lib/core/network/requests.rs
index 2f02a3fd..6d0033a2 100644
--- a/lib/core/network/requests.rs
+++ b/lib/core/network/requests.rs
@@ -17,6 +17,7 @@
 
 use crate::documents::*;
 use crate::*;
+use dubp_documents::documents::block::BlockDocument;
 use dubp_documents::Blockstamp;
 
 #[derive(Debug, Copy, Clone)]
@@ -72,11 +73,11 @@ pub enum OldNetworkRequestError {
 /// Type containing the response to a network request
 pub enum NetworkResponse {
     /// CurrentBlock
-    CurrentBlock(ModuleReqFullId, NodeFullId, Box<NetworkBlock>),
+    CurrentBlock(ModuleReqFullId, NodeFullId, Box<BlockDocument>),
     /// Block
-    Block(ModuleReqFullId, NodeFullId, Box<NetworkBlock>),
+    Block(ModuleReqFullId, NodeFullId, Box<BlockDocument>),
     /// Chunk
-    Chunk(ModuleReqFullId, NodeFullId, Vec<Box<NetworkBlock>>),
+    Chunk(ModuleReqFullId, NodeFullId, Vec<Box<BlockDocument>>),
     /// PendingDocuments
     PendingDocuments(ModuleReqFullId, Vec<BlockchainDocument>),
     /// Consensus
diff --git a/lib/modules/blockchain/blockchain/Cargo.toml b/lib/modules/blockchain/blockchain/Cargo.toml
index c8bf0556..611971f2 100644
--- a/lib/modules/blockchain/blockchain/Cargo.toml
+++ b/lib/modules/blockchain/blockchain/Cargo.toml
@@ -15,11 +15,14 @@ duniter-conf = { path = "../../../core/conf" }
 dup-crypto = { path = "../../../tools/crypto" }
 durs-blockchain-dal = { path = "../blockchain-dal" }
 dubp-documents= { path = "../../../tools/documents" }
+durs-common-tools = { path = "../../../tools/common-tools" }
 durs-network-documents = { path = "../../../tools/network-documents" }
 durs-message =  { path = "../../../core/message" }
 duniter-module = { path = "../../../core/module" }
 duniter-network = { path = "../../../core/network" }
 durs-wot = { path = "../../../tools/wot" }
+failure = "0.1.5"
+json-pest-parser = { path = "../../../tools/json-pest-parser" }
 log = "0.4.*"
 num_cpus = "1.8.*"
 pbr = "1.0.*"
diff --git a/lib/modules/blockchain/blockchain/check_and_apply_block.rs b/lib/modules/blockchain/blockchain/check_and_apply_block.rs
index 4f869287..459ef535 100644
--- a/lib/modules/blockchain/blockchain/check_and_apply_block.rs
+++ b/lib/modules/blockchain/blockchain/check_and_apply_block.rs
@@ -20,15 +20,13 @@ use crate::verify_block::*;
 use crate::*;
 use dubp_documents::Document;
 use dubp_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
-use duniter_network::documents::NetworkBlock;
 use dup_crypto::keys::*;
 use durs_blockchain_dal::block::DALBlock;
 use durs_blockchain_dal::*;
 
 #[derive(Debug, Copy, Clone)]
 pub enum BlockError {
-    BlockVersionNotSupported(),
-    CompletedBlockError(CompletedBlockError),
+    VerifyBlockHashsError(VerifyBlockHashsError),
     DALError(DALError),
     InvalidBlock(InvalidBlockError),
     ApplyValidBlockError(ApplyValidBlockError),
@@ -36,9 +34,9 @@ pub enum BlockError {
     UnknowError(),
 }
 
-impl From<CompletedBlockError> for BlockError {
-    fn from(err: CompletedBlockError) -> Self {
-        BlockError::CompletedBlockError(err)
+impl From<VerifyBlockHashsError> for BlockError {
+    fn from(err: VerifyBlockHashsError) -> Self {
+        BlockError::VerifyBlockHashsError(err)
     }
 }
 
@@ -65,20 +63,20 @@ pub fn check_and_apply_block<W: WebOfTrust>(
 ) -> Result<ValidBlockApplyReqs, BlockError> {
     // Get BlockDocument && check if already have block
     let (block_doc, already_have_block) = match *block {
-        Block::NetworkBlock(network_block) => match *network_block {
-            NetworkBlock::V10(ref network_block_v10) => {
-                let already_have_block = DALBlock::already_have_block(
-                    &blocks_databases.blockchain_db,
-                    &blocks_databases.forks_blocks_db,
-                    network_block_v10.uncompleted_block_doc.blockstamp(),
-                )?;
-                (&network_block_v10.uncompleted_block_doc, already_have_block)
-            }
-            _ => return Err(BlockError::BlockVersionNotSupported()),
-        },
+        Block::NetworkBlock(block_doc) => {
+            let already_have_block = DALBlock::already_have_block(
+                &blocks_databases.blockchain_db,
+                &blocks_databases.forks_blocks_db,
+                block_doc.blockstamp(),
+            )?;
+            (block_doc, already_have_block)
+        }
         Block::LocalBlock(block_doc) => (block_doc, true),
     };
 
+    // Verify block hashs
+    verify_block_hashs(block_doc)?;
+
     // Check block chainability
     if (block_doc.number.0 == current_blockstamp.id.0 + 1
         && block_doc.previous_hash.to_string() == current_blockstamp.hash.0.to_string())
@@ -95,14 +93,14 @@ pub fn check_and_apply_block<W: WebOfTrust>(
 
         // Try stack up block
         let mut old_fork_id = None;
-        let block_doc = match *block {
-            Block::NetworkBlock(network_block) => complete_network_block(network_block, true)?,
+        let block_doc: &BlockDocument = match *block {
+            Block::NetworkBlock(block_doc) => block_doc,
             Block::LocalBlock(block_doc) => {
                 old_fork_id = durs_blockchain_dal::block::get_fork_id_of_blockstamp(
                     &blocks_databases.forks_blocks_db,
                     &block_doc.blockstamp(),
                 )?;
-                block_doc.clone()
+                block_doc
             }
         };
 
@@ -154,13 +152,11 @@ pub fn check_and_apply_block<W: WebOfTrust>(
                 _ => {}
             }
             match *block {
-                Block::NetworkBlock(network_block) => {
-                    // Completed network block
-                    let block_doc = complete_network_block(network_block, true)?;
+                Block::NetworkBlock(block_doc) => {
                     let dal_block = DALBlock {
                         fork_id,
                         isolate,
-                        block: block_doc,
+                        block: block_doc.clone(),
                         expire_certs: None,
                     };
                     durs_blockchain_dal::writers::block::write(
diff --git a/lib/modules/blockchain/blockchain/constants.rs b/lib/modules/blockchain/blockchain/constants.rs
new file mode 100644
index 00000000..6f575d14
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/constants.rs
@@ -0,0 +1,26 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+/// Default currency
+pub static DEFAULT_CURRENCY: &'static str = "g1";
+
+/// Chunk size (in blocks)
+pub static CHUNK_SIZE: &'static usize = &250;
+
+/// Chunk file name begin
+pub static CHUNK_FILE_NAME_BEGIN: &'static str = "chunk_";
+
+/// Chunk file name end
+pub static CHUNK_FILE_NAME_END: &'static str = "-250.json";
diff --git a/lib/modules/blockchain/blockchain/lib.rs b/lib/modules/blockchain/blockchain/lib.rs
index 8b700682..53a5a505 100644
--- a/lib/modules/blockchain/blockchain/lib.rs
+++ b/lib/modules/blockchain/blockchain/lib.rs
@@ -29,15 +29,17 @@
     unused_qualifications
 )]
 
+//#[macro_use]
+//extern crate failure;
 #[macro_use]
 extern crate log;
 
 mod apply_valid_block;
 mod check_and_apply_block;
+mod constants;
 mod dbex;
 mod revert_block;
 mod sync;
-mod ts_parsers;
 mod verify_block;
 
 use std::collections::HashMap;
@@ -49,6 +51,7 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH};
 
 use crate::apply_valid_block::*;
 use crate::check_and_apply_block::*;
+use crate::constants::*;
 pub use crate::dbex::{DBExQuery, DBExTxQuery, DBExWotQuery};
 use dubp_documents::documents::block::BlockDocument;
 use dubp_documents::documents::DUBPDocument;
@@ -56,7 +59,7 @@ use dubp_documents::*;
 use duniter_module::*;
 use duniter_network::{
     cli::sync::SyncOpt,
-    documents::{BlockchainDocument, NetworkBlock},
+    documents::BlockchainDocument,
     events::NetworkEvent,
     requests::{NetworkResponse, OldNetworkRequest},
 };
@@ -66,6 +69,7 @@ use durs_blockchain_dal::currency_params::CurrencyParameters;
 use durs_blockchain_dal::identity::DALIdentity;
 use durs_blockchain_dal::writers::requests::BlocksDBsWriteQuery;
 use durs_blockchain_dal::*;
+use durs_common_tools::fatal_error;
 use durs_message::events::*;
 use durs_message::requests::*;
 use durs_message::responses::*;
@@ -111,7 +115,7 @@ pub struct BlockchainModule {
 /// Block
 pub enum Block<'a> {
     /// Block coming from Network
-    NetworkBlock(&'a NetworkBlock),
+    NetworkBlock(&'a BlockDocument),
     /// Block coming from local database
     LocalBlock(&'a BlockDocument),
 }
@@ -120,7 +124,7 @@ impl<'a> Block<'a> {
     /// Return blockstamp
     pub fn blockstamp(&self) -> Blockstamp {
         match *self {
-            Block::NetworkBlock(ref network_block) => network_block.blockstamp(),
+            Block::NetworkBlock(ref block) => block.blockstamp(),
             Block::LocalBlock(ref block) => block.blockstamp(),
         }
     }
@@ -137,12 +141,12 @@ pub enum SyncVerificationLevel {
 }
 
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
-/// Error returned by function complete_network_block()
-pub enum CompletedBlockError {
+/// Error returned by function verify_block_hashs()
+pub enum VerifyBlockHashsError {
     /// Invalid block inner hash
     InvalidInnerHash(),
     /// Invalid block hash
-    InvalidHash(BlockId, Option<BlockHash>, Option<BlockHash>),
+    InvalidHash(BlockId, Option<BlockHash>),
     /// Invalid block version
     InvalidVersion(),
 }
@@ -204,28 +208,35 @@ impl BlockchainModule {
     pub fn dbex<DC: DuniterConf>(profile: &str, conf: &DC, csv: bool, req: &DBExQuery) {
         dbex::dbex(profile, conf, csv, req);
     }
-    /// Synchronize blockchain from a duniter-ts database
+    /// Synchronize blockchain from local duniter json files
     pub fn sync_ts<DC: DuniterConf>(profile: &str, conf: &DC, sync_opts: &SyncOpt) {
-        // get db_ts_path
-        let db_ts_path = if let Some(ref ts_path) = sync_opts.source {
-            PathBuf::from(ts_path)
+        // get json_chunks_path
+        let json_chunks_path = if let Some(ref path) = sync_opts.source {
+            PathBuf::from(path)
         } else {
-            let mut db_ts_path = match dirs::config_dir() {
+            let mut json_chunks_path = match dirs::config_dir() {
                 Some(path) => path,
                 None => panic!("Impossible to get user config directory !"),
             };
-            db_ts_path.push("duniter/");
-            db_ts_path.push("duniter_default");
-            db_ts_path.push("duniter.db");
-            db_ts_path
+            json_chunks_path.push("duniter/");
+            json_chunks_path.push("duniter_default");
+
+            let currency = if let Some(currency) = &sync_opts.currency {
+                currency
+            } else {
+                DEFAULT_CURRENCY
+            };
+
+            json_chunks_path.push(currency);
+            json_chunks_path
         };
-        if !db_ts_path.as_path().exists() {
-            panic!("Fatal error : duniter-ts database don't exist !");
+        if !json_chunks_path.as_path().exists() {
+            panic!("Fatal error : duniter json chunks folder don't exist !");
         }
-        sync::sync_ts(
+        sync::sync(
             profile,
             conf,
-            db_ts_path,
+            json_chunks_path,
             sync_opts.end,
             sync_opts.cautious_mode,
             !sync_opts.unsafe_mode,
@@ -330,18 +341,19 @@ impl BlockchainModule {
         let mut current_blockstamp = *current_blockstamp;
 
         for network_document in network_documents {
-            if let BlockchainDocument::Block(ref network_block) = network_document {
+            if let BlockchainDocument::Block(ref block_doc) = network_document {
+                let block_doc = block_doc.deref();
                 match check_and_apply_block::<W>(
                     &self.blocks_databases,
                     &self.wot_databases.certs_db,
-                    &Block::NetworkBlock(network_block),
+                    &Block::NetworkBlock(block_doc),
                     &current_blockstamp,
                     wot_index,
                     wot_db,
                     &self.forks_states,
                 ) {
                     Ok(ValidBlockApplyReqs(block_req, wot_dbs_reqs, currency_dbs_reqs)) => {
-                        let block_doc = network_block.uncompleted_block_doc().clone();
+                        let block_doc = block_doc.clone();
                         let mut save_wots_dbs = false;
                         let mut save_currency_dbs = false;
 
@@ -372,7 +384,7 @@ impl BlockchainModule {
                                 },
                             ));
                         }
-                        current_blockstamp = network_block.blockstamp();
+                        current_blockstamp = block_doc.blockstamp();
                         // Update forks states
                         self.forks_states = durs_blockchain_dal::block::get_forks(
                             &self.blocks_databases.forks_db,
@@ -400,12 +412,9 @@ impl BlockchainModule {
                         }
                     }
                     Err(_) => {
-                        warn!(
-                            "RefusedBlock({})",
-                            network_block.uncompleted_block_doc().number.0
-                        );
+                        warn!("RefusedBlock({})", block_doc.number.0);
                         self.send_event(&BlockchainEvent::RefusedPendingDoc(DUBPDocument::Block(
-                            Box::new(network_block.uncompleted_block_doc().clone()),
+                            Box::new(block_doc.clone()),
                         )));
                     }
                 }
@@ -691,7 +700,9 @@ impl BlockchainModule {
                                             {
                                                 let blocks: Vec<Box<Block>> = blocks
                                                     .iter()
-                                                    .map(|b| Box::new(Block::NetworkBlock(b)))
+                                                    .map(|b| {
+                                                        Box::new(Block::NetworkBlock(b.deref()))
+                                                    })
                                                     .collect();
 
                                                 let new_current_blockstamp = self.receive_blocks(
@@ -823,56 +834,33 @@ impl BlockchainModule {
     }
 }
 
-/// Complete Network Block
-pub fn complete_network_block(
-    network_block: &NetworkBlock,
-    verif_inner_hash: bool,
-) -> Result<BlockDocument, CompletedBlockError> {
-    if let NetworkBlock::V10(ref network_block_v10) = *network_block {
-        let mut block_doc = network_block_v10.uncompleted_block_doc.clone();
-        trace!("complete_network_block #{}...", block_doc.number);
-        block_doc.certifications =
-            durs_blockchain_dal::parsers::certifications::parse_certifications_into_compact(
-                &network_block_v10.certifications,
-            );
-        trace!("Success to complete certs.");
-        block_doc.revoked = durs_blockchain_dal::parsers::revoked::parse_revocations_into_compact(
-            &network_block_v10.revoked,
-        );
-        trace!("Success to complete certs & revocations.");
-        let inner_hash = block_doc.inner_hash.expect(
-            "BlockchainModule : complete_network_block() : fatal error : block.inner_hash = None",
+/// Verify block hashs
+pub fn verify_block_hashs(block_doc: &BlockDocument) -> Result<(), VerifyBlockHashsError> {
+    trace!("complete_block #{}...", block_doc.number);
+
+    if block_doc.inner_hash.is_none() {
+        fatal_error(
+            "BlockchainModule : verify_block_hashs() : fatal error : block.inner_hash = None",
         );
-        if verif_inner_hash && block_doc.number.0 > 0 {
-            block_doc.compute_inner_hash();
-        }
-        let hash = block_doc.hash;
-        block_doc.compute_hash();
-        if block_doc.inner_hash.expect(
-            "BlockchainModule : complete_network_block() : fatal error : block.inner_hash = None",
-        ) == inner_hash
-        {
-            block_doc.fill_inner_hash_and_nonce_str(None);
-            if !verif_inner_hash || block_doc.hash == hash {
-                trace!("Succes to complete_network_block #{}", block_doc.number.0);
-                Ok(block_doc)
-            } else {
-                warn!("BlockchainModule : Refuse Bloc : invalid hash !");
-                Err(CompletedBlockError::InvalidHash(
-                    block_doc.number,
-                    block_doc.hash,
-                    hash,
-                ))
-            }
+    }
+
+    if block_doc.verify_inner_hash() {
+        if block_doc.verify_hash() {
+            trace!("Succes to verify_block_hashs #{}", block_doc.number.0);
+            Ok(())
         } else {
-            warn!("BlockchainModule : Refuse Bloc : invalid inner hash !");
-            debug!(
-                "BlockInnerFormat={}",
-                block_doc.generate_compact_inner_text()
-            );
-            Err(CompletedBlockError::InvalidInnerHash())
+            warn!("BlockchainModule : Refuse Bloc : invalid hash !");
+            Err(VerifyBlockHashsError::InvalidHash(
+                block_doc.number,
+                block_doc.hash,
+            ))
         }
     } else {
-        Err(CompletedBlockError::InvalidVersion())
+        warn!("BlockchainModule : Refuse Bloc : invalid inner hash !");
+        debug!(
+            "BlockInnerFormat={}",
+            block_doc.generate_compact_inner_text()
+        );
+        Err(VerifyBlockHashsError::InvalidInnerHash())
     }
 }
diff --git a/lib/modules/blockchain/blockchain/sync.rs b/lib/modules/blockchain/blockchain/sync.rs
deleted file mode 100644
index 1ab46712..00000000
--- a/lib/modules/blockchain/blockchain/sync.rs
+++ /dev/null
@@ -1,684 +0,0 @@
-//  Copyright (C) 2018  The Duniter Project Developers.
-//
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as
-// published by the Free Software Foundation, either version 3 of the
-// License, or (at your option) any later version.
-//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-// GNU Affero General Public License for more details.
-//
-// You should have received a copy of the GNU Affero General Public License
-// along with this program.  If not, see <https://www.gnu.org/licenses/>.
-
-use crate::ts_parsers::*;
-use crate::*;
-use dubp_documents::{BlockHash, BlockId};
-use duniter_network::documents::NetworkBlock;
-use dup_crypto::hashs::Hash;
-use dup_crypto::keys::*;
-use durs_blockchain_dal::currency_params::CurrencyParameters;
-use durs_blockchain_dal::writers::requests::*;
-use durs_blockchain_dal::ForkId;
-use durs_wot::NodeId;
-use pbr::ProgressBar;
-use std::collections::{HashMap, VecDeque};
-use std::fs;
-use std::ops::Deref;
-use std::sync::mpsc;
-use std::thread;
-use std::time::SystemTime;
-use threadpool::ThreadPool;
-
-/// Number of sync jobs
-pub static NB_SYNC_JOBS: &'static usize = &4;
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-/// Block header
-pub struct BlockHeader {
-    pub number: BlockId,
-    pub hash: BlockHash,
-    pub issuer: PubKey,
-}
-
-#[derive(Debug)]
-/// Message for main sync thread
-enum MessForSyncThread {
-    Target(CurrencyName, Blockstamp),
-    NetworkBlock(NetworkBlock),
-    DownloadFinish(),
-    ApplyFinish(),
-}
-
-#[derive(Debug)]
-/// Message for a job thread
-enum SyncJobsMess {
-    BlocksDBsWriteQuery(BlocksDBsWriteQuery),
-    WotsDBsWriteQuery(WotsDBsWriteQuery, Box<CurrencyParameters>),
-    CurrencyDBsWriteQuery(CurrencyDBsWriteQuery),
-    End(),
-}
-
-/// Sync from a duniter-ts database
-pub fn sync_ts<DC: DuniterConf>(
-    profile: &str,
-    conf: &DC,
-    db_ts_path: PathBuf,
-    end: Option<u32>,
-    cautious: bool,
-    verif_inner_hash: bool,
-) {
-    // Get verification level
-    let _verif_level = if cautious {
-        println!("Start cautious sync...");
-        info!("Start cautious sync...");
-        SyncVerificationLevel::Cautious()
-    } else {
-        println!("Start fast sync...");
-        info!("Start fast sync...");
-        SyncVerificationLevel::FastSync()
-    };
-
-    // Create sync_thread channels
-    let (sender_sync_thread, recv_sync_thread) = mpsc::channel();
-
-    // Create ThreadPool
-    let nb_cpus = num_cpus::get();
-    let nb_workers = if nb_cpus < *NB_SYNC_JOBS {
-        nb_cpus
-    } else {
-        *NB_SYNC_JOBS
-    };
-    let pool = ThreadPool::new(nb_workers);
-
-    // Determine db_ts_copy_path
-    let mut db_ts_copy_path = duniter_conf::datas_path(profile, &conf.currency().clone());
-    db_ts_copy_path.push("tmp_db_ts_copy.db");
-
-    // Lauch ts thread
-    let profile_copy = String::from(profile);
-    let sender_sync_thread_clone = sender_sync_thread.clone();
-    pool.execute(move || {
-        let ts_job_begin = SystemTime::now();
-        // copy db_ts
-        fs::copy(db_ts_path.as_path(), db_ts_copy_path.as_path())
-            .expect("Fatal error : fail to copy duniter-ts database !");
-        // open copy of db_ts
-        let ts_db = sqlite::open(db_ts_copy_path.as_path())
-            .expect("Fatal error : fail to open copy of duniter-ts database !");
-        info!("sync_ts : Success to open duniter-ts database.");
-
-        // Get ts target blockstamp
-        debug!("Get ts-db current blockstamp...");
-        let mut cursor: sqlite::Cursor = if let Some(end) = end {
-            let mut cursor = ts_db
-            .prepare("SELECT hash, number, currency FROM block WHERE fork=? AND number=? LIMIT 1;")
-            .expect("Request SQL get_ts_current_block is wrong !")
-            .cursor();
-            cursor.bind(&[sqlite::Value::Integer(0), sqlite::Value::Integer(i64::from(end))]).expect("Fail to get ts target block !");
-            cursor
-        } else {
-            let mut cursor = ts_db
-            .prepare("SELECT hash, number, currency FROM block WHERE fork=? ORDER BY number DESC LIMIT 1;")
-            .expect("Request SQL get_ts_current_block is wrong !")
-            .cursor();
-            cursor.bind(&[sqlite::Value::Integer(0)]).expect("Fail to get ts current block !");
-            cursor
-        };
-
-        let (currency, current_ts_blockstamp) =
-            if let Some(row) = cursor.next().expect("cursor error") {
-                let block_id = BlockId(
-                    row[1]
-                        .as_integer()
-                        .expect("Fail to parse current ts blockstamp !") as u32,
-                );
-                let block_hash = BlockHash(
-                    Hash::from_hex(
-                        row[0]
-                            .as_string()
-                            .expect("Fail to parse current ts blockstamp !"),
-                    ).expect("Fail to parse current ts blockstamp !"),
-                );
-                (
-                    CurrencyName(String::from(
-                        row[2]
-                            .as_string()
-                            .expect("Fatal error :Fail to get currency !"),
-                    )),
-                    Blockstamp {
-                        id: block_id,
-                        hash: block_hash,
-                    },
-                )
-            } else {
-                panic!("Fail to get current ts blockstamp !");
-            };
-
-        debug!("Success to ts-db current blockstamp.");
-
-        // Get current local blockstamp
-        debug!("Get local current blockstamp...");
-        let db_path = duniter_conf::get_blockchain_db_path(&profile_copy, &currency);
-        let blocks_databases = BlocksV10DBs::open(Some(&db_path));
-        let current_blockstamp: Blockstamp = durs_blockchain_dal::block::get_current_blockstamp(
-            &blocks_databases,
-        ).expect("ForksV10DB : RustBreakError !")
-            .unwrap_or_default();
-        debug!("Success to get local current blockstamp.");
-
-        // Send ts current blockstamp
-        sender_sync_thread_clone
-            .send(MessForSyncThread::Target(
-                currency.clone(),
-                current_ts_blockstamp,
-            ))
-            .expect("Fatal error : sync_thread unrechable !");
-
-        // Get genesis block
-        if current_blockstamp == Blockstamp::default() {
-            let mut cursor: sqlite::Cursor = ts_db
-                    .prepare(
-                        "SELECT hash, inner_hash, signature, currency, issuer, parameters, previousHash,
-                            previousIssuer, version, membersCount, monetaryMass, medianTime, dividend, unitbase,
-                            time, powMin, number, nonce, transactions, certifications, identities, joiners,
-                            actives, leavers, revoked, excluded, issuersFrame, issuersFrameVar, issuersCount
-                            FROM block WHERE fork=0 AND number=? LIMIT 1;",
-                    )
-                    .expect("Request SQL get_ts_blocks is wrong !")
-                    .cursor();
-            cursor
-                .bind(&[sqlite::Value::Integer(0)])
-                .expect("Fail to get genesis block !");
-            if let Some(row) = cursor.next().expect("cursor error") {
-                sender_sync_thread_clone
-                    .send(MessForSyncThread::NetworkBlock(parse_ts_block(row)))
-                    .expect("Fatal error : sync_thread unrechable !");
-            }
-        }
-
-        // Request ts blocks
-        let mut cursor: sqlite::Cursor = ts_db
-                .prepare(
-                    "SELECT hash, inner_hash, signature, currency, issuer, parameters, previousHash,
-                        previousIssuer, version, membersCount, monetaryMass, medianTime, dividend, unitbase,
-                        time, powMin, number, nonce, transactions, certifications, identities, joiners,
-                        actives, leavers, revoked, excluded, issuersFrame, issuersFrameVar, issuersCount
-                        FROM block WHERE fork=? AND number > ? AND number <= ? ORDER BY number ASC;",
-                )
-                .expect("Request SQL get_ts_blocks is wrong !")
-                .cursor();
-        cursor
-            .bind(&[
-                sqlite::Value::Integer(0),
-                sqlite::Value::Integer(i64::from(current_blockstamp.id.0)),
-                sqlite::Value::Integer(i64::from(current_ts_blockstamp.id.0)),
-            ])
-            .expect("0");
-
-        // Parse ts blocks
-        //let mut ts_blocks = Vec::with_capacity(current_ts_blockstamp.id.0 + 1);
-        //let pool = ThreadPool::new(4);
-        while let Some(row) = cursor.next().expect("cursor error") {
-            //let sender_sync_thread_clone = sender_sync_thread.clone();
-            //pool.execute(move || {
-            sender_sync_thread_clone
-                .send(MessForSyncThread::NetworkBlock(parse_ts_block(row)))
-                .expect("Fatal error : sync_thread unrechable !");
-            //});
-        }
-        fs::remove_file(db_ts_copy_path.as_path())
-            .expect("Fatal error : fail to remove db_ts_copy !");
-        sender_sync_thread_clone
-            .send(MessForSyncThread::DownloadFinish())
-            .expect("Fatal error : sync_thread unrechable !");
-        let ts_job_duration = SystemTime::now()
-            .duration_since(ts_job_begin)
-            .expect("duration_since error");
-        info!(
-            "ts_job_duration={},{:03} seconds.",
-            ts_job_duration.as_secs(),
-            ts_job_duration.subsec_millis()
-        );
-    });
-
-    // Get currency and target blockstamp
-    let (currency, target_blockstamp) =
-        if let Ok(MessForSyncThread::Target(currency, target_blockstamp)) = recv_sync_thread.recv()
-        {
-            (currency, target_blockstamp)
-        } else {
-            panic!("Fatal error : no TargetBlockstamp !")
-        };
-
-    // Update DuniterConf
-    let mut conf = conf.clone();
-    conf.set_currency(currency.clone());
-
-    // Get databases path
-    let db_path = duniter_conf::get_blockchain_db_path(profile, &currency);
-
-    // Write nex conf
-    duniter_conf::write_conf_file(profile, &conf).expect("Fail to write new conf !");
-
-    // Open wot db
-    let wot_db = open_wot_db::<RustyWebOfTrust>(Some(&db_path)).expect("Fail to open WotDB !");
-
-    // Open blocks databases
-    let databases = BlocksV10DBs::open(Some(&db_path));
-
-    // Open wot databases
-    let wot_databases = WotsV10DBs::open(Some(&db_path));
-
-    // Get local current blockstamp
-    debug!("Get local current blockstamp...");
-    let mut current_blockstamp: Blockstamp =
-        durs_blockchain_dal::block::get_current_blockstamp(&databases)
-            .expect("ForksV10DB : RustBreakError !")
-            .unwrap_or_default();
-    debug!("Success to get local current blockstamp.");
-
-    // Node is already synchronized ?
-    if target_blockstamp.id.0 < current_blockstamp.id.0 {
-        println!("Your duniter-rs node is already synchronized.");
-        return;
-    }
-
-    // Get wot index
-    let mut wot_index: HashMap<PubKey, NodeId> =
-        DALIdentity::get_wot_index(&wot_databases.identities_db)
-            .expect("Fatal eror : get_wot_index : Fail to read blockchain databases");
-
-    // Start sync
-    let sync_start_time = SystemTime::now();
-    info!(
-        "Sync from #{} to #{}...",
-        current_blockstamp.id.0, target_blockstamp.id.0
-    );
-    println!(
-        "Sync from #{} to #{}...",
-        current_blockstamp.id.0, target_blockstamp.id.0
-    );
-
-    // Createprogess bar
-    let count_blocks = target_blockstamp.id.0 + 1 - current_blockstamp.id.0;
-    let count_chunks = if count_blocks % 250 > 0 {
-        (count_blocks / 250) + 1
-    } else {
-        count_blocks / 250
-    };
-    let mut apply_pb = ProgressBar::new(count_chunks.into());
-    apply_pb.format("╢▌▌░╟");
-    // Create workers threads channels
-    let (sender_blocks_thread, recv_blocks_thread) = mpsc::channel();
-    let (sender_tx_thread, recv_tx_thread) = mpsc::channel();
-    let (sender_wot_thread, recv_wot_thread) = mpsc::channel();
-
-    // Launch blocks_worker thread
-    let sender_sync_thread_clone = sender_sync_thread.clone();
-    pool.execute(move || {
-        let blocks_job_begin = SystemTime::now();
-
-        // Listen db requets
-        let mut chunk_index = 0;
-        let mut blockchain_meta_datas = HashMap::new();
-        let mut all_wait_duration = Duration::from_millis(0);
-        let mut wait_begin = SystemTime::now();
-        while let Ok(SyncJobsMess::BlocksDBsWriteQuery(req)) = recv_blocks_thread.recv() {
-            all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
-            // Apply db request
-            req.apply(&databases, true)
-                .expect("Fatal error : Fail to apply DBWriteRequest !");
-            if let BlocksDBsWriteQuery::WriteBlock(
-                ref _dal_block,
-                ref _old_fork_id,
-                ref previous_blockstamp,
-                ref previous_hash,
-            ) = req
-            {
-                blockchain_meta_datas.insert(*previous_blockstamp, *previous_hash);
-                chunk_index += 1;
-                if chunk_index == 250 {
-                    chunk_index = 0;
-                    apply_pb.inc();
-                }
-            }
-            wait_begin = SystemTime::now();
-        }
-
-        // Indexing blockchain meta datas
-        info!("Indexing blockchain meta datas...");
-        /*let blockchain_meta_datas: HashMap<PreviousBlockstamp, BlockHash> = databases
-        .blockchain_db
-        .read(|db| {
-            let mut blockchain_meta_datas: HashMap<
-                PreviousBlockstamp,
-                BlockHash,
-            > = HashMap::new();
-            for dal_block in db.values() {
-                let block_previous_hash = if dal_block.block.number.0 == 0 {
-                    PreviousBlockstamp::default()
-                } else {
-                    PreviousBlockstamp {
-                        id: BlockId(dal_block.block.number.0 - 1),
-                        hash: BlockHash(dal_block.block.previous_hash),
-                    }
-                };
-                blockchain_meta_datas
-                    .insert(block_previous_hash, dal_block.block.expect("Try to get hash of an uncompleted or reduce block !"));
-            }
-            blockchain_meta_datas
-        })
-        .expect("Indexing blockchain meta datas : DALError");*/
-        databases
-            .forks_db
-            .write(|db| {
-                db.insert(ForkId(0), blockchain_meta_datas);
-            })
-            .expect("Indexing blockchain meta datas : DALError");
-
-        // Increment progress bar (last chunk)
-        apply_pb.inc();
-        // Save blockchain, and fork databases
-        println!();
-        println!("Write indexs in files...");
-        info!("Save blockchain and forks databases in files...");
-        databases.save_dbs();
-
-        // Send finish signal
-        sender_sync_thread_clone
-            .send(MessForSyncThread::ApplyFinish())
-            .expect("Fatal error : sync_thread unrechable !");
-        let blocks_job_duration =
-            SystemTime::now().duration_since(blocks_job_begin).unwrap() - all_wait_duration;
-        info!(
-            "blocks_job_duration={},{:03} seconds.",
-            blocks_job_duration.as_secs(),
-            blocks_job_duration.subsec_millis()
-        );
-    });
-
-    // / Launch wot_worker thread
-    let profile_copy2 = String::from(profile);
-    let currency_copy2 = currency.clone();
-    let sender_sync_thread_clone2 = sender_sync_thread.clone();
-
-    pool.execute(move || {
-        let wot_job_begin = SystemTime::now();
-        // Open databases
-        let db_path = duniter_conf::get_blockchain_db_path(&profile_copy2, &currency_copy2);
-        let databases = WotsV10DBs::open(Some(&db_path));
-
-        // Listen db requets
-        let mut all_wait_duration = Duration::from_millis(0);
-        let mut wait_begin = SystemTime::now();
-        while let Ok(mess) = recv_wot_thread.recv() {
-            all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
-            match mess {
-                SyncJobsMess::WotsDBsWriteQuery(req, currency_params) => req
-                    .apply(&databases, &currency_params.deref())
-                    .expect("Fatal error : Fail to apply DBWriteRequest !"),
-                SyncJobsMess::End() => break,
-                _ => {}
-            }
-            wait_begin = SystemTime::now();
-        }
-        // Save wots databases
-        info!("Save wots databases in files...");
-        databases.save_dbs();
-
-        // Send finish signal
-        sender_sync_thread_clone2
-            .send(MessForSyncThread::ApplyFinish())
-            .expect("Fatal error : sync_thread unrechable !");
-        let wot_job_duration =
-            SystemTime::now().duration_since(wot_job_begin).unwrap() - all_wait_duration;
-        info!(
-            "wot_job_duration={},{:03} seconds.",
-            wot_job_duration.as_secs(),
-            wot_job_duration.subsec_millis()
-        );
-    });
-
-    // Launch tx_worker thread
-    let profile_copy = String::from(profile);
-    let currency_copy = conf.currency().clone();
-    let sender_sync_thread_clone = sender_sync_thread.clone();
-    pool.execute(move || {
-        let tx_job_begin = SystemTime::now();
-        // Open databases
-        let db_path = duniter_conf::get_blockchain_db_path(&profile_copy, &currency_copy);
-        let databases = CurrencyV10DBs::open(Some(&db_path));
-
-        // Listen db requets
-        let mut all_wait_duration = Duration::from_millis(0);
-        let mut wait_begin = SystemTime::now();
-        while let Ok(SyncJobsMess::CurrencyDBsWriteQuery(req)) = recv_tx_thread.recv() {
-            all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
-            // Apply db request
-            req.apply(&databases)
-                .expect("Fatal error : Fail to apply DBWriteRequest !");
-            wait_begin = SystemTime::now();
-        }
-        // Save tx, utxo, du and balances databases
-        info!("Save tx and sources database in file...");
-        databases.save_dbs(true, true);
-
-        // Send finish signal
-        sender_sync_thread_clone
-            .send(MessForSyncThread::ApplyFinish())
-            .expect("Fatal error : sync_thread unrechable !");
-        let tx_job_duration =
-            SystemTime::now().duration_since(tx_job_begin).unwrap() - all_wait_duration;
-        info!(
-            "tx_job_duration={},{:03} seconds.",
-            tx_job_duration.as_secs(),
-            tx_job_duration.subsec_millis()
-        );
-    });
-    let main_job_begin = SystemTime::now();
-
-    // Open currency_params_db
-    let dbs_path = duniter_conf::get_blockchain_db_path(profile, &conf.currency());
-    let currency_params_db = open_file_db::<CurrencyParamsV10Datas>(&dbs_path, "params.db")
-        .expect("Fail to open params db");
-
-    // Apply blocks
-    let mut blocks_not_expiring = VecDeque::with_capacity(200_000);
-    let mut last_block_expiring: isize = -1;
-    let certs_db =
-        BinDB::Mem(open_memory_db::<CertsExpirV10Datas>().expect("Fail to create memory certs_db"));
-    let mut currency_params = CurrencyParameters::default();
-    let mut get_currency_params = false;
-    let mut certs_count = 0;
-
-    let mut all_wait_duration = Duration::from_millis(0);
-    let mut wait_begin = SystemTime::now();
-    let mut all_complete_block_duration = Duration::from_millis(0);
-    let mut all_apply_valid_block_duration = Duration::from_millis(0);
-    while let Ok(MessForSyncThread::NetworkBlock(network_block)) = recv_sync_thread.recv() {
-        all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
-        // Complete block
-        let complete_block_begin = SystemTime::now();
-        let block_doc = complete_network_block(&network_block, verif_inner_hash)
-            .expect("Receive wrong block, please reset data and resync !");
-        all_complete_block_duration += SystemTime::now()
-            .duration_since(complete_block_begin)
-            .unwrap();
-        // Get currency params
-        if !get_currency_params && block_doc.number.0 == 0 {
-            if block_doc.parameters.is_some() {
-                currency_params_db
-                    .write(|db| {
-                        db.0 = block_doc.currency.clone();
-                        db.1 = block_doc.parameters.unwrap();
-                    })
-                    .expect("fail to write in params DB");
-                currency_params = CurrencyParameters::from((
-                    block_doc.currency.clone(),
-                    block_doc.parameters.unwrap(),
-                ));
-                get_currency_params = true;
-            } else {
-                panic!("The genesis block are None parameters !");
-            }
-        }
-        // Push block median_time in blocks_not_expiring
-        blocks_not_expiring.push_back(block_doc.median_time);
-        // Get blocks_expiring
-        let mut blocks_expiring = Vec::new();
-        while blocks_not_expiring.front().cloned()
-            < Some(block_doc.median_time - currency_params.sig_validity)
-        {
-            last_block_expiring += 1;
-            blocks_expiring.push(BlockId(last_block_expiring as u32));
-            blocks_not_expiring.pop_front();
-        }
-        // Find expire_certs
-        let expire_certs =
-            durs_blockchain_dal::certs::find_expire_certs(&certs_db, blocks_expiring)
-                .expect("find_expire_certs() : DALError");
-        // Apply block
-        let apply_valid_block_begin = SystemTime::now();
-        if let Ok(ValidBlockApplyReqs(block_req, wot_db_reqs, currency_db_reqs)) =
-            apply_valid_block::<RustyWebOfTrust>(
-                &block_doc,
-                &mut wot_index,
-                &wot_db,
-                &expire_certs,
-                None,
-            )
-        {
-            all_apply_valid_block_duration += SystemTime::now()
-                .duration_since(apply_valid_block_begin)
-                .unwrap();
-            current_blockstamp = network_block.blockstamp();
-            debug!("Apply db requests...");
-            // Send block request to blocks worker thread
-            sender_blocks_thread
-                .send(SyncJobsMess::BlocksDBsWriteQuery(block_req.clone()))
-                .expect(
-                    "Fail to communicate with blocks worker thread, please reset data & resync !",
-                );
-            // Send wot requests to wot worker thread
-            for req in wot_db_reqs {
-                if let WotsDBsWriteQuery::CreateCert(
-                    ref _source_pubkey,
-                    ref source,
-                    ref target,
-                    ref created_block_id,
-                    ref _median_time,
-                ) = req
-                {
-                    certs_count += 1;
-                    // Add cert in certs_db
-                    certs_db
-                        .write(|db| {
-                            let mut created_certs =
-                                db.get(&created_block_id).cloned().unwrap_or_default();
-                            created_certs.insert((*source, *target));
-                            db.insert(*created_block_id, created_certs);
-                        })
-                        .expect("RustBreakError : please reset data and resync !");
-                }
-                sender_wot_thread
-                    .send(SyncJobsMess::WotsDBsWriteQuery(
-                        req.clone(),
-                        Box::new(currency_params),
-                    ))
-                    .expect(
-                        "Fail to communicate with tx worker thread, please reset data & resync !",
-                    )
-            }
-            // Send blocks and wot requests to wot worker thread
-            for req in currency_db_reqs {
-                sender_tx_thread
-                    .send(SyncJobsMess::CurrencyDBsWriteQuery(req.clone()))
-                    .expect(
-                        "Fail to communicate with tx worker thread, please reset data & resync !",
-                    );
-            }
-            debug!("Success to apply block #{}", current_blockstamp.id.0);
-            if current_blockstamp.id.0 >= target_blockstamp.id.0 {
-                if current_blockstamp == target_blockstamp {
-                    // Sync completed
-                    break;
-                } else {
-                    panic!("Fatal Error : we get a fork, please reset data and sync again !");
-                }
-            }
-        } else {
-            panic!(
-                "Fatal error : fail to stack up block #{}",
-                current_blockstamp.id.0 + 1
-            )
-        }
-        wait_begin = SystemTime::now();
-    }
-    // Send end signal to workers threads
-    sender_blocks_thread
-        .send(SyncJobsMess::End())
-        .expect("Sync : Fail to send End signal to blocks worker !");
-    info!("Sync : send End signal to blocks job.");
-    sender_wot_thread
-        .send(SyncJobsMess::End())
-        .expect("Sync : Fail to send End signal to wot worker !");
-    info!("Sync : send End signal to wot job.");
-    sender_tx_thread
-        .send(SyncJobsMess::End())
-        .expect("Sync : Fail to send End signal to writer worker !");
-    info!("Sync : send End signal to tx job.");
-
-    // Save params db
-    currency_params_db.save().expect("Fail to save params db");
-
-    // Save wot file
-    wot_db.save().expect("Fail to save wot db");
-
-    let main_job_duration =
-        SystemTime::now().duration_since(main_job_begin).unwrap() - all_wait_duration;
-    info!(
-        "main_job_duration={},{:03} seconds.",
-        main_job_duration.as_secs(),
-        main_job_duration.subsec_millis()
-    );
-    info!(
-        "all_complete_block_duration={},{:03} seconds.",
-        all_complete_block_duration.as_secs(),
-        all_complete_block_duration.subsec_millis()
-    );
-    info!(
-        "all_apply_valid_block_duration={},{:03} seconds.",
-        all_apply_valid_block_duration.as_secs(),
-        all_apply_valid_block_duration.subsec_millis()
-    );
-
-    // Wait recv two finish signals
-    let mut wait_jobs = *NB_SYNC_JOBS - 1;
-    while wait_jobs > 0 {
-        match recv_sync_thread.recv() {
-            Ok(MessForSyncThread::ApplyFinish()) => wait_jobs -= 1,
-            Ok(_) => thread::sleep(Duration::from_millis(50)),
-            Err(_) => wait_jobs -= 1,
-        }
-    }
-    info!("All sync jobs finish.");
-
-    // Log sync duration
-    debug!("certs_count={}", certs_count);
-    let sync_duration = SystemTime::now().duration_since(sync_start_time).unwrap();
-    println!(
-        "Sync {} blocks in {}.{:03} seconds.",
-        current_blockstamp.id.0 + 1,
-        sync_duration.as_secs(),
-        sync_duration.subsec_millis(),
-    );
-    info!(
-        "Sync {} blocks in {}.{:03} seconds.",
-        current_blockstamp.id.0 + 1,
-        sync_duration.as_secs(),
-        sync_duration.subsec_millis(),
-    );
-}
diff --git a/lib/modules/blockchain/blockchain/sync/apply/blocks_worker.rs b/lib/modules/blockchain/blockchain/sync/apply/blocks_worker.rs
new file mode 100644
index 00000000..0d8daada
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/apply/blocks_worker.rs
@@ -0,0 +1,109 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use crate::sync::*;
+use pbr::ProgressBar;
+use std::sync::mpsc;
+
+pub fn execute(
+    pool: &ThreadPool,
+    sender_sync_thread: mpsc::Sender<MessForSyncThread>,
+    recv: mpsc::Receiver<SyncJobsMess>,
+    databases: BlocksV10DBs,
+    mut apply_pb: ProgressBar<std::io::Stdout>,
+) {
+    // Launch blocks_worker thread
+    pool.execute(move || {
+        let blocks_job_begin = SystemTime::now();
+
+        // Listen db requets
+        let mut chunk_index = 0;
+        let mut blockchain_meta_datas = HashMap::new();
+        let mut all_wait_duration = Duration::from_millis(0);
+        let mut wait_begin = SystemTime::now();
+        while let Ok(SyncJobsMess::BlocksDBsWriteQuery(req)) = recv.recv() {
+            all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+            // Apply db request
+            req.apply(&databases, true)
+                .expect("Fatal error : Fail to apply DBWriteRequest !");
+            if let BlocksDBsWriteQuery::WriteBlock(
+                ref _dal_block,
+                ref _old_fork_id,
+                ref previous_blockstamp,
+                ref previous_hash,
+            ) = req
+            {
+                blockchain_meta_datas.insert(*previous_blockstamp, *previous_hash);
+                chunk_index += 1;
+                if chunk_index == 250 {
+                    chunk_index = 0;
+                    apply_pb.inc();
+                }
+            }
+            wait_begin = SystemTime::now();
+        }
+
+        // Indexing blockchain meta datas
+        info!("Indexing blockchain meta datas...");
+        /*let blockchain_meta_datas: HashMap<PreviousBlockstamp, BlockHash> = databases
+        .blockchain_db
+        .read(|db| {
+            let mut blockchain_meta_datas: HashMap<
+                PreviousBlockstamp,
+                BlockHash,
+            > = HashMap::new();
+            for dal_block in db.values() {
+                let block_previous_hash = if dal_block.block.number.0 == 0 {
+                    PreviousBlockstamp::default()
+                } else {
+                    PreviousBlockstamp {
+                        id: BlockId(dal_block.block.number.0 - 1),
+                        hash: BlockHash(dal_block.block.previous_hash),
+                    }
+                };
+                blockchain_meta_datas
+                    .insert(block_previous_hash, dal_block.block.expect("Try to get hash of an uncompleted or reduce block !"));
+            }
+            blockchain_meta_datas
+        })
+        .expect("Indexing blockchain meta datas : DALError");*/
+        databases
+            .forks_db
+            .write(|db| {
+                db.insert(ForkId(0), blockchain_meta_datas);
+            })
+            .expect("Indexing blockchain meta datas : DALError");
+
+        // Increment progress bar (last chunk)
+        apply_pb.inc();
+        // Save blockchain, and fork databases
+        println!();
+        println!("Write indexs in files...");
+        info!("Save blockchain and forks databases in files...");
+        databases.save_dbs();
+
+        // Send finish signal
+        sender_sync_thread
+            .send(MessForSyncThread::ApplyFinish())
+            .expect("Fatal error : sync_thread unrechable !");
+        let blocks_job_duration =
+            SystemTime::now().duration_since(blocks_job_begin).unwrap() - all_wait_duration;
+        info!(
+            "blocks_job_duration={},{:03} seconds.",
+            blocks_job_duration.as_secs(),
+            blocks_job_duration.subsec_millis()
+        );
+    });
+}
diff --git a/lib/modules/blockchain/blockchain/sync/apply/mod.rs b/lib/modules/blockchain/blockchain/sync/apply/mod.rs
new file mode 100644
index 00000000..2826b034
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/apply/mod.rs
@@ -0,0 +1,18 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+pub mod blocks_worker;
+pub mod txs_worker;
+pub mod wot_worker;
diff --git a/lib/modules/blockchain/blockchain/sync/apply/txs_worker.rs b/lib/modules/blockchain/blockchain/sync/apply/txs_worker.rs
new file mode 100644
index 00000000..ebd456c9
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/apply/txs_worker.rs
@@ -0,0 +1,59 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use crate::sync::*;
+use std::sync::mpsc;
+
+pub fn execute(
+    pool: &ThreadPool,
+    profile: String,
+    currency: CurrencyName,
+    sender_sync_thread: mpsc::Sender<MessForSyncThread>,
+    recv: mpsc::Receiver<SyncJobsMess>,
+) {
+    // Launch tx_worker thread
+    pool.execute(move || {
+        let tx_job_begin = SystemTime::now();
+        // Open databases
+        let db_path = duniter_conf::get_blockchain_db_path(&profile, &currency);
+        let databases = CurrencyV10DBs::open(Some(&db_path));
+
+        // Listen db requets
+        let mut all_wait_duration = Duration::from_millis(0);
+        let mut wait_begin = SystemTime::now();
+        while let Ok(SyncJobsMess::CurrencyDBsWriteQuery(req)) = recv.recv() {
+            all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+            // Apply db request
+            req.apply(&databases)
+                .expect("Fatal error : Fail to apply DBWriteRequest !");
+            wait_begin = SystemTime::now();
+        }
+        // Save tx, utxo, du and balances databases
+        info!("Save tx and sources database in file...");
+        databases.save_dbs(true, true);
+
+        // Send finish signal
+        sender_sync_thread
+            .send(MessForSyncThread::ApplyFinish())
+            .expect("Fatal error : sync_thread unrechable !");
+        let tx_job_duration =
+            SystemTime::now().duration_since(tx_job_begin).unwrap() - all_wait_duration;
+        info!(
+            "tx_job_duration={},{:03} seconds.",
+            tx_job_duration.as_secs(),
+            tx_job_duration.subsec_millis()
+        );
+    });
+}
diff --git a/lib/modules/blockchain/blockchain/sync/apply/wot_worker.rs b/lib/modules/blockchain/blockchain/sync/apply/wot_worker.rs
new file mode 100644
index 00000000..33d2620a
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/apply/wot_worker.rs
@@ -0,0 +1,63 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use crate::sync::*;
+use std::sync::mpsc;
+
+pub fn execute(
+    pool: &ThreadPool,
+    profile: String,
+    currency: CurrencyName,
+    sender_sync_thread: mpsc::Sender<MessForSyncThread>,
+    recv: mpsc::Receiver<SyncJobsMess>,
+) {
+    // Launch wot_worker thread
+    pool.execute(move || {
+        let wot_job_begin = SystemTime::now();
+        // Open databases
+        let db_path = duniter_conf::get_blockchain_db_path(&profile, &currency);
+        let databases = WotsV10DBs::open(Some(&db_path));
+
+        // Listen db requets
+        let mut all_wait_duration = Duration::from_millis(0);
+        let mut wait_begin = SystemTime::now();
+        while let Ok(mess) = recv.recv() {
+            all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+            match mess {
+                SyncJobsMess::WotsDBsWriteQuery(req, currency_params) => req
+                    .apply(&databases, &currency_params.deref())
+                    .expect("Fatal error : Fail to apply DBWriteRequest !"),
+                SyncJobsMess::End() => break,
+                _ => {}
+            }
+            wait_begin = SystemTime::now();
+        }
+        // Save wots databases
+        info!("Save wots databases in files...");
+        databases.save_dbs();
+
+        // Send finish signal
+        sender_sync_thread
+            .send(MessForSyncThread::ApplyFinish())
+            .expect("Fatal error : sync_thread unrechable !");
+        let wot_job_duration =
+            SystemTime::now().duration_since(wot_job_begin).unwrap() - all_wait_duration;
+        info!(
+            "wot_job_duration={},{:03} seconds.",
+            wot_job_duration.as_secs(),
+            wot_job_duration.subsec_millis()
+        );
+    });
+}
diff --git a/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs b/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
new file mode 100644
index 00000000..8ee40951
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
@@ -0,0 +1,240 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use crate::constants::*;
+use crate::sync::*;
+use dubp_documents::parsers::blocks::parse_json_block;
+use dubp_documents::Blockstamp;
+use durs_common_tools::fatal_error;
+use failure::Error;
+use std::collections::HashSet;
+use std::io::Read;
+use std::path::{Path, PathBuf};
+use std::sync::mpsc;
+use threadpool::ThreadPool;
+
+/// Json reader worker
+pub fn json_reader_worker(
+    pool: &ThreadPool,
+    profile: &str,
+    sender_sync_thread: mpsc::Sender<MessForSyncThread>,
+    json_chunks_path: PathBuf,
+    end: Option<u32>,
+) {
+    // Lauch json reader thread
+    let profile_copy = String::from(profile);
+    pool.execute(move || {
+        let ts_job_begin = SystemTime::now();
+
+        // Get list of json chunk files
+        let chunks_set = get_chunks_set(&json_chunks_path);
+        if chunks_set.is_empty() {
+            fatal_error("json_files_path directory is empty !");
+        }
+
+        // Get max chunk number and max block id
+        let (max_chunk_number, max_block_id): (usize, u32) = if let Some(end) = end {
+            (end as usize / (*crate::constants::CHUNK_SIZE), end)
+        } else {
+            (
+                chunks_set.len() - 1,
+                (chunks_set.len() * (*crate::constants::CHUNK_SIZE) - 1) as u32,
+            )
+        };
+
+        // Verify if max chunk exist
+        if chunks_set.get(&max_chunk_number).is_none() {
+            fatal_error(&format!("Missing chunk file n°{}", max_chunk_number));
+        };
+
+        // Open chunk file
+        let chunk_file_content_result = open_json_chunk_file(&json_chunks_path, max_chunk_number);
+        if chunk_file_content_result.is_err() {
+            fatal_error(&format!("Fail to open chunk file n°{}", max_chunk_number));
+        }
+
+        // Parse chunk file content
+        let blocks_result = parse_json_chunk(&chunk_file_content_result.expect("safe unwrap"));
+        let last_chunk_blocks = match blocks_result {
+            Ok(blocks) => blocks,
+            Err(e) => {
+                fatal_error(&format!(
+                    "Fail to parse chunk file n°{} : {}",
+                    max_chunk_number, e,
+                ));
+                unreachable!();
+            }
+        };
+
+        if last_chunk_blocks.is_empty() {
+            fatal_error("Last chunk is empty !");
+        }
+
+        let last_block = &last_chunk_blocks[last_chunk_blocks.len() - 1];
+
+        // Send TargetBlockcstamp
+        sender_sync_thread
+            .send(MessForSyncThread::Target(
+                last_block.currency.clone(),
+                last_block.blockstamp(),
+            ))
+            .expect("Fatal error : sync_thread unrechable !");
+
+        // Get current local blockstamp
+        debug!("Get local current blockstamp...");
+        let db_path = duniter_conf::get_blockchain_db_path(&profile_copy, &last_block.currency);
+        let blocks_databases = BlocksV10DBs::open(Some(&db_path));
+        let current_blockstamp: Blockstamp =
+            durs_blockchain_dal::block::get_current_blockstamp(&blocks_databases)
+                .expect("ForksV10DB : RustBreakError !")
+                .unwrap_or_default();
+        debug!("Success to get local current blockstamp.");
+
+        // Get first chunk number
+        let first_chunk_number: usize =
+            current_blockstamp.id.0 as usize / *crate::constants::CHUNK_SIZE;
+
+        // Parse chunks
+        for chunk_number in first_chunk_number..=max_chunk_number {
+            if chunks_set.get(&chunk_number).is_some() {
+                // Open chunk file
+                let chunk_file_content_result =
+                    open_json_chunk_file(&json_chunks_path, chunk_number);
+                if chunk_file_content_result.is_err() {
+                    fatal_error(&format!("Fail to open chunk file n°{}", chunk_number));
+                }
+
+                // Parse chunk file content
+                let blocks_result =
+                    parse_json_chunk(&chunk_file_content_result.expect("safe unwrap"));
+                let blocks = match blocks_result {
+                    Ok(blocks) => blocks,
+                    Err(e) => {
+                        fatal_error(&format!(
+                            "Fail to parse chunk file n°{} : {}",
+                            chunk_number, e,
+                        ));
+                        panic!(); // for compilator
+                    }
+                };
+
+                // Send all blocks of this chunk
+                for block in blocks {
+                    // Verify if the block number is within the expected interval
+                    let block_id = block.blockstamp().id;
+                    if block_id > current_blockstamp.id && block_id.0 <= max_block_id {
+                        // Send block document
+                        sender_sync_thread
+                            .send(MessForSyncThread::BlockDocument(Box::new(block)))
+                            .expect("Fatal error : sync_thread unrechable !");
+                    }
+                }
+            } else {
+                fatal_error(&format!("Missing chunk file n°{}", chunk_number));
+            }
+        }
+
+        sender_sync_thread
+            .send(MessForSyncThread::DownloadFinish())
+            .expect("Fatal error : sync_thread unrechable !");
+        let ts_job_duration = SystemTime::now()
+            .duration_since(ts_job_begin)
+            .expect("duration_since error");
+        info!(
+            "ts_job_duration={},{:03} seconds.",
+            ts_job_duration.as_secs(),
+            ts_job_duration.subsec_millis()
+        );
+    });
+}
+
+/// Parse json chunk into BlockDocument Vector
+fn parse_json_chunk(json_chunk_content: &str) -> Result<Vec<BlockDocument>, Error> {
+    let mut block_doc_vec = Vec::with_capacity(*crate::constants::CHUNK_SIZE);
+
+    let json_value = json_pest_parser::parse_json_string(json_chunk_content)?;
+    if let Some(json_object) = json_value.to_object() {
+        if let Some(blocks) = json_object.get("blocks") {
+            if let Some(blocks_array) = blocks.to_array() {
+                for json_block in blocks_array {
+                    block_doc_vec.push(parse_json_block(json_block)?);
+                }
+            } else {
+                fatal_error("Fail to parse json chunk : field \"blocks\" must be an array !");
+            }
+        } else {
+            fatal_error("Fail to parse json chunk : field \"blocks\" don't exist !");
+        }
+    } else {
+        fatal_error("Fail to parse json chunk : json root node must be an object !");
+    }
+
+    Ok(block_doc_vec)
+}
+
+fn get_chunks_set(dir: &Path) -> HashSet<usize> {
+    let json_chunk_file_list_result = fs::read_dir(dir);
+    if json_chunk_file_list_result.is_err() {
+        error!("Fail to read dir json_files_path !");
+        panic!("Fail to read dir json_files_path !");
+    }
+
+    let mut chunks_set = HashSet::new();
+
+    for dir_entry in json_chunk_file_list_result.expect("Dev error: err case must be treat before.")
+    {
+        if let Ok(dir_entry) = dir_entry {
+            if let Ok(file_name) = dir_entry.file_name().into_string() {
+                let file_name_len = file_name.len();
+
+                if let Ok(file_type) = dir_entry.file_type() {
+                    if file_type.is_file()
+                        && file_name[0..CHUNK_FILE_NAME_BEGIN.len()] == *CHUNK_FILE_NAME_BEGIN
+                        && file_name[file_name_len - CHUNK_FILE_NAME_END.len()..]
+                            == *CHUNK_FILE_NAME_END
+                    {
+                        let chunk_number_result: Result<usize, std::num::ParseIntError> = file_name
+                            [CHUNK_FILE_NAME_BEGIN.len()
+                                ..file_name_len - CHUNK_FILE_NAME_END.len()]
+                            .parse();
+
+                        if let Ok(chunk_number) = chunk_number_result {
+                            chunks_set.insert(chunk_number);
+                        }
+                    }
+                }
+            }
+        }
+    }
+
+    chunks_set
+}
+
+fn open_json_chunk_file(
+    json_chunks_path: &PathBuf,
+    chunk_number: usize,
+) -> std::io::Result<(String)> {
+    let mut chunk_file_path = json_chunks_path.clone();
+    chunk_file_path.push(&format!(
+        "{}{}{}",
+        CHUNK_FILE_NAME_BEGIN, chunk_number, CHUNK_FILE_NAME_END
+    ));
+    let file = std::fs::File::open(chunk_file_path)?;
+    let mut buf_reader = std::io::BufReader::new(file);
+    let mut contents = String::new();
+    buf_reader.read_to_string(&mut contents)?;
+
+    Ok(contents)
+}
diff --git a/lib/modules/blockchain/blockchain/sync/download/mod.rs b/lib/modules/blockchain/blockchain/sync/download/mod.rs
new file mode 100644
index 00000000..f1f3bd4a
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/download/mod.rs
@@ -0,0 +1,16 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+pub mod json_reader_worker;
diff --git a/lib/modules/blockchain/blockchain/sync/mod.rs b/lib/modules/blockchain/blockchain/sync/mod.rs
new file mode 100644
index 00000000..049084fd
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/mod.rs
@@ -0,0 +1,423 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+mod apply;
+mod download;
+
+use crate::*;
+use dubp_documents::{BlockHash, BlockId};
+use dup_crypto::keys::*;
+use durs_blockchain_dal::currency_params::CurrencyParameters;
+use durs_blockchain_dal::writers::requests::*;
+use durs_blockchain_dal::ForkId;
+use durs_common_tools::fatal_error;
+use durs_wot::NodeId;
+use pbr::ProgressBar;
+use std::collections::{HashMap, VecDeque};
+use std::fs;
+use std::sync::mpsc;
+use std::thread;
+use std::time::SystemTime;
+use threadpool::ThreadPool;
+
+/// Number of sync jobs
+pub static NB_SYNC_JOBS: &'static usize = &4;
+
+/*#[derive(Debug)]
+/// Sync source
+enum SyncSource {
+    Network(String),
+    LocalJsonFiles(PathBuf),
+}*/
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+/// Block header
+pub struct BlockHeader {
+    pub number: BlockId,
+    pub hash: BlockHash,
+    pub issuer: PubKey,
+}
+
+#[derive(Debug)]
+/// Message for main sync thread
+pub enum MessForSyncThread {
+    Target(CurrencyName, Blockstamp),
+    BlockDocument(Box<BlockDocument>),
+    DownloadFinish(),
+    ApplyFinish(),
+}
+
+#[derive(Debug)]
+/// Message for a job thread
+pub enum SyncJobsMess {
+    BlocksDBsWriteQuery(BlocksDBsWriteQuery),
+    WotsDBsWriteQuery(WotsDBsWriteQuery, Box<CurrencyParameters>),
+    CurrencyDBsWriteQuery(CurrencyDBsWriteQuery),
+    End(),
+}
+
+/// Sync
+pub fn sync<DC: DuniterConf>(
+    profile: &str,
+    conf: &DC,
+    //source: SyncSource,
+    json_files_path: PathBuf,
+    end: Option<u32>,
+    cautious: bool,
+    verif_inner_hash: bool,
+) {
+    // Get verification level
+    let _verif_level = if cautious {
+        println!("Start cautious sync...");
+        info!("Start cautious sync...");
+        SyncVerificationLevel::Cautious()
+    } else {
+        println!("Start fast sync...");
+        info!("Start fast sync...");
+        SyncVerificationLevel::FastSync()
+    };
+
+    // Create sync_thread channels
+    let (sender_sync_thread, recv_sync_thread) = mpsc::channel();
+
+    // Create ThreadPool
+    let nb_cpus = num_cpus::get();
+    let nb_workers = if nb_cpus < *NB_SYNC_JOBS {
+        nb_cpus
+    } else {
+        *NB_SYNC_JOBS
+    };
+    let pool = ThreadPool::new(nb_workers);
+
+    //match source {
+    //SyncSource::LocalJsonFiles(json_files_path) => {
+    // json_files_path must be a directory
+    if !json_files_path.is_dir() {
+        error!("json_files_path must be a directory");
+        panic!("json_files_path must be a directory");
+    }
+
+    // Lauch json reader thread
+    download::json_reader_worker::json_reader_worker(
+        &pool,
+        profile,
+        sender_sync_thread.clone(),
+        json_files_path,
+        end,
+    );
+    //}
+    //SyncSource::Network(url) => unimplemented!(),
+    //}
+
+    // Get target blockstamp
+    let (currency, target_blockstamp) =
+        if let Ok(MessForSyncThread::Target(currency, target_blockstamp)) = recv_sync_thread.recv()
+        {
+            (currency, target_blockstamp)
+        } else {
+            fatal_error("Fatal error : no target blockstamp !");
+            panic!(); // for compilator
+        };
+
+    // Update DuniterConf
+    let mut conf = conf.clone();
+    conf.set_currency(currency.clone());
+
+    // Get databases path
+    let db_path = duniter_conf::get_blockchain_db_path(profile, &currency);
+
+    // Write new conf
+    duniter_conf::write_conf_file(profile, &conf).expect("Fail to write new conf !");
+
+    // Open wot db
+    let wot_db = open_wot_db::<RustyWebOfTrust>(Some(&db_path)).expect("Fail to open WotDB !");
+
+    // Open blocks databases
+    let databases = BlocksV10DBs::open(Some(&db_path));
+
+    // Open wot databases
+    let wot_databases = WotsV10DBs::open(Some(&db_path));
+
+    // Get local current blockstamp
+    debug!("Get local current blockstamp...");
+    let mut current_blockstamp: Blockstamp =
+        durs_blockchain_dal::block::get_current_blockstamp(&databases)
+            .expect("ForksV10DB : RustBreakError !")
+            .unwrap_or_default();
+    debug!("Success to get local current blockstamp.");
+
+    // Node is already synchronized ?
+    if target_blockstamp.id.0 < current_blockstamp.id.0 {
+        println!("Your duniter-rs node is already synchronized.");
+        return;
+    }
+
+    // Get wot index
+    let mut wot_index: HashMap<PubKey, NodeId> =
+        DALIdentity::get_wot_index(&wot_databases.identities_db)
+            .expect("Fatal eror : get_wot_index : Fail to read blockchain databases");
+
+    // Start sync
+    let sync_start_time = SystemTime::now();
+
+    // Createprogess bar
+    let count_blocks = target_blockstamp.id.0 + 1 - current_blockstamp.id.0;
+    let count_chunks = if count_blocks % 250 > 0 {
+        (count_blocks / 250) + 1
+    } else {
+        count_blocks / 250
+    };
+    let mut apply_pb = ProgressBar::new(count_chunks.into());
+    apply_pb.format("╢▌▌░╟");
+
+    // Create workers threads channels
+    let (sender_blocks_thread, recv_blocks_thread) = mpsc::channel();
+    let (sender_wot_thread, recv_wot_thread) = mpsc::channel();
+    let (sender_tx_thread, recv_tx_thread) = mpsc::channel();
+
+    // Launch blocks_worker thread
+    apply::blocks_worker::execute(
+        &pool,
+        sender_sync_thread.clone(),
+        recv_blocks_thread,
+        databases,
+        apply_pb,
+    );
+
+    // / Launch wot_worker thread
+    apply::wot_worker::execute(
+        &pool,
+        profile.to_owned(),
+        currency.clone(),
+        sender_sync_thread.clone(),
+        recv_wot_thread,
+    );
+
+    // Launch tx_worker thread
+    apply::txs_worker::execute(
+        &pool,
+        profile.to_owned(),
+        currency.clone(),
+        sender_sync_thread.clone(),
+        recv_tx_thread,
+    );
+
+    let main_job_begin = SystemTime::now();
+
+    // Open currency_params_db
+    let dbs_path = duniter_conf::get_blockchain_db_path(profile, &conf.currency());
+    let currency_params_db = open_file_db::<CurrencyParamsV10Datas>(&dbs_path, "params.db")
+        .expect("Fail to open params db");
+
+    // Apply blocks
+    let mut blocks_not_expiring = VecDeque::with_capacity(200_000);
+    let mut last_block_expiring: isize = -1;
+    let certs_db =
+        BinDB::Mem(open_memory_db::<CertsExpirV10Datas>().expect("Fail to create memory certs_db"));
+    let mut currency_params = CurrencyParameters::default();
+    let mut get_currency_params = false;
+    let mut certs_count = 0;
+
+    let mut all_wait_duration = Duration::from_millis(0);
+    let mut wait_begin = SystemTime::now();
+    let mut all_verif_block_hashs_duration = Duration::from_millis(0);
+    let mut all_apply_valid_block_duration = Duration::from_millis(0);
+    while let Ok(MessForSyncThread::BlockDocument(block_doc)) = recv_sync_thread.recv() {
+        all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+        let block_doc = block_doc.deref();
+        // Verify block hashs
+        let verif_block_hashs_begin = SystemTime::now();
+        if verif_inner_hash {
+            verify_block_hashs(&block_doc)
+                .expect("Receive wrong block, please reset data and resync !");
+        }
+        all_verif_block_hashs_duration += SystemTime::now()
+            .duration_since(verif_block_hashs_begin)
+            .unwrap();
+        // Get currency params
+        if !get_currency_params && block_doc.number.0 == 0 {
+            if block_doc.parameters.is_some() {
+                currency_params_db
+                    .write(|db| {
+                        db.0 = block_doc.currency.clone();
+                        db.1 = block_doc.parameters.unwrap();
+                    })
+                    .expect("fail to write in params DB");
+                currency_params = CurrencyParameters::from((
+                    block_doc.currency.clone(),
+                    block_doc.parameters.unwrap(),
+                ));
+                get_currency_params = true;
+            } else {
+                panic!("The genesis block are None parameters !");
+            }
+        }
+        // Push block median_time in blocks_not_expiring
+        blocks_not_expiring.push_back(block_doc.median_time);
+        // Get blocks_expiring
+        let mut blocks_expiring = Vec::new();
+        while blocks_not_expiring.front().cloned()
+            < Some(block_doc.median_time - currency_params.sig_validity)
+        {
+            last_block_expiring += 1;
+            blocks_expiring.push(BlockId(last_block_expiring as u32));
+            blocks_not_expiring.pop_front();
+        }
+        // Find expire_certs
+        let expire_certs =
+            durs_blockchain_dal::certs::find_expire_certs(&certs_db, blocks_expiring)
+                .expect("find_expire_certs() : DALError");
+        // Apply block
+        let apply_valid_block_begin = SystemTime::now();
+        if let Ok(ValidBlockApplyReqs(block_req, wot_db_reqs, currency_db_reqs)) =
+            apply_valid_block::<RustyWebOfTrust>(
+                &block_doc,
+                &mut wot_index,
+                &wot_db,
+                &expire_certs,
+                None,
+            )
+        {
+            all_apply_valid_block_duration += SystemTime::now()
+                .duration_since(apply_valid_block_begin)
+                .unwrap();
+            current_blockstamp = block_doc.blockstamp();
+            debug!("Apply db requests...");
+            // Send block request to blocks worker thread
+            sender_blocks_thread
+                .send(SyncJobsMess::BlocksDBsWriteQuery(block_req.clone()))
+                .expect(
+                    "Fail to communicate with blocks worker thread, please reset data & resync !",
+                );
+            // Send wot requests to wot worker thread
+            for req in wot_db_reqs {
+                if let WotsDBsWriteQuery::CreateCert(
+                    ref _source_pubkey,
+                    ref source,
+                    ref target,
+                    ref created_block_id,
+                    ref _median_time,
+                ) = req
+                {
+                    certs_count += 1;
+                    // Add cert in certs_db
+                    certs_db
+                        .write(|db| {
+                            let mut created_certs =
+                                db.get(&created_block_id).cloned().unwrap_or_default();
+                            created_certs.insert((*source, *target));
+                            db.insert(*created_block_id, created_certs);
+                        })
+                        .expect("RustBreakError : please reset data and resync !");
+                }
+                sender_wot_thread
+                    .send(SyncJobsMess::WotsDBsWriteQuery(
+                        req.clone(),
+                        Box::new(currency_params),
+                    ))
+                    .expect(
+                        "Fail to communicate with tx worker thread, please reset data & resync !",
+                    )
+            }
+            // Send blocks and wot requests to wot worker thread
+            for req in currency_db_reqs {
+                sender_tx_thread
+                    .send(SyncJobsMess::CurrencyDBsWriteQuery(req.clone()))
+                    .expect(
+                        "Fail to communicate with tx worker thread, please reset data & resync !",
+                    );
+            }
+            debug!("Success to apply block #{}", current_blockstamp.id.0);
+            if current_blockstamp.id.0 >= target_blockstamp.id.0 {
+                if current_blockstamp == target_blockstamp {
+                    // Sync completed
+                    break;
+                } else {
+                    panic!("Fatal Error : we get a fork, please reset data and sync again !");
+                }
+            }
+        } else {
+            panic!(
+                "Fatal error : fail to stack up block #{}",
+                current_blockstamp.id.0 + 1
+            )
+        }
+        wait_begin = SystemTime::now();
+    }
+    // Send end signal to workers threads
+    sender_blocks_thread
+        .send(SyncJobsMess::End())
+        .expect("Sync : Fail to send End signal to blocks worker !");
+    info!("Sync : send End signal to blocks job.");
+    sender_wot_thread
+        .send(SyncJobsMess::End())
+        .expect("Sync : Fail to send End signal to wot worker !");
+    info!("Sync : send End signal to wot job.");
+    sender_tx_thread
+        .send(SyncJobsMess::End())
+        .expect("Sync : Fail to send End signal to writer worker !");
+    info!("Sync : send End signal to tx job.");
+
+    // Save params db
+    currency_params_db.save().expect("Fail to save params db");
+
+    // Save wot file
+    wot_db.save().expect("Fail to save wot db");
+
+    let main_job_duration =
+        SystemTime::now().duration_since(main_job_begin).unwrap() - all_wait_duration;
+    info!(
+        "main_job_duration={},{:03} seconds.",
+        main_job_duration.as_secs(),
+        main_job_duration.subsec_millis()
+    );
+    info!(
+        "all_verif_block_hashs_duration={},{:03} seconds.",
+        all_verif_block_hashs_duration.as_secs(),
+        all_verif_block_hashs_duration.subsec_millis()
+    );
+    info!(
+        "all_apply_valid_block_duration={},{:03} seconds.",
+        all_apply_valid_block_duration.as_secs(),
+        all_apply_valid_block_duration.subsec_millis()
+    );
+
+    // Wait recv two finish signals
+    let mut wait_jobs = *NB_SYNC_JOBS - 1;
+    while wait_jobs > 0 {
+        match recv_sync_thread.recv() {
+            Ok(MessForSyncThread::ApplyFinish()) => wait_jobs -= 1,
+            Ok(_) => thread::sleep(Duration::from_millis(50)),
+            Err(_) => wait_jobs -= 1,
+        }
+    }
+    info!("All sync jobs finish.");
+
+    // Log sync duration
+    debug!("certs_count={}", certs_count);
+    let sync_duration = SystemTime::now().duration_since(sync_start_time).unwrap();
+    println!(
+        "Sync {} blocks in {}.{:03} seconds.",
+        current_blockstamp.id.0 + 1,
+        sync_duration.as_secs(),
+        sync_duration.subsec_millis(),
+    );
+    info!(
+        "Sync {} blocks in {}.{:03} seconds.",
+        current_blockstamp.id.0 + 1,
+        sync_duration.as_secs(),
+        sync_duration.subsec_millis(),
+    );
+}
diff --git a/lib/modules/blockchain/blockchain/ts_parsers.rs b/lib/modules/blockchain/blockchain/ts_parsers.rs
index cf9a5ddd..de303563 100644
--- a/lib/modules/blockchain/blockchain/ts_parsers.rs
+++ b/lib/modules/blockchain/blockchain/ts_parsers.rs
@@ -21,7 +21,6 @@ use dubp_documents::documents::transaction::*;
 use dubp_documents::CurrencyName;
 use dubp_documents::DocumentBuilder;
 use dubp_documents::{BlockHash, BlockId, Blockstamp};
-use duniter_network::documents::{NetworkBlock, NetworkBlockV10};
 use dup_crypto::hashs::Hash;
 use dup_crypto::keys::*;
 use std::str::FromStr;
@@ -32,184 +31,6 @@ pub enum MembershipParseError {
     WrongFormat(),
 }
 
-/// Parse a block from duniter-ts database
-pub fn parse_ts_block(row: &[sqlite::Value]) -> NetworkBlock {
-    let current_header = BlockHeader {
-        number: BlockId(row[16].as_integer().expect("Fail to parse block number") as u32),
-        hash: BlockHash(
-            Hash::from_hex(row[0].as_string().expect("Fail to parse block hash"))
-                .expect("Fail to parse block hash (2)"),
-        ),
-        issuer: PubKey::Ed25519(
-            ed25519::PublicKey::from_base58(
-                row[4].as_string().expect("Fail to parse block issuer"),
-            )
-            .expect("Failt to parse block issuer (2)"),
-        ),
-    };
-    let previous_header = if current_header.number.0 > 0 {
-        Some(BlockHeader {
-            number: BlockId(current_header.number.0 - 1),
-            hash: BlockHash(
-                Hash::from_hex(
-                    row[6]
-                        .as_string()
-                        .expect("Fail to parse block previous hash"),
-                )
-                .expect("Fail to parse block previous hash (2)"),
-            ),
-            issuer: PubKey::Ed25519(
-                ed25519::PublicKey::from_base58(
-                    row[7]
-                        .as_string()
-                        .expect("Fail to parse previous block issuer"),
-                )
-                .expect("Fail to parse previous block issuer (2)"),
-            ),
-        })
-    } else {
-        None
-    };
-    let currency = row[3].as_string().expect("Fail to parse currency");
-    let parameters = if let Some(params_str) = row[5].as_string() {
-        if let Ok(params) = BlockV10Parameters::from_str(params_str) {
-            Some(params)
-        } else {
-            None
-        }
-    } else {
-        None
-    };
-    let dividend = match row[12].as_integer() {
-        Some(dividend) => Some(dividend as usize),
-        None => None,
-    };
-    let json_identities: serde_json::Value =
-        serde_json::from_str(row[20].as_string().expect("Fail to parse block identities"))
-            .expect("Fail to parse block identities (2)");
-    let mut identities = Vec::new();
-    for raw_idty in json_identities
-        .as_array()
-        .expect("Fail to parse block identities (3)")
-    {
-        identities
-            .push(parse_compact_identity(&currency, &raw_idty).expect("Fail to parse block idty"));
-    }
-    let json_txs: serde_json::Value =
-        serde_json::from_str(row[18].as_string().expect("Fail to parse block txs"))
-            .expect("Fail to parse block txs (2)");
-    let mut transactions = Vec::new();
-    for json_tx in json_txs.as_array().expect("Fail to parse block txs (3)") {
-        transactions.push(TxDocOrTxHash::TxDoc(Box::new(
-            parse_transaction(currency, &json_tx).expect("Fail to parse block tx"),
-        )));
-    }
-    let previous_hash = match previous_header.clone() {
-        Some(previous_header_) => previous_header_.hash.0,
-        None => Hash::default(),
-    };
-    let previous_issuer = match previous_header {
-        Some(previous_header_) => Some(previous_header_.issuer),
-        None => None,
-    };
-    let excluded: serde_json::Value =
-        serde_json::from_str(row[25].as_string().expect("Fail to parse excluded"))
-            .expect("Fail to parse excluded (2)");
-    let uncompleted_block_doc = BlockDocument {
-        nonce: row[17].as_integer().expect("Fail to parse nonce") as u64,
-        version: row[8].as_integer().expect("Fail to parse version") as u32,
-        number: current_header.number,
-        pow_min: row[15].as_integer().expect("Fail to parse pow_min") as usize,
-        time: row[14].as_integer().expect("Fail to parse time") as u64,
-        median_time: row[11].as_integer().expect("Fail to parse median_time") as u64,
-        members_count: row[9].as_integer().expect("Fail to parse members_count") as usize,
-        monetary_mass: row[10]
-            .as_string()
-            .expect("Fail to parse monetary_mass")
-            .parse()
-            .expect("Fail to parse monetary_mass (2)"),
-        unit_base: row[13].as_integer().expect("Fail to parse unit_base") as usize,
-        issuers_count: row[28].as_integer().expect("Fail to parse issuers_count") as usize,
-        issuers_frame: row[26].as_integer().expect("Fail to parse issuers_frame") as isize,
-        issuers_frame_var: row[27]
-            .as_integer()
-            .expect("Fail to parse issuers_frame_var") as isize,
-        currency: CurrencyName(String::from(currency)),
-        issuers: vec![PubKey::Ed25519(
-            ed25519::PublicKey::from_base58(row[4].as_string().expect("Fail to parse issuer"))
-                .expect("Fail to parse issuer '2)"),
-        )],
-        signatures: vec![Sig::Ed25519(
-            ed25519::Signature::from_base64(row[2].as_string().expect("Fail to parse signature"))
-                .expect("Fail to parse signature (2)"),
-        )],
-        hash: Some(current_header.hash),
-        parameters,
-        previous_hash,
-        previous_issuer,
-        inner_hash: Some(
-            Hash::from_hex(row[1].as_string().expect("Fail to parse block inner_hash"))
-                .expect("Fail to parse block inner_hash (2)"),
-        ),
-        dividend,
-        identities,
-        joiners: parse_memberships(
-            currency,
-            MembershipType::In(),
-            row[21].as_string().expect("Fail to parse joiners"),
-        )
-        .expect("Fail to parse joiners (2)"),
-        actives: parse_memberships(
-            currency,
-            MembershipType::In(),
-            row[22].as_string().expect("Fail to parse actives"),
-        )
-        .expect("Fail to parse actives (2)"),
-        leavers: parse_memberships(
-            currency,
-            MembershipType::In(),
-            row[23].as_string().expect("Fail to parse leavers"),
-        )
-        .expect("Fail to parse leavers (2)"),
-        revoked: Vec::new(),
-        excluded: excluded
-            .as_array()
-            .expect("Fail to parse excluded (3)")
-            .to_vec()
-            .into_iter()
-            .map(|e| {
-                PubKey::Ed25519(
-                    ed25519::PublicKey::from_base58(
-                        e.as_str().expect("Fail to parse excluded (4)"),
-                    )
-                    .expect("Fail to parse excluded (5)"),
-                )
-            })
-            .collect(),
-        certifications: Vec::new(),
-        transactions,
-        inner_hash_and_nonce_str: String::new(),
-    };
-    let revoked: serde_json::Value =
-        serde_json::from_str(row[24].as_string().expect("Fail to parse revoked"))
-            .expect("Fail to parse revoked (2)");
-    let certifications: serde_json::Value =
-        serde_json::from_str(row[19].as_string().expect("Fail to parse certifications"))
-            .expect("Fail to parse certifications (2)");
-    // return NetworkBlock
-    NetworkBlock::V10(Box::new(NetworkBlockV10 {
-        uncompleted_block_doc,
-        revoked: revoked
-            .as_array()
-            .expect("Fail to parse revoked (3)")
-            .to_vec(),
-        certifications: certifications
-            .as_array()
-            .expect("Fail to parse certifications (3)")
-            .to_vec(),
-    }))
-}
-
 /// Parse a compact identity
 pub fn parse_compact_identity(
     currency: &str,
diff --git a/lib/modules/ws2p-v1-legacy/lib.rs b/lib/modules/ws2p-v1-legacy/lib.rs
index c0755d7e..aa000ed9 100644
--- a/lib/modules/ws2p-v1-legacy/lib.rs
+++ b/lib/modules/ws2p-v1-legacy/lib.rs
@@ -759,7 +759,9 @@ impl DursModule<DuRsConf, DursMsg> for WS2PModule {
                                         let mut chunk = Vec::new();
                                         for json_block in response.as_array().unwrap() {
                                             if let Some(block) = parse_json_block(json_block) {
-                                                chunk.push(BlockchainDocument::Block(block));
+                                                chunk.push(BlockchainDocument::Block(Box::new(
+                                                    block,
+                                                )));
                                             } else {
                                                 warn!("WS2PModule: Error : fail to parse one json block !");
                                             }
@@ -935,7 +937,6 @@ mod tests {
     use super::*;
     use dubp_documents::documents::block::BlockDocument;
     use duniter_module::DursModule;
-    use duniter_network::documents::NetworkBlock;
     use dup_crypto::keys::PublicKey;
     use durs_network_documents::network_endpoint::NetworkEndpointApi;
     use std::fs;
@@ -1089,12 +1090,7 @@ mod tests {
             ],
         });
         let mut block: BlockDocument =
-            match parse_json_block(&json_block).expect("Fail to parse test json block !") {
-                NetworkBlock::V10(network_block_v10) => network_block_v10.uncompleted_block_doc,
-                _ => {
-                    panic!("Test block must be a v10 block !");
-                }
-            };
+            parse_json_block(&json_block).expect("Fail to parse test json block !");
         assert_eq!(
             block
                 .inner_hash
diff --git a/lib/modules/ws2p-v1-legacy/parsers/blocks.rs b/lib/modules/ws2p-v1-legacy/parsers/blocks.rs
index 9a8dd79c..18f487b5 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/blocks.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/blocks.rs
@@ -4,9 +4,10 @@ use super::transactions::parse_transaction;
 use dubp_documents::documents::block::BlockDocument;
 use dubp_documents::documents::block::{BlockV10Parameters, TxDocOrTxHash};
 use dubp_documents::documents::membership::*;
+use dubp_documents::parsers::certifications::*;
+use dubp_documents::parsers::revoked::*;
 use dubp_documents::CurrencyName;
 use dubp_documents::{BlockHash, BlockId};
-use duniter_network::documents::{NetworkBlock, NetworkBlockV10};
 use dup_crypto::hashs::Hash;
 use dup_crypto::keys::*;
 use std::str::FromStr;
@@ -57,7 +58,7 @@ fn parse_memberships(
     Some(memberships)
 }
 
-pub fn parse_json_block(source: &serde_json::Value) -> Option<NetworkBlock> {
+pub fn parse_json_block(source: &serde_json::Value) -> Option<BlockDocument> {
     let number = BlockId(source.get("number")?.as_u64()? as u32);
     let currency = source.get("currency")?.as_str()?.to_string();
     let issuer = match ed25519::PublicKey::from_base58(source.get("issuer")?.as_str()?) {
@@ -98,13 +99,25 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option<NetworkBlock> {
     let joiners = parse_memberships(&currency, MembershipType::In(), source.get("joiners")?)?;
     let actives = parse_memberships(&currency, MembershipType::In(), source.get("actives")?)?;
     let leavers = parse_memberships(&currency, MembershipType::Out(), source.get("actives")?)?;
+    let revoked: Vec<&str> = source
+        .get("revoked")?
+        .as_array()?
+        .iter()
+        .map(|v| v.as_str().unwrap_or(""))
+        .collect();
+    let certifications: Vec<&str> = source
+        .get("certifications")?
+        .as_array()?
+        .iter()
+        .map(|v| v.as_str().unwrap_or(""))
+        .collect();
     let mut transactions = Vec::new();
     for json_tx in source.get("transactions")?.as_array()? {
         transactions.push(TxDocOrTxHash::TxDoc(Box::new(parse_transaction(
             "g1", &json_tx,
         )?)));
     }
-    let block_doc = BlockDocument {
+    Some(BlockDocument {
         nonce: source.get("nonce")?.as_i64()? as u64,
         version: source.get("version")?.as_u64()? as u32,
         number: BlockId(source.get("number")?.as_u64()? as u32),
@@ -130,9 +143,9 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option<NetworkBlock> {
         joiners,
         actives,
         leavers,
-        revoked: Vec::with_capacity(0),
+        revoked: parse_revocations_into_compact(&revoked),
         excluded: parse_exclusions_from_json_value(&source.get("excluded")?.as_array()?),
-        certifications: Vec::with_capacity(0),
+        certifications: parse_certifications_into_compact(&certifications),
         transactions,
         inner_hash_and_nonce_str: format!(
             "InnerHash: {}\nNonce: {}\n",
@@ -141,10 +154,5 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option<NetworkBlock> {
                 .to_hex(),
             source.get("nonce")?.as_u64()?
         ),
-    };
-    Some(NetworkBlock::V10(Box::new(NetworkBlockV10 {
-        uncompleted_block_doc: block_doc,
-        revoked: source.get("revoked")?.as_array()?.clone(),
-        certifications: source.get("certifications")?.as_array()?.clone(),
-    })))
+    })
 }
diff --git a/lib/modules/ws2p-v1-legacy/ws2p_connection.rs b/lib/modules/ws2p-v1-legacy/ws2p_connection.rs
index 64a16611..e56098ad 100644
--- a/lib/modules/ws2p-v1-legacy/ws2p_connection.rs
+++ b/lib/modules/ws2p-v1-legacy/ws2p_connection.rs
@@ -485,9 +485,9 @@ impl WS2PConnectionMetaDatas {
                         match s.as_str().unwrap() {
                             "BLOCK" => match body.get("block") {
                                 Some(block) => {
-                                    if let Some(network_block) = parse_json_block(&block) {
+                                    if let Some(block_doc) = parse_json_block(&block) {
                                         return WS2PConnectionMessagePayload::Document(
-                                            BlockchainDocument::Block(network_block),
+                                            BlockchainDocument::Block(Box::new(block_doc)),
                                         );
                                     } else {
                                         info!("WS2PSignal: receive invalid block (wrong format).");
-- 
GitLab


From 2a9c468b6af1a8afb8770a3b82337b4d0a095dfd Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Mon, 21 Jan 2019 23:02:39 +0100
Subject: [PATCH 13/26] [tests] test json block parser + [feat] add tx parser

---
 lib/tools/documents/src/lib.rs                |   6 +-
 lib/tools/documents/src/parsers/blocks.rs     | 296 +++++++++++++-----
 lib/tools/documents/src/parsers/excluded.rs   |  28 --
 lib/tools/documents/src/parsers/mod.rs        | 154 ++++++++-
 .../documents/src/parsers/transactions.rs     | 120 +++++++
 5 files changed, 495 insertions(+), 109 deletions(-)
 delete mode 100644 lib/tools/documents/src/parsers/excluded.rs
 create mode 100644 lib/tools/documents/src/parsers/transactions.rs

diff --git a/lib/tools/documents/src/lib.rs b/lib/tools/documents/src/lib.rs
index 3e6d06fb..6ca76fd2 100644
--- a/lib/tools/documents/src/lib.rs
+++ b/lib/tools/documents/src/lib.rs
@@ -72,14 +72,18 @@ pub trait TextDocumentParser<R: RuleType> {
 }
 
 /// List of possible errors while parsing.
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, Fail)]
 pub enum TextDocumentParseError {
     /// The given source don't have a valid specific document format (document type).
+    #[fail(display = "TextDocumentParseError: Invalid inner format.")]
     InvalidInnerFormat(&'static str),
     /// Error with pest parser
+    #[fail(display = "TextDocumentParseError: PestError.")]
     PestError(String),
+    #[fail(display = "TextDocumentParseError: UnexpectedVersion.")]
     /// UnexpectedVersion
     UnexpectedVersion(String),
+    #[fail(display = "TextDocumentParseError: UnknownType.")]
     /// Unknown type
     UnknownType,
 }
diff --git a/lib/tools/documents/src/parsers/blocks.rs b/lib/tools/documents/src/parsers/blocks.rs
index 2b0fa0ba..86775952 100644
--- a/lib/tools/documents/src/parsers/blocks.rs
+++ b/lib/tools/documents/src/parsers/blocks.rs
@@ -21,11 +21,10 @@ use dup_crypto::hashs::Hash;
 use dup_crypto::keys::*;
 use failure::Error;
 use json_pest_parser::JSONValue;
-use std::collections::HashMap;
 
 pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error> {
     if !json_block.is_object() {
-        return Err(ParseBlockError {
+        return Err(ParseJsonError {
             cause: "Json block must be an object !".to_owned(),
         }
         .into());
@@ -87,9 +86,11 @@ pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error>
         revoked: crate::parsers::revoked::parse_revocations_into_compact(&get_str_array(
             json_block, "revoked",
         )?),
-        excluded: crate::parsers::excluded::parse_excluded(&get_str_array(
-            json_block, "excluded",
-        )?)?,
+        excluded: get_str_array(json_block, "excluded")?
+            .iter()
+            .map(|p| ed25519::PublicKey::from_base58(p))
+            .map(|p| p.map(PubKey::Ed25519))
+            .collect::<Result<Vec<PubKey>, BaseConvertionError>>()?,
         certifications: crate::parsers::certifications::parse_certifications_into_compact(
             &get_str_array(json_block, "certifications")?,
         ),
@@ -98,74 +99,225 @@ pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error>
     })
 }
 
-fn get_optional_usize(
-    json_block: &HashMap<&str, JSONValue>,
-    field: &str,
-) -> Result<Option<usize>, ParseBlockError> {
-    Ok(match json_block.get(field) {
-        Some(value) => {
-            if !value.is_null() {
-                Some(
-                    value
-                        .to_number()
-                        .ok_or_else(|| ParseBlockError {
-                            cause: format!("Json block {} field must be a number !", field),
-                        })?
-                        .trunc() as usize,
-                )
-            } else {
-                None
-            }
-        }
-        None => None,
-    })
-}
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::documents::block::TxDocOrTxHash;
 
-fn get_number(json_block: &HashMap<&str, JSONValue>, field: &str) -> Result<f64, ParseBlockError> {
-    Ok(json_block
-        .get(field)
-        .ok_or_else(|| ParseBlockError {
-            cause: format!("Json block must have {} field !", field),
-        })?
-        .to_number()
-        .ok_or_else(|| ParseBlockError {
-            cause: format!("Json block {} field must be a number !", field),
-        })?)
-}
+    #[test]
+    fn parse_empty_json_block() {
+        let block_json_str = r#"{
+   "version": 10,
+   "nonce": 10200000037108,
+   "number": 7,
+   "powMin": 70,
+   "time": 1488987677,
+   "medianTime": 1488987394,
+   "membersCount": 59,
+   "monetaryMass": 59000,
+   "unitbase": 0,
+   "issuersCount": 1,
+   "issuersFrame": 6,
+   "issuersFrameVar": 0,
+   "currency": "g1",
+   "issuer": "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ",
+   "signature": "xaWNjdFeE4yr9+AKckgR6QuAvMzmKUWfY+uIlC3HKjn2apJqG70Gf59A71W+Ucz6E9WPXRzDDF/xOrf6GCGHCA==",
+   "hash": "0000407900D981FC17B5A6FBCF8E8AFA4C00FAD7AFC5BEA9A96FF505E5D105EC",
+   "parameters": "",
+   "previousHash": "0000379BBE6ABC18DCFD6E4733F9F76CB06593D10FAEDF722BE190C277AC16EA",
+   "previousIssuer": "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ",
+   "inner_hash": "CF2701092D5A34A55802E343B5F8D61D9B7E8089F1F13A19721234DF5B2F0F38",
+   "dividend": null,
+   "identities": [],
+   "joiners": [],
+   "actives": [],
+   "leavers": [],
+   "revoked": [],
+   "excluded": [],
+   "certifications": [],
+   "transactions": [],
+   "raw": "Version: 10\nType: Block\nCurrency: g1\nNumber: 7\nPoWMin: 70\nTime: 1488987677\nMedianTime: 1488987394\nUnitBase: 0\nIssuer: 2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\nIssuersFrame: 6\nIssuersFrameVar: 0\nDifferentIssuersCount: 1\nPreviousHash: 0000379BBE6ABC18DCFD6E4733F9F76CB06593D10FAEDF722BE190C277AC16EA\nPreviousIssuer: 2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\nMembersCount: 59\nIdentities:\nJoiners:\nActives:\nLeavers:\nRevoked:\nExcluded:\nCertifications:\nTransactions:\nInnerHash: CF2701092D5A34A55802E343B5F8D61D9B7E8089F1F13A19721234DF5B2F0F38\nNonce: 10200000037108\n"
+  }"#;
 
-fn get_str<'a>(
-    json_block: &'a HashMap<&str, JSONValue>,
-    field: &str,
-) -> Result<&'a str, ParseBlockError> {
-    Ok(json_block
-        .get(field)
-        .ok_or_else(|| ParseBlockError {
-            cause: format!("Json block must have {} field !", field),
-        })?
-        .to_str()
-        .ok_or_else(|| ParseBlockError {
-            cause: format!("Json block {} field must be a string !", field),
-        })?)
-}
+        let block_json_value = json_pest_parser::parse_json_string(block_json_str)
+            .expect("Fail to parse json block !");
+        assert_eq!(
+            BlockDocument {
+                version: 10,
+                nonce: 10200000037108,
+                number: BlockId(7),
+                pow_min: 70,
+                time: 1488987677,
+                median_time: 1488987394,
+                members_count: 59,
+                monetary_mass: 59000,
+                unit_base: 0,
+                issuers_count: 1,
+                issuers_frame: 6,
+                issuers_frame_var: 0,
+                currency: CurrencyName("g1".to_owned()),
+                issuers: vec![PubKey::Ed25519(
+                    ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+                        .expect("Fail to parse issuer !")
+                )],
+                signatures: vec![Sig::Ed25519(
+                    ed25519::Signature::from_base64("xaWNjdFeE4yr9+AKckgR6QuAvMzmKUWfY+uIlC3HKjn2apJqG70Gf59A71W+Ucz6E9WPXRzDDF/xOrf6GCGHCA==").expect("Fail to parse sig !")
+                )],
+                hash: Some(BlockHash(
+                    Hash::from_hex(
+                        "0000407900D981FC17B5A6FBCF8E8AFA4C00FAD7AFC5BEA9A96FF505E5D105EC"
+                    )
+                    .expect("Fail to parse hash !")
+                )),
+                parameters: None,
+                previous_hash: Hash::from_hex(
+                    "0000379BBE6ABC18DCFD6E4733F9F76CB06593D10FAEDF722BE190C277AC16EA"
+                )
+                .expect("Fail to parse previous_hash !"),
+                previous_issuer: Some(PubKey::Ed25519(
+                    ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+                        .expect("Fail to parse previous issuer !")
+                )),
+                inner_hash: Some(
+                    Hash::from_hex(
+                        "CF2701092D5A34A55802E343B5F8D61D9B7E8089F1F13A19721234DF5B2F0F38"
+                    )
+                    .expect("Fail to parse inner hash !")
+                ),
+                dividend: None,
+                identities: vec![],
+                joiners: vec![],
+                actives: vec![],
+                leavers: vec![],
+                revoked: vec![],
+                excluded: vec![],
+                certifications: vec![],
+                transactions: vec![],
+                inner_hash_and_nonce_str: "".to_owned(),
+            },
+            parse_json_block(&block_json_value).expect("Fail to parse block_json_value !")
+        );
+    }
+
+    #[test]
+    fn parse_json_block_with_one_tx() {
+        let block_json_str = r#"{
+   "version": 10,
+   "nonce": 10100000033688,
+   "number": 52,
+   "powMin": 74,
+   "time": 1488990898,
+   "medianTime": 1488990117,
+   "membersCount": 59,
+   "monetaryMass": 59000,
+   "unitbase": 0,
+   "issuersCount": 1,
+   "issuersFrame": 6,
+   "issuersFrameVar": 0,
+   "currency": "g1",
+   "issuer": "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ",
+   "signature": "4/UIwXzWQekbYw7fpD8ueMH4GnDEwCM+DvDaTfquBXOvFXLRYo/S+Vrk5u7so/98gYaZ2O7Myh20xgQvhh5FDQ==",
+   "hash": "000057D4B29AF6DADB16F841F19C54C00EB244CECA9C8F2D4839D54E5F91451C",
+   "parameters": "",
+   "previousHash": "00000FEDA61240DD125A26886FEB2E6995B52A94778C71224CAF8492FF257D47",
+   "previousIssuer": "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ",
+   "inner_hash": "6B27ACDA51F416449E5A61FC69438F8974D11FC27EB7A992410C276FC0B9BA5F",
+   "dividend": null,
+   "identities": [],
+   "joiners": [],
+   "actives": [],
+   "leavers": [],
+   "revoked": [],
+   "excluded": [],
+   "certifications": [],
+   "transactions": [
+    {
+     "version": 10,
+     "currency": "g1",
+     "locktime": 0,
+     "blockstamp": "50-00001DAA4559FEDB8320D1040B0F22B631459F36F237A0D9BC1EB923C12A12E7",
+     "blockstampTime": 1488990016,
+     "issuers": [
+      "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ"
+     ],
+     "inputs": [
+      "1000:0:D:2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ:1"
+     ],
+     "outputs": [
+      "1:0:SIG(Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm)",
+      "999:0:SIG(2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ)"
+     ],
+     "unlocks": [
+      "0:SIG(0)"
+     ],
+     "signatures": [
+      "fAH5Gor+8MtFzQZ++JaJO6U8JJ6+rkqKtPrRr/iufh3MYkoDGxmjzj6jCADQL+hkWBt8y8QzlgRkz0ixBcKHBw=="
+     ],
+     "comment": "TEST",
+     "block_number": 0,
+     "time": 0
+    }
+   ],
+   "raw": "Version: 10\nType: Block\nCurrency: g1\nNumber: 52\nPoWMin: 74\nTime: 1488990898\nMedianTime: 1488990117\nUnitBase: 0\nIssuer: 2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\nIssuersFrame: 6\nIssuersFrameVar: 0\nDifferentIssuersCount: 1\nPreviousHash: 00000FEDA61240DD125A26886FEB2E6995B52A94778C71224CAF8492FF257D47\nPreviousIssuer: 2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\nMembersCount: 59\nIdentities:\nJoiners:\nActives:\nLeavers:\nRevoked:\nExcluded:\nCertifications:\nTransactions:\nTX:10:1:1:1:2:1:0\n50-00001DAA4559FEDB8320D1040B0F22B631459F36F237A0D9BC1EB923C12A12E7\n2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\n1000:0:D:2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ:1\n0:SIG(0)\n1:0:SIG(Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm)\n999:0:SIG(2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ)\nTEST\nfAH5Gor+8MtFzQZ++JaJO6U8JJ6+rkqKtPrRr/iufh3MYkoDGxmjzj6jCADQL+hkWBt8y8QzlgRkz0ixBcKHBw==\nInnerHash: 6B27ACDA51F416449E5A61FC69438F8974D11FC27EB7A992410C276FC0B9BA5F\nNonce: 10100000033688\n"
+  }"#;
 
-fn get_str_array<'a>(
-    json_block: &'a HashMap<&str, JSONValue>,
-    field: &str,
-) -> Result<Vec<&'a str>, ParseBlockError> {
-    json_block
-        .get(field)
-        .ok_or_else(|| ParseBlockError {
-            cause: format!("Json block must have {} field !", field),
-        })?
-        .to_array()
-        .ok_or_else(|| ParseBlockError {
-            cause: format!("Json block {} field must be an array !", field),
-        })?
-        .iter()
-        .map(|v| {
-            v.to_str().ok_or_else(|| ParseBlockError {
-                cause: format!("Json block {} field must be an array of string !", field),
-            })
-        })
-        .collect()
+        let block_json_value = json_pest_parser::parse_json_string(block_json_str)
+            .expect("Fail to parse json block !");
+        assert_eq!(
+            BlockDocument {
+                version: 10,
+                nonce: 10100000033688,
+                number: BlockId(52),
+                pow_min: 74,
+                time: 1488990898,
+                median_time: 1488990117,
+                members_count: 59,
+                monetary_mass: 59000,
+                unit_base: 0,
+                issuers_count: 1,
+                issuers_frame: 6,
+                issuers_frame_var: 0,
+                currency: CurrencyName("g1".to_owned()),
+                issuers: vec![PubKey::Ed25519(
+                    ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+                        .expect("Fail to parse issuer !")
+                )],
+                signatures: vec![Sig::Ed25519(
+                    ed25519::Signature::from_base64("4/UIwXzWQekbYw7fpD8ueMH4GnDEwCM+DvDaTfquBXOvFXLRYo/S+Vrk5u7so/98gYaZ2O7Myh20xgQvhh5FDQ==").expect("Fail to parse sig !")
+                )],
+                hash: Some(BlockHash(
+                    Hash::from_hex(
+                        "000057D4B29AF6DADB16F841F19C54C00EB244CECA9C8F2D4839D54E5F91451C"
+                    )
+                    .expect("Fail to parse hash !")
+                )),
+                parameters: None,
+                previous_hash: Hash::from_hex(
+                    "00000FEDA61240DD125A26886FEB2E6995B52A94778C71224CAF8492FF257D47"
+                )
+                .expect("Fail to parse previous_hash !"),
+                previous_issuer: Some(PubKey::Ed25519(
+                    ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+                        .expect("Fail to parse previous issuer !")
+                )),
+                inner_hash: Some(
+                    Hash::from_hex(
+                        "6B27ACDA51F416449E5A61FC69438F8974D11FC27EB7A992410C276FC0B9BA5F"
+                    )
+                    .expect("Fail to parse inner hash !")
+                ),
+                dividend: None,
+                identities: vec![],
+                joiners: vec![],
+                actives: vec![],
+                leavers: vec![],
+                revoked: vec![],
+                excluded: vec![],
+                certifications: vec![],
+                transactions: vec![TxDocOrTxHash::TxDoc(Box::new(crate::parsers::tests::first_g1_tx_doc()))],
+                inner_hash_and_nonce_str: "".to_owned(),
+            },
+            parse_json_block(&block_json_value).expect("Fail to parse block_json_value !")
+        );
+    }
 }
diff --git a/lib/tools/documents/src/parsers/excluded.rs b/lib/tools/documents/src/parsers/excluded.rs
deleted file mode 100644
index 7c139b59..00000000
--- a/lib/tools/documents/src/parsers/excluded.rs
+++ /dev/null
@@ -1,28 +0,0 @@
-//  Copyright (C) 2018  The Durs Project Developers.
-//
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as
-// published by the Free Software Foundation, either version 3 of the
-// License, or (at your option) any later version.
-//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-// GNU Affero General Public License for more details.
-//
-// You should have received a copy of the GNU Affero General Public License
-// along with this program.  If not, see <https://www.gnu.org/licenses/>.
-
-use dup_crypto::keys::*;
-use failure::Error;
-
-/// Parse array of revocations json documents into vector of `CompactRevocationDocument`
-pub fn parse_excluded(str_pubkeys: &[&str]) -> Result<Vec<PubKey>, Error> {
-    let mut excluded: Vec<PubKey> = Vec::with_capacity(str_pubkeys.len());
-    for str_pubkey in str_pubkeys {
-        excluded.push(PubKey::Ed25519(ed25519::PublicKey::from_base58(
-            str_pubkey,
-        )?))
-    }
-    Ok(excluded)
-}
diff --git a/lib/tools/documents/src/parsers/mod.rs b/lib/tools/documents/src/parsers/mod.rs
index 127f6eda..746ac63d 100644
--- a/lib/tools/documents/src/parsers/mod.rs
+++ b/lib/tools/documents/src/parsers/mod.rs
@@ -19,9 +19,6 @@ pub mod blocks;
 /// Parsers for certifications
 pub mod certifications;
 
-/// Parsers for exclusions
-pub mod excluded;
-
 /// Parsers for identities
 pub mod identities;
 
@@ -31,18 +28,159 @@ pub mod memberships;
 /// Parsers for revocations
 pub mod revoked;
 
+/// Parsers for transactions
+pub mod transactions;
+
 use crate::*;
+use json_pest_parser::JSONValue;
+use std::collections::HashMap;
 
 #[derive(Debug, Fail)]
-#[fail(display = "Fail to parse JSON Block : {:?}", cause)]
-pub struct ParseBlockError {
+#[fail(display = "Fail to parse JSON value : {:?}", cause)]
+pub struct ParseJsonError {
     pub cause: String,
 }
 
-impl From<BaseConvertionError> for ParseBlockError {
-    fn from(_: BaseConvertionError) -> ParseBlockError {
-        ParseBlockError {
+impl From<BaseConvertionError> for ParseJsonError {
+    fn from(_: BaseConvertionError) -> ParseJsonError {
+        ParseJsonError {
             cause: "base conversion error".to_owned(),
         }
     }
 }
+
+fn get_optional_usize(
+    json_block: &HashMap<&str, JSONValue>,
+    field: &str,
+) -> Result<Option<usize>, ParseJsonError> {
+    Ok(match json_block.get(field) {
+        Some(value) => {
+            if !value.is_null() {
+                Some(
+                    value
+                        .to_number()
+                        .ok_or_else(|| ParseJsonError {
+                            cause: format!("Json block {} field must be a number !", field),
+                        })?
+                        .trunc() as usize,
+                )
+            } else {
+                None
+            }
+        }
+        None => None,
+    })
+}
+
+fn get_optional_str<'a>(
+    json_block: &'a HashMap<&str, JSONValue>,
+    field: &str,
+) -> Result<Option<&'a str>, ParseJsonError> {
+    Ok(match json_block.get(field) {
+        Some(value) => {
+            if !value.is_null() {
+                Some(value.to_str().ok_or_else(|| ParseJsonError {
+                    cause: format!("Json block {} field must be a string !", field),
+                })?)
+            } else {
+                None
+            }
+        }
+        None => None,
+    })
+}
+
+fn get_number(json_block: &HashMap<&str, JSONValue>, field: &str) -> Result<f64, ParseJsonError> {
+    Ok(json_block
+        .get(field)
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Json block must have {} field !", field),
+        })?
+        .to_number()
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Json block {} field must be a number !", field),
+        })?)
+}
+
+fn get_str<'a>(
+    json_block: &'a HashMap<&str, JSONValue>,
+    field: &str,
+) -> Result<&'a str, ParseJsonError> {
+    Ok(json_block
+        .get(field)
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Json block must have {} field !", field),
+        })?
+        .to_str()
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Json block {} field must be a string !", field),
+        })?)
+}
+
+fn get_str_array<'a>(
+    json_block: &'a HashMap<&str, JSONValue>,
+    field: &str,
+) -> Result<Vec<&'a str>, ParseJsonError> {
+    json_block
+        .get(field)
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Json block must have {} field !", field),
+        })?
+        .to_array()
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Json block {} field must be an array !", field),
+        })?
+        .iter()
+        .map(|v| {
+            v.to_str().ok_or_else(|| ParseJsonError {
+                cause: format!("Json block {} field must be an array of string !", field),
+            })
+        })
+        .collect()
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+    use crate::blockstamp::Blockstamp;
+    use crate::documents::transaction::*;
+    use std::str::FromStr;
+
+    pub fn first_g1_tx_doc() -> TransactionDocument {
+        let expected_tx_builder = TransactionDocumentBuilder {
+            currency: &"g1",
+            blockstamp: &Blockstamp::from_string(
+                "50-00001DAA4559FEDB8320D1040B0F22B631459F36F237A0D9BC1EB923C12A12E7",
+            )
+            .expect("Fail to parse blockstamp"),
+            locktime: &0,
+            issuers: &vec![PubKey::Ed25519(
+                ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+                    .expect("Fail to parse issuer !"),
+            )],
+            inputs: &vec![TransactionInput::from_str(
+                "1000:0:D:2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ:1",
+            )
+            .expect("Fail to parse inputs")],
+            unlocks: &vec![
+                TransactionInputUnlocks::from_str("0:SIG(0)").expect("Fail to parse unlocks")
+            ],
+            outputs: &vec![
+                TransactionOutput::from_str(
+                    "1:0:SIG(Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm)",
+                )
+                .expect("Fail to parse outputs"),
+                TransactionOutput::from_str(
+                    "999:0:SIG(2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ)",
+                )
+                .expect("Fail to parse outputs"),
+            ],
+            comment: "TEST",
+            hash: None,
+        };
+
+        expected_tx_builder.build_with_signature(vec![Sig::Ed25519(
+                ed25519::Signature::from_base64("fAH5Gor+8MtFzQZ++JaJO6U8JJ6+rkqKtPrRr/iufh3MYkoDGxmjzj6jCADQL+hkWBt8y8QzlgRkz0ixBcKHBw==").expect("Fail to parse sig !")
+            )])
+    }
+}
diff --git a/lib/tools/documents/src/parsers/transactions.rs b/lib/tools/documents/src/parsers/transactions.rs
new file mode 100644
index 00000000..c0c8916a
--- /dev/null
+++ b/lib/tools/documents/src/parsers/transactions.rs
@@ -0,0 +1,120 @@
+//  Copyright (C) 2018  The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program.  If not, see <https://www.gnu.org/licenses/>.
+
+use crate::documents::transaction::*;
+use crate::parsers::*;
+use crate::DocumentBuilder;
+use dup_crypto::keys::*;
+use failure::Error;
+use std::str::FromStr;
+
+#[derive(Debug, Fail, Copy, Clone)]
+pub enum ParseTxError {
+    #[fail(display = "Fail to parse transaction : wrong format !")]
+    WrongFormat,
+}
+
+/// Parse transaction from json value
+pub fn parse_json_transaction(json_tx: &JSONValue) -> Result<TransactionDocument, Error> {
+    if !json_tx.is_object() {
+        return Err(ParseJsonError {
+            cause: "Json transaction must be an object !".to_owned(),
+        }
+        .into());
+    }
+
+    let json_tx = json_tx.to_object().expect("safe unwrap");
+
+    let tx_doc_builder = TransactionDocumentBuilder {
+        currency: get_str(json_tx, "currency")?,
+        blockstamp: &Blockstamp::from_string(get_str(json_tx, "blockstamp")?)?,
+        locktime: &(get_number(json_tx, "locktime")?.trunc() as u64),
+        issuers: &get_str_array(json_tx, "issuers")?
+            .iter()
+            .map(|p| ed25519::PublicKey::from_base58(p))
+            .map(|p| p.map(PubKey::Ed25519))
+            .collect::<Result<Vec<PubKey>, BaseConvertionError>>()?,
+        inputs: &get_str_array(json_tx, "inputs")?
+            .iter()
+            .map(|i| TransactionInput::from_str(i))
+            .collect::<Result<Vec<TransactionInput>, TextDocumentParseError>>()?,
+        unlocks: &get_str_array(json_tx, "unlocks")?
+            .iter()
+            .map(|i| TransactionInputUnlocks::from_str(i))
+            .collect::<Result<Vec<TransactionInputUnlocks>, TextDocumentParseError>>()?,
+        outputs: &get_str_array(json_tx, "outputs")?
+            .iter()
+            .map(|i| TransactionOutput::from_str(i))
+            .collect::<Result<Vec<TransactionOutput>, TextDocumentParseError>>()?,
+        comment: get_str(json_tx, "comment")?,
+        hash: if let Some(hash_str) = get_optional_str(json_tx, "hash")? {
+            Some(Hash::from_hex(hash_str)?)
+        } else {
+            None
+        },
+    };
+
+    Ok(tx_doc_builder.build_with_signature(
+        get_str_array(json_tx, "signatures")?
+            .iter()
+            .map(|p| ed25519::Signature::from_base64(p))
+            .map(|p| p.map(Sig::Ed25519))
+            .collect::<Result<Vec<Sig>, BaseConvertionError>>()?,
+    ))
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    fn parse_empty_json_block() {
+        let tx_json_str = r#"{
+     "version": 10,
+     "currency": "g1",
+     "locktime": 0,
+     "blockstamp": "50-00001DAA4559FEDB8320D1040B0F22B631459F36F237A0D9BC1EB923C12A12E7",
+     "blockstampTime": 1488990016,
+     "issuers": [
+      "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ"
+     ],
+     "inputs": [
+      "1000:0:D:2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ:1"
+     ],
+     "outputs": [
+      "1:0:SIG(Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm)",
+      "999:0:SIG(2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ)"
+     ],
+     "unlocks": [
+      "0:SIG(0)"
+     ],
+     "signatures": [
+      "fAH5Gor+8MtFzQZ++JaJO6U8JJ6+rkqKtPrRr/iufh3MYkoDGxmjzj6jCADQL+hkWBt8y8QzlgRkz0ixBcKHBw=="
+     ],
+     "comment": "TEST",
+     "block_number": 0,
+     "time": 0
+    }"#;
+
+        let tx_json_value =
+            json_pest_parser::parse_json_string(tx_json_str).expect("Fail to parse json tx !");
+
+        assert_eq!(
+            crate::parsers::tests::first_g1_tx_doc(),
+            parse_json_transaction(&tx_json_value).expect("Fail to parse tx_json_value !")
+        );
+    }
+
+}
-- 
GitLab


From d146c31640209b2d58cd6e4400d1381ddc44db87 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Mon, 21 Jan 2019 23:15:16 +0100
Subject: [PATCH 14/26] [ref] network: remove useless import

---
 lib/core/network/documents.rs | 1 -
 1 file changed, 1 deletion(-)

diff --git a/lib/core/network/documents.rs b/lib/core/network/documents.rs
index cda76e59..4189d117 100644
--- a/lib/core/network/documents.rs
+++ b/lib/core/network/documents.rs
@@ -21,7 +21,6 @@ use dubp_documents::documents::identity::IdentityDocument;
 use dubp_documents::documents::membership::MembershipDocument;
 use dubp_documents::documents::revocation::RevocationDocument;
 use dubp_documents::documents::transaction::TransactionDocument;
-use serde_json;
 
 #[derive(Debug, Clone)]
 /// Network Document
-- 
GitLab


From 756dac4d76f208ca67c9447e8d320b0856ffba2b Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 26 Jan 2019 17:17:36 +0100
Subject: [PATCH 15/26] [ref] documents: move json parse tools in crate
 json-pest-parser

---
 lib/tools/documents/src/parsers/blocks.rs     |   3 +-
 lib/tools/documents/src/parsers/identities.rs |   2 +-
 .../documents/src/parsers/memberships.rs      |   2 +-
 lib/tools/documents/src/parsers/mod.rs        | 110 +-----------------
 .../documents/src/parsers/transactions.rs     |   6 +-
 lib/tools/json-pest-parser/src/lib.rs         | 100 ++++++++++++++++
 6 files changed, 108 insertions(+), 115 deletions(-)

diff --git a/lib/tools/documents/src/parsers/blocks.rs b/lib/tools/documents/src/parsers/blocks.rs
index 86775952..65ba771f 100644
--- a/lib/tools/documents/src/parsers/blocks.rs
+++ b/lib/tools/documents/src/parsers/blocks.rs
@@ -15,12 +15,11 @@
 
 use crate::documents::block::BlockDocument;
 use crate::documents::membership::MembershipType;
-use crate::parsers::*;
 use crate::*;
 use dup_crypto::hashs::Hash;
 use dup_crypto::keys::*;
 use failure::Error;
-use json_pest_parser::JSONValue;
+use json_pest_parser::*;
 
 pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error> {
     if !json_block.is_object() {
diff --git a/lib/tools/documents/src/parsers/identities.rs b/lib/tools/documents/src/parsers/identities.rs
index 0c66a678..ab583f85 100644
--- a/lib/tools/documents/src/parsers/identities.rs
+++ b/lib/tools/documents/src/parsers/identities.rs
@@ -14,7 +14,7 @@
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 use crate::documents::identity::*;
-use crate::parsers::*;
+use crate::Blockstamp;
 use crate::DocumentBuilder;
 use dup_crypto::keys::*;
 
diff --git a/lib/tools/documents/src/parsers/memberships.rs b/lib/tools/documents/src/parsers/memberships.rs
index b1687807..7225812b 100644
--- a/lib/tools/documents/src/parsers/memberships.rs
+++ b/lib/tools/documents/src/parsers/memberships.rs
@@ -14,7 +14,7 @@
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 use crate::documents::membership::*;
-use crate::parsers::*;
+use crate::Blockstamp;
 use crate::DocumentBuilder;
 use dup_crypto::keys::*;
 use failure::Error;
diff --git a/lib/tools/documents/src/parsers/mod.rs b/lib/tools/documents/src/parsers/mod.rs
index 746ac63d..39c41056 100644
--- a/lib/tools/documents/src/parsers/mod.rs
+++ b/lib/tools/documents/src/parsers/mod.rs
@@ -31,119 +31,11 @@ pub mod revoked;
 /// Parsers for transactions
 pub mod transactions;
 
-use crate::*;
-use json_pest_parser::JSONValue;
-use std::collections::HashMap;
-
-#[derive(Debug, Fail)]
-#[fail(display = "Fail to parse JSON value : {:?}", cause)]
-pub struct ParseJsonError {
-    pub cause: String,
-}
-
-impl From<BaseConvertionError> for ParseJsonError {
-    fn from(_: BaseConvertionError) -> ParseJsonError {
-        ParseJsonError {
-            cause: "base conversion error".to_owned(),
-        }
-    }
-}
-
-fn get_optional_usize(
-    json_block: &HashMap<&str, JSONValue>,
-    field: &str,
-) -> Result<Option<usize>, ParseJsonError> {
-    Ok(match json_block.get(field) {
-        Some(value) => {
-            if !value.is_null() {
-                Some(
-                    value
-                        .to_number()
-                        .ok_or_else(|| ParseJsonError {
-                            cause: format!("Json block {} field must be a number !", field),
-                        })?
-                        .trunc() as usize,
-                )
-            } else {
-                None
-            }
-        }
-        None => None,
-    })
-}
-
-fn get_optional_str<'a>(
-    json_block: &'a HashMap<&str, JSONValue>,
-    field: &str,
-) -> Result<Option<&'a str>, ParseJsonError> {
-    Ok(match json_block.get(field) {
-        Some(value) => {
-            if !value.is_null() {
-                Some(value.to_str().ok_or_else(|| ParseJsonError {
-                    cause: format!("Json block {} field must be a string !", field),
-                })?)
-            } else {
-                None
-            }
-        }
-        None => None,
-    })
-}
-
-fn get_number(json_block: &HashMap<&str, JSONValue>, field: &str) -> Result<f64, ParseJsonError> {
-    Ok(json_block
-        .get(field)
-        .ok_or_else(|| ParseJsonError {
-            cause: format!("Json block must have {} field !", field),
-        })?
-        .to_number()
-        .ok_or_else(|| ParseJsonError {
-            cause: format!("Json block {} field must be a number !", field),
-        })?)
-}
-
-fn get_str<'a>(
-    json_block: &'a HashMap<&str, JSONValue>,
-    field: &str,
-) -> Result<&'a str, ParseJsonError> {
-    Ok(json_block
-        .get(field)
-        .ok_or_else(|| ParseJsonError {
-            cause: format!("Json block must have {} field !", field),
-        })?
-        .to_str()
-        .ok_or_else(|| ParseJsonError {
-            cause: format!("Json block {} field must be a string !", field),
-        })?)
-}
-
-fn get_str_array<'a>(
-    json_block: &'a HashMap<&str, JSONValue>,
-    field: &str,
-) -> Result<Vec<&'a str>, ParseJsonError> {
-    json_block
-        .get(field)
-        .ok_or_else(|| ParseJsonError {
-            cause: format!("Json block must have {} field !", field),
-        })?
-        .to_array()
-        .ok_or_else(|| ParseJsonError {
-            cause: format!("Json block {} field must be an array !", field),
-        })?
-        .iter()
-        .map(|v| {
-            v.to_str().ok_or_else(|| ParseJsonError {
-                cause: format!("Json block {} field must be an array of string !", field),
-            })
-        })
-        .collect()
-}
-
 #[cfg(test)]
 mod tests {
-    use super::*;
     use crate::blockstamp::Blockstamp;
     use crate::documents::transaction::*;
+    use crate::*;
     use std::str::FromStr;
 
     pub fn first_g1_tx_doc() -> TransactionDocument {
diff --git a/lib/tools/documents/src/parsers/transactions.rs b/lib/tools/documents/src/parsers/transactions.rs
index c0c8916a..c70d79eb 100644
--- a/lib/tools/documents/src/parsers/transactions.rs
+++ b/lib/tools/documents/src/parsers/transactions.rs
@@ -14,10 +14,12 @@
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 use crate::documents::transaction::*;
-use crate::parsers::*;
-use crate::DocumentBuilder;
+use crate::TextDocumentParseError;
+use crate::*;
+use dup_crypto::hashs::Hash;
 use dup_crypto::keys::*;
 use failure::Error;
+use json_pest_parser::*;
 use std::str::FromStr;
 
 #[derive(Debug, Fail, Copy, Clone)]
diff --git a/lib/tools/json-pest-parser/src/lib.rs b/lib/tools/json-pest-parser/src/lib.rs
index 53689544..fcadaea4 100644
--- a/lib/tools/json-pest-parser/src/lib.rs
+++ b/lib/tools/json-pest-parser/src/lib.rs
@@ -38,6 +38,7 @@ extern crate maplit;
 #[macro_use]
 extern crate pretty_assertions;
 
+use failure::Error;
 use pest::iterators::Pair;
 use pest::Parser;
 use std::collections::HashMap;
@@ -216,6 +217,105 @@ fn parse_value(pair: Pair<Rule>) -> JSONValue {
     }
 }
 
+pub fn get_optional_usize<S: std::hash::BuildHasher>(
+    json_block: &HashMap<&str, JSONValue, S>,
+    field: &str,
+) -> Result<Option<usize>, Error> {
+    Ok(match json_block.get(field) {
+        Some(value) => {
+            if !value.is_null() {
+                Some(
+                    value
+                        .to_number()
+                        .ok_or_else(|| ParseJsonError {
+                            cause: format!(
+                                "Fail to parse json : field '{}' must be a number !",
+                                field
+                            ),
+                        })?
+                        .trunc() as usize,
+                )
+            } else {
+                None
+            }
+        }
+        None => None,
+    })
+}
+
+pub fn get_optional_str<'a, S: std::hash::BuildHasher>(
+    json_block: &'a HashMap<&str, JSONValue, S>,
+    field: &str,
+) -> Result<Option<&'a str>, Error> {
+    Ok(match json_block.get(field) {
+        Some(value) => {
+            if !value.is_null() {
+                Some(value.to_str().ok_or_else(|| ParseJsonError {
+                    cause: format!("Fail to parse json : field '{}' must be a string !", field),
+                })?)
+            } else {
+                None
+            }
+        }
+        None => None,
+    })
+}
+
+pub fn get_number<S: std::hash::BuildHasher>(
+    json_block: &HashMap<&str, JSONValue, S>,
+    field: &str,
+) -> Result<f64, Error> {
+    Ok(json_block
+        .get(field)
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Fail to parse json : field '{}' must exist !", field),
+        })?
+        .to_number()
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Fail to parse json : field '{}' must be a number !", field),
+        })?)
+}
+
+pub fn get_str<'a, S: std::hash::BuildHasher>(
+    json_block: &'a HashMap<&str, JSONValue, S>,
+    field: &str,
+) -> Result<&'a str, Error> {
+    Ok(json_block
+        .get(field)
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Fail to parse json : field '{}' must exist !", field),
+        })?
+        .to_str()
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Fail to parse json : field '{}' must be a string !", field),
+        })?)
+}
+
+pub fn get_str_array<'a, S: std::hash::BuildHasher>(
+    json_block: &'a HashMap<&str, JSONValue, S>,
+    field: &str,
+) -> Result<Vec<&'a str>, ParseJsonError> {
+    json_block
+        .get(field)
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Fail to parse json : field '{}' must exist !", field),
+        })?
+        .to_array()
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Fail to parse json : field '{}' must be an array !", field),
+        })?
+        .iter()
+        .map(|v| {
+            v.to_str().ok_or_else(|| ParseJsonError {
+                cause: format!(
+                    "Fail to parse json : field '{}' must be an array of string !",
+                    field
+                ),
+            })
+        })
+        .collect()
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
-- 
GitLab


From cf8c1410e3feaf253d90d228ea541a87805ae91b Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 19:46:18 +0100
Subject: [PATCH 16/26] [ref] json-pest-parser: use generic hasher

---
 lib/tools/json-pest-parser/Cargo.toml |  3 +-
 lib/tools/json-pest-parser/src/lib.rs | 56 ++++++++++++++-------------
 2 files changed, 30 insertions(+), 29 deletions(-)

diff --git a/lib/tools/json-pest-parser/Cargo.toml b/lib/tools/json-pest-parser/Cargo.toml
index 5dc20657..317f0289 100644
--- a/lib/tools/json-pest-parser/Cargo.toml
+++ b/lib/tools/json-pest-parser/Cargo.toml
@@ -18,5 +18,4 @@ pest = "2.1.0"
 pest_derive = "2.1.0"
 
 [dev-dependencies]
-pretty_assertions = "0.5.1"
-maplit = "1.0.1"
\ No newline at end of file
+pretty_assertions = "0.5.1"
\ No newline at end of file
diff --git a/lib/tools/json-pest-parser/src/lib.rs b/lib/tools/json-pest-parser/src/lib.rs
index fcadaea4..ab83b778 100644
--- a/lib/tools/json-pest-parser/src/lib.rs
+++ b/lib/tools/json-pest-parser/src/lib.rs
@@ -30,10 +30,6 @@ extern crate failure;
 #[macro_use]
 extern crate pest_derive;
 
-#[cfg(test)]
-#[macro_use]
-extern crate maplit;
-
 #[cfg(test)]
 #[macro_use]
 extern crate pretty_assertions;
@@ -48,16 +44,18 @@ use std::collections::HashMap;
 struct JSONParser;
 
 #[derive(Debug, PartialEq)]
-pub enum JSONValue<'a> {
-    Object(HashMap<&'a str, JSONValue<'a>>),
-    Array(Vec<JSONValue<'a>>),
+pub enum JSONValue<'a, S: std::hash::BuildHasher> {
+    Object(HashMap<&'a str, JSONValue<'a, S>, S>),
+    Array(Vec<JSONValue<'a, S>>),
     String(&'a str),
     Number(f64),
     Boolean(bool),
     Null,
 }
 
-impl<'a> JSONValue<'a> {
+type JsonObject<'a, S> = HashMap<&'a str, JSONValue<'a, S>, S>;
+
+impl<'a, S: std::hash::BuildHasher> JSONValue<'a, S> {
     pub fn is_object(&self) -> bool {
         if let JSONValue::Object(_) = self {
             true
@@ -66,7 +64,7 @@ impl<'a> JSONValue<'a> {
         }
     }
 
-    pub fn to_object(&self) -> Option<&HashMap<&'a str, JSONValue<'a>>> {
+    pub fn to_object(&self) -> Option<&HashMap<&'a str, JSONValue<'a, S>, S>> {
         if let JSONValue::Object(object) = self {
             Some(object)
         } else {
@@ -82,7 +80,7 @@ impl<'a> JSONValue<'a> {
         }
     }
 
-    pub fn to_array(&self) -> Option<&Vec<JSONValue<'a>>> {
+    pub fn to_array(&self) -> Option<&Vec<JSONValue<'a, S>>> {
         if let JSONValue::Array(array) = self {
             Some(array)
         } else {
@@ -147,7 +145,7 @@ impl<'a> JSONValue<'a> {
     }
 }
 
-impl<'a> ToString for JSONValue<'a> {
+impl<'a, S: std::hash::BuildHasher> ToString for JSONValue<'a, S> {
     fn to_string(&self) -> String {
         match self {
             JSONValue::Object(o) => {
@@ -175,7 +173,20 @@ pub struct ParseJsonError {
     pub cause: String,
 }
 
-pub fn parse_json_string(source: &str) -> Result<JSONValue, ParseJsonError> {
+pub fn parse_json_string<'a>(
+    source: &'a str,
+) -> Result<
+    JSONValue<'a, std::hash::BuildHasherDefault<std::collections::hash_map::DefaultHasher>>,
+    ParseJsonError,
+> {
+    parse_json_string_with_specific_hasher::<
+        std::hash::BuildHasherDefault<std::collections::hash_map::DefaultHasher>,
+    >(source)
+}
+
+pub fn parse_json_string_with_specific_hasher<S: std::hash::BuildHasher + Default>(
+    source: &str,
+) -> Result<JSONValue<S>, ParseJsonError> {
     match JSONParser::parse(Rule::json, source) {
         Ok(mut pair) => Ok(parse_value(pair.next().unwrap())),
         Err(pest_error) => Err(ParseJsonError {
@@ -184,7 +195,7 @@ pub fn parse_json_string(source: &str) -> Result<JSONValue, ParseJsonError> {
     }
 }
 
-fn parse_value(pair: Pair<Rule>) -> JSONValue {
+fn parse_value<S: std::hash::BuildHasher + Default>(pair: Pair<Rule>) -> JSONValue<S> {
     match pair.as_rule() {
         Rule::object => JSONValue::Object(
             pair.into_inner()
@@ -218,7 +229,7 @@ fn parse_value(pair: Pair<Rule>) -> JSONValue {
 }
 
 pub fn get_optional_usize<S: std::hash::BuildHasher>(
-    json_block: &HashMap<&str, JSONValue, S>,
+    json_block: &HashMap<&str, JSONValue<S>, S>,
     field: &str,
 ) -> Result<Option<usize>, Error> {
     Ok(match json_block.get(field) {
@@ -244,7 +255,7 @@ pub fn get_optional_usize<S: std::hash::BuildHasher>(
 }
 
 pub fn get_optional_str<'a, S: std::hash::BuildHasher>(
-    json_block: &'a HashMap<&str, JSONValue, S>,
+    json_block: &'a HashMap<&str, JSONValue<S>, S>,
     field: &str,
 ) -> Result<Option<&'a str>, Error> {
     Ok(match json_block.get(field) {
@@ -262,7 +273,7 @@ pub fn get_optional_str<'a, S: std::hash::BuildHasher>(
 }
 
 pub fn get_number<S: std::hash::BuildHasher>(
-    json_block: &HashMap<&str, JSONValue, S>,
+    json_block: &HashMap<&str, JSONValue<S>, S>,
     field: &str,
 ) -> Result<f64, Error> {
     Ok(json_block
@@ -277,7 +288,7 @@ pub fn get_number<S: std::hash::BuildHasher>(
 }
 
 pub fn get_str<'a, S: std::hash::BuildHasher>(
-    json_block: &'a HashMap<&str, JSONValue, S>,
+    json_block: &'a HashMap<&str, JSONValue<S>, S>,
     field: &str,
 ) -> Result<&'a str, Error> {
     Ok(json_block
@@ -292,7 +303,7 @@ pub fn get_str<'a, S: std::hash::BuildHasher>(
 }
 
 pub fn get_str_array<'a, S: std::hash::BuildHasher>(
-    json_block: &'a HashMap<&str, JSONValue, S>,
+    json_block: &'a HashMap<&str, JSONValue<S>, S>,
     field: &str,
 ) -> Result<Vec<&'a str>, ParseJsonError> {
     json_block
@@ -333,15 +344,6 @@ mod tests {
 
         let json_value = parse_json_string(json_string).expect("Fail to parse json string !");
 
-        assert_eq!(
-            JSONValue::Object(hashmap![
-                "name" => JSONValue::String("toto"),
-                "age" => JSONValue::Number(25f64),
-                "friends" => JSONValue::Array(vec![JSONValue::String("titi"), JSONValue::String("tata"),])
-            ]),
-            json_value
-        );
-
         assert!(json_value.is_object());
 
         let json_object = json_value.to_object().expect("safe unwrap");
-- 
GitLab


From e90531d0a6195a7efc0a9166b4a55786b7983cc6 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 19:47:17 +0100
Subject: [PATCH 17/26] [feat] json-pest-parser: add tools methods

---
 lib/tools/json-pest-parser/src/lib.rs | 38 +++++++++++++++++++++++++++
 1 file changed, 38 insertions(+)

diff --git a/lib/tools/json-pest-parser/src/lib.rs b/lib/tools/json-pest-parser/src/lib.rs
index ab83b778..0b364451 100644
--- a/lib/tools/json-pest-parser/src/lib.rs
+++ b/lib/tools/json-pest-parser/src/lib.rs
@@ -254,6 +254,22 @@ pub fn get_optional_usize<S: std::hash::BuildHasher>(
     })
 }
 
+pub fn get_optional_str_not_empty<'a, S: std::hash::BuildHasher>(
+    json_block: &'a HashMap<&str, JSONValue<S>, S>,
+    field: &str,
+) -> Result<Option<&'a str>, Error> {
+    let result = get_optional_str(json_block, field);
+    if let Ok(Some(value)) = result {
+        if !value.is_empty() {
+            Ok(Some(value))
+        } else {
+            Ok(None)
+        }
+    } else {
+        result
+    }
+}
+
 pub fn get_optional_str<'a, S: std::hash::BuildHasher>(
     json_block: &'a HashMap<&str, JSONValue<S>, S>,
     field: &str,
@@ -327,6 +343,28 @@ pub fn get_str_array<'a, S: std::hash::BuildHasher>(
         .collect()
 }
 
+pub fn get_object_array<'a, S: std::hash::BuildHasher>(
+    json_block: &'a JsonObject<'a, S>,
+    field: &str,
+) -> Result<Vec<&'a JsonObject<'a, S>>, ParseJsonError> {
+    json_block
+        .get(field)
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Fail to parse json : field '{}' must exist !", field),
+        })?
+        .to_array()
+        .ok_or_else(|| ParseJsonError {
+            cause: format!("Fail to parse json : field '{}' must be an array !", field),
+        })?
+        .iter()
+        .map(|v| {
+            v.to_object().ok_or_else(|| ParseJsonError {
+                cause: format!("Fail to parse json : field '{}' must be an object !", field),
+            })
+        })
+        .collect()
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
-- 
GitLab


From 45c1eb3b8e3c9d06dd23d4ea736561569bd26d28 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 19:48:03 +0100
Subject: [PATCH 18/26] [feat] common-tools: add unescape_str

---
 lib/tools/common-tools/src/lib.rs | 30 ++++++++++++++++++++++++++++++
 1 file changed, 30 insertions(+)

diff --git a/lib/tools/common-tools/src/lib.rs b/lib/tools/common-tools/src/lib.rs
index fb446fda..3d894aa3 100644
--- a/lib/tools/common-tools/src/lib.rs
+++ b/lib/tools/common-tools/src/lib.rs
@@ -38,3 +38,33 @@ pub fn fatal_error(msg: &str) {
         panic!(format!("Fatal Error : {}", msg));
     }
 }
+
+/// Unescape backslash
+pub fn unescape_str(source: &str) -> String {
+    let mut previous_char = None;
+    let mut str_result = String::with_capacity(source.len());
+
+    for current_char in source.chars() {
+        if previous_char.is_some() && previous_char.unwrap() == '\\' {
+            match current_char {
+                '\\' => {} // Do nothing
+                _ => str_result.push(current_char),
+            }
+        } else {
+            str_result.push(current_char);
+        }
+        previous_char = Some(current_char);
+    }
+
+    str_result
+}
+
+#[cfg(test)]
+mod tests {
+    use super::*;
+
+    #[test]
+    pub fn test_unescape_str() {
+        assert_eq!("\\".to_owned(), unescape_str("\\\\"));
+    }
+}
-- 
GitLab


From c0906c8dc6c005796fa05ede1be68dbd79dbb252 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 19:52:49 +0100
Subject: [PATCH 19/26] [fix] documents : generate compact text for genesis
 block

---
 lib/tools/documents/src/documents/block.rs | 83 +++++++++++++++++-----
 1 file changed, 65 insertions(+), 18 deletions(-)

diff --git a/lib/tools/documents/src/documents/block.rs b/lib/tools/documents/src/documents/block.rs
index cad636cf..0bef7e10 100644
--- a/lib/tools/documents/src/documents/block.rs
+++ b/lib/tools/documents/src/documents/block.rs
@@ -28,12 +28,14 @@ use crate::documents::transaction::TransactionDocument;
 use crate::documents::*;
 use crate::text_document_traits::*;
 
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, Fail)]
 /// Store error in block parameters parsing
 pub enum ParseParamsError {
     /// ParseIntError
+    #[fail(display = "Fail to parse params :ParseIntError !")]
     ParseIntError(::std::num::ParseIntError),
     /// ParseFloatError
+    #[fail(display = "Fail to parse params :ParseFloatError !")]
     ParseFloatError(::std::num::ParseFloatError),
 }
 
@@ -50,7 +52,7 @@ impl From<::std::num::ParseFloatError> for ParseParamsError {
 }
 
 /// Currency parameters
-#[derive(Debug, Copy, Clone, PartialEq, Deserialize, Serialize)]
+#[derive(Debug, Copy, Clone, Deserialize, Serialize, PartialEq)]
 pub struct BlockV10Parameters {
     /// UD target growth rate (see Relative Theorie of Money)
     pub c: f64,
@@ -153,8 +155,38 @@ impl ::std::str::FromStr for BlockV10Parameters {
     }
 }
 
+impl ToString for BlockV10Parameters {
+    fn to_string(&self) -> String {
+        format!(
+            "{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}",
+            self.c,
+            self.dt,
+            self.ud0,
+            self.sig_period,
+            self.sig_stock,
+            self.sig_window,
+            self.sig_validity,
+            self.sig_qty,
+            self.idty_window,
+            self.ms_window,
+            self.x_percent,
+            self.ms_validity,
+            self.step_max,
+            self.median_time_blocks,
+            self.avg_gen_time,
+            self.dt_diff_eval,
+            self.percent_rot,
+            self.ud_time0,
+            self.ud_reeval_time0,
+            self.dt_reeval,
+        )
+    }
+}
+
+impl Eq for BlockV10Parameters {}
+
 /// Store a transaction document or just its hash.
-#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
+#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
 pub enum TxDocOrTxHash {
     /// Transaction document
     TxDoc(Box<TransactionDocument>),
@@ -190,7 +222,7 @@ impl TxDocOrTxHash {
 /// Wrap a Block document.
 ///
 /// Must be created by parsing a text document or using a builder.
-#[derive(Clone, Debug, Deserialize, Serialize)]
+#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
 pub struct BlockDocument {
     /// Version
     pub version: u32,
@@ -255,14 +287,6 @@ pub struct BlockDocument {
     pub inner_hash_and_nonce_str: String,
 }
 
-impl PartialEq for BlockDocument {
-    fn eq(&self, other: &BlockDocument) -> bool {
-        self.hash == other.hash
-    }
-}
-
-impl Eq for BlockDocument {}
-
 impl BlockDocument {
     /// Return previous blockstamp
     pub fn previous_blockstamp(&self) -> Blockstamp {
@@ -409,8 +433,31 @@ impl BlockDocument {
                 dividend_str.push_str("\n");
             }
         }
+        let mut parameters_str = String::from("");
+        if let Some(params) = self.parameters {
+            parameters_str.push_str("Parameters: ");
+            parameters_str.push_str(&params.to_string());
+            parameters_str.push_str("\n");
+        }
+        let mut previous_hash_str = String::from("");
+        if self.number.0 > 0 {
+            previous_hash_str.push_str("PreviousHash: ");
+            previous_hash_str.push_str(&self.previous_hash.to_string());
+            previous_hash_str.push_str("\n");
+        }
+        let mut previous_issuer_str = String::from("");
+        if self.number.0 > 0 {
+            previous_issuer_str.push_str("PreviousIssuer: ");
+            previous_issuer_str.push_str(
+                &self
+                    .previous_issuer
+                    .expect("No genesis block must have previous issuer")
+                    .to_string(),
+            );
+            previous_issuer_str.push_str("\n");
+        }
         format!(
-            "Version: 10
+            "Version: {version}
 Type: Block
 Currency: {currency}
 Number: {block_number}
@@ -422,9 +469,7 @@ Issuer: {issuer}
 IssuersFrame: {issuers_frame}
 IssuersFrameVar: {issuers_frame_var}
 DifferentIssuersCount: {issuers_count}
-PreviousHash: {previous_hash}
-PreviousIssuer: {previous_issuer}
-MembersCount: {members_count}
+{parameters}{previous_hash}{previous_issuer}MembersCount: {members_count}
 Identities:{identities}
 Joiners:{joiners}
 Actives:{actives}
@@ -434,6 +479,7 @@ Excluded:{excluded}
 Certifications:{certifications}
 Transactions:{transactions}
 ",
+            version = self.version,
             currency = self.currency,
             block_number = self.number,
             pow_min = self.pow_min,
@@ -445,8 +491,9 @@ Transactions:{transactions}
             issuers_frame = self.issuers_frame,
             issuers_frame_var = self.issuers_frame_var,
             issuers_count = self.issuers_count,
-            previous_hash = self.previous_hash,
-            previous_issuer = self.previous_issuer.unwrap(),
+            parameters = parameters_str,
+            previous_hash = previous_hash_str,
+            previous_issuer = previous_issuer_str,
             members_count = self.members_count,
             identities = identities_str,
             joiners = joiners_str,
-- 
GitLab


From caef3870424a60850f41c2073199d8051b887474 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 19:57:37 +0100
Subject: [PATCH 20/26] [fix] documents: json block parser: parse tx & params +
 impl Eq to Block

---
 lib/tools/documents/Cargo.toml                |  1 +
 lib/tools/documents/src/documents/mod.rs      |  2 +-
 lib/tools/documents/src/lib.rs                |  2 +-
 lib/tools/documents/src/parsers/blocks.rs     | 59 ++++++++++++++-----
 lib/tools/documents/src/parsers/mod.rs        |  2 +
 .../documents/src/parsers/transactions.rs     |  7 ++-
 .../documents/src/text_document_traits.rs     |  2 +-
 7 files changed, 56 insertions(+), 19 deletions(-)

diff --git a/lib/tools/documents/Cargo.toml b/lib/tools/documents/Cargo.toml
index f695d275..e2369aaa 100644
--- a/lib/tools/documents/Cargo.toml
+++ b/lib/tools/documents/Cargo.toml
@@ -16,6 +16,7 @@ path = "src/lib.rs"
 base58 = "0.1.*"
 base64 = "0.9.*"
 byteorder = "1.2.3"
+durs-common-tools = { path = "../common-tools" }
 dup-crypto = { path = "../crypto" }
 failure = "0.1.5"
 json-pest-parser = { path = "../json-pest-parser" }
diff --git a/lib/tools/documents/src/documents/mod.rs b/lib/tools/documents/src/documents/mod.rs
index 2e151097..109393d8 100644
--- a/lib/tools/documents/src/documents/mod.rs
+++ b/lib/tools/documents/src/documents/mod.rs
@@ -160,7 +160,7 @@ mod tests {
     use dup_crypto::keys::*;
 
     // simple text document for signature testing
-    #[derive(Debug, Clone)]
+    #[derive(Debug, Clone, PartialEq, Eq)]
     struct PlainTextDocument {
         pub text: &'static str,
         pub issuers: Vec<PubKey>,
diff --git a/lib/tools/documents/src/lib.rs b/lib/tools/documents/src/lib.rs
index 6ca76fd2..909aceb0 100644
--- a/lib/tools/documents/src/lib.rs
+++ b/lib/tools/documents/src/lib.rs
@@ -185,7 +185,7 @@ impl Debug for BlockHash {
 ///
 /// Allow only ed25519 for protocol 10 and many differents
 /// schemes for protocol 11 through a proxy type.
-pub trait Document: Debug + Clone {
+pub trait Document: Debug + Clone + PartialEq + Eq {
     /// Type of the `PublicKey` used by the document.
     type PublicKey: PublicKey;
     /// Data type of the currency code used by the document.
diff --git a/lib/tools/documents/src/parsers/blocks.rs b/lib/tools/documents/src/parsers/blocks.rs
index 65ba771f..b788f38e 100644
--- a/lib/tools/documents/src/parsers/blocks.rs
+++ b/lib/tools/documents/src/parsers/blocks.rs
@@ -13,15 +13,17 @@
 // You should have received a copy of the GNU Affero General Public License
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
-use crate::documents::block::BlockDocument;
+use crate::documents::block::{BlockDocument, BlockV10Parameters, TxDocOrTxHash};
 use crate::documents::membership::MembershipType;
+use crate::parsers::DefaultHasher;
 use crate::*;
 use dup_crypto::hashs::Hash;
 use dup_crypto::keys::*;
 use failure::Error;
 use json_pest_parser::*;
+use std::str::FromStr;
 
-pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error> {
+pub fn parse_json_block(json_block: &JSONValue<DefaultHasher>) -> Result<BlockDocument, Error> {
     if !json_block.is_object() {
         return Err(ParseJsonError {
             cause: "Json block must be an object !".to_owned(),
@@ -33,10 +35,12 @@ pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error>
 
     let currency = get_str(json_block, "currency")?;
 
+    let block_number = get_number(json_block, "number")?.trunc() as u32;
+
     Ok(BlockDocument {
         version: get_number(json_block, "version")?.trunc() as u32,
         nonce: get_number(json_block, "nonce")?.trunc() as u64,
-        number: BlockId(get_number(json_block, "number")?.trunc() as u32),
+        number: BlockId(block_number),
         pow_min: get_number(json_block, "powMin")?.trunc() as usize,
         time: get_number(json_block, "time")?.trunc() as u64,
         median_time: get_number(json_block, "medianTime")?.trunc() as u64,
@@ -55,12 +59,24 @@ pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error>
             "signature",
         )?)?)],
         hash: Some(BlockHash(Hash::from_hex(get_str(json_block, "hash")?)?)),
-        parameters: None,
-        previous_hash: Hash::from_hex(get_str(json_block, "previousHash")?)?,
-        previous_issuer: Some(PubKey::Ed25519(ed25519::PublicKey::from_base58(get_str(
-            json_block,
-            "previousIssuer",
-        )?)?)),
+        parameters: if let Some(params) = get_optional_str_not_empty(json_block, "parameters")? {
+            Some(BlockV10Parameters::from_str(params)?)
+        } else {
+            None
+        },
+        previous_hash: if block_number == 0 {
+            Hash::default()
+        } else {
+            Hash::from_hex(get_str(json_block, "previousHash")?)?
+        },
+        previous_issuer: if block_number == 0 {
+            None
+        } else {
+            Some(PubKey::Ed25519(ed25519::PublicKey::from_base58(get_str(
+                json_block,
+                "previousIssuer",
+            )?)?))
+        },
         inner_hash: Some(Hash::from_hex(get_str(json_block, "inner_hash")?)?),
         dividend: get_optional_usize(json_block, "dividend")?,
         identities: crate::parsers::identities::parse_compact_identities(
@@ -93,7 +109,20 @@ pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error>
         certifications: crate::parsers::certifications::parse_certifications_into_compact(
             &get_str_array(json_block, "certifications")?,
         ),
-        transactions: vec![],
+        transactions: json_block
+            .get("transactions")
+            .ok_or_else(|| ParseJsonError {
+                cause: "Fail to parse json block : field 'transactions' must exist !".to_owned(),
+            })?
+            .to_array()
+            .ok_or_else(|| ParseJsonError {
+                cause: "Fail to parse json block : field 'transactions' must be an array !"
+                    .to_owned(),
+            })?
+            .iter()
+            .map(|tx| crate::parsers::transactions::parse_json_transaction(tx))
+            .map(|tx_result| tx_result.map(|tx_doc| TxDocOrTxHash::TxDoc(Box::new(tx_doc))))
+            .collect::<Result<Vec<TxDocOrTxHash>, Error>>()?,
         inner_hash_and_nonce_str: "".to_owned(),
     })
 }
@@ -101,7 +130,6 @@ pub fn parse_json_block(json_block: &JSONValue) -> Result<BlockDocument, Error>
 #[cfg(test)]
 mod tests {
     use super::*;
-    use crate::documents::block::TxDocOrTxHash;
 
     #[test]
     fn parse_empty_json_block() {
@@ -262,8 +290,8 @@ mod tests {
 
         let block_json_value = json_pest_parser::parse_json_string(block_json_str)
             .expect("Fail to parse json block !");
-        assert_eq!(
-            BlockDocument {
+
+        let expected_block = BlockDocument {
                 version: 10,
                 nonce: 10100000033688,
                 number: BlockId(52),
@@ -315,8 +343,11 @@ mod tests {
                 certifications: vec![],
                 transactions: vec![TxDocOrTxHash::TxDoc(Box::new(crate::parsers::tests::first_g1_tx_doc()))],
                 inner_hash_and_nonce_str: "".to_owned(),
-            },
+            };
+        assert_eq!(
+            expected_block,
             parse_json_block(&block_json_value).expect("Fail to parse block_json_value !")
         );
+        assert!(expected_block.verify_inner_hash());
     }
 }
diff --git a/lib/tools/documents/src/parsers/mod.rs b/lib/tools/documents/src/parsers/mod.rs
index 39c41056..6a85ecb9 100644
--- a/lib/tools/documents/src/parsers/mod.rs
+++ b/lib/tools/documents/src/parsers/mod.rs
@@ -31,6 +31,8 @@ pub mod revoked;
 /// Parsers for transactions
 pub mod transactions;
 
+type DefaultHasher = std::hash::BuildHasherDefault<std::collections::hash_map::DefaultHasher>;
+
 #[cfg(test)]
 mod tests {
     use crate::blockstamp::Blockstamp;
diff --git a/lib/tools/documents/src/parsers/transactions.rs b/lib/tools/documents/src/parsers/transactions.rs
index c70d79eb..5ce78570 100644
--- a/lib/tools/documents/src/parsers/transactions.rs
+++ b/lib/tools/documents/src/parsers/transactions.rs
@@ -14,6 +14,7 @@
 // along with this program.  If not, see <https://www.gnu.org/licenses/>.
 
 use crate::documents::transaction::*;
+use crate::parsers::DefaultHasher;
 use crate::TextDocumentParseError;
 use crate::*;
 use dup_crypto::hashs::Hash;
@@ -29,7 +30,9 @@ pub enum ParseTxError {
 }
 
 /// Parse transaction from json value
-pub fn parse_json_transaction(json_tx: &JSONValue) -> Result<TransactionDocument, Error> {
+pub fn parse_json_transaction(
+    json_tx: &JSONValue<DefaultHasher>,
+) -> Result<TransactionDocument, Error> {
     if !json_tx.is_object() {
         return Err(ParseJsonError {
             cause: "Json transaction must be an object !".to_owned(),
@@ -60,7 +63,7 @@ pub fn parse_json_transaction(json_tx: &JSONValue) -> Result<TransactionDocument
             .iter()
             .map(|i| TransactionOutput::from_str(i))
             .collect::<Result<Vec<TransactionOutput>, TextDocumentParseError>>()?,
-        comment: get_str(json_tx, "comment")?,
+        comment: &durs_common_tools::unescape_str(get_str(json_tx, "comment")?),
         hash: if let Some(hash_str) = get_optional_str(json_tx, "hash")? {
             Some(Hash::from_hex(hash_str)?)
         } else {
diff --git a/lib/tools/documents/src/text_document_traits.rs b/lib/tools/documents/src/text_document_traits.rs
index 2829905b..0c4ce914 100644
--- a/lib/tools/documents/src/text_document_traits.rs
+++ b/lib/tools/documents/src/text_document_traits.rs
@@ -18,7 +18,7 @@
 use crate::*;
 use dup_crypto::keys::*;
 
-#[derive(Clone, Debug, Deserialize, Serialize)]
+#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
 /// Contains a document in full or compact format
 pub enum TextDocumentFormat<D: TextDocument> {
     /// Complete format (Allows to check the validity of the signature)
-- 
GitLab


From 0948c988ad3ab1984ebb091eea3863eb11773346 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 20:02:14 +0100
Subject: [PATCH 21/26] [feat] blockchain-dal: more debug log

---
 lib/modules/blockchain/blockchain-dal/writers/dividend.rs    | 4 ++++
 lib/modules/blockchain/blockchain-dal/writers/transaction.rs | 1 +
 2 files changed, 5 insertions(+)

diff --git a/lib/modules/blockchain/blockchain-dal/writers/dividend.rs b/lib/modules/blockchain/blockchain-dal/writers/dividend.rs
index 46ef576b..be5e9c4e 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/dividend.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/dividend.rs
@@ -29,6 +29,10 @@ pub fn create_du(
     members: &[PubKey],
     revert: bool,
 ) -> Result<(), DALError> {
+    debug!(
+        "create_du(amount, block_id, members, revert)=({:?}, {}, {:?}, {})",
+        du_amount, du_block_id.0, members, revert
+    );
     // Insert/Remove UD sources in UDsV10DB
     du_db.write(|db| {
         for pubkey in members {
diff --git a/lib/modules/blockchain/blockchain-dal/writers/transaction.rs b/lib/modules/blockchain/blockchain-dal/writers/transaction.rs
index a9e0b7c5..bb7994ab 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/transaction.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/transaction.rs
@@ -269,6 +269,7 @@ pub fn apply_and_write_tx(
                 if let SourceIndexV10::UTXO(utxo_index) = source_index {
                     // Get utxo
                     let utxo = db.get(&utxo_index).unwrap_or_else(|| {
+                        debug!("apply_tx=\"{:#?}\"", tx_doc);
                         panic!(
                             "ApplyBLockError : unknow UTXO in inputs : {:?} !",
                             utxo_index
-- 
GitLab


From 789f615add31646aefecb63806e80712277cc7b8 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 20:05:03 +0100
Subject: [PATCH 22/26] [fix] conf: allow AlreadyExist err when create
 blockchain dir

---
 lib/core/conf/Cargo.toml | 3 ++-
 lib/core/conf/lib.rs     | 7 ++++++-
 2 files changed, 8 insertions(+), 2 deletions(-)

diff --git a/lib/core/conf/Cargo.toml b/lib/core/conf/Cargo.toml
index 6dabfbe2..a700e1c4 100644
--- a/lib/core/conf/Cargo.toml
+++ b/lib/core/conf/Cargo.toml
@@ -17,8 +17,9 @@ serde_json = "1.0.*"
 dirs = "1.0.2"
 dup-crypto = { path = "../../tools/crypto" }
 dubp-documents= { path = "../../tools/documents" }
-rpassword = "1.0.0"
 duniter-module = { path = "../module" }
+durs-common-tools = { path = "../../tools/common-tools" }
+rpassword = "1.0.0"
 
 [features]
 # Treat warnings as a build error.
diff --git a/lib/core/conf/lib.rs b/lib/core/conf/lib.rs
index 4df1a506..4463a29d 100644
--- a/lib/core/conf/lib.rs
+++ b/lib/core/conf/lib.rs
@@ -38,6 +38,7 @@ pub mod keys;
 use dubp_documents::CurrencyName;
 use duniter_module::{DuniterConf, ModuleName, RequiredKeys, RequiredKeysContent};
 use dup_crypto::keys::*;
+use durs_common_tools::fatal_error;
 use rand::Rng;
 use serde::ser::{Serialize, SerializeStruct, Serializer};
 use std::collections::HashSet;
@@ -477,7 +478,11 @@ pub fn get_blockchain_db_path(profile: &str, currency: &CurrencyName) -> PathBuf
     let mut db_path = datas_path(profile, &currency);
     db_path.push("blockchain/");
     if !db_path.as_path().exists() {
-        fs::create_dir(db_path.as_path()).expect("Impossible to create blockchain dir !");
+        if let Err(io_error) = fs::create_dir(db_path.as_path()) {
+            if io_error.kind() != std::io::ErrorKind::AlreadyExists {
+                fatal_error("Impossible to create blockchain dir !");
+            }
+        }
     }
     db_path
 }
-- 
GitLab


From 88a6213a68252484539c9c3cd3e5530c1bc6d726 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 20:05:54 +0100
Subject: [PATCH 23/26] [fix] network: sync cli: remove duplicate -c option &
 hide unsafe option

---
 lib/core/network/cli/sync.rs | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/lib/core/network/cli/sync.rs b/lib/core/network/cli/sync.rs
index bc8f983a..ef49e0fd 100644
--- a/lib/core/network/cli/sync.rs
+++ b/lib/core/network/cli/sync.rs
@@ -36,10 +36,10 @@ pub struct SyncOpt {
     #[structopt(short = "e", long = "end")]
     pub end: Option<u32>,
     /// cautious mode (check all protocol rules, very slow)
-    #[structopt(short = "c", long = "cautious")]
+    #[structopt(long = "cautious")]
     pub cautious_mode: bool,
     /// unsafe mode (not check blocks inner hashs, very dangerous)
-    #[structopt(short = "u", long = "unsafe")]
+    #[structopt(short = "u", long = "unsafe", hidden = true)]
     pub unsafe_mode: bool,
 }
 
-- 
GitLab


From 06a4ce5d5cca5af923e14b8090da0f535d081429 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 20:11:55 +0100
Subject: [PATCH 24/26] [fix] blockchain: sync with end opt must stop  at 
 number entered

---
 .../sync/download/json_reader_worker.rs          |  8 ++++++--
 lib/modules/blockchain/blockchain/sync/mod.rs    | 16 +++++++++++++---
 2 files changed, 19 insertions(+), 5 deletions(-)

diff --git a/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs b/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
index 8ee40951..901cce88 100644
--- a/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
+++ b/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
@@ -82,7 +82,9 @@ pub fn json_reader_worker(
             fatal_error("Last chunk is empty !");
         }
 
-        let last_block = &last_chunk_blocks[last_chunk_blocks.len() - 1];
+        let last_block = last_chunk_blocks
+            .get(max_block_id as usize % *crate::constants::CHUNK_SIZE)
+            .expect("safe unwrap because not empty");
 
         // Send TargetBlockcstamp
         sender_sync_thread
@@ -134,7 +136,9 @@ pub fn json_reader_worker(
                 for block in blocks {
                     // Verify if the block number is within the expected interval
                     let block_id = block.blockstamp().id;
-                    if block_id > current_blockstamp.id && block_id.0 <= max_block_id {
+                    if (block_id > current_blockstamp.id && block_id.0 <= max_block_id)
+                        || (block_id.0 == 0 && current_blockstamp == Blockstamp::default())
+                    {
                         // Send block document
                         sender_sync_thread
                             .send(MessForSyncThread::BlockDocument(Box::new(block)))
diff --git a/lib/modules/blockchain/blockchain/sync/mod.rs b/lib/modules/blockchain/blockchain/sync/mod.rs
index 049084fd..18c1a685 100644
--- a/lib/modules/blockchain/blockchain/sync/mod.rs
+++ b/lib/modules/blockchain/blockchain/sync/mod.rs
@@ -172,13 +172,23 @@ pub fn sync<DC: DuniterConf>(
     // Start sync
     let sync_start_time = SystemTime::now();
 
-    // Createprogess bar
+    // Count number of blocks and chunks
     let count_blocks = target_blockstamp.id.0 + 1 - current_blockstamp.id.0;
     let count_chunks = if count_blocks % 250 > 0 {
         (count_blocks / 250) + 1
     } else {
         count_blocks / 250
     };
+    println!(
+        "Sync from #{} to #{} :",
+        current_blockstamp.id.0, target_blockstamp.id.0
+    );
+    info!(
+        "Sync from #{} to #{} :",
+        current_blockstamp.id.0, target_blockstamp.id.0
+    );
+
+    // Createprogess bar
     let mut apply_pb = ProgressBar::new(count_chunks.into());
     apply_pb.format("╢▌▌░╟");
 
@@ -410,13 +420,13 @@ pub fn sync<DC: DuniterConf>(
     let sync_duration = SystemTime::now().duration_since(sync_start_time).unwrap();
     println!(
         "Sync {} blocks in {}.{:03} seconds.",
-        current_blockstamp.id.0 + 1,
+        count_blocks,
         sync_duration.as_secs(),
         sync_duration.subsec_millis(),
     );
     info!(
         "Sync {} blocks in {}.{:03} seconds.",
-        current_blockstamp.id.0 + 1,
+        count_blocks,
         sync_duration.as_secs(),
         sync_duration.subsec_millis(),
     );
-- 
GitLab


From bd898a0edc0d284e2516f8df537f1525427d01b9 Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 20:13:29 +0100
Subject: [PATCH 25/26] [ref] blockchain: improve code quality (reduce clone)

---
 .../blockchain/apply_valid_block.rs           | 26 ++---
 .../blockchain/check_and_apply_block.rs       | 99 +++++++------------
 lib/modules/blockchain/blockchain/lib.rs      | 75 +++++++++-----
 .../sync/download/json_reader_worker.rs       |  9 +-
 lib/modules/blockchain/blockchain/sync/mod.rs | 33 +++----
 5 files changed, 115 insertions(+), 127 deletions(-)

diff --git a/lib/modules/blockchain/blockchain/apply_valid_block.rs b/lib/modules/blockchain/blockchain/apply_valid_block.rs
index 5242e879..a4e4683b 100644
--- a/lib/modules/blockchain/blockchain/apply_valid_block.rs
+++ b/lib/modules/blockchain/blockchain/apply_valid_block.rs
@@ -42,7 +42,7 @@ pub enum ApplyValidBlockError {
 }
 
 pub fn apply_valid_block<W: WebOfTrust>(
-    block: &BlockDocument,
+    mut block: BlockDocument,
     wot_index: &mut HashMap<PubKey, NodeId>,
     wot_db: &BinDB<W>,
     expire_certs: &HashMap<(NodeId, NodeId), BlockId>,
@@ -50,17 +50,17 @@ pub fn apply_valid_block<W: WebOfTrust>(
 ) -> Result<ValidBlockApplyReqs, ApplyValidBlockError> {
     debug!(
         "BlockchainModule : apply_valid_block({})",
-        block.blockstamp()
+        block.blockstamp(),
     );
     let mut wot_dbs_requests = Vec::new();
     let mut currency_dbs_requests = Vec::new();
     let current_blockstamp = block.blockstamp();
     let mut identities = HashMap::with_capacity(block.identities.len());
-    for identity in block.identities.clone() {
+    for identity in &block.identities {
         identities.insert(identity.issuers()[0], identity);
     }
-    for joiner in block.joiners.clone() {
-        let pubkey = joiner.clone().issuers()[0];
+    for joiner in &block.joiners {
+        let pubkey = joiner.issuers()[0];
         if let Some(idty_doc) = identities.get(&pubkey) {
             // Newcomer
             let wot_id = NodeId(
@@ -78,7 +78,7 @@ pub fn apply_valid_block<W: WebOfTrust>(
                 wot_id,
                 current_blockstamp,
                 block.median_time,
-                Box::new(idty_doc.clone()),
+                Box::new((*idty_doc).clone()),
                 joiner.blockstamp().id,
             ));
         } else {
@@ -97,7 +97,7 @@ pub fn apply_valid_block<W: WebOfTrust>(
             ));
         }
     }
-    for active in block.actives.clone() {
+    for active in &block.actives {
         let pubkey = active.issuers()[0];
         if !identities.contains_key(&pubkey) {
             let wot_id = wot_index[&pubkey];
@@ -114,7 +114,7 @@ pub fn apply_valid_block<W: WebOfTrust>(
             ));
         }
     }
-    for exclusion in block.excluded.clone() {
+    for exclusion in &block.excluded {
         let wot_id = if let Some(wot_id) = wot_index.get(&exclusion) {
             wot_id
         } else {
@@ -126,11 +126,11 @@ pub fn apply_valid_block<W: WebOfTrust>(
             })
             .expect("Fail to write in WotDB");
         wot_dbs_requests.push(WotsDBsWriteQuery::ExcludeIdentity(
-            exclusion,
+            *exclusion,
             block.blockstamp(),
         ));
     }
-    for revocation in block.revoked.clone() {
+    for revocation in &block.revoked {
         let compact_revoc = revocation.to_compact_document();
         let wot_id = if let Some(wot_id) = wot_index.get(&compact_revoc.issuer) {
             wot_id
@@ -148,7 +148,7 @@ pub fn apply_valid_block<W: WebOfTrust>(
             true,
         ));
     }
-    for certification in block.certifications.clone() {
+    for certification in &block.certifications {
         trace!("stack_up_valid_block: apply cert...");
         let compact_cert = certification.to_compact_document();
         let wot_node_from = wot_index[&compact_cert.issuer];
@@ -210,7 +210,8 @@ pub fn apply_valid_block<W: WebOfTrust>(
             ));
         }
     }
-    for tx in block.transactions.clone() {
+
+    for tx in &block.transactions {
         currency_dbs_requests.push(CurrencyDBsWriteQuery::WriteTx(Box::new(tx.unwrap_doc())));
     }
 
@@ -263,7 +264,6 @@ pub fn apply_valid_block<W: WebOfTrust>(
         );
     }*/
     // Create DALBlock
-    let mut block = block.clone();
     let previous_blockcstamp = block.previous_blockstamp();
     let block_hash = block
         .hash
diff --git a/lib/modules/blockchain/blockchain/check_and_apply_block.rs b/lib/modules/blockchain/blockchain/check_and_apply_block.rs
index 459ef535..93226ab2 100644
--- a/lib/modules/blockchain/blockchain/check_and_apply_block.rs
+++ b/lib/modules/blockchain/blockchain/check_and_apply_block.rs
@@ -55,27 +55,28 @@ impl From<ApplyValidBlockError> for BlockError {
 pub fn check_and_apply_block<W: WebOfTrust>(
     blocks_databases: &BlocksV10DBs,
     certs_db: &BinDB<CertsExpirV10Datas>,
-    block: &Block,
+    block: Block,
     current_blockstamp: &Blockstamp,
     wot_index: &mut HashMap<PubKey, NodeId>,
     wot_db: &BinDB<W>,
     forks_states: &[ForkStatus],
 ) -> Result<ValidBlockApplyReqs, BlockError> {
+    let block_from_network = block.is_from_network();
+    let block_doc: BlockDocument = block.into_doc();
+
     // Get BlockDocument && check if already have block
-    let (block_doc, already_have_block) = match *block {
-        Block::NetworkBlock(block_doc) => {
-            let already_have_block = DALBlock::already_have_block(
-                &blocks_databases.blockchain_db,
-                &blocks_databases.forks_blocks_db,
-                block_doc.blockstamp(),
-            )?;
-            (block_doc, already_have_block)
-        }
-        Block::LocalBlock(block_doc) => (block_doc, true),
+    let already_have_block = if block_from_network {
+        DALBlock::already_have_block(
+            &blocks_databases.blockchain_db,
+            &blocks_databases.forks_blocks_db,
+            block_doc.blockstamp(),
+        )?
+    } else {
+        false
     };
 
     // Verify block hashs
-    verify_block_hashs(block_doc)?;
+    verify_block_hashs(&block_doc)?;
 
     // Check block chainability
     if (block_doc.number.0 == current_blockstamp.id.0 + 1
@@ -92,16 +93,13 @@ pub fn check_and_apply_block<W: WebOfTrust>(
             durs_blockchain_dal::certs::find_expire_certs(certs_db, blocks_expiring)?;
 
         // Try stack up block
-        let mut old_fork_id = None;
-        let block_doc: &BlockDocument = match *block {
-            Block::NetworkBlock(block_doc) => block_doc,
-            Block::LocalBlock(block_doc) => {
-                old_fork_id = durs_blockchain_dal::block::get_fork_id_of_blockstamp(
-                    &blocks_databases.forks_blocks_db,
-                    &block_doc.blockstamp(),
-                )?;
-                block_doc
-            }
+        let old_fork_id = if block_from_network {
+            durs_blockchain_dal::block::get_fork_id_of_blockstamp(
+                &blocks_databases.forks_blocks_db,
+                &block_doc.blockstamp(),
+            )?
+        } else {
+            None
         };
 
         // Verify block validity (check all protocol rule, very long !)
@@ -114,7 +112,7 @@ pub fn check_and_apply_block<W: WebOfTrust>(
         )?;
 
         return Ok(apply_valid_block(
-            &block_doc,
+            block_doc,
             wot_index,
             wot_db,
             &expire_certs,
@@ -151,45 +149,24 @@ pub fn check_and_apply_block<W: WebOfTrust>(
                 }
                 _ => {}
             }
-            match *block {
-                Block::NetworkBlock(block_doc) => {
-                    let dal_block = DALBlock {
-                        fork_id,
-                        isolate,
-                        block: block_doc.clone(),
-                        expire_certs: None,
-                    };
-                    durs_blockchain_dal::writers::block::write(
-                        &blocks_databases.blockchain_db,
-                        &blocks_databases.forks_db,
-                        &blocks_databases.forks_blocks_db,
-                        &dal_block,
-                        None,
-                        false,
-                        false,
-                    )
-                    .expect("durs_blockchain_dal::writers::block::write() : DALError")
-                }
-                Block::LocalBlock(block_doc) => {
-                    let old_fork_id = None;
-                    let dal_block = DALBlock {
-                        fork_id,
-                        isolate,
-                        block: block_doc.clone(),
-                        expire_certs: None,
-                    };
-                    durs_blockchain_dal::writers::block::write(
-                        &blocks_databases.blockchain_db,
-                        &blocks_databases.forks_db,
-                        &blocks_databases.forks_blocks_db,
-                        &dal_block,
-                        old_fork_id,
-                        false,
-                        false,
-                    )
-                    .expect("durs_blockchain_dal::writers::block::write() : DALError")
-                }
+
+            let dal_block = DALBlock {
+                fork_id,
+                isolate,
+                block: block_doc.clone(),
+                expire_certs: None,
             };
+
+            durs_blockchain_dal::writers::block::write(
+                &blocks_databases.blockchain_db,
+                &blocks_databases.forks_db,
+                &blocks_databases.forks_blocks_db,
+                &dal_block,
+                None,
+                false,
+                false,
+            )
+            .expect("durs_blockchain_dal::writers::block::write() : DALError")
         } else {
             return Err(BlockError::NoForkAvailable());
         }
diff --git a/lib/modules/blockchain/blockchain/lib.rs b/lib/modules/blockchain/blockchain/lib.rs
index 53a5a505..65bdad2c 100644
--- a/lib/modules/blockchain/blockchain/lib.rs
+++ b/lib/modules/blockchain/blockchain/lib.rs
@@ -16,7 +16,7 @@
 //! Module managing the Duniter blockchain.
 
 #![cfg_attr(feature = "strict", deny(warnings))]
-//#![cfg_attr(feature = "cargo-clippy", allow(duration_subsec))]
+#![allow(clippy::large_enum_variant)]
 #![deny(
     missing_docs,
     missing_debug_implementations,
@@ -113,14 +113,28 @@ pub struct BlockchainModule {
 
 #[derive(Debug, Clone)]
 /// Block
-pub enum Block<'a> {
+pub enum Block {
     /// Block coming from Network
-    NetworkBlock(&'a BlockDocument),
+    NetworkBlock(BlockDocument),
     /// Block coming from local database
-    LocalBlock(&'a BlockDocument),
+    LocalBlock(BlockDocument),
 }
 
-impl<'a> Block<'a> {
+impl Block {
+    /// Into block document
+    pub fn into_doc(self) -> BlockDocument {
+        match self {
+            Block::NetworkBlock(block) => block,
+            Block::LocalBlock(block) => block,
+        }
+    }
+    /// Get block document ref
+    pub fn get_doc_ref(&self) -> &BlockDocument {
+        match *self {
+            Block::NetworkBlock(ref block) => block,
+            Block::LocalBlock(ref block) => block,
+        }
+    }
     /// Return blockstamp
     pub fn blockstamp(&self) -> Blockstamp {
         match *self {
@@ -128,6 +142,13 @@ impl<'a> Block<'a> {
             Block::LocalBlock(ref block) => block.blockstamp(),
         }
     }
+    /// Is from network ?
+    pub fn is_from_network(&self) -> bool {
+        match *self {
+            Block::NetworkBlock(_) => true,
+            _ => false,
+        }
+    }
 }
 
 #[derive(Debug, Copy, Clone, PartialEq, Eq)]
@@ -233,7 +254,7 @@ impl BlockchainModule {
         if !json_chunks_path.as_path().exists() {
             panic!("Fatal error : duniter json chunks folder don't exist !");
         }
-        sync::sync(
+        sync::local_sync(
             profile,
             conf,
             json_chunks_path,
@@ -346,7 +367,7 @@ impl BlockchainModule {
                 match check_and_apply_block::<W>(
                     &self.blocks_databases,
                     &self.wot_databases.certs_db,
-                    &Block::NetworkBlock(block_doc),
+                    Block::NetworkBlock(block_doc.clone()),
                     &current_blockstamp,
                     wot_index,
                     wot_db,
@@ -426,30 +447,30 @@ impl BlockchainModule {
 
     fn receive_blocks<W: WebOfTrust>(
         &mut self,
-        blocks_in_box: &[Box<Block>],
+        blocks: Vec<Block>,
         current_blockstamp: &Blockstamp,
         wot_index: &mut HashMap<PubKey, NodeId>,
         wot: &BinDB<W>,
     ) -> Blockstamp {
         debug!("BlockchainModule : receive_blocks()");
-        let blocks: Vec<&Block> = blocks_in_box.iter().map(|b| b.deref()).collect();
         let mut current_blockstamp = *current_blockstamp;
         let mut save_blocks_dbs = false;
         let mut save_wots_dbs = false;
         let mut save_currency_dbs = false;
-        for block in blocks {
+        for block in blocks.into_iter() {
+            let blockstamp = block.blockstamp();
             if let Ok(ValidBlockApplyReqs(bc_db_query, wot_dbs_queries, tx_dbs_queries)) =
                 check_and_apply_block::<W>(
                     &self.blocks_databases,
                     &self.wot_databases.certs_db,
-                    &block,
+                    block,
                     &current_blockstamp,
                     wot_index,
                     wot,
                     &self.forks_states,
                 )
             {
-                current_blockstamp = block.blockstamp();
+                current_blockstamp = blockstamp;
                 // Update forks states
                 self.forks_states = durs_blockchain_dal::block::get_forks(
                     &self.blocks_databases.forks_db,
@@ -636,7 +657,7 @@ impl BlockchainModule {
                             DursEvent::MemPoolEvent(ref mempool_event) => {
                                 if let MemPoolEvent::FindNextBlock(next_block_box) = mempool_event {
                                     let new_current_blockstamp = self.receive_blocks(
-                                        &[Box::new(Block::LocalBlock(next_block_box.deref()))],
+                                        vec![Block::LocalBlock(next_block_box.deref().clone())],
                                         &current_blockstamp,
                                         &mut wot_index,
                                         &wot_db,
@@ -698,15 +719,13 @@ impl BlockchainModule {
                                             if let NetworkResponse::Chunk(_, _, ref blocks) =
                                                 *network_response.deref()
                                             {
-                                                let blocks: Vec<Box<Block>> = blocks
+                                                let blocks: Vec<Block> = blocks
                                                     .iter()
-                                                    .map(|b| {
-                                                        Box::new(Block::NetworkBlock(b.deref()))
-                                                    })
+                                                    .map(|b| Block::NetworkBlock(b.deref().clone()))
                                                     .collect();
 
                                                 let new_current_blockstamp = self.receive_blocks(
-                                                    &blocks,
+                                                    blocks,
                                                     &current_blockstamp,
                                                     &mut wot_index,
                                                     &wot_db,
@@ -761,6 +780,10 @@ impl BlockchainModule {
                         let mut find_valid_block = false;
                         for stackable_block in stackable_blocks {
                             debug!("stackable_block({})", stackable_block.block.number);
+
+                            let stackable_block_number = stackable_block.block.number;
+                            let stackable_block_blockstamp = stackable_block.block.blockstamp();
+
                             if let Ok(ValidBlockApplyReqs(
                                 bc_db_query,
                                 wot_dbs_queries,
@@ -768,7 +791,7 @@ impl BlockchainModule {
                             )) = check_and_apply_block(
                                 &self.blocks_databases,
                                 &self.wot_databases.certs_db,
-                                &Block::LocalBlock(&stackable_block.block),
+                                Block::LocalBlock(stackable_block.block),
                                 &current_blockstamp,
                                 &mut wot_index,
                                 &wot_db,
@@ -798,16 +821,13 @@ impl BlockchainModule {
                                 if !tx_dbs_queries.is_empty() {
                                     self.currency_databases.save_dbs(true, true);
                                 }
-                                debug!(
-                                    "success to stackable_block({})",
-                                    stackable_block.block.number
-                                );
+                                debug!("success to stackable_block({})", stackable_block_number);
 
-                                current_blockstamp = stackable_block.block.blockstamp();
+                                current_blockstamp = stackable_block_blockstamp;
                                 find_valid_block = true;
                                 break;
                             } else {
-                                warn!("fail to stackable_block({})", stackable_block.block.number);
+                                warn!("fail to stackable_block({})", stackable_block_number);
                                 // Delete this fork
                                 DALBlock::delete_fork(
                                     &self.blocks_databases.forks_db,
@@ -857,8 +877,9 @@ pub fn verify_block_hashs(block_doc: &BlockDocument) -> Result<(), VerifyBlockHa
         }
     } else {
         warn!("BlockchainModule : Refuse Bloc : invalid inner hash !");
-        debug!(
-            "BlockInnerFormat={}",
+        warn!("BlockDocument=\"{:?}\"", block_doc);
+        warn!(
+            "BlockInnerFormat=\"{}\"",
             block_doc.generate_compact_inner_text()
         );
         Err(VerifyBlockHashsError::InvalidInnerHash())
diff --git a/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs b/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
index 901cce88..4d44db56 100644
--- a/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
+++ b/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
@@ -28,13 +28,12 @@ use threadpool::ThreadPool;
 /// Json reader worker
 pub fn json_reader_worker(
     pool: &ThreadPool,
-    profile: &str,
+    profile: String,
     sender_sync_thread: mpsc::Sender<MessForSyncThread>,
     json_chunks_path: PathBuf,
     end: Option<u32>,
 ) {
     // Lauch json reader thread
-    let profile_copy = String::from(profile);
     pool.execute(move || {
         let ts_job_begin = SystemTime::now();
 
@@ -96,13 +95,13 @@ pub fn json_reader_worker(
 
         // Get current local blockstamp
         debug!("Get local current blockstamp...");
-        let db_path = duniter_conf::get_blockchain_db_path(&profile_copy, &last_block.currency);
+        let db_path = duniter_conf::get_blockchain_db_path(&profile, &last_block.currency);
         let blocks_databases = BlocksV10DBs::open(Some(&db_path));
         let current_blockstamp: Blockstamp =
             durs_blockchain_dal::block::get_current_blockstamp(&blocks_databases)
                 .expect("ForksV10DB : RustBreakError !")
                 .unwrap_or_default();
-        debug!("Success to get local current blockstamp.");
+        info!("Local current blockstamp = {}", current_blockstamp);
 
         // Get first chunk number
         let first_chunk_number: usize =
@@ -141,7 +140,7 @@ pub fn json_reader_worker(
                     {
                         // Send block document
                         sender_sync_thread
-                            .send(MessForSyncThread::BlockDocument(Box::new(block)))
+                            .send(MessForSyncThread::BlockDocument(block))
                             .expect("Fatal error : sync_thread unrechable !");
                     }
                 }
diff --git a/lib/modules/blockchain/blockchain/sync/mod.rs b/lib/modules/blockchain/blockchain/sync/mod.rs
index 18c1a685..46641d81 100644
--- a/lib/modules/blockchain/blockchain/sync/mod.rs
+++ b/lib/modules/blockchain/blockchain/sync/mod.rs
@@ -35,13 +35,6 @@ use threadpool::ThreadPool;
 /// Number of sync jobs
 pub static NB_SYNC_JOBS: &'static usize = &4;
 
-/*#[derive(Debug)]
-/// Sync source
-enum SyncSource {
-    Network(String),
-    LocalJsonFiles(PathBuf),
-}*/
-
 #[derive(Debug, Clone, PartialEq, Eq)]
 /// Block header
 pub struct BlockHeader {
@@ -54,7 +47,7 @@ pub struct BlockHeader {
 /// Message for main sync thread
 pub enum MessForSyncThread {
     Target(CurrencyName, Blockstamp),
-    BlockDocument(Box<BlockDocument>),
+    BlockDocument(BlockDocument),
     DownloadFinish(),
     ApplyFinish(),
 }
@@ -68,11 +61,10 @@ pub enum SyncJobsMess {
     End(),
 }
 
-/// Sync
-pub fn sync<DC: DuniterConf>(
+/// Sync from local json files
+pub fn local_sync<DC: DuniterConf>(
     profile: &str,
     conf: &DC,
-    //source: SyncSource,
     json_files_path: PathBuf,
     end: Option<u32>,
     cautious: bool,
@@ -109,17 +101,14 @@ pub fn sync<DC: DuniterConf>(
         panic!("json_files_path must be a directory");
     }
 
-    // Lauch json reader thread
+    // Lauch json reader worker
     download::json_reader_worker::json_reader_worker(
         &pool,
-        profile,
+        profile.to_owned(),
         sender_sync_thread.clone(),
         json_files_path,
         end,
     );
-    //}
-    //SyncSource::Network(url) => unimplemented!(),
-    //}
 
     // Get target blockstamp
     let (currency, target_blockstamp) =
@@ -154,13 +143,13 @@ pub fn sync<DC: DuniterConf>(
     debug!("Get local current blockstamp...");
     let mut current_blockstamp: Blockstamp =
         durs_blockchain_dal::block::get_current_blockstamp(&databases)
-            .expect("ForksV10DB : RustBreakError !")
+            .expect("DALError : fail to get current blockstamp !")
             .unwrap_or_default();
     debug!("Success to get local current blockstamp.");
 
     // Node is already synchronized ?
     if target_blockstamp.id.0 < current_blockstamp.id.0 {
-        println!("Your duniter-rs node is already synchronized.");
+        println!("Your durs node is already synchronized.");
         return;
     }
 
@@ -246,7 +235,7 @@ pub fn sync<DC: DuniterConf>(
     let mut all_apply_valid_block_duration = Duration::from_millis(0);
     while let Ok(MessForSyncThread::BlockDocument(block_doc)) = recv_sync_thread.recv() {
         all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
-        let block_doc = block_doc.deref();
+
         // Verify block hashs
         let verif_block_hashs_begin = SystemTime::now();
         if verif_inner_hash {
@@ -289,11 +278,13 @@ pub fn sync<DC: DuniterConf>(
         let expire_certs =
             durs_blockchain_dal::certs::find_expire_certs(&certs_db, blocks_expiring)
                 .expect("find_expire_certs() : DALError");
+        // Get block blockstamp
+        let blockstamp = block_doc.blockstamp();
         // Apply block
         let apply_valid_block_begin = SystemTime::now();
         if let Ok(ValidBlockApplyReqs(block_req, wot_db_reqs, currency_db_reqs)) =
             apply_valid_block::<RustyWebOfTrust>(
-                &block_doc,
+                block_doc,
                 &mut wot_index,
                 &wot_db,
                 &expire_certs,
@@ -303,7 +294,7 @@ pub fn sync<DC: DuniterConf>(
             all_apply_valid_block_duration += SystemTime::now()
                 .duration_since(apply_valid_block_begin)
                 .unwrap();
-            current_blockstamp = block_doc.blockstamp();
+            current_blockstamp = blockstamp;
             debug!("Apply db requests...");
             // Send block request to blocks worker thread
             sender_blocks_thread
-- 
GitLab


From cfbcd843397cfec8b564b9cf4ecbfad14d9a3fde Mon Sep 17 00:00:00 2001
From: librelois <elois@ifee.fr>
Date: Sat, 2 Feb 2019 20:18:25 +0100
Subject: [PATCH 26/26] [build] update Cargo.lock

---
 Cargo.lock | 3 ++-
 1 file changed, 2 insertions(+), 1 deletion(-)

diff --git a/Cargo.lock b/Cargo.lock
index 7f4aacbf..978cdfbf 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -249,6 +249,7 @@ dependencies = [
  "base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "dup-crypto 0.5.0",
+ "durs-common-tools 0.1.0",
  "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
  "json-pest-parser 0.1.0",
  "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -267,6 +268,7 @@ dependencies = [
  "dubp-documents 0.11.0",
  "duniter-module 0.1.0-a0.1",
  "dup-crypto 0.5.0",
+ "durs-common-tools 0.1.0",
  "rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
  "rpassword 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -657,7 +659,6 @@ name = "json-pest-parser"
 version = "0.1.0"
 dependencies = [
  "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
-- 
GitLab