diff --git a/blockchain/apply_valid_block.rs b/blockchain/apply_valid_block.rs
index c395d2fcf598f12fbfefc1782dcf6b896ffb23cd..f3b8c713dc3fd2e0eb7867beee976aff9d05aae2 100644
--- a/blockchain/apply_valid_block.rs
+++ b/blockchain/apply_valid_block.rs
@@ -22,10 +22,10 @@ use duniter_documents::blockchain::v10::documents::transaction::{TxAmount, TxBas
 use duniter_documents::blockchain::v10::documents::BlockDocument;
 use duniter_documents::blockchain::Document;
 use duniter_documents::BlockId;
-use duniter_wotb::data::NewLinkResult;
+use duniter_wotb::data::{NewLinkResult, RemLinkResult};
 use duniter_wotb::{NodeId, WebOfTrust};
 use rustbreak::backend::Backend;
-use std::collections::HashMap;
+use std::collections::{HashMap, HashSet};
 use std::fmt::Debug;
 
 #[derive(Debug)]
@@ -166,7 +166,7 @@ pub fn apply_valid_block<W: WebOfTrust, B: Backend + Debug>(
                     ),
                 }
             })
-            .expect("Fail to read WotDB");
+            .expect("Fail to write in WotDB");
         wot_dbs_requests.push(WotsDBsWriteQuery::CreateCert(
             compact_cert.issuer,
             wotb_node_from,
@@ -176,12 +176,23 @@ pub fn apply_valid_block<W: WebOfTrust, B: Backend + Debug>(
         ));
         trace!("stack_up_valid_block: apply cert...success.");
     }
-    for ((source, target), created_block_id) in expire_certs {
-        wot_dbs_requests.push(WotsDBsWriteQuery::ExpireCert(
-            *source,
-            *target,
-            *created_block_id,
-        ));
+    if !expire_certs.is_empty() {
+        let mut blocks_already_expire = HashSet::new();
+        for ((source, target), created_block_id) in expire_certs {
+            if !blocks_already_expire.contains(created_block_id) {
+                wot_dbs_requests.push(WotsDBsWriteQuery::ExpireCerts(*created_block_id));
+                blocks_already_expire.insert(*created_block_id);
+            }
+            wot_db
+                .write(|db| {
+                    let result = db.rem_link(*source, *target);
+                    match result {
+                        RemLinkResult::Removed(_) => {}
+                        _ => panic!("Fail to rem_link {}->{} : {:?}", source.0, target.0, result),
+                    }
+                })
+                .expect("Fail to write in WotDB");
+        }
     }
     if let Some(du_amount) = block.dividend {
         if du_amount > 0 {
diff --git a/blockchain/check_and_apply_block.rs b/blockchain/check_and_apply_block.rs
index 2981adefdd099d1a85ca3f6e4c961a17a5310a89..620aad46cabefa51a3549a1e218bbe86bfb0af77 100644
--- a/blockchain/check_and_apply_block.rs
+++ b/blockchain/check_and_apply_block.rs
@@ -92,7 +92,7 @@ pub fn check_and_apply_block<W: WebOfTrust, B: Backend + Debug>(
         // Try stack up block
         let mut old_fork_id = None;
         let block_doc = match *block {
-            Block::NetworkBlock(network_block) => complete_network_block(network_block)?,
+            Block::NetworkBlock(network_block) => complete_network_block(network_block, true)?,
             Block::LocalBlock(block_doc) => {
                 old_fork_id = duniter_dal::block::get_fork_id_of_blockstamp(
                     &blocks_databases.forks_blocks_db,
@@ -142,7 +142,7 @@ pub fn check_and_apply_block<W: WebOfTrust, B: Backend + Debug>(
             match *block {
                 Block::NetworkBlock(network_block) => {
                     // Completed network block
-                    let block_doc = complete_network_block(network_block)?;
+                    let block_doc = complete_network_block(network_block, true)?;
                     let dal_block = DALBlock {
                         fork_id,
                         isolate,
diff --git a/blockchain/dbex.rs b/blockchain/dbex.rs
index 6da2fa3096de5961b992ee7b513c6f573610698d..f7937787d48066042ab78a2dc932b659aadf1139 100644
--- a/blockchain/dbex.rs
+++ b/blockchain/dbex.rs
@@ -90,8 +90,7 @@ pub fn dbex_tx(conf: &DuniterConf, query: &DBExTxQuery) {
                 println!("This address doesn't exist !");
                 return;
             };
-            let address =
-                TransactionOutputConditionGroup::Single(TransactionOutputCondition::Sig(pubkey));
+            let address = UTXOConditionsGroup::Single(TransactionOutputCondition::Sig(pubkey));
             let address_balance = duniter_dal::balance::get_address_balance(
                 &currency_databases.balances_db,
                 &address,
diff --git a/blockchain/lib.rs b/blockchain/lib.rs
index 7fbd4fbab84237134e5908a68efb3cdc21a9412a..3365f3890e93ee3ddae55bfd4041bb1323a8dcd7 100644
--- a/blockchain/lib.rs
+++ b/blockchain/lib.rs
@@ -136,7 +136,7 @@ pub enum CompletedBlockError {
     /// Invalid block inner hash
     InvalidInnerHash(),
     /// Invalid block hash
-    InvalidHash(),
+    InvalidHash(BlockId, Option<BlockHash>, Option<BlockHash>),
     /// Invalid block version
     InvalidVersion(),
 }
@@ -196,7 +196,7 @@ impl BlockchainModule {
         dbex::dbex(conf, req);
     }
     /// Synchronize blockchain from a duniter-ts database
-    pub fn sync_ts(conf: &DuniterConf, ts_profile: &str, cautious: bool) {
+    pub fn sync_ts(conf: &DuniterConf, ts_profile: &str, cautious: bool, verif_inner_hash: bool) {
         // get db_ts_path
         let mut db_ts_path = match env::home_dir() {
             Some(path) => path,
@@ -208,7 +208,7 @@ impl BlockchainModule {
         if !db_ts_path.as_path().exists() {
             panic!("Fatal error : duniter-ts database don't exist !");
         }
-        sync::sync_ts(conf, db_ts_path, cautious);
+        sync::sync_ts(conf, db_ts_path, cautious, verif_inner_hash);
     }
     /// Request chunk from network (chunk = group of blocks)
     fn request_chunk(&self, req_id: &ModuleReqId, from: u32) -> (ModuleReqId, NetworkRequest) {
@@ -816,6 +816,7 @@ impl BlockchainModule {
 /// Complete Network Block
 pub fn complete_network_block(
     network_block: &NetworkBlock,
+    verif_inner_hash: bool,
 ) -> Result<BlockDocument, CompletedBlockError> {
     if let NetworkBlock::V10(ref network_block_v10) = *network_block {
         let mut block_doc = network_block_v10.uncompleted_block_doc.clone();
@@ -832,7 +833,7 @@ pub fn complete_network_block(
         let inner_hash = block_doc.inner_hash.expect(
             "BlockchainModule : complete_network_block() : fatal error : block.inner_hash = None",
         );
-        if block_doc.number.0 > 0 {
+        if verif_inner_hash && block_doc.number.0 > 0 {
             block_doc.compute_inner_hash();
         }
         let hash = block_doc.hash;
@@ -841,14 +842,17 @@ pub fn complete_network_block(
             "BlockchainModule : complete_network_block() : fatal error : block.inner_hash = None",
         ) == inner_hash
         {
-            let nonce = block_doc.nonce;
-            block_doc.change_nonce(nonce);
-            if block_doc.hash == hash {
+            block_doc.fill_inner_hash_and_nonce_str(None);
+            if !verif_inner_hash || block_doc.hash == hash {
                 trace!("Succes to complete_network_block #{}", block_doc.number.0);
                 Ok(block_doc)
             } else {
                 warn!("BlockchainModule : Refuse Bloc : invalid hash !");
-                Err(CompletedBlockError::InvalidHash())
+                Err(CompletedBlockError::InvalidHash(
+                    block_doc.number,
+                    block_doc.hash,
+                    hash,
+                ))
             }
         } else {
             warn!("BlockchainModule : Refuse Bloc : invalid inner hash !");
diff --git a/blockchain/revert_block.rs b/blockchain/revert_block.rs
index a349ba7fad4b0252500f72c045dc18b39e0ec1d4..d4298de416109b3ca7921c4340ceb28715223524 100644
--- a/blockchain/revert_block.rs
+++ b/blockchain/revert_block.rs
@@ -22,7 +22,7 @@ use duniter_dal::{BinDB, ForkId, TxV10Datas};
 use duniter_documents::blockchain::v10::documents::block::TxDocOrTxHash;
 use duniter_documents::blockchain::v10::documents::transaction::{TxAmount, TxBase};
 use duniter_documents::blockchain::Document;
-use duniter_wotb::data::NewLinkResult;
+use duniter_wotb::data::{NewLinkResult, RemLinkResult};
 use duniter_wotb::{NodeId, WebOfTrust};
 use rustbreak::backend::Backend;
 use std::collections::HashMap;
@@ -102,12 +102,23 @@ pub fn revert_block<W: WebOfTrust, B: Backend + Debug>(
     // REVERT WOT EVENTS
     let mut wot_dbs_requests = Vec::new();
     // Revert expire_certs
-    for ((source, target), created_block_id) in expire_certs {
-        wot_dbs_requests.push(WotsDBsWriteQuery::RevertExpireCert(
-            source,
-            target,
-            created_block_id,
-        ));
+    if !expire_certs.is_empty() {
+        for ((source, target), created_block_id) in expire_certs {
+            wot_db
+                .write(|db| {
+                    let result = db.add_link(source, target);
+                    match result {
+                        NewLinkResult::Ok(_) => {}
+                        _ => panic!("Fail to add_link {}->{} : {:?}", source.0, target.0, result),
+                    }
+                })
+                .expect("Fail to write in WotDB");
+            wot_dbs_requests.push(WotsDBsWriteQuery::RevertExpireCert(
+                source,
+                target,
+                created_block_id,
+            ));
+        }
     }
     // Revert certifications
     for certification in block.certifications.clone() {
@@ -117,16 +128,16 @@ pub fn revert_block<W: WebOfTrust, B: Backend + Debug>(
         let wotb_node_to = wot_index[&compact_cert.target];
         wot_db
             .write(|db| {
-                let result = db.add_link(wotb_node_from, wotb_node_to);
+                let result = db.rem_link(wotb_node_from, wotb_node_to);
                 match result {
-                    NewLinkResult::Ok(_) => {}
+                    RemLinkResult::Removed(_) => {}
                     _ => panic!(
-                        "Fail to add_link {}->{} : {:?}",
+                        "Fail to rem_link {}->{} : {:?}",
                         wotb_node_from.0, wotb_node_to.0, result
                     ),
                 }
             })
-            .expect("Fail to read WotDB");
+            .expect("Fail to write in WotDB");
         wot_dbs_requests.push(WotsDBsWriteQuery::RevertCert(
             compact_cert,
             wotb_node_from,
diff --git a/blockchain/sync.rs b/blockchain/sync.rs
index 6636be2a604fcfd55bc907bd2b880aab40b75c44..b0766079ae4906c5486f05c56c0797506b3bb614 100644
--- a/blockchain/sync.rs
+++ b/blockchain/sync.rs
@@ -51,7 +51,7 @@ pub struct BlockHeader {
 #[derive(Debug)]
 /// Message for main sync thread
 enum MessForSyncThread {
-    TargetBlockstamp(Blockstamp),
+    Target(Currency, Blockstamp),
     NetworkBlock(NetworkBlock),
     DownloadFinish(),
     ApplyFinish(),
@@ -67,27 +67,7 @@ enum SyncJobsMess {
 }
 
 /// Sync from a duniter-ts database
-pub fn sync_ts(conf: &DuniterConf, db_ts_path: PathBuf, cautious: bool) {
-    // get profile and currency and current_blockstamp
-    let profile = &conf.profile();
-    let currency = &conf.currency();
-
-    // Get databases path
-    let db_path = duniter_conf::get_blockchain_db_path(&profile, &currency);
-
-    // Open wot db
-    let wot_db = open_wot_db::<RustyWebOfTrust>(&db_path).expect("Fail to open WotDB !");
-
-    // Open blocks databases
-    let databases = BlocksV10DBs::open(&db_path, false);
-
-    // Get local current blockstamp
-    debug!("Get local current blockstamp...");
-    let mut current_blockstamp: Blockstamp = duniter_dal::block::get_current_blockstamp(&databases)
-        .expect("ForksV10DB : RustBreakError !")
-        .unwrap_or_default();
-    debug!("Success to get local current blockstamp.");
-
+pub fn sync_ts(conf: &DuniterConf, db_ts_path: PathBuf, cautious: bool, verif_inner_hash: bool) {
     // Get verification level
     let _verif_level = if cautious {
         println!("Start cautious sync...");
@@ -112,10 +92,12 @@ pub fn sync_ts(conf: &DuniterConf, db_ts_path: PathBuf, cautious: bool) {
     let pool = ThreadPool::new(nb_workers);
 
     // Determine db_ts_copy_path
-    let mut db_ts_copy_path = duniter_conf::datas_path(&profile.clone(), currency);
+    let mut db_ts_copy_path =
+        duniter_conf::datas_path(&conf.profile().clone(), &conf.currency().clone());
     db_ts_copy_path.push("tmp_db_ts_copy.db");
 
     // Lauch ts thread
+    let profile_copy = conf.profile().clone();
     let sender_sync_thread_clone = sender_sync_thread.clone();
     pool.execute(move || {
         let ts_job_begin = SystemTime::now();
@@ -130,37 +112,59 @@ pub fn sync_ts(conf: &DuniterConf, db_ts_path: PathBuf, cautious: bool) {
         // Get ts current blockstamp
         debug!("Get ts-db current blockstamp...");
         let mut cursor: sqlite::Cursor = ts_db
-            .prepare("SELECT hash, number FROM block WHERE fork=? ORDER BY number DESC LIMIT 1;")
+            .prepare("SELECT hash, number, currency FROM block WHERE fork=? ORDER BY number DESC LIMIT 1;")
             .expect("Request SQL get_ts_current_block is wrong !")
             .cursor();
         cursor
             .bind(&[sqlite::Value::Integer(0)])
             .expect("Fail to get ts current block !");
-        let current_ts_blockstamp = if let Some(row) = cursor.next().expect("cursor error") {
-            let block_id = BlockId(
-                row[1]
-                    .as_integer()
-                    .expect("Fail to parse current ts blockstamp !") as u32,
-            );
-            let block_hash = BlockHash(
-                Hash::from_hex(
-                    row[0]
-                        .as_string()
-                        .expect("Fail to parse current ts blockstamp !"),
-                ).expect("Fail to parse current ts blockstamp !"),
-            );
-            Blockstamp {
-                id: block_id,
-                hash: block_hash,
-            }
-        } else {
-            panic!("Fail to get current ts blockstamp !");
-        };
+        let (currency, current_ts_blockstamp) =
+            if let Some(row) = cursor.next().expect("cursor error") {
+                let block_id = BlockId(
+                    row[1]
+                        .as_integer()
+                        .expect("Fail to parse current ts blockstamp !") as u32,
+                );
+                let block_hash = BlockHash(
+                    Hash::from_hex(
+                        row[0]
+                            .as_string()
+                            .expect("Fail to parse current ts blockstamp !"),
+                    ).expect("Fail to parse current ts blockstamp !"),
+                );
+                (
+                    Currency::Str(String::from(
+                        row[2]
+                            .as_string()
+                            .expect("Fatal error :Fail to get currency !"),
+                    )),
+                    Blockstamp {
+                        id: block_id,
+                        hash: block_hash,
+                    },
+                )
+            } else {
+                panic!("Fail to get current ts blockstamp !");
+            };
+
         debug!("Success to ts-db current blockstamp.");
 
+        // Get current local blockstamp
+        debug!("Get local current blockstamp...");
+        let db_path = duniter_conf::get_blockchain_db_path(&profile_copy, &currency);
+        let blocks_databases = BlocksV10DBs::open(&db_path, false);
+        let current_blockstamp: Blockstamp = duniter_dal::block::get_current_blockstamp(
+            &blocks_databases,
+        ).expect("ForksV10DB : RustBreakError !")
+            .unwrap_or_default();
+        debug!("Success to get local current blockstamp.");
+
         // Send ts current blockstamp
         sender_sync_thread_clone
-            .send(MessForSyncThread::TargetBlockstamp(current_ts_blockstamp))
+            .send(MessForSyncThread::Target(
+                currency.clone(),
+                current_ts_blockstamp,
+            ))
             .expect("Fatal error : sync_thread unrechable !");
 
         // Get genesis block
@@ -229,18 +233,41 @@ pub fn sync_ts(conf: &DuniterConf, db_ts_path: PathBuf, cautious: bool) {
         );
     });
 
-    // Get target blockstamp
-    let target_blockstamp = if let Ok(MessForSyncThread::TargetBlockstamp(target_blockstamp)) =
-        recv_sync_thread.recv()
-    {
-        target_blockstamp
-    } else {
-        panic!("Fatal error : no TargetBlockstamp !")
-    };
+    // Get currency and target blockstamp
+    let (currency, target_blockstamp) =
+        if let Ok(MessForSyncThread::Target(currency, target_blockstamp)) = recv_sync_thread.recv()
+        {
+            (currency, target_blockstamp)
+        } else {
+            panic!("Fatal error : no TargetBlockstamp !")
+        };
+
+    // Update DuniterConf
+    let mut conf = conf.clone();
+    conf.set_currency(currency.clone());
+
+    // Get databases path
+    let db_path = duniter_conf::get_blockchain_db_path(&conf.profile(), &currency);
+
+    // Write nex conf
+    duniter_conf::write_conf_file(&conf).expect("Fail to write new conf !");
+
+    // Open wot db
+    let wot_db = open_wot_db::<RustyWebOfTrust>(&db_path).expect("Fail to open WotDB !");
+
+    // Open blocks databases
+    let databases = BlocksV10DBs::open(&db_path, false);
+
+    // Get local current blockstamp
+    debug!("Get local current blockstamp...");
+    let mut current_blockstamp: Blockstamp = duniter_dal::block::get_current_blockstamp(&databases)
+        .expect("ForksV10DB : RustBreakError !")
+        .unwrap_or_default();
+    debug!("Success to get local current blockstamp.");
 
     // Instanciate blockchain module
     let blockchain_module =
-        BlockchainModule::load_blockchain_conf(conf, RequiredKeysContent::None());
+        BlockchainModule::load_blockchain_conf(&conf, RequiredKeysContent::None());
 
     // Node is already synchronized ?
     if target_blockstamp.id.0 < current_blockstamp.id.0 {
@@ -361,7 +388,7 @@ pub fn sync_ts(conf: &DuniterConf, db_ts_path: PathBuf, cautious: bool) {
     });
 
     // / Launch wot_worker thread
-    let profile_copy2 = profile.clone();
+    let profile_copy2 = conf.profile().clone();
     let currency_copy2 = currency.clone();
     let sender_sync_thread_clone2 = sender_sync_thread.clone();
 
@@ -462,7 +489,7 @@ pub fn sync_ts(conf: &DuniterConf, db_ts_path: PathBuf, cautious: bool) {
         all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
         // Complete block
         let complete_block_begin = SystemTime::now();
-        let block_doc = complete_network_block(&network_block)
+        let block_doc = complete_network_block(&network_block, verif_inner_hash)
             .expect("Receive wrong block, please reset data and resync !");
         all_complete_block_duration += SystemTime::now()
             .duration_since(complete_block_begin)
diff --git a/blockchain/ts_parsers.rs b/blockchain/ts_parsers.rs
index 883179693a036ac9959785dd5e41e6c194113ca3..2915f8def24c23e9da48f6097cb73707aea15e98 100644
--- a/blockchain/ts_parsers.rs
+++ b/blockchain/ts_parsers.rs
@@ -341,12 +341,13 @@ pub fn parse_transaction(
     let outputs_array = source.get("outputs")?.as_array()?;
     let mut outputs = Vec::with_capacity(outputs_array.len());
     for output in outputs_array {
-        match TransactionOutput::parse_from_str(output.as_str()?) {
-            Ok(output) => outputs.push(output),
-            Err(_) => {
-                return None;
-            }
-        }
+        outputs.push(
+            TransactionOutput::parse_from_str(
+                output
+                    .as_str()
+                    .expect(&format!("Fail to parse output : {:?}", output)),
+            ).expect(&format!("Fail to parse output : {:?}", output)),
+        );
     }
     let signatures_array = source.get("signatures")?.as_array()?;
     let mut signatures = Vec::with_capacity(signatures_array.len());
@@ -369,6 +370,7 @@ pub fn parse_transaction(
         unlocks: &unlocks,
         outputs: &outputs,
         comment,
+        hash: Some(Hash::from_hex(source.get("hash")?.as_str()?).expect("Fail to parse tx hash")),
     };
     Some(tx_doc_builder.build_with_signature(signatures))
 }
diff --git a/conf/lib.rs b/conf/lib.rs
index 01dc55d1d5144191d31872139ceb6b9edf3f1dab..fe82e0af9cc144e5914b2c8a6919a961cc11709f 100644
--- a/conf/lib.rs
+++ b/conf/lib.rs
@@ -137,21 +137,18 @@ pub fn get_user_datas_folder() -> &'static str {
     USER_DATAS_FOLDER
 }
 
-/// Returns the path to the folder containing the user data of the running profile
+/// Returns the path to the folder containing the currency datas of the running profile
 pub fn datas_path(profile: &str, currency: &Currency) -> PathBuf {
-    let mut datas_path = match env::home_dir() {
-        Some(path) => path,
-        None => panic!("Impossible to get your home dir!"),
-    };
-    datas_path.push(".config/");
-    datas_path.push(USER_DATAS_FOLDER);
-    datas_path.push(profile);
+    let mut datas_path = get_profile_path(profile);
     datas_path.push(currency.to_string());
+    if !datas_path.as_path().exists() {
+        fs::create_dir(datas_path.as_path()).expect("Impossible to create currency dir !");
+    }
     datas_path
 }
 
-/// Load configuration.
-pub fn load_conf(profile: &str) -> (DuniterConf, DuniterKeyPairs) {
+/// Returns the path to the folder containing the user data of the running profile
+pub fn get_profile_path(profile: &str) -> PathBuf {
     // Define and create datas directory if not exist
     let mut profile_path = match env::home_dir() {
         Some(path) => path,
@@ -172,6 +169,12 @@ pub fn load_conf(profile: &str) -> (DuniterConf, DuniterKeyPairs) {
     if !profile_path.as_path().exists() {
         fs::create_dir(profile_path.as_path()).expect("Impossible to create your profile dir !");
     }
+    profile_path
+}
+
+/// Load configuration.
+pub fn load_conf(profile: &str) -> (DuniterConf, DuniterKeyPairs) {
+    let mut profile_path = get_profile_path(profile);
 
     // Load conf
     let (conf, keypairs) = load_conf_at_path(profile, &profile_path);
@@ -280,7 +283,7 @@ pub fn load_conf_at_path(profile: &str, profile_path: &PathBuf) -> (DuniterConf,
         }
     } else {
         // Create conf file with default conf
-        write_conf_file(&conf_path, &DuniterConf::V1(conf.clone()))
+        write_conf_file(&DuniterConf::V1(conf.clone()))
             .expect("Fatal error : fail to write default conf file !");
     }
 
@@ -306,10 +309,12 @@ pub fn write_keypairs_file(
 }
 
 /// Save configuration in profile folder
-pub fn write_conf_file(file_path: &PathBuf, conf: &DuniterConf) -> Result<(), std::io::Error> {
+pub fn write_conf_file(conf: &DuniterConf) -> Result<(), std::io::Error> {
+    let mut conf_path = get_profile_path(&conf.profile());
+    conf_path.push("conf.json");
     match *conf {
         DuniterConf::V1(ref conf_v1) => {
-            let mut f = try!(File::create(file_path.as_path()));
+            let mut f = try!(File::create(conf_path.as_path()));
             try!(
                 f.write_all(
                     serde_json::to_string_pretty(conf_v1)
diff --git a/core/cli/en.yml b/core/cli/en.yml
index f295780739b4181d8856aa3825f0d7a4d27f1478..f48ad39c470a09614e91eb64a45e998c9a675193 100644
--- a/core/cli/en.yml
+++ b/core/cli/en.yml
@@ -39,6 +39,10 @@ subcommands:
                 short: c
                 long: cautious
                 help: cautious mode (check all protocol rules, very slow)
+            - unsafe:
+                short: u
+                long: unsafe
+                help: unsafe mode (not check blocks inner hashs, very dangerous)
     - dbex:
         about: durs databases explorer
         version: "0.1.0"
diff --git a/core/lib.rs b/core/lib.rs
index 574bd7811818aeddfae0d2530a685a2865648fa0..f7d815b167fa84c8b8fab2de6ce9410eb8fa1918 100644
--- a/core/lib.rs
+++ b/core/lib.rs
@@ -122,7 +122,12 @@ impl DuniterCore {
             ))
         } else if let Some(matches) = cli_args.subcommand_matches("sync_ts") {
             let ts_profile = matches.value_of("TS_PROFILE").unwrap_or("duniter_default");
-            sync_ts(&conf, ts_profile, matches.is_present("cautious"));
+            sync_ts(
+                &conf,
+                ts_profile,
+                matches.is_present("cautious"),
+                !matches.is_present("unsafe"),
+            );
             None
         } else if let Some(matches) = cli_args.subcommand_matches("dbex") {
             let csv = matches.is_present("csv");
@@ -403,9 +408,9 @@ pub fn start(
 }
 
 /// Launch synchronisation from a duniter-ts database
-pub fn sync_ts(conf: &DuniterConf, ts_profile: &str, cautious: bool) {
+pub fn sync_ts(conf: &DuniterConf, ts_profile: &str, cautious: bool, verif_inner_hash: bool) {
     // Launch sync-ts
-    BlockchainModule::sync_ts(conf, ts_profile, cautious);
+    BlockchainModule::sync_ts(conf, ts_profile, cautious, verif_inner_hash);
 }
 
 /// Launch databases explorer
diff --git a/dal/balance.rs b/dal/balance.rs
index 848b480195cf88897d6170324f7b39e6c6adfafa..4e751dfc1ea2680d9bc90bba4c39821a9ec41db6 100644
--- a/dal/balance.rs
+++ b/dal/balance.rs
@@ -18,7 +18,7 @@ use *;
 
 pub fn get_address_balance(
     balances_db: &BinFileDB<BalancesV10Datas>,
-    address: &TransactionOutputConditionGroup,
+    address: &UTXOConditionsGroup,
 ) -> Result<Option<SourceAmount>, DALError> {
     Ok(balances_db.read(|db| {
         if let Some(balance_and_utxos) = db.get(address) {
diff --git a/dal/lib.rs b/dal/lib.rs
index d6e4196a56aca24151cecd0ee9e8775a4fa2c8d9..2996d71959d74baad762e2ae4ab3d8f345d37ae2 100644
--- a/dal/lib.rs
+++ b/dal/lib.rs
@@ -84,8 +84,7 @@ pub type CertsExpirV10Datas = HashMap<BlockId, HashSet<(NodeId, NodeId)>>;
 pub type TxV10Datas = HashMap<Hash, DALTxV10>;
 pub type UTXOsV10Datas = HashMap<UTXOIndexV10, UTXOContentV10>;
 pub type DUsV10Datas = HashMap<PubKey, HashSet<BlockId>>;
-pub type BalancesV10Datas =
-    HashMap<TransactionOutputConditionGroup, (SourceAmount, HashSet<UTXOIndexV10>)>;
+pub type BalancesV10Datas = HashMap<UTXOConditionsGroup, (SourceAmount, HashSet<UTXOIndexV10>)>;
 
 pub type BinDB<D, B> = Database<D, B, Bincode>;
 pub type BinFileDB<D> = FileDatabase<D, Bincode>;
diff --git a/dal/sources.rs b/dal/sources.rs
index bfe6c54b6dfbf98512f5cd285756af5c0c19d94d..4a42ef2b3a8a3673d96cc758ef41005c2493c98b 100644
--- a/dal/sources.rs
+++ b/dal/sources.rs
@@ -72,8 +72,8 @@ pub type UTXOContentV10 = TransactionOutput;
 pub struct UTXOV10(pub UTXOIndexV10, pub UTXOContentV10);
 
 impl UTXOV10 {
-    pub fn get_conditions(&self) -> TransactionOutputConditionGroup {
-        self.1.conditions.clone()
+    pub fn get_conditions(&self) -> UTXOConditionsGroup {
+        self.1.conditions.conditions.clone()
     }
     pub fn get_amount(&self) -> SourceAmount {
         SourceAmount(self.1.amount, self.1.base)
@@ -87,7 +87,7 @@ pub enum UTXO {
 }
 
 impl UTXO {
-    pub fn get_conditions(&self) -> TransactionOutputConditionGroup {
+    pub fn get_conditions(&self) -> UTXOConditionsGroup {
         match *self {
             UTXO::V10(ref utxo_v10) => utxo_v10.get_conditions(),
             _ => panic!("UTXO version not supported !"),
diff --git a/dal/writers/certification.rs b/dal/writers/certification.rs
index 5f593fcbb8b7bfed507f61d7e1ff806400250f60..fd2d7341e599e4fe2b2e32601a1ddce33e99b122 100644
--- a/dal/writers/certification.rs
+++ b/dal/writers/certification.rs
@@ -82,3 +82,29 @@ pub fn revert_write_cert(
     })?;
     Ok(())
 }
+
+pub fn revert_expire_cert(
+    certs_db: &BinFileDB<CertsExpirV10Datas>,
+    source: NodeId,
+    target: NodeId,
+    created_block_id: BlockId,
+) -> Result<(), DALError> {
+    // Reinsert CertsExpirV10Datas entry
+    certs_db.write(|db| {
+        let mut certs = db.get(&created_block_id).cloned().unwrap_or_default();
+        certs.insert((source, target));
+        db.insert(created_block_id, certs);
+    })?;
+    Ok(())
+}
+
+pub fn expire_certs(
+    certs_db: &BinFileDB<CertsExpirV10Datas>,
+    created_block_id: BlockId,
+) -> Result<(), DALError> {
+    // Remove CertsExpirV10Datas entries
+    certs_db.write(|db| {
+        db.remove(&created_block_id);
+    })?;
+    Ok(())
+}
diff --git a/dal/writers/dividend.rs b/dal/writers/dividend.rs
index 3071e6c675942754143d36be262b2d254d8a1817..ec8e74e21d96cb8d3867987d2da61ab848aaa33f 100644
--- a/dal/writers/dividend.rs
+++ b/dal/writers/dividend.rs
@@ -49,7 +49,7 @@ pub fn create_du<B: Backend + Debug>(
             for pubkey in members {
                 members_balances.insert(
                     *pubkey,
-                    db.get(&TransactionOutputConditionGroup::Single(
+                    db.get(&UTXOConditionsGroup::Single(
                         TransactionOutputCondition::Sig(*pubkey),
                     )).cloned()
                         .unwrap_or_default(),
@@ -73,7 +73,7 @@ pub fn create_du<B: Backend + Debug>(
     balances_db.write(|db| {
         for (pubkey, (balance, utxos_indexs)) in members_balances {
             db.insert(
-                TransactionOutputConditionGroup::Single(TransactionOutputCondition::Sig(pubkey)),
+                UTXOConditionsGroup::Single(TransactionOutputCondition::Sig(pubkey)),
                 (balance, utxos_indexs),
             );
         }
diff --git a/dal/writers/requests.rs b/dal/writers/requests.rs
index 0b42b7d24f9d0a3682c94640e6b57827b11cd417..865aaef94e1f3ef62e104bf2db9a01a94b8e7574 100644
--- a/dal/writers/requests.rs
+++ b/dal/writers/requests.rs
@@ -96,7 +96,7 @@ pub enum WotsDBsWriteQuery {
     /// Revert certification (source_pubkey, source, target, created_block_id, median_time)
     RevertCert(CompactCertificationDocument, NodeId, NodeId),
     /// Certification expiry (source, target, created_block_id)
-    ExpireCert(NodeId, NodeId, BlockId),
+    ExpireCerts(BlockId),
     /// Revert certification expiry event (source, target, created_block_id)
     RevertExpireCert(NodeId, NodeId, BlockId),
 }
@@ -228,27 +228,16 @@ impl WotsDBsWriteQuery {
                 )?;
                 trace!("WotsDBsWriteQuery::CreateCert...finish");
             }
-            WotsDBsWriteQuery::ExpireCert(ref _source, ref _target, ref _created_block_id) => {
-                /*super::certification::expire_cert(
-                    &databases.certs_db,
-                    *source,
-                    *target,
-                    *created_block_id,
-                    false,
-                )?;*/
+            WotsDBsWriteQuery::ExpireCerts(ref created_block_id) => {
+                super::certification::expire_certs(&databases.certs_db, *created_block_id)?;
             }
-            WotsDBsWriteQuery::RevertExpireCert(
-                ref _source,
-                ref _target,
-                ref _created_block_id,
-            ) => {
-                /*super::certification::expire_cert(
+            WotsDBsWriteQuery::RevertExpireCert(ref source, ref target, ref created_block_id) => {
+                super::certification::revert_expire_cert(
                     &databases.certs_db,
                     *source,
                     *target,
                     *created_block_id,
-                    true,
-                )?;*/
+                )?;
             }
         }
         Ok(())
diff --git a/dal/writers/transaction.rs b/dal/writers/transaction.rs
index 9aeafafa92f91c95b310435f67d40e82018e4882..d2021bb969978139c2219495a2e1bf5dabe561da 100644
--- a/dal/writers/transaction.rs
+++ b/dal/writers/transaction.rs
@@ -55,7 +55,7 @@ pub fn revert_tx<B: Backend + Debug>(
     // Recalculate balance of consumed adress
     let new_balances_consumed_adress = dbs.balances_db.read(|db| {
         let mut new_balances_consumed_adress: HashMap<
-            TransactionOutputConditionGroup,
+            UTXOConditionsGroup,
             (SourceAmount, HashSet<UTXOIndexV10>),
         > = HashMap::new();
         for source in &consumed_utxos {
@@ -96,36 +96,36 @@ pub fn revert_tx<B: Backend + Debug>(
         }
     })?;
     // Complete sources_destroyed
-    let sources_destroyed: HashMap<
-        TransactionOutputConditionGroup,
-        Vec<(UTXOIndexV10, SourceAmount)>,
-    > = if !sources_destroyed.is_empty() {
-        dbs.tx_db.read(|db| {
-            let mut sources_destroyed_completed = HashMap::new();
-            for s_index in sources_destroyed {
-                let tx_output = db
-                    .get(&s_index.0)
-                    .expect("Not find tx")
-                    .tx_doc
-                    .get_outputs()[(s_index.1).0]
-                    .clone();
-                let mut sources_destroyed_for_same_address: Vec<(
-                    UTXOIndexV10,
-                    SourceAmount,
-                )> = sources_destroyed_completed
-                    .get(&tx_output.conditions)
-                    .cloned()
-                    .unwrap_or_default();
-                sources_destroyed_for_same_address
-                    .push((*s_index, SourceAmount(tx_output.amount, tx_output.base)));
+    let sources_destroyed: HashMap<UTXOConditionsGroup, Vec<(UTXOIndexV10, SourceAmount)>> =
+        if !sources_destroyed.is_empty() {
+            dbs.tx_db.read(|db| {
+                let mut sources_destroyed_completed = HashMap::new();
+                for s_index in sources_destroyed {
+                    let tx_output = db
+                        .get(&s_index.0)
+                        .expect("Not find tx")
+                        .tx_doc
+                        .get_outputs()[(s_index.1).0]
+                        .clone();
+                    let mut sources_destroyed_for_same_address: Vec<(
+                        UTXOIndexV10,
+                        SourceAmount,
+                    )> = sources_destroyed_completed
+                        .get(&tx_output.conditions.conditions)
+                        .cloned()
+                        .unwrap_or_default();
+                    sources_destroyed_for_same_address
+                        .push((*s_index, SourceAmount(tx_output.amount, tx_output.base)));
+                    sources_destroyed_completed.insert(
+                        tx_output.conditions.conditions,
+                        sources_destroyed_for_same_address,
+                    );
+                }
                 sources_destroyed_completed
-                    .insert(tx_output.conditions, sources_destroyed_for_same_address);
-            }
-            sources_destroyed_completed
-        })?
-    } else {
-        HashMap::with_capacity(0)
-    };
+            })?
+        } else {
+            HashMap::with_capacity(0)
+        };
     // Index recreated sources
     let recreated_sources: HashMap<SourceIndexV10, SourceAmount> = tx_doc
         .get_inputs()
@@ -142,43 +142,40 @@ pub fn revert_tx<B: Backend + Debug>(
         })
         .collect();
     // Find adress of recreated sources
-    let recreated_adress: HashMap<
-        TransactionOutputConditionGroup,
-        (SourceAmount, HashSet<UTXOIndexV10>),
-    > = dbs.utxos_db.read(|db| {
-        let mut recreated_adress: HashMap<
-            TransactionOutputConditionGroup,
-            (SourceAmount, HashSet<UTXOIndexV10>),
-        > = HashMap::new();
-        for (source_index, source_amount) in &recreated_sources {
-            if let SourceIndexV10::UTXO(utxo_index) = source_index {
-                // Get utxo
-                let utxo = db
-                    .get(&utxo_index)
-                    .expect("ApplyBLockError : unknow UTXO in inputs !");
-                // Get utxo conditions(=address)
-                let conditions = &utxo.conditions;
-                // Calculate new balances datas for "conditions" address
-                let (mut balance, mut utxos_index) = recreated_adress
-                    .get(conditions)
-                    .cloned()
-                    .unwrap_or_default();
-                balance = balance + *source_amount;
-                utxos_index.insert(*utxo_index);
-                // Write new balances datas for "conditions" address
-                recreated_adress.insert(conditions.clone(), (balance, utxos_index));
-            } else if let SourceIndexV10::DU(pubkey, _block_id) = source_index {
-                let address = TransactionOutputConditionGroup::Single(
-                    TransactionOutputCondition::Sig(*pubkey),
-                );
-                let (mut balance, utxos_index) =
-                    recreated_adress.get(&address).cloned().unwrap_or_default();
-                balance = balance + *source_amount;
-                recreated_adress.insert(address, (balance, utxos_index));
+    let recreated_adress: HashMap<UTXOConditionsGroup, (SourceAmount, HashSet<UTXOIndexV10>)> =
+        dbs.utxos_db.read(|db| {
+            let mut recreated_adress: HashMap<
+                UTXOConditionsGroup,
+                (SourceAmount, HashSet<UTXOIndexV10>),
+            > = HashMap::new();
+            for (source_index, source_amount) in &recreated_sources {
+                if let SourceIndexV10::UTXO(utxo_index) = source_index {
+                    // Get utxo
+                    let utxo = db
+                        .get(&utxo_index)
+                        .expect("ApplyBLockError : unknow UTXO in inputs !");
+                    // Get utxo conditions(=address)
+                    let conditions = &utxo.conditions.conditions;
+                    // Calculate new balances datas for "conditions" address
+                    let (mut balance, mut utxos_index) = recreated_adress
+                        .get(conditions)
+                        .cloned()
+                        .unwrap_or_default();
+                    balance = balance + *source_amount;
+                    utxos_index.insert(*utxo_index);
+                    // Write new balances datas for "conditions" address
+                    recreated_adress.insert(conditions.clone(), (balance, utxos_index));
+                } else if let SourceIndexV10::DU(pubkey, _block_id) = source_index {
+                    let address =
+                        UTXOConditionsGroup::Single(TransactionOutputCondition::Sig(*pubkey));
+                    let (mut balance, utxos_index) =
+                        recreated_adress.get(&address).cloned().unwrap_or_default();
+                    balance = balance + *source_amount;
+                    recreated_adress.insert(address, (balance, utxos_index));
+                }
             }
-        }
-        recreated_adress
-    })?;
+            recreated_adress
+        })?;
     // Recalculate balance of recreated adress
     let new_balances_recreated_adress = dbs.balances_db.read(|db| {
         let mut new_balances_recreated_adress = Vec::new();
@@ -260,41 +257,39 @@ pub fn apply_and_write_tx<B: Backend + Debug>(
         })
         .collect();
     // Find adress of consumed sources
-    let consumed_adress: HashMap<
-        TransactionOutputConditionGroup,
-        (SourceAmount, HashSet<UTXOIndexV10>),
-    > = dbs.utxos_db.read(|db| {
-        let mut consumed_adress: HashMap<
-            TransactionOutputConditionGroup,
-            (SourceAmount, HashSet<UTXOIndexV10>),
-        > = HashMap::new();
-        for (source_index, source_amount) in &consumed_sources {
-            if let SourceIndexV10::UTXO(utxo_index) = source_index {
-                // Get utxo
-                let utxo = db
-                    .get(&utxo_index)
-                    .expect("ApplyBLockError : unknow UTXO in inputs !");
-                // Get utxo conditions(=address)
-                let conditions = &utxo.conditions;
-                // Calculate new balances datas for "conditions" address
-                let (mut balance, mut utxos_index) =
-                    consumed_adress.get(conditions).cloned().unwrap_or_default();
-                balance = balance + *source_amount;
-                utxos_index.insert(*utxo_index);
-                // Write new balances datas for "conditions" address
-                consumed_adress.insert(conditions.clone(), (balance, utxos_index));
-            } else if let SourceIndexV10::DU(pubkey, _block_id) = source_index {
-                let address = TransactionOutputConditionGroup::Single(
-                    TransactionOutputCondition::Sig(*pubkey),
-                );
-                let (mut balance, utxos_index) =
-                    consumed_adress.get(&address).cloned().unwrap_or_default();
-                balance = balance + *source_amount;
-                consumed_adress.insert(address, (balance, utxos_index));
+    let consumed_adress: HashMap<UTXOConditionsGroup, (SourceAmount, HashSet<UTXOIndexV10>)> =
+        dbs.utxos_db.read(|db| {
+            let mut consumed_adress: HashMap<
+                UTXOConditionsGroup,
+                (SourceAmount, HashSet<UTXOIndexV10>),
+            > = HashMap::new();
+            for (source_index, source_amount) in &consumed_sources {
+                if let SourceIndexV10::UTXO(utxo_index) = source_index {
+                    // Get utxo
+                    let utxo = db.get(&utxo_index).expect(&format!(
+                        "ApplyBLockError : unknow UTXO in inputs : {:?} !",
+                        utxo_index
+                    ));
+                    // Get utxo conditions(=address)
+                    let conditions = &utxo.conditions.conditions;
+                    // Calculate new balances datas for "conditions" address
+                    let (mut balance, mut utxos_index) =
+                        consumed_adress.get(conditions).cloned().unwrap_or_default();
+                    balance = balance + *source_amount;
+                    utxos_index.insert(*utxo_index);
+                    // Write new balances datas for "conditions" address
+                    consumed_adress.insert(conditions.clone(), (balance, utxos_index));
+                } else if let SourceIndexV10::DU(pubkey, _block_id) = source_index {
+                    let address =
+                        UTXOConditionsGroup::Single(TransactionOutputCondition::Sig(*pubkey));
+                    let (mut balance, utxos_index) =
+                        consumed_adress.get(&address).cloned().unwrap_or_default();
+                    balance = balance + *source_amount;
+                    consumed_adress.insert(address, (balance, utxos_index));
+                }
             }
-        }
-        consumed_adress
-    })?;
+            consumed_adress
+        })?;
     // Recalculate balance of consumed adress
     let new_balances_consumed_adress = dbs.balances_db.read(|db| {
         let mut new_balances_consumed_adress = Vec::new();
@@ -358,7 +353,7 @@ pub fn apply_and_write_tx<B: Backend + Debug>(
     // Recalculate balance of supplied adress
     let new_balances_supplied_adress = dbs.balances_db.read(|db| {
         let mut new_balances_supplied_adress: HashMap<
-            TransactionOutputConditionGroup,
+            UTXOConditionsGroup,
             (SourceAmount, HashSet<UTXOIndexV10>),
         > = HashMap::new();
         for source in &created_utxos {
@@ -449,6 +444,7 @@ mod tests {
                 ).expect("fail to parse output !"),
             ],
             comment: "TEST",
+            hash: None,
         };
         builder.build_with_signature(vec![sig])
     }
@@ -478,7 +474,7 @@ mod tests {
         let cgeek_new_balance = currency_dbs
             .balances_db
             .read(|db| {
-                db.get(&TransactionOutputConditionGroup::Single(
+                db.get(&UTXOConditionsGroup::Single(
                     TransactionOutputCondition::Sig(tx_doc.issuers()[0]),
                 )).cloned()
             })
@@ -488,7 +484,7 @@ mod tests {
         let tortue_new_balance = currency_dbs
             .balances_db
             .read(|db| {
-                db.get(&TransactionOutputConditionGroup::Single(
+                db.get(&UTXOConditionsGroup::Single(
                     TransactionOutputCondition::Sig(tortue_pubkey),
                 )).cloned()
             })
@@ -504,7 +500,7 @@ mod tests {
         let cgeek_new_balance = currency_dbs
             .balances_db
             .read(|db| {
-                db.get(&TransactionOutputConditionGroup::Single(
+                db.get(&UTXOConditionsGroup::Single(
                     TransactionOutputCondition::Sig(tx_doc.issuers()[0]),
                 )).cloned()
             })
@@ -516,7 +512,7 @@ mod tests {
         let receiver_new_balance = currency_dbs
             .balances_db
             .read(|db| {
-                db.get(&TransactionOutputConditionGroup::Single(
+                db.get(&UTXOConditionsGroup::Single(
                     TransactionOutputCondition::Sig(tortue_pubkey),
                 )).cloned()
             })
@@ -540,7 +536,7 @@ mod tests {
         let cgeek_new_balance = currency_dbs
             .balances_db
             .read(|db| {
-                db.get(&TransactionOutputConditionGroup::Single(
+                db.get(&UTXOConditionsGroup::Single(
                     TransactionOutputCondition::Sig(tx_doc.issuers()[0]),
                 )).cloned()
             })
@@ -552,7 +548,7 @@ mod tests {
         let receiver_new_balance = currency_dbs
             .balances_db
             .read(|db| {
-                db.get(&TransactionOutputConditionGroup::Single(
+                db.get(&UTXOConditionsGroup::Single(
                     TransactionOutputCondition::Sig(tortue_pubkey),
                 )).cloned()
             })
diff --git a/documents/blockchain/v10/documents/block.rs b/documents/blockchain/v10/documents/block.rs
index 1968890f733498c6208d04b2c1fcc42fb7789fe2..786a5439dc77f7a326ff42327f877b4d163df21c 100644
--- a/documents/blockchain/v10/documents/block.rs
+++ b/documents/blockchain/v10/documents/block.rs
@@ -286,9 +286,11 @@ impl BlockDocument {
         sha256.input_str(&inner_text);
         self.inner_hash = Some(Hash::from_hex(&sha256.result_str()).unwrap());
     }
-    /// Change nonce
-    pub fn change_nonce(&mut self, new_nonce: u64) {
-        self.nonce = new_nonce;
+    /// Fill inner_hash_and_nonce_str
+    pub fn fill_inner_hash_and_nonce_str(&mut self, new_nonce: Option<u64>) {
+        if let Some(new_nonce) = new_nonce {
+            self.nonce = new_nonce;
+        }
         self.inner_hash_and_nonce_str = format!(
             "InnerHash: {}\nNonce: {}\n",
             self.inner_hash
@@ -299,6 +301,7 @@ impl BlockDocument {
     }
     /// Sign block
     pub fn sign(&mut self, privkey: PrivKey) {
+        self.fill_inner_hash_and_nonce_str(None);
         self.signatures = vec![privkey.sign(self.inner_hash_and_nonce_str.as_bytes())];
     }
     /// Compute hash
@@ -511,24 +514,24 @@ mod tests {
     #[test]
     fn generate_and_verify_empty_block() {
         let mut block = BlockDocument {
-            nonce: 10_500_000_089_933,
-            number: BlockId(107_777),
-            pow_min: 89,
-            time: 1_522_624_657,
-            median_time: 1_522_616_790,
-            members_count: 894,
-            monetary_mass: 139_571_973,
+            nonce: 100_010_200_000_006_940,
+            number: BlockId(174_260),
+            pow_min: 68,
+            time: 1_525_296_873,
+            median_time: 1_525_292_577,
+            members_count: 33,
+            monetary_mass: 15_633_687,
             unit_base: 0,
-            issuers_count: 41,
-            issuers_frame: 201,
-            issuers_frame_var: 5,
-            currency: CurrencyName(String::from("g1")),
-            issuers: vec![PubKey::Ed25519(ed25519::PublicKey::from_base58("2sZF6j2PkxBDNAqUde7Dgo5x3crkerZpQ4rBqqJGn8QT").unwrap())],
-            signatures: vec![Sig::Ed25519(ed25519::Signature::from_base64("FsRxB+NOiL+8zTr2d3B2j2KBItDuCa0KjFMF6hXmdQzfqXAs9g3m7DlGgYLcqzqe6JXjx/Lyzqze1HBR4cS0Aw==").unwrap())],
+            issuers_count: 8,
+            issuers_frame: 41,
+            issuers_frame_var: 0,
+            currency: CurrencyName(String::from("g1-test")),
+            issuers: vec![PubKey::Ed25519(ed25519::PublicKey::from_base58("39Fnossy1GrndwCnAXGDw3K5UYXhNXAFQe7yhYZp8ELP").unwrap())],
+            signatures: vec![Sig::Ed25519(ed25519::Signature::from_base64("lqXrNOopjM39oM7hgB7Vq13uIohdCuLlhh/q8RVVEZ5UVASphow/GXikCdhbWID19Bn0XrXzTbt/R7akbE9xAg==").unwrap())],
             hash: None,
             parameters: None,
-            previous_hash: Hash::from_hex("0000001F8AACF6764135F3E5D0D4E8358A3CBE537A4BF71152A00CC442EFD136").expect("fail to parse previous_hash"),
-            previous_issuer: Some(PubKey::Ed25519(ed25519::PublicKey::from_base58("38MEAZN68Pz1DTvT3tqgxx4yQP6snJCQhPqEFxbDk4aE").unwrap())),
+            previous_hash: Hash::from_hex("0000A7D4361B9EBF4CE974A521149A73E8A5DE9B73907AB3BC918726AED7D40A").expect("fail to parse previous_hash"),
+            previous_issuer: Some(PubKey::Ed25519(ed25519::PublicKey::from_base58("EPKuZA1Ek5y8S1AjAmAPtGrVCMFqUGzUEAa7Ei62CY2L").unwrap())),
             inner_hash: None,
             dividend: None,
             identities: Vec::new(),
@@ -549,39 +552,10 @@ mod tests {
                 .inner_hash
                 .expect("Try to get inner_hash of an uncompleted or reduce block !")
                 .to_hex(),
-            "95948AC4D45E46DA07CE0713EDE1CE0295C227EE4CA5557F73F56B7DD46FE89C"
-        );
-        // test generate_compact_text()
-        assert_eq!(
-            block.generate_compact_text(),
-            "Version: 10
-Type: Block
-Currency: g1
-Number: 107777
-PoWMin: 89
-Time: 1522624657
-MedianTime: 1522616790
-UnitBase: 0
-Issuer: 2sZF6j2PkxBDNAqUde7Dgo5x3crkerZpQ4rBqqJGn8QT
-IssuersFrame: 201
-IssuersFrameVar: 5
-DifferentIssuersCount: 41
-PreviousHash: 0000001F8AACF6764135F3E5D0D4E8358A3CBE537A4BF71152A00CC442EFD136
-PreviousIssuer: 38MEAZN68Pz1DTvT3tqgxx4yQP6snJCQhPqEFxbDk4aE
-MembersCount: 894
-Identities:
-Joiners:
-Actives:
-Leavers:
-Revoked:
-Excluded:
-Certifications:
-Transactions:
-InnerHash: 95948AC4D45E46DA07CE0713EDE1CE0295C227EE4CA5557F73F56B7DD46FE89C
-Nonce: "
+            "58E4865A47A46E0DF1449AABC449B5406A12047C413D61B5E17F86BE6641E7B0"
         );
         // Test signature validity
-        block.change_nonce(10_500_000_089_933);
+        block.fill_inner_hash_and_nonce_str(Some(100_010_200_000_006_940));
         assert_eq!(block.verify_signatures(), VerificationResult::Valid());
         // Test hash computation
         block.compute_hash();
@@ -591,7 +565,7 @@ Nonce: "
                 .expect("Try to get hash of an uncompleted or reduce block !")
                 .0
                 .to_hex(),
-            "000002D3296A2D257D01F6FEE8AEC5C3E5779D04EA43F08901F41998FA97D9A1"
+            "00002EE584F36C15D3EB21AAC78E0896C75EF9070E73B4EC33BFA2C3D561EEB2"
         );
     }
 
@@ -740,7 +714,7 @@ InnerHash: C8AB69E33ECE2612EADC7AB30D069B1F1A3D8C95EBBFD50DE583AC8E3666CCA1
 Nonce: "
         );
         // Test signature validity
-        block.change_nonce(10_300_000_018_323);
+        block.fill_inner_hash_and_nonce_str(Some(10_300_000_018_323));
         assert_eq!(block.verify_signatures(), VerificationResult::Valid());
         // Test hash computation
         block.compute_hash();
diff --git a/documents/blockchain/v10/documents/transaction.rs b/documents/blockchain/v10/documents/transaction.rs
index 12388f01f17ab4a91a555ce5ca211471d7285260..0ed63eed529dca9448b6cd56368c5442755f70ef 100644
--- a/documents/blockchain/v10/documents/transaction.rs
+++ b/documents/blockchain/v10/documents/transaction.rs
@@ -15,16 +15,14 @@
 
 //! Wrappers around Transaction documents.
 
-use std::ops::{Add, Deref, Sub};
-
+use blockchain::v10::documents::*;
+use blockchain::{BlockchainProtocol, Document, DocumentBuilder, IntoSpecializedDocument};
 use crypto::digest::Digest;
 use crypto::sha2::Sha256;
-use duniter_crypto::keys::*;
 use regex::Regex;
 use regex::RegexBuilder;
-
-use blockchain::v10::documents::*;
-use blockchain::{BlockchainProtocol, Document, DocumentBuilder, IntoSpecializedDocument};
+use std::ops::{Add, Deref, Sub};
+use std::str::FromStr;
 use {BlockId, Blockstamp, Hash};
 
 lazy_static! {
@@ -44,10 +42,10 @@ lazy_static! {
     static ref UNLOCK_SIG_REGEX: Regex =
         Regex::new(r"^SIG\((?P<index>[0-9]+)\)$").unwrap();
     static ref UNLOCK_XHX_REGEX: Regex = Regex::new(r"^XHX\((?P<code>\w+)\)$").unwrap();
-    static ref OUTPUT_COND_SIG_REGEX: Regex = Regex::new(r"^SIG\((?P<pubkey>[1-9A-Za-z]{43,44})\)$").unwrap();
-    static ref OUTPUT_COND_XHX_REGEX: Regex = Regex::new(r"^XHX\((?P<hash>[0-9A-F]{64})\)$").unwrap();
-    static ref OUTPUT_COND_CLTV_REGEX: Regex = Regex::new(r"^CLTV\((?P<timestamp>[0-9]+)\)$").unwrap();
-    static ref OUTPUT_COND_CSV_REGEX: Regex = Regex::new(r"^CSV\((?P<timestamp>[0-9]+)\)$").unwrap();
+    static ref OUTPUT_COND_SIG_REGEX: Regex = Regex::new(r"^SIG\((?P<pubkey>[1-9A-Za-z]{43,44})\)*$").unwrap();
+    static ref OUTPUT_COND_XHX_REGEX: Regex = Regex::new(r"^XHX\((?P<hash>[0-9A-F]{64})\)*$").unwrap();
+    static ref OUTPUT_COND_CLTV_REGEX: Regex = Regex::new(r"^CLTV\((?P<timestamp>[0-9]+)\)*$").unwrap();
+    static ref OUTPUT_COND_CSV_REGEX: Regex = Regex::new(r"^CSV\((?P<duration>[0-9]+)\)*$").unwrap();
     static ref OUPUT_CONDS_BRAKETS: Regex = Regex::new(r"^\((?P<conditions>[0-9A-Za-z()&| ]+)\)$").unwrap();
     static ref OUPUT_CONDS_AND: Regex = Regex::new(r"^(?P<conditions_group_1>[0-9A-Za-z()&| ]+) && (?P<conditions_group_2>[0-9A-Za-z()&| ]+)$").unwrap();
     static ref OUPUT_CONDS_OR: Regex = Regex::new(r"^(?P<conditions_group_1>[0-9A-Za-z()&| ]+) \|\| (?P<conditions_group_2>[0-9A-Za-z()&| ]+)$").unwrap();
@@ -56,7 +54,7 @@ lazy_static! {
     ).unwrap();
 }
 
-/// Wrap a transaction amout
+/// Wrap a transaction amount
 #[derive(Debug, Copy, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Hash, Serialize)]
 pub struct TxAmount(pub isize);
 
@@ -295,77 +293,107 @@ impl TransactionOutputCondition {
     }
 }
 
+/// Wrap an utxo conditions
+#[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)]
+pub struct UTXOConditions {
+    /// We are obliged to allow the introduction of the original text (instead of the self-generated text),
+    /// because the original text may contain errors that are unfortunately allowed by duniter-ts.
+    pub origin_str: Option<String>,
+    /// Store script conditions
+    pub conditions: UTXOConditionsGroup,
+}
+
+impl UTXOConditions {
+    /// Lightens the UTXOConditions (for example to store it while minimizing the space required)
+    pub fn reduce(&mut self) {
+        if self.origin_str.is_some()
+            && self.origin_str.clone().expect("safe unwrap") == self.conditions.to_string()
+        {
+            self.origin_str = None;
+        }
+    }
+    /// Check validity of this UTXOConditions
+    pub fn check(&self) -> bool {
+        !(self.origin_str.is_some()
+            && self.origin_str.clone().expect("safe unwrap") != self.conditions.to_string())
+    }
+}
+
+impl ToString for UTXOConditions {
+    fn to_string(&self) -> String {
+        if let Some(ref origin_str) = self.origin_str {
+            origin_str.to_string()
+        } else {
+            self.conditions.to_string()
+        }
+    }
+}
+
+impl ::std::str::FromStr for UTXOConditions {
+    type Err = V10DocumentParsingError;
+
+    fn from_str(source: &str) -> Result<Self, Self::Err> {
+        Ok(UTXOConditions {
+            origin_str: Some(String::from(source)),
+            conditions: UTXOConditionsGroup::from_str(source)?,
+        })
+    }
+}
+
 /// Wrap a transaction ouput condition group
 #[derive(Debug, Clone, PartialEq, Eq, Hash, Deserialize, Serialize)]
-pub enum TransactionOutputConditionGroup {
+pub enum UTXOConditionsGroup {
     /// Single
     Single(TransactionOutputCondition),
     /// Brackets
-    Brackets(Box<TransactionOutputConditionGroup>),
+    Brackets(Box<UTXOConditionsGroup>),
     /// And operator
-    And(
-        Box<TransactionOutputConditionGroup>,
-        Box<TransactionOutputConditionGroup>,
-    ),
+    And(Box<UTXOConditionsGroup>, Box<UTXOConditionsGroup>),
     /// Or operator
-    Or(
-        Box<TransactionOutputConditionGroup>,
-        Box<TransactionOutputConditionGroup>,
-    ),
+    Or(Box<UTXOConditionsGroup>, Box<UTXOConditionsGroup>),
 }
 
-impl ToString for TransactionOutputConditionGroup {
+impl ToString for UTXOConditionsGroup {
     fn to_string(&self) -> String {
         match *self {
-            TransactionOutputConditionGroup::Single(ref condition) => condition.to_string(),
-            TransactionOutputConditionGroup::Brackets(ref condition_group) => {
+            UTXOConditionsGroup::Single(ref condition) => condition.to_string(),
+            UTXOConditionsGroup::Brackets(ref condition_group) => {
                 format!("({})", condition_group.deref().to_string())
             }
-            TransactionOutputConditionGroup::And(ref condition_group_1, ref condition_group_2) => {
-                format!(
-                    "{} && {}",
-                    condition_group_1.deref().to_string(),
-                    condition_group_2.deref().to_string()
-                )
-            }
-            TransactionOutputConditionGroup::Or(ref condition_group_1, ref condition_group_2) => {
-                format!(
-                    "{} || {}",
-                    condition_group_1.deref().to_string(),
-                    condition_group_2.deref().to_string()
-                )
-            }
+            UTXOConditionsGroup::And(ref condition_group_1, ref condition_group_2) => format!(
+                "{} && {}",
+                condition_group_1.deref().to_string(),
+                condition_group_2.deref().to_string()
+            ),
+            UTXOConditionsGroup::Or(ref condition_group_1, ref condition_group_2) => format!(
+                "{} || {}",
+                condition_group_1.deref().to_string(),
+                condition_group_2.deref().to_string()
+            ),
         }
     }
 }
 
-impl TransactionOutputConditionGroup {
-    fn parse_from_str(
-        conditions: &str,
-    ) -> Result<TransactionOutputConditionGroup, V10DocumentParsingError> {
+impl ::std::str::FromStr for UTXOConditionsGroup {
+    type Err = V10DocumentParsingError;
+
+    fn from_str(conditions: &str) -> Result<Self, Self::Err> {
         if let Ok(single_condition) = TransactionOutputCondition::parse_from_str(conditions) {
-            Ok(TransactionOutputConditionGroup::Single(single_condition))
+            Ok(UTXOConditionsGroup::Single(single_condition))
         } else if let Some(caps) = OUPUT_CONDS_BRAKETS.captures(conditions) {
-            let inner_conditions =
-                TransactionOutputConditionGroup::parse_from_str(&caps["conditions"])?;
-            Ok(TransactionOutputConditionGroup::Brackets(Box::new(
-                inner_conditions,
-            )))
+            let inner_conditions = UTXOConditionsGroup::from_str(&caps["conditions"])?;
+            Ok(UTXOConditionsGroup::Brackets(Box::new(inner_conditions)))
         } else if let Some(caps) = OUPUT_CONDS_AND.captures(conditions) {
-            let conditions_group_1 =
-                TransactionOutputConditionGroup::parse_from_str(&caps["conditions_group_1"])?;
-            let conditions_group_2 =
-                TransactionOutputConditionGroup::parse_from_str(&caps["conditions_group_2"])?;
-            Ok(TransactionOutputConditionGroup::And(
+            let conditions_group_1 = UTXOConditionsGroup::from_str(&caps["conditions_group_1"])?;
+            let conditions_group_2 = UTXOConditionsGroup::from_str(&caps["conditions_group_2"])?;
+            Ok(UTXOConditionsGroup::And(
                 Box::new(conditions_group_1),
                 Box::new(conditions_group_2),
             ))
         } else if let Some(caps) = OUPUT_CONDS_OR.captures(conditions) {
-            let conditions_group_1 =
-                TransactionOutputConditionGroup::parse_from_str(&caps["conditions_group_1"])?;
-            let conditions_group_2 =
-                TransactionOutputConditionGroup::parse_from_str(&caps["conditions_group_2"])?;
-            Ok(TransactionOutputConditionGroup::Or(
+            let conditions_group_1 = UTXOConditionsGroup::from_str(&caps["conditions_group_1"])?;
+            let conditions_group_2 = UTXOConditionsGroup::from_str(&caps["conditions_group_2"])?;
+            Ok(UTXOConditionsGroup::Or(
                 Box::new(conditions_group_1),
                 Box::new(conditions_group_2),
             ))
@@ -386,7 +414,18 @@ pub struct TransactionOutput {
     /// Base
     pub base: TxBase,
     /// List of conditions for consum this output
-    pub conditions: TransactionOutputConditionGroup,
+    pub conditions: UTXOConditions,
+}
+
+impl TransactionOutput {
+    /// Lightens the TransactionOutput (for example to store it while minimizing the space required)
+    fn reduce(&mut self) {
+        self.conditions.reduce()
+    }
+    /// Check validity of this output
+    pub fn check(&self) -> bool {
+        self.conditions.check()
+    }
 }
 
 impl ToString for TransactionOutput {
@@ -406,7 +445,7 @@ impl TransactionOutput {
         if let Some(caps) = OUTPUT_REGEX.captures(source) {
             let amount = TxAmount(caps["amount"].parse().expect("fail to parse output amount"));
             let base = TxBase(caps["base"].parse().expect("fail to parse base amount"));
-            let conditions = TransactionOutputConditionGroup::parse_from_str(&caps["conditions"])?;
+            let conditions = UTXOConditions::from_str(&caps["conditions"])?;
             Ok(TransactionOutput {
                 conditions,
                 amount,
@@ -489,6 +528,7 @@ impl TransactionDocument {
     pub fn reduce(&mut self) {
         self.text = None;
         self.hash = None;
+        self.outputs.iter_mut().map(|o| o.reduce()).collect::<()>();
     }
 }
 
@@ -615,6 +655,8 @@ pub struct TransactionDocumentBuilder<'a> {
     pub outputs: &'a Vec<TransactionOutput>,
     /// Transaction comment
     pub comment: &'a str,
+    /// Transaction hash
+    pub hash: Option<Hash>,
 }
 
 impl<'a> TransactionDocumentBuilder<'a> {
@@ -630,7 +672,7 @@ impl<'a> TransactionDocumentBuilder<'a> {
             outputs: self.outputs.clone(),
             comment: String::from(self.comment),
             signatures,
-            hash: None,
+            hash: self.hash,
         }
     }
 }
@@ -812,6 +854,7 @@ mod tests {
                 ).expect("fail to parse output !"),
             ],
             comment: "test",
+            hash: None,
         };
         println!(
             "Signature = {:?}",
@@ -865,8 +908,10 @@ mod tests {
                 ).expect("fail to parse output !"),
             ],
             comment: "Pour cesium merci",
+            hash: None,
         };
         let mut tx_doc = builder.build_with_signature(vec![sig]);
+        tx_doc.hash = None;
         assert_eq!(tx_doc.verify_signatures(), VerificationResult::Valid());
         assert_eq!(
             tx_doc.get_hash(),
diff --git a/module/lib.rs b/module/lib.rs
index a75136830777356f024887772dff63f1fff6e54c..05e74ea3a70b93df07fa550fdb16671c44cfad52 100644
--- a/module/lib.rs
+++ b/module/lib.rs
@@ -172,6 +172,13 @@ impl DuniterConf {
             _ => panic!("Fail to load duniter conf : conf version not supported !"),
         }
     }
+    /// Set currency
+    pub fn set_currency(&mut self, new_currency: Currency) {
+        match *self {
+            DuniterConf::V1(ref mut conf_v1) => conf_v1.currency = new_currency,
+            _ => panic!("Fail to load duniter conf : conf version not supported !"),
+        }
+    }
     /// Get node id
     pub fn my_node_id(&self) -> u32 {
         match *self {
diff --git a/ws2p/parsers/mod.rs b/ws2p/parsers/mod.rs
index 74de739010c0008f445306527df55b761c30744d..4865af2852d5b8bc7d4e1040da072e78313533a0 100644
--- a/ws2p/parsers/mod.rs
+++ b/ws2p/parsers/mod.rs
@@ -76,11 +76,13 @@ mod tests {
             ],
             unlocks: &vec![TransactionInputUnlocks::parse_from_str("0:SIG(0)").unwrap()],
             comment: "Merci pour la calligraphie ;) de Liam",
+            hash: None,
         };
-
+        let mut tx_doc = tx_builder.build_with_signature(vec![Sig::Ed25519(ed25519::Signature::from_base64("5olrjFylTCsVq8I5Yr7FpXeviynICyvIwe1yG5N0RJF+VZb+bCFBnLAMpmMCU2qzUvK7z41UXOrMRybXiLa2Dw==").unwrap())]);
+        tx_doc.compute_hash();
         assert_eq!(
             parse_transaction("g1", &tx_json).expect("Fail to parse transaction !"),
-            tx_builder.build_with_signature(vec![Sig::Ed25519(ed25519::Signature::from_base64("5olrjFylTCsVq8I5Yr7FpXeviynICyvIwe1yG5N0RJF+VZb+bCFBnLAMpmMCU2qzUvK7z41UXOrMRybXiLa2Dw==").unwrap())])
+            tx_doc
         );
     }
 
@@ -137,11 +139,13 @@ mod tests {
             ],
             unlocks: &vec![TransactionInputUnlocks::parse_from_str("0:SIG(0)").unwrap()],
             comment: "Un petit cafe ;-)",
+            hash: None,
         };
-
+        let mut tx_doc = tx_builder.build_with_signature(vec![Sig::Ed25519(ed25519::Signature::from_base64("VWbvsiybM4L2X5+o+6lIiuKNw5KrD1yGZqmV+lHtA28XoRUFzochSIgfoUqBsTAaYEHY45vSX917LDXudTEzBg==").unwrap())]);
+        tx_doc.compute_hash();
         assert_eq!(
             parse_transaction("g1", &tx_json).expect("Fail to parse transaction !"),
-            tx_builder.build_with_signature(vec![Sig::Ed25519(ed25519::Signature::from_base64("VWbvsiybM4L2X5+o+6lIiuKNw5KrD1yGZqmV+lHtA28XoRUFzochSIgfoUqBsTAaYEHY45vSX917LDXudTEzBg==").unwrap())])
+            tx_doc,
         );
     }
 }
diff --git a/ws2p/parsers/transactions.rs b/ws2p/parsers/transactions.rs
index e4b21fa4b0b46bd9d23acd91d391745569f3e111..e0440879359b8d2562075ea96af189a76769bd29 100644
--- a/ws2p/parsers/transactions.rs
+++ b/ws2p/parsers/transactions.rs
@@ -7,107 +7,7 @@ use duniter_documents::blockchain::v10::documents::transaction::{
     TransactionOutput,
 };
 use duniter_documents::blockchain::DocumentBuilder;
-use duniter_documents::Blockstamp;
-
-pub fn parse_compact_transactions(
-    currency: &str,
-    json_datas: &str,
-) -> Option<Vec<TransactionDocument>> {
-    let raw_transactions: serde_json::Value =
-        serde_json::from_str(json_datas).expect("Fatal error : fail to jsonifie tx from DB !");
-
-    if raw_transactions.is_array() {
-        let mut transactions = Vec::new();
-        for transaction in raw_transactions.as_array().unwrap() {
-            let transaction_lines: Vec<&str> = transaction
-                .as_str()
-                .expect("Fail to parse tx from DB !")
-                .split('$')
-                .collect();
-            let tx_headers: Vec<&str> = transaction_lines[0].split(':').collect();
-            let issuers_count = tx_headers[2]
-                .parse()
-                .expect("Fail to parse tx header NB_ISSUERS !");
-            let inputs_count = tx_headers[3]
-                .parse()
-                .expect("Fail to parse tx header NB_INPUTS !");
-            let unlocks_count = tx_headers[4]
-                .parse()
-                .expect("Fail to parse tx header NB_UNLOCKS !");
-            let outputs_count = tx_headers[5]
-                .parse()
-                .expect("Fail to parse tx header NB_OUTPUTS !");
-            let has_comment: usize = tx_headers[6]
-                .parse()
-                .expect("Fail to parse tx header HAS_COMMENT !");
-            let locktime = tx_headers[7]
-                .parse()
-                .expect("Fail to parse tx header LOCKTIME !");
-            let blockstamp = Blockstamp::from_string(transaction_lines[1])
-                .expect("Fail to parse tx BLOCKSTAMP !");
-            let mut line = 2;
-            let mut issuers = Vec::new();
-            for _ in 0..issuers_count {
-                issuers.push(PubKey::Ed25519(
-                    ed25519::PublicKey::from_base58(transaction_lines[line])
-                        .expect("Fail to parse tx issuer !"),
-                ));
-                line += 1;
-            }
-            let mut inputs = Vec::new();
-            for _ in 0..inputs_count {
-                inputs.push(
-                    TransactionInput::parse_from_str(transaction_lines[line])
-                        .expect("Fail to parse tx issuer !"),
-                );
-                line += 1;
-            }
-            let mut unlocks = Vec::new();
-            for _ in 0..unlocks_count {
-                unlocks.push(
-                    TransactionInputUnlocks::parse_from_str(transaction_lines[line])
-                        .expect("Fail to parse tx issuer !"),
-                );
-                line += 1;
-            }
-            let mut outputs = Vec::new();
-            for _ in 0..outputs_count {
-                outputs.push(
-                    TransactionOutput::parse_from_str(transaction_lines[line])
-                        .expect("Fail to parse tx issuer !"),
-                );
-                line += 1;
-            }
-            let mut comment = "";
-            if has_comment == 1 {
-                comment = transaction_lines[line];
-                line += 1;
-            }
-            let mut signatures = Vec::new();
-            for _ in 0..issuers_count {
-                signatures.push(Sig::Ed25519(
-                    ed25519::Signature::from_base64(transaction_lines[line])
-                        .expect("Fail to parse tx signature !"),
-                ));
-                line += 1;
-            }
-            let tx_doc_builder = TransactionDocumentBuilder {
-                currency,
-                blockstamp: &blockstamp,
-                locktime: &locktime,
-                issuers: &issuers,
-                inputs: &inputs,
-                unlocks: &unlocks,
-                outputs: &outputs,
-                comment,
-            };
-            transactions.push(tx_doc_builder.build_with_signature(signatures));
-        }
-        Some(transactions)
-    } else {
-        None
-    }
-}
+use duniter_documents::{Blockstamp, Hash};
 
 pub fn parse_transaction(
     currency: &str,
@@ -173,6 +73,12 @@ pub fn parse_transaction(
         }
     }
     let comment = source.get("comment")?.as_str()?;
+    let hash = match Hash::from_hex(source.get("hash")?.as_str()?) {
+        Ok(hash) => hash,
+        Err(_) => {
+            return None;
+        }
+    };
 
     let tx_doc_builder = TransactionDocumentBuilder {
         currency,
@@ -183,6 +89,7 @@ pub fn parse_transaction(
         unlocks: &unlocks,
         outputs: &outputs,
         comment,
+        hash: Some(hash),
     };
     Some(tx_doc_builder.build_with_signature(signatures))
 }
@@ -193,7 +100,7 @@ mod tests {
 
     #[test]
     fn parse_compact_tx() {
-        let compact_txs = "[\"TX:10:1:1:1:1:1:0$\
+        let _compact_txs = "[\"TX:10:1:1:1:1:1:0$\
 112533-000002150F2E805E604D9B31212D079570AAD8D3A4D8BB75F2C15A94A345B6B1$\
 51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2$\
 1000:0:D:51EFVNZwpfmTXU7BSLpeh3PZFgfdmm5hq5MzCDopdH2:46496$\
@@ -202,7 +109,7 @@ mod tests {
 Merci pour la calligraphie ;) de Liam$\
 5olrjFylTCsVq8I5Yr7FpXeviynICyvIwe1yG5N0RJF+VZb+bCFBnLAMpmMCU2qzUvK7z41UXOrMRybXiLa2Dw==\"]";
 
-        let tx_builder = TransactionDocumentBuilder {
+        let _tx_builder = TransactionDocumentBuilder {
             currency: "g1",
             blockstamp: &Blockstamp::from_string(
                 "112533-000002150F2E805E604D9B31212D079570AAD8D3A4D8BB75F2C15A94A345B6B1",
@@ -224,11 +131,7 @@ Merci pour la calligraphie ;) de Liam$\
             ],
             unlocks: &vec![TransactionInputUnlocks::parse_from_str("0:SIG(0)").unwrap()],
             comment: "Merci pour la calligraphie ;) de Liam",
+            hash: None,
         };
-
-        assert_eq!(
-            parse_compact_transactions("g1", compact_txs).expect("Fail to parse compact transactions !"),
-            vec![tx_builder.build_with_signature(vec![Sig::Ed25519(ed25519::Signature::from_base64("5olrjFylTCsVq8I5Yr7FpXeviynICyvIwe1yG5N0RJF+VZb+bCFBnLAMpmMCU2qzUvK7z41UXOrMRybXiLa2Dw==").unwrap())])]
-        );
     }
 }