diff --git a/Cargo.lock b/Cargo.lock
index 28e891852a01779621e66a00bb60a00c7e1fdd57..eedfb2d1698ebd073d76b70725c5896c96f3065c 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -282,6 +282,7 @@ dependencies = [
  "duniter-module 0.1.0-a0.1",
  "dup-crypto 0.6.0",
  "durs-common-tools 0.1.0",
+ "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "rpassword 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.86 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -423,7 +424,6 @@ dependencies = [
 name = "durs-common-tools"
 version = "0.1.0"
 dependencies = [
- "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
diff --git a/lib/core/conf/Cargo.toml b/lib/core/conf/Cargo.toml
index 40937fe821101f95315f9b4a4a62690e30296c55..36d4b14ba6549a90c4f286ae89a83808e28ebfcc 100644
--- a/lib/core/conf/Cargo.toml
+++ b/lib/core/conf/Cargo.toml
@@ -10,15 +10,16 @@ edition = "2018"
 path = "lib.rs"
 
 [dependencies]
-rand = "0.4.*"
-serde = "1.0.*"
-serde_derive = "1.0.*"
-serde_json = "1.0.*"
 dirs = "1.0.2"
 dup-crypto = { path = "../../tools/crypto" }
 dubp-documents= { path = "../../tools/documents" }
 duniter-module = { path = "../module" }
 durs-common-tools = { path = "../../tools/common-tools" }
+log = "0.4.*"
+rand = "0.4.*"
+serde = "1.0.*"
+serde_derive = "1.0.*"
+serde_json = "1.0.*"
 rpassword = "1.0.0"
 
 [features]
diff --git a/lib/core/conf/lib.rs b/lib/core/conf/lib.rs
index 74ae80c269561cad678f4a531d9a76f86f5cb83f..e8dca5bf7bcd7f8d26d501f4995352443e25058e 100644
--- a/lib/core/conf/lib.rs
+++ b/lib/core/conf/lib.rs
@@ -27,6 +27,8 @@
     unused_qualifications
 )]
 
+#[macro_use]
+extern crate log;
 #[macro_use]
 extern crate serde_derive;
 #[macro_use]
@@ -479,7 +481,7 @@ pub fn get_blockchain_db_path(profile: &str, currency: &CurrencyName) -> PathBuf
     if !db_path.as_path().exists() {
         if let Err(io_error) = fs::create_dir(db_path.as_path()) {
             if io_error.kind() != std::io::ErrorKind::AlreadyExists {
-                fatal_error("Impossible to create blockchain dir !");
+                fatal_error!("Impossible to create blockchain dir !");
             }
         }
     }
diff --git a/lib/core/core/lib.rs b/lib/core/core/lib.rs
index 62e07b5eb6abf149e8a97777188be2def9f9a6ed..d67987319d6bd6abc45b9f18132e6b9a1adbd95f 100644
--- a/lib/core/core/lib.rs
+++ b/lib/core/core/lib.rs
@@ -687,6 +687,7 @@ pub fn dbex<DC: DuniterConf>(profile: &str, conf: &DC, csv: bool, query: &DBExQu
 }
 
 /// Initialize logger
+/// Warning: This function cannot use the macro fatal_error! because the logger is not yet initialized, so it must use panic !
 pub fn init_logger(profile: &str, soft_name: &'static str, cli_args: &ArgMatches) {
     // Get datas folder path
     let mut log_file_path = match dirs::config_dir() {
diff --git a/lib/modules/blockchain/blockchain-dal/src/entities/fork_tree.rs b/lib/modules/blockchain/blockchain-dal/src/entities/fork_tree.rs
index 3859f51118657b08a4596492df6ba1c202f8a141..51d918024d92f61e6e5ca8b2ea41ba2d19812aec 100644
--- a/lib/modules/blockchain/blockchain-dal/src/entities/fork_tree.rs
+++ b/lib/modules/blockchain/blockchain-dal/src/entities/fork_tree.rs
@@ -171,8 +171,7 @@ impl ForkTree {
         if let Some(Some(ref node)) = self.nodes.get(id.0) {
             node
         } else {
-            durs_common_tools::fatal_error("Dev error: fork tree : get unexist or removed node !");
-            panic!() // for compiler
+            durs_common_tools::fatal_error!("Dev error: fork tree : get unexist or removed node !");
         }
     }
     /// Get mutable reference to a specific tree node
@@ -181,8 +180,7 @@ impl ForkTree {
         if let Some(Some(ref mut node)) = self.nodes.get_mut(id.0) {
             node
         } else {
-            durs_common_tools::fatal_error("Dev error: fork tree : get unexist or removed node !");
-            panic!() // for compiler
+            durs_common_tools::fatal_error!("Dev error: fork tree : get unexist or removed node !");
         }
     }
     /// Get free identifier
@@ -360,7 +358,7 @@ impl ForkTree {
         } else if self.root.is_none() {
             self.root = Some(new_node_id);
         } else {
-            durs_common_tools::fatal_error("Dev error: Insert root node in not empty tree !")
+            durs_common_tools::fatal_error!("Dev error: Insert root node in not empty tree !")
         }
 
         self.removed_blockstamps.clear();
diff --git a/lib/modules/blockchain/blockchain/src/dubp/check/hashs.rs b/lib/modules/blockchain/blockchain/src/dubp/check/hashs.rs
index bb61e3f286dd27d2b2932a91c3114e350e52586f..bee2034936d8f4b70c7c4f1c65018e2c0663c5df 100644
--- a/lib/modules/blockchain/blockchain/src/dubp/check/hashs.rs
+++ b/lib/modules/blockchain/blockchain/src/dubp/check/hashs.rs
@@ -24,7 +24,7 @@ pub fn verify_block_hashs(block_doc: &BlockDocument) -> Result<(), VerifyBlockHa
     trace!("complete_block #{}...", block_doc.number);
 
     if block_doc.inner_hash.is_none() {
-        fatal_error(
+        fatal_error!(
             "BlockchainModule : verify_block_hashs() : fatal error : block.inner_hash = None",
         );
     }
diff --git a/lib/modules/blockchain/blockchain/src/fork/revert_block.rs b/lib/modules/blockchain/blockchain/src/fork/revert_block.rs
index af0cf2e8d58f39006d10e414315a82f05b40ed60..febdd0fd4860c158f8aa72a73a257c0436bc9d44 100644
--- a/lib/modules/blockchain/blockchain/src/fork/revert_block.rs
+++ b/lib/modules/blockchain/blockchain/src/fork/revert_block.rs
@@ -73,8 +73,7 @@ pub fn revert_block<W: WebOfTrust>(
                 if let Ok(Some(tx)) = txs_db.read(|db| db.get(tx_hash).cloned()) {
                     tx
                 } else {
-                    fatal_error(&format!("revert_block(): tx {} not found !", tx_hash));
-                    panic!() // to compile
+                    fatal_error!("revert_block(): tx {} not found !", tx_hash);
                 }
             }
             TxDocOrTxHash::TxDoc(ref _dal_tx) => panic!("Try to revert not reduce block !"),
diff --git a/lib/modules/blockchain/blockchain/src/fork/rollback.rs b/lib/modules/blockchain/blockchain/src/fork/rollback.rs
index dd9797c6da58daaf0acd838f7ad24c0e11282083..bda0681c314a9c6a6ac836a1e1dad97126195a69 100644
--- a/lib/modules/blockchain/blockchain/src/fork/rollback.rs
+++ b/lib/modules/blockchain/blockchain/src/fork/rollback.rs
@@ -33,8 +33,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
             .fork_blocks_db
             .read(|db| db.get(&bc.current_blockstamp).cloned())
             .unwrap_or_else(|_| {
-                fatal_error(&format!("revert block {} fail !", bc.current_blockstamp));
-                panic!()
+                fatal_error!("revert block {} fail !", bc.current_blockstamp);
             })
         {
             let ValidBlockRevertReqs(bc_db_query, wot_dbs_queries, tx_dbs_queries) =
@@ -45,8 +44,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
                     &bc.currency_databases.tx_db,
                 )
                 .unwrap_or_else(|_| {
-                    fatal_error(&format!("revert block {} fail !", bc.current_blockstamp));
-                    panic!()
+                    fatal_error!("revert block {} fail !", bc.current_blockstamp);
                 });
             // Apply db requests
             bc_db_query
@@ -63,7 +61,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
                     .expect("Fatal error : Fail to apply CurrencyDBsWriteRequest !");
             }
         } else {
-            fatal_error("apply_rollback(): Not found current block in forks blocks DB !");
+            fatal_error!("apply_rollback(): Not found current block in forks blocks DB !");
         }
     }
 
@@ -102,10 +100,10 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
                 break;
             }
         } else {
-            fatal_error(&format!(
+            fatal_error!(
                 "apply_rollback(): Fail to get block {} on new branch in forks blocks DB !",
                 blockstamp
-            ));
+            );
         }
     }
 
@@ -116,7 +114,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
             old_current_blockstamp,
             bc.current_blockstamp,
         ) {
-            fatal_error(&format!("DALError: ForksDB: {:?}", err));
+            fatal_error!("DALError: ForksDB: {:?}", err);
         }
 
         // save dbs
diff --git a/lib/modules/blockchain/blockchain/src/sync/download/json_reader_worker.rs b/lib/modules/blockchain/blockchain/src/sync/download/json_reader_worker.rs
index 8b27d39cfb17607098ba379eb5d3e6334cd8807c..69b31b983bce25976343844c66bd3bd1fb4ae225 100644
--- a/lib/modules/blockchain/blockchain/src/sync/download/json_reader_worker.rs
+++ b/lib/modules/blockchain/blockchain/src/sync/download/json_reader_worker.rs
@@ -43,7 +43,7 @@ pub fn json_reader_worker(
         // Get list of json chunk files
         let chunks_set = get_chunks_set(&json_chunks_path);
         if chunks_set.is_empty() {
-            fatal_error("json_files_path directory is empty !");
+            fatal_error!("json_files_path directory is empty !");
         }
 
         // Get max chunk number and max block id
@@ -58,13 +58,13 @@ pub fn json_reader_worker(
 
         // Verify if max chunk exist
         if chunks_set.get(&max_chunk_number).is_none() {
-            fatal_error(&format!("Missing chunk file n°{}", max_chunk_number));
+            fatal_error!("Missing chunk file n°{}", max_chunk_number);
         };
 
         // Open chunk file
         let chunk_file_content_result = open_json_chunk_file(&json_chunks_path, max_chunk_number);
         if chunk_file_content_result.is_err() {
-            fatal_error(&format!("Fail to open chunk file n°{}", max_chunk_number));
+            fatal_error!("Fail to open chunk file n°{}", max_chunk_number);
         }
 
         // Parse chunk file content
@@ -72,16 +72,12 @@ pub fn json_reader_worker(
         let last_chunk_blocks = match blocks_result {
             Ok(blocks) => blocks,
             Err(e) => {
-                fatal_error(&format!(
-                    "Fail to parse chunk file n°{} : {}",
-                    max_chunk_number, e,
-                ));
-                unreachable!();
+                fatal_error!("Fail to parse chunk file n°{} : {}", max_chunk_number, e);
             }
         };
 
         if last_chunk_blocks.is_empty() {
-            fatal_error("Last chunk is empty !");
+            fatal_error!("Last chunk is empty !");
         }
 
         let last_block = last_chunk_blocks
@@ -168,7 +164,7 @@ fn treat_once_json_chunk(
     // Open chunk file
     let chunk_file_content_result = open_json_chunk_file(json_chunks_path, chunk_number);
     if chunk_file_content_result.is_err() {
-        fatal_error(&format!("Fail to open chunk file n°{}", chunk_number));
+        fatal_error!("Fail to open chunk file n°{}", chunk_number);
     }
 
     // Parse chunk file content
@@ -176,11 +172,7 @@ fn treat_once_json_chunk(
     let blocks = match blocks_result {
         Ok(blocks) => blocks,
         Err(e) => {
-            fatal_error(&format!(
-                "Fail to parse chunk file n°{} : {}",
-                chunk_number, e,
-            ));
-            panic!(); // for compilator
+            fatal_error!("Fail to parse chunk file n°{} : {}", chunk_number, e);
         }
     };
     (chunk_number, blocks)
@@ -198,13 +190,13 @@ fn parse_json_chunk(json_chunk_content: &str) -> Result<Vec<BlockDocument>, Erro
                     block_doc_vec.push(parse_json_block(json_block)?);
                 }
             } else {
-                fatal_error("Fail to parse json chunk : field \"blocks\" must be an array !");
+                fatal_error!("Fail to parse json chunk : field \"blocks\" must be an array !");
             }
         } else {
-            fatal_error("Fail to parse json chunk : field \"blocks\" don't exist !");
+            fatal_error!("Fail to parse json chunk : field \"blocks\" don't exist !");
         }
     } else {
-        fatal_error("Fail to parse json chunk : json root node must be an object !");
+        fatal_error!("Fail to parse json chunk : json root node must be an object !");
     }
 
     Ok(block_doc_vec)
diff --git a/lib/modules/blockchain/blockchain/src/sync/mod.rs b/lib/modules/blockchain/blockchain/src/sync/mod.rs
index 757f851c76a6756b0fa82441c53506897e822a24..87b4998bf7ed5399bc550a5d8e38c0c77731fa02 100644
--- a/lib/modules/blockchain/blockchain/src/sync/mod.rs
+++ b/lib/modules/blockchain/blockchain/src/sync/mod.rs
@@ -142,8 +142,7 @@ pub fn local_sync<DC: DuniterConf>(profile: &str, conf: &DC, sync_opts: SyncOpt)
         {
             (currency, target_blockstamp)
         } else {
-            fatal_error("Fatal error : no target blockstamp !");
-            panic!(); // for compilator
+            fatal_error!("Fatal error : no target blockstamp !");
         };
 
     // Update DuniterConf
diff --git a/lib/tools/common-tools/Cargo.toml b/lib/tools/common-tools/Cargo.toml
index 12bce29186a85db1f7ccf71b959975ab3a46ef70..715eb0ff7e2fe162d232e0173792eea0ca325feb 100644
--- a/lib/tools/common-tools/Cargo.toml
+++ b/lib/tools/common-tools/Cargo.toml
@@ -13,7 +13,6 @@ edition = "2018"
 path = "src/lib.rs"
 
 [dependencies]
-log = "0.4.*"
 
 [dev-dependencies]
 pretty_assertions = "0.5.1"
diff --git a/lib/tools/common-tools/src/lib.rs b/lib/tools/common-tools/src/lib.rs
index 3d894aa32db699a586cf2a3866f19f1962f0a036..f726482e81abc33714cdd6568f1ed7c61dcef69a 100644
--- a/lib/tools/common-tools/src/lib.rs
+++ b/lib/tools/common-tools/src/lib.rs
@@ -26,19 +26,33 @@
     unused_import_braces
 )]
 
-#[macro_use]
-extern crate log;
-
 /// Interrupts the program and log error message
-pub fn fatal_error(msg: &str) {
-    if cfg!(feature = "log_panics") {
-        panic!(format!("Fatal Error : {}", msg));
-    } else {
-        error!("{}", &format!("Fatal Error : {}", msg));
-        panic!(format!("Fatal Error : {}", msg));
-    }
+/// WARNING: this macro must not be called before the logger is initialized !
+#[macro_export]
+macro_rules! fatal_error {
+    ($msg:expr) => ({
+        error!("{}", &dbg!($msg));
+        panic!($msg);
+    });
+    ($msg:expr,) => ({
+        error!("{}", &dbg!($msg));
+        panic!($msg);
+    });
+    ($fmt:expr, $($arg:tt)+) => ({
+        error!("{}", dbg!(format!($fmt, $($arg)+)));
+        panic!($fmt, $($arg)+);
+    });
 }
 
+/*macro_rules! error {
+    (target: $target:expr, $($arg:tt)+) => (
+        log!(target: $target, $crate::Level::Error, $($arg)+);
+    );
+    ($($arg:tt)+) => (
+        log!($crate::Level::Error, $($arg)+);
+    )
+}*/
+
 /// Unescape backslash
 pub fn unescape_str(source: &str) -> String {
     let mut previous_char = None;