Skip to content
Snippets Groups Projects
Commit 1fcf497a authored by Éloïs's avatar Éloïs
Browse files

[fix] common-tools: #130 allow fatal_error to handle several arguments

parent 36a6fc96
No related branches found
No related tags found
1 merge request!127Elois/fixs
...@@ -282,6 +282,7 @@ dependencies = [ ...@@ -282,6 +282,7 @@ dependencies = [
"duniter-module 0.1.0-a0.1", "duniter-module 0.1.0-a0.1",
"dup-crypto 0.6.0", "dup-crypto 0.6.0",
"durs-common-tools 0.1.0", "durs-common-tools 0.1.0",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)", "rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"rpassword 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "rpassword 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.86 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.86 (registry+https://github.com/rust-lang/crates.io-index)",
...@@ -423,7 +424,6 @@ dependencies = [ ...@@ -423,7 +424,6 @@ dependencies = [
name = "durs-common-tools" name = "durs-common-tools"
version = "0.1.0" version = "0.1.0"
dependencies = [ dependencies = [
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
] ]
......
...@@ -10,15 +10,16 @@ edition = "2018" ...@@ -10,15 +10,16 @@ edition = "2018"
path = "lib.rs" path = "lib.rs"
[dependencies] [dependencies]
rand = "0.4.*"
serde = "1.0.*"
serde_derive = "1.0.*"
serde_json = "1.0.*"
dirs = "1.0.2" dirs = "1.0.2"
dup-crypto = { path = "../../tools/crypto" } dup-crypto = { path = "../../tools/crypto" }
dubp-documents= { path = "../../tools/documents" } dubp-documents= { path = "../../tools/documents" }
duniter-module = { path = "../module" } duniter-module = { path = "../module" }
durs-common-tools = { path = "../../tools/common-tools" } durs-common-tools = { path = "../../tools/common-tools" }
log = "0.4.*"
rand = "0.4.*"
serde = "1.0.*"
serde_derive = "1.0.*"
serde_json = "1.0.*"
rpassword = "1.0.0" rpassword = "1.0.0"
[features] [features]
...@@ -27,6 +27,8 @@ ...@@ -27,6 +27,8 @@
unused_qualifications unused_qualifications
)] )]
#[macro_use]
extern crate log;
#[macro_use] #[macro_use]
extern crate serde_derive; extern crate serde_derive;
#[macro_use] #[macro_use]
...@@ -479,7 +481,7 @@ pub fn get_blockchain_db_path(profile: &str, currency: &CurrencyName) -> PathBuf ...@@ -479,7 +481,7 @@ pub fn get_blockchain_db_path(profile: &str, currency: &CurrencyName) -> PathBuf
if !db_path.as_path().exists() { if !db_path.as_path().exists() {
if let Err(io_error) = fs::create_dir(db_path.as_path()) { if let Err(io_error) = fs::create_dir(db_path.as_path()) {
if io_error.kind() != std::io::ErrorKind::AlreadyExists { if io_error.kind() != std::io::ErrorKind::AlreadyExists {
fatal_error("Impossible to create blockchain dir !"); fatal_error!("Impossible to create blockchain dir !");
} }
} }
} }
......
...@@ -687,6 +687,7 @@ pub fn dbex<DC: DuniterConf>(profile: &str, conf: &DC, csv: bool, query: &DBExQu ...@@ -687,6 +687,7 @@ pub fn dbex<DC: DuniterConf>(profile: &str, conf: &DC, csv: bool, query: &DBExQu
} }
/// Initialize logger /// Initialize logger
/// Warning: This function cannot use the macro fatal_error! because the logger is not yet initialized, so it must use panic !
pub fn init_logger(profile: &str, soft_name: &'static str, cli_args: &ArgMatches) { pub fn init_logger(profile: &str, soft_name: &'static str, cli_args: &ArgMatches) {
// Get datas folder path // Get datas folder path
let mut log_file_path = match dirs::config_dir() { let mut log_file_path = match dirs::config_dir() {
......
...@@ -171,8 +171,7 @@ impl ForkTree { ...@@ -171,8 +171,7 @@ impl ForkTree {
if let Some(Some(ref node)) = self.nodes.get(id.0) { if let Some(Some(ref node)) = self.nodes.get(id.0) {
node node
} else { } else {
durs_common_tools::fatal_error("Dev error: fork tree : get unexist or removed node !"); durs_common_tools::fatal_error!("Dev error: fork tree : get unexist or removed node !");
panic!() // for compiler
} }
} }
/// Get mutable reference to a specific tree node /// Get mutable reference to a specific tree node
...@@ -181,8 +180,7 @@ impl ForkTree { ...@@ -181,8 +180,7 @@ impl ForkTree {
if let Some(Some(ref mut node)) = self.nodes.get_mut(id.0) { if let Some(Some(ref mut node)) = self.nodes.get_mut(id.0) {
node node
} else { } else {
durs_common_tools::fatal_error("Dev error: fork tree : get unexist or removed node !"); durs_common_tools::fatal_error!("Dev error: fork tree : get unexist or removed node !");
panic!() // for compiler
} }
} }
/// Get free identifier /// Get free identifier
...@@ -360,7 +358,7 @@ impl ForkTree { ...@@ -360,7 +358,7 @@ impl ForkTree {
} else if self.root.is_none() { } else if self.root.is_none() {
self.root = Some(new_node_id); self.root = Some(new_node_id);
} else { } else {
durs_common_tools::fatal_error("Dev error: Insert root node in not empty tree !") durs_common_tools::fatal_error!("Dev error: Insert root node in not empty tree !")
} }
self.removed_blockstamps.clear(); self.removed_blockstamps.clear();
......
...@@ -24,7 +24,7 @@ pub fn verify_block_hashs(block_doc: &BlockDocument) -> Result<(), VerifyBlockHa ...@@ -24,7 +24,7 @@ pub fn verify_block_hashs(block_doc: &BlockDocument) -> Result<(), VerifyBlockHa
trace!("complete_block #{}...", block_doc.number); trace!("complete_block #{}...", block_doc.number);
if block_doc.inner_hash.is_none() { if block_doc.inner_hash.is_none() {
fatal_error( fatal_error!(
"BlockchainModule : verify_block_hashs() : fatal error : block.inner_hash = None", "BlockchainModule : verify_block_hashs() : fatal error : block.inner_hash = None",
); );
} }
......
...@@ -73,8 +73,7 @@ pub fn revert_block<W: WebOfTrust>( ...@@ -73,8 +73,7 @@ pub fn revert_block<W: WebOfTrust>(
if let Ok(Some(tx)) = txs_db.read(|db| db.get(tx_hash).cloned()) { if let Ok(Some(tx)) = txs_db.read(|db| db.get(tx_hash).cloned()) {
tx tx
} else { } else {
fatal_error(&format!("revert_block(): tx {} not found !", tx_hash)); fatal_error!("revert_block(): tx {} not found !", tx_hash);
panic!() // to compile
} }
} }
TxDocOrTxHash::TxDoc(ref _dal_tx) => panic!("Try to revert not reduce block !"), TxDocOrTxHash::TxDoc(ref _dal_tx) => panic!("Try to revert not reduce block !"),
......
...@@ -33,8 +33,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) ...@@ -33,8 +33,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
.fork_blocks_db .fork_blocks_db
.read(|db| db.get(&bc.current_blockstamp).cloned()) .read(|db| db.get(&bc.current_blockstamp).cloned())
.unwrap_or_else(|_| { .unwrap_or_else(|_| {
fatal_error(&format!("revert block {} fail !", bc.current_blockstamp)); fatal_error!("revert block {} fail !", bc.current_blockstamp);
panic!()
}) })
{ {
let ValidBlockRevertReqs(bc_db_query, wot_dbs_queries, tx_dbs_queries) = let ValidBlockRevertReqs(bc_db_query, wot_dbs_queries, tx_dbs_queries) =
...@@ -45,8 +44,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) ...@@ -45,8 +44,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
&bc.currency_databases.tx_db, &bc.currency_databases.tx_db,
) )
.unwrap_or_else(|_| { .unwrap_or_else(|_| {
fatal_error(&format!("revert block {} fail !", bc.current_blockstamp)); fatal_error!("revert block {} fail !", bc.current_blockstamp);
panic!()
}); });
// Apply db requests // Apply db requests
bc_db_query bc_db_query
...@@ -63,7 +61,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) ...@@ -63,7 +61,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
.expect("Fatal error : Fail to apply CurrencyDBsWriteRequest !"); .expect("Fatal error : Fail to apply CurrencyDBsWriteRequest !");
} }
} else { } else {
fatal_error("apply_rollback(): Not found current block in forks blocks DB !"); fatal_error!("apply_rollback(): Not found current block in forks blocks DB !");
} }
} }
...@@ -102,10 +100,10 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) ...@@ -102,10 +100,10 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
break; break;
} }
} else { } else {
fatal_error(&format!( fatal_error!(
"apply_rollback(): Fail to get block {} on new branch in forks blocks DB !", "apply_rollback(): Fail to get block {} on new branch in forks blocks DB !",
blockstamp blockstamp
)); );
} }
} }
...@@ -116,7 +114,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) ...@@ -116,7 +114,7 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>)
old_current_blockstamp, old_current_blockstamp,
bc.current_blockstamp, bc.current_blockstamp,
) { ) {
fatal_error(&format!("DALError: ForksDB: {:?}", err)); fatal_error!("DALError: ForksDB: {:?}", err);
} }
// save dbs // save dbs
......
...@@ -43,7 +43,7 @@ pub fn json_reader_worker( ...@@ -43,7 +43,7 @@ pub fn json_reader_worker(
// Get list of json chunk files // Get list of json chunk files
let chunks_set = get_chunks_set(&json_chunks_path); let chunks_set = get_chunks_set(&json_chunks_path);
if chunks_set.is_empty() { if chunks_set.is_empty() {
fatal_error("json_files_path directory is empty !"); fatal_error!("json_files_path directory is empty !");
} }
// Get max chunk number and max block id // Get max chunk number and max block id
...@@ -58,13 +58,13 @@ pub fn json_reader_worker( ...@@ -58,13 +58,13 @@ pub fn json_reader_worker(
// Verify if max chunk exist // Verify if max chunk exist
if chunks_set.get(&max_chunk_number).is_none() { if chunks_set.get(&max_chunk_number).is_none() {
fatal_error(&format!("Missing chunk file n°{}", max_chunk_number)); fatal_error!("Missing chunk file n°{}", max_chunk_number);
}; };
// Open chunk file // Open chunk file
let chunk_file_content_result = open_json_chunk_file(&json_chunks_path, max_chunk_number); let chunk_file_content_result = open_json_chunk_file(&json_chunks_path, max_chunk_number);
if chunk_file_content_result.is_err() { if chunk_file_content_result.is_err() {
fatal_error(&format!("Fail to open chunk file n°{}", max_chunk_number)); fatal_error!("Fail to open chunk file n°{}", max_chunk_number);
} }
// Parse chunk file content // Parse chunk file content
...@@ -72,16 +72,12 @@ pub fn json_reader_worker( ...@@ -72,16 +72,12 @@ pub fn json_reader_worker(
let last_chunk_blocks = match blocks_result { let last_chunk_blocks = match blocks_result {
Ok(blocks) => blocks, Ok(blocks) => blocks,
Err(e) => { Err(e) => {
fatal_error(&format!( fatal_error!("Fail to parse chunk file n°{} : {}", max_chunk_number, e);
"Fail to parse chunk file n°{} : {}",
max_chunk_number, e,
));
unreachable!();
} }
}; };
if last_chunk_blocks.is_empty() { if last_chunk_blocks.is_empty() {
fatal_error("Last chunk is empty !"); fatal_error!("Last chunk is empty !");
} }
let last_block = last_chunk_blocks let last_block = last_chunk_blocks
...@@ -168,7 +164,7 @@ fn treat_once_json_chunk( ...@@ -168,7 +164,7 @@ fn treat_once_json_chunk(
// Open chunk file // Open chunk file
let chunk_file_content_result = open_json_chunk_file(json_chunks_path, chunk_number); let chunk_file_content_result = open_json_chunk_file(json_chunks_path, chunk_number);
if chunk_file_content_result.is_err() { if chunk_file_content_result.is_err() {
fatal_error(&format!("Fail to open chunk file n°{}", chunk_number)); fatal_error!("Fail to open chunk file n°{}", chunk_number);
} }
// Parse chunk file content // Parse chunk file content
...@@ -176,11 +172,7 @@ fn treat_once_json_chunk( ...@@ -176,11 +172,7 @@ fn treat_once_json_chunk(
let blocks = match blocks_result { let blocks = match blocks_result {
Ok(blocks) => blocks, Ok(blocks) => blocks,
Err(e) => { Err(e) => {
fatal_error(&format!( fatal_error!("Fail to parse chunk file n°{} : {}", chunk_number, e);
"Fail to parse chunk file n°{} : {}",
chunk_number, e,
));
panic!(); // for compilator
} }
}; };
(chunk_number, blocks) (chunk_number, blocks)
...@@ -198,13 +190,13 @@ fn parse_json_chunk(json_chunk_content: &str) -> Result<Vec<BlockDocument>, Erro ...@@ -198,13 +190,13 @@ fn parse_json_chunk(json_chunk_content: &str) -> Result<Vec<BlockDocument>, Erro
block_doc_vec.push(parse_json_block(json_block)?); block_doc_vec.push(parse_json_block(json_block)?);
} }
} else { } else {
fatal_error("Fail to parse json chunk : field \"blocks\" must be an array !"); fatal_error!("Fail to parse json chunk : field \"blocks\" must be an array !");
} }
} else { } else {
fatal_error("Fail to parse json chunk : field \"blocks\" don't exist !"); fatal_error!("Fail to parse json chunk : field \"blocks\" don't exist !");
} }
} else { } else {
fatal_error("Fail to parse json chunk : json root node must be an object !"); fatal_error!("Fail to parse json chunk : json root node must be an object !");
} }
Ok(block_doc_vec) Ok(block_doc_vec)
......
...@@ -142,8 +142,7 @@ pub fn local_sync<DC: DuniterConf>(profile: &str, conf: &DC, sync_opts: SyncOpt) ...@@ -142,8 +142,7 @@ pub fn local_sync<DC: DuniterConf>(profile: &str, conf: &DC, sync_opts: SyncOpt)
{ {
(currency, target_blockstamp) (currency, target_blockstamp)
} else { } else {
fatal_error("Fatal error : no target blockstamp !"); fatal_error!("Fatal error : no target blockstamp !");
panic!(); // for compilator
}; };
// Update DuniterConf // Update DuniterConf
......
...@@ -13,7 +13,6 @@ edition = "2018" ...@@ -13,7 +13,6 @@ edition = "2018"
path = "src/lib.rs" path = "src/lib.rs"
[dependencies] [dependencies]
log = "0.4.*"
[dev-dependencies] [dev-dependencies]
pretty_assertions = "0.5.1" pretty_assertions = "0.5.1"
...@@ -26,19 +26,33 @@ ...@@ -26,19 +26,33 @@
unused_import_braces unused_import_braces
)] )]
#[macro_use]
extern crate log;
/// Interrupts the program and log error message /// Interrupts the program and log error message
pub fn fatal_error(msg: &str) { /// WARNING: this macro must not be called before the logger is initialized !
if cfg!(feature = "log_panics") { #[macro_export]
panic!(format!("Fatal Error : {}", msg)); macro_rules! fatal_error {
} else { ($msg:expr) => ({
error!("{}", &format!("Fatal Error : {}", msg)); error!("{}", &dbg!($msg));
panic!(format!("Fatal Error : {}", msg)); panic!($msg);
} });
($msg:expr,) => ({
error!("{}", &dbg!($msg));
panic!($msg);
});
($fmt:expr, $($arg:tt)+) => ({
error!("{}", dbg!(format!($fmt, $($arg)+)));
panic!($fmt, $($arg)+);
});
} }
/*macro_rules! error {
(target: $target:expr, $($arg:tt)+) => (
log!(target: $target, $crate::Level::Error, $($arg)+);
);
($($arg:tt)+) => (
log!($crate::Level::Error, $($arg)+);
)
}*/
/// Unescape backslash /// Unescape backslash
pub fn unescape_str(source: &str) -> String { pub fn unescape_str(source: &str) -> String {
let mut previous_char = None; let mut previous_char = None;
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment