diff --git a/Cargo.lock b/Cargo.lock
index bfacaf7142e6b95df26aa71cd7b6bae83ec65857..978cdfbfa045cf66ee6862654cb73122ca7c545d 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -249,8 +249,11 @@ dependencies = [
"base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"dup-crypto 0.5.0",
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_derive 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "durs-common-tools 0.1.0",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "json-pest-parser 0.1.0",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -265,6 +268,7 @@ dependencies = [
"dubp-documents 0.11.0",
"duniter-module 0.1.0-a0.1",
"dup-crypto 0.5.0",
+ "durs-common-tools 0.1.0",
"rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"rpassword 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -328,6 +332,7 @@ dependencies = [
"base58 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"base64 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)",
"bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.5.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rust-crypto-wasm 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -356,9 +361,12 @@ dependencies = [
"duniter-network 0.1.0-a0.1",
"dup-crypto 0.5.0",
"durs-blockchain-dal 0.1.0-a0.1",
+ "durs-common-tools 0.1.0",
"durs-message 0.1.0-a0.1",
"durs-network-documents 0.3.0",
"durs-wot 0.8.0-a0.9",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "json-pest-parser 0.1.0",
"log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.8.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -384,6 +392,14 @@ dependencies = [
"serde_json 1.0.33 (registry+https://github.com/rust-lang/crates.io-index)",
]
+[[package]]
+name = "durs-common-tools"
+version = "0.1.0"
+dependencies = [
+ "log 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "durs-message"
version = "0.1.0-a0.1"
@@ -407,8 +423,8 @@ dependencies = [
"dubp-documents 0.11.0",
"dup-crypto 0.5.0",
"hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_derive 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -528,16 +544,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "failure"
-version = "0.1.3"
+version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"backtrace 0.3.9 (registry+https://github.com/rust-lang/crates.io-index)",
- "failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "failure_derive"
-version = "0.1.3"
+version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -638,6 +654,16 @@ name = "itoa"
version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
+[[package]]
+name = "json-pest-parser"
+version = "0.1.0"
+dependencies = [
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)",
+]
+
[[package]]
name = "kernel32-sys"
version = "0.2.2"
@@ -823,7 +849,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
[[package]]
name = "pest"
-version = "2.0.2"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"ucd-trie 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -831,32 +857,32 @@ dependencies = [
[[package]]
name = "pest_derive"
-version = "2.0.1"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_generator 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "pest_generator"
-version = "2.0.0"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest_meta 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest_meta 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
"quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)",
+ "syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
name = "pest_meta"
-version = "2.0.3"
+version = "2.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"maplit 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
+ "pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"sha-1 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -975,7 +1001,7 @@ version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"argon2rs 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)",
- "failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"rand 0.4.3 (registry+https://github.com/rust-lang/crates.io-index)",
"redox_syscall 0.1.43 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -1016,7 +1042,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"base64 0.9.3 (registry+https://github.com/rust-lang/crates.io-index)",
"bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)",
- "failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
+ "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.80 (registry+https://github.com/rust-lang/crates.io-index)",
]
@@ -1153,16 +1179,6 @@ dependencies = [
"syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)",
]
-[[package]]
-name = "syn"
-version = "0.14.9"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-dependencies = [
- "proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "quote 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
- "unicode-xid 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
-]
-
[[package]]
name = "syn"
version = "0.15.22"
@@ -1377,8 +1393,8 @@ dependencies = [
"checksum digest 0.7.6 (registry+https://github.com/rust-lang/crates.io-index)" = "03b072242a8cbaf9c145665af9d250c59af3b958f83ed6824e13533cf76d5b90"
"checksum dirs 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "88972de891f6118092b643d85a0b28e0678e0f948d7f879aa32f2d5aafe97d2a"
"checksum either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3be565ca5c557d7f59e7cfcf1844f9e3033650c929c6566f511e8005f205c1d0"
-"checksum failure 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6dd377bcc1b1b7ce911967e3ec24fa19c3224394ec05b54aa7b083d498341ac7"
-"checksum failure_derive 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "64c2d913fe8ed3b6c6518eedf4538255b989945c14c2a7d5cbff62a5e2120596"
+"checksum failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "795bd83d3abeb9220f257e597aa0080a508b27533824adf336529648f6abf7e2"
+"checksum failure_derive 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "ea1063915fd7ef4309e222a5a07cf9c319fb9c7836b1f89b85458672dbb127e1"
"checksum fake-simd 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "e88a8acf291dafb59c2d96e8f59828f3838bb1a70398823ade51a84de6a6deed"
"checksum foreign-types 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1"
"checksum foreign-types-shared 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b"
@@ -1415,10 +1431,10 @@ dependencies = [
"checksum openssl-sys 0.9.39 (registry+https://github.com/rust-lang/crates.io-index)" = "278c1ad40a89aa1e741a1eed089a2f60b18fab8089c3139b542140fc7d674106"
"checksum pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "deb73390ab68d81992bd994d145f697451bb0b54fd39738e72eef32458ad6907"
"checksum percent-encoding 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "31010dd2e1ac33d5b46a5b413495239882813e0369f8ed8a5e266f173602f831"
-"checksum pest 2.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "a677051ad923732bb5c70f2d45f8985a96e3eee2e2bff86697e3b11b0c3fcfde"
-"checksum pest_derive 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "b76f477146419bc539a63f4ef40e902166cb43b3e51cecc71d9136fd12c567e7"
-"checksum pest_generator 2.0.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3ebee4e9680be4fd162e6f3394ae4192a6b60b1e4d17d845e631f0c68d1a3386"
-"checksum pest_meta 2.0.3 (registry+https://github.com/rust-lang/crates.io-index)" = "1f6d5f6f0e6082578c86af197d780dc38328e3f768cec06aac9bc46d714e8221"
+"checksum pest 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "54f0c72a98d8ab3c99560bfd16df8059cc10e1f9a8e83e6e3b97718dd766e9c3"
+"checksum pest_derive 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "833d1ae558dc601e9a60366421196a8d94bc0ac980476d0b67e1d0988d72b2d0"
+"checksum pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "63120576c4efd69615b5537d3d052257328a4ca82876771d6944424ccfd9f646"
+"checksum pest_meta 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "f5a3492a4ed208ffc247adcdcc7ba2a95be3104f58877d0d02f0df39bf3efb5e"
"checksum pkg-config 0.3.14 (registry+https://github.com/rust-lang/crates.io-index)" = "676e8eb2b1b4c9043511a9b7bea0915320d7e502b0a079fb03f9635a5252b18c"
"checksum pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a029430f0d744bc3d15dd474d591bed2402b645d024583082b9f63bb936dac6"
"checksum proc-macro2 0.4.24 (registry+https://github.com/rust-lang/crates.io-index)" = "77619697826f31a02ae974457af0b29b723e5619e113e9397b8b82c6bd253f09"
@@ -1455,7 +1471,6 @@ dependencies = [
"checksum strsim 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "bb4f380125926a99e52bc279241539c018323fab05ad6368b56f93d9369ff550"
"checksum structopt 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "41c4a2479a078509940d82773d90ff824a8c89533ab3b59cd3ce8b0c0e369c02"
"checksum structopt-derive 0.2.13 (registry+https://github.com/rust-lang/crates.io-index)" = "5352090cfae7a2c85e1a31146268b53396106c88ca5d6ccee2e3fae83b6e35c2"
-"checksum syn 0.14.9 (registry+https://github.com/rust-lang/crates.io-index)" = "261ae9ecaa397c42b960649561949d69311f08eeaea86a65696e6e46517cf741"
"checksum syn 0.15.22 (registry+https://github.com/rust-lang/crates.io-index)" = "ae8b29eb5210bc5cf63ed6149cbf9adfc82ac0be023d8735c176ee74a2db4da7"
"checksum synstructure 0.10.1 (registry+https://github.com/rust-lang/crates.io-index)" = "73687139bf99285483c96ac0add482c3776528beac1d97d444f6e91f203a2015"
"checksum term 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "5e6b677dd1e8214ea1ef4297f85dbcbed8e8cdddb561040cc998ca2551c37561"
diff --git a/Cargo.toml b/Cargo.toml
index 400aeae37f2c89eaea90ce62fbf7bede035ebcfd..85a45af60dcf82f939ed8eff7ab4237adb35788c 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -14,7 +14,9 @@ members = [
"lib/modules/ws2p/ws2p",
"lib/modules/ws2p/ws2p-messages",
"lib/tools/crypto",
+ "lib/tools/common-tools",
"lib/tools/documents",
+ "lib/tools/json-pest-parser",
"lib/tools/network-documents",
"lib/tools/wot",
]
diff --git a/doc/fr/conventions-git.md b/doc/fr/conventions-git.md
index 33d1e4e746b7f938c22a90099e87f4624991dd47..37bec85bbc7ae93afbd00fb86b6d66b440a89a4a 100644
--- a/doc/fr/conventions-git.md
+++ b/doc/fr/conventions-git.md
@@ -43,6 +43,7 @@ Exemple, renomage d'un trait `Toto` en `Titi` dans la crate `durs-bidule` :
* `build` : Modification des script de build, de packaging ou/et de publication des livrables.
* `ci` : Modification de la chaine d'intégration continue.
+* `deps` : Modification des dépendances sans modification du code : ce peut-être pour mettre à jours des dépendances tierces ou pour supprimer des dépendances tierces qui ne sont plus utilisées.
* `docs` : Modification de la documentation (y compris traduction et création de nouveau contenu).
* `feat` : Développement d'une nouvelle fonctionnalitée.
* `fix` : Correction d'un bug
diff --git a/lib/core/conf/Cargo.toml b/lib/core/conf/Cargo.toml
index 6dabfbe281be94526c8e60bfad21f03381938634..a700e1c4919fdbc454a81ac6af4181daddd40197 100644
--- a/lib/core/conf/Cargo.toml
+++ b/lib/core/conf/Cargo.toml
@@ -17,8 +17,9 @@ serde_json = "1.0.*"
dirs = "1.0.2"
dup-crypto = { path = "../../tools/crypto" }
dubp-documents= { path = "../../tools/documents" }
-rpassword = "1.0.0"
duniter-module = { path = "../module" }
+durs-common-tools = { path = "../../tools/common-tools" }
+rpassword = "1.0.0"
[features]
# Treat warnings as a build error.
diff --git a/lib/core/conf/lib.rs b/lib/core/conf/lib.rs
index 4df1a506e45aaed2e14a7258b2564cc3e5f6c21c..4463a29dbf449524bdd5bef998ba634241f7a31f 100644
--- a/lib/core/conf/lib.rs
+++ b/lib/core/conf/lib.rs
@@ -38,6 +38,7 @@ pub mod keys;
use dubp_documents::CurrencyName;
use duniter_module::{DuniterConf, ModuleName, RequiredKeys, RequiredKeysContent};
use dup_crypto::keys::*;
+use durs_common_tools::fatal_error;
use rand::Rng;
use serde::ser::{Serialize, SerializeStruct, Serializer};
use std::collections::HashSet;
@@ -477,7 +478,11 @@ pub fn get_blockchain_db_path(profile: &str, currency: &CurrencyName) -> PathBuf
let mut db_path = datas_path(profile, ¤cy);
db_path.push("blockchain/");
if !db_path.as_path().exists() {
- fs::create_dir(db_path.as_path()).expect("Impossible to create blockchain dir !");
+ if let Err(io_error) = fs::create_dir(db_path.as_path()) {
+ if io_error.kind() != std::io::ErrorKind::AlreadyExists {
+ fatal_error("Impossible to create blockchain dir !");
+ }
+ }
}
db_path
}
diff --git a/lib/core/core/lib.rs b/lib/core/core/lib.rs
index 5465108f288398f28c31a94a33e2c0716f183063..22eab0c2725ab97f91b05f0c9023703079e2bf70 100644
--- a/lib/core/core/lib.rs
+++ b/lib/core/core/lib.rs
@@ -241,7 +241,9 @@ impl<'a, 'b: 'a> DuniterCore<'b, 'a, DuRsConf> {
init_logger(profile.as_str(), self.soft_meta_datas.soft_name, &cli_args);
// Print panic! in logs
- log_panics::init();
+ if cfg!(feature = "log_panics") {
+ log_panics::init();
+ }
// Load global conf
let (conf, keypairs) = duniter_conf::load_conf(profile.as_str());
@@ -296,10 +298,9 @@ impl<'a, 'b: 'a> DuniterCore<'b, 'a, DuRsConf> {
let opts = SyncOpt::from_clap(matches);
match opts.source_type {
SyncSourceType::Network => unimplemented!(),
- SyncSourceType::TsSqlDb => {
+ SyncSourceType::LocalDuniter => {
sync_ts(profile.as_str(), &conf, &opts);
}
- SyncSourceType::JsonFiles => unimplemented!(),
}
false
diff --git a/lib/core/message/events.rs b/lib/core/message/events.rs
index fad3f21a457c8379375d7480bcd2379f32e7681f..9bb86786e7551723b69d8b560e8f06d2114e1211 100644
--- a/lib/core/message/events.rs
+++ b/lib/core/message/events.rs
@@ -14,7 +14,8 @@
// along with this program. If not, see .
use crate::*;
-use dubp_documents::v10::block::BlockDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::DUBPDocument;
use dubp_documents::*;
use duniter_network::events::NetworkEvent;
@@ -24,7 +25,9 @@ pub enum DursEvent {
/// Arbitrary datas.
ArbitraryDatas(ArbitraryDatas),
/// Blockchain event
- BlockchainEvent(BlockchainEvent),
+ BlockchainEvent(Box),
+ /// MemPool Event (local node find next block)
+ MemPoolEvent(MemPoolEvent),
/// Network event
NetworkEvent(Box),
/// Client API event
@@ -32,7 +35,16 @@ pub enum DursEvent {
}
#[derive(Debug, Clone)]
-/// Event to be transmitted to the other modules
+/// MemPool module events
+pub enum MemPoolEvent {
+ /// FindNextBlock (local node find next block)
+ FindNextBlock(Box),
+ /// Store new Blockhain Document in Pool
+ StoreNewDocInPool(Box),
+}
+
+#[derive(Debug, Clone)]
+/// Blockchain module events
pub enum BlockchainEvent {
/// Stack up new valid block in local blockchain
StackUpValidBlock(Box, Blockstamp),
diff --git a/lib/core/message/responses.rs b/lib/core/message/responses.rs
index 5447928139a5a0eab1e00f8200ba257cebb5953a..6449ef54c445f9a1c6131888de3b83917b02079f 100644
--- a/lib/core/message/responses.rs
+++ b/lib/core/message/responses.rs
@@ -13,11 +13,11 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-use dubp_documents::v10::block::BlockDocument;
-use dubp_documents::v10::certification::CertificationDocument;
-use dubp_documents::v10::identity::IdentityDocument;
-use dubp_documents::v10::membership::MembershipDocument;
-use dubp_documents::v10::revocation::RevocationDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::certification::CertificationDocument;
+use dubp_documents::documents::identity::IdentityDocument;
+use dubp_documents::documents::membership::MembershipDocument;
+use dubp_documents::documents::revocation::RevocationDocument;
use dubp_documents::BlockId;
use dubp_documents::Blockstamp;
use duniter_module::ModuleReqId;
diff --git a/lib/core/network/cli/sync.rs b/lib/core/network/cli/sync.rs
index c39158ea2351d822ac064e02cff39278e23591d3..ef49e0fd53b45a11207fdda487f91a032c1e96bc 100644
--- a/lib/core/network/cli/sync.rs
+++ b/lib/core/network/cli/sync.rs
@@ -24,19 +24,22 @@ use std::str::FromStr;
)]
/// Synchronization from network
pub struct SyncOpt {
- /// The source of datas (url of the node from which to synchronize OR path to local file)
+ /// The source of datas (url of the node from which to synchronize OR path to local folder)
pub source: Option,
/// The source type
#[structopt(short = "t", long = "type", default_value = "ts")]
pub source_type: SyncSourceType,
+ /// Currency
+ #[structopt(short = "c", long = "currency")]
+ pub currency: Option,
/// End block
#[structopt(short = "e", long = "end")]
pub end: Option,
/// cautious mode (check all protocol rules, very slow)
- #[structopt(short = "c", long = "cautious")]
+ #[structopt(long = "cautious")]
pub cautious_mode: bool,
/// unsafe mode (not check blocks inner hashs, very dangerous)
- #[structopt(short = "u", long = "unsafe")]
+ #[structopt(short = "u", long = "unsafe", hidden = true)]
pub unsafe_mode: bool,
}
@@ -45,10 +48,8 @@ pub struct SyncOpt {
pub enum SyncSourceType {
/// Sync from network
Network,
- /// Sync from Duniter-ts sqlite bdd
- TsSqlDb,
- /// Sync from json blocks in files
- JsonFiles,
+ /// Sync from local Duniter json blocks in files
+ LocalDuniter,
}
impl FromStr for SyncSourceType {
@@ -57,8 +58,7 @@ impl FromStr for SyncSourceType {
fn from_str(source: &str) -> Result {
match source {
"n" | "network" => Ok(SyncSourceType::Network),
- "ts" | "ts-sql" => Ok(SyncSourceType::TsSqlDb),
- "json" => Ok(SyncSourceType::JsonFiles),
+ "ts" | "duniter" => Ok(SyncSourceType::LocalDuniter),
&_ => Err("Unknown source type".to_owned()),
}
}
diff --git a/lib/core/network/documents.rs b/lib/core/network/documents.rs
index 05d351e2b5dc20ada464e7421bdab4ba09f672ab..4189d1178733b72cbde1b133f61a71cda9ea91a6 100644
--- a/lib/core/network/documents.rs
+++ b/lib/core/network/documents.rs
@@ -15,78 +15,18 @@
//! Defined all network documents
-use dubp_documents::v10::block::BlockDocument;
-use dubp_documents::v10::certification::CertificationDocument;
-use dubp_documents::v10::identity::IdentityDocument;
-use dubp_documents::v10::membership::MembershipDocument;
-use dubp_documents::v10::revocation::RevocationDocument;
-use dubp_documents::v10::transaction::TransactionDocument;
-use dubp_documents::Document;
-use dubp_documents::{BlockHash, BlockId, Blockstamp};
-use serde_json;
-use std::ops::Deref;
-
-#[derive(Debug, Clone)]
-/// Block v10 in network format (Some events require a blockchain access to reconstitute the corresponding document)
-pub struct NetworkBlockV10 {
- /// Uncompleted block document
- pub uncompleted_block_doc: BlockDocument,
- /// revoked
- pub revoked: Vec,
- /// certifications
- pub certifications: Vec,
-}
-
-#[derive(Debug, Clone)]
-/// Block in network format (Some events require a blockchain access to reconstitute the corresponding document)
-pub enum NetworkBlock {
- /// Block V1
- V10(Box),
- /// Block V11
- V11(),
-}
-
-impl NetworkBlock {
- /// Return uncompleted block document
- pub fn uncompleted_block_doc(&self) -> BlockDocument {
- match *self {
- NetworkBlock::V10(ref network_block_v10) => {
- network_block_v10.deref().uncompleted_block_doc.clone()
- }
- _ => panic!("Block version not supported !"),
- }
- }
- /// Return blockstamp
- pub fn blockstamp(&self) -> Blockstamp {
- match *self {
- NetworkBlock::V10(ref network_block_v10) => {
- network_block_v10.deref().uncompleted_block_doc.blockstamp()
- }
- _ => panic!("Block version not supported !"),
- }
- }
- /// Return previous blockstamp
- pub fn previous_blockstamp(&self) -> Blockstamp {
- match *self {
- NetworkBlock::V10(ref network_block_v10) => Blockstamp {
- id: BlockId(network_block_v10.deref().uncompleted_block_doc.number.0 - 1),
- hash: BlockHash(
- network_block_v10
- .deref()
- .uncompleted_block_doc
- .previous_hash,
- ),
- },
- _ => panic!("Block version not supported !"),
- }
- }
-}
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::certification::CertificationDocument;
+use dubp_documents::documents::identity::IdentityDocument;
+use dubp_documents::documents::membership::MembershipDocument;
+use dubp_documents::documents::revocation::RevocationDocument;
+use dubp_documents::documents::transaction::TransactionDocument;
#[derive(Debug, Clone)]
/// Network Document
pub enum BlockchainDocument {
/// Network Block
- Block(NetworkBlock),
+ Block(Box),
/// Identity Document
Identity(Box),
/// Membership Document
diff --git a/lib/core/network/requests.rs b/lib/core/network/requests.rs
index 2f02a3fd8b47e3f24e994b834173dd8d8e133074..6d0033a2601506907d199d345ebcb27dc9a7518f 100644
--- a/lib/core/network/requests.rs
+++ b/lib/core/network/requests.rs
@@ -17,6 +17,7 @@
use crate::documents::*;
use crate::*;
+use dubp_documents::documents::block::BlockDocument;
use dubp_documents::Blockstamp;
#[derive(Debug, Copy, Clone)]
@@ -72,11 +73,11 @@ pub enum OldNetworkRequestError {
/// Type containing the response to a network request
pub enum NetworkResponse {
/// CurrentBlock
- CurrentBlock(ModuleReqFullId, NodeFullId, Box),
+ CurrentBlock(ModuleReqFullId, NodeFullId, Box),
/// Block
- Block(ModuleReqFullId, NodeFullId, Box),
+ Block(ModuleReqFullId, NodeFullId, Box),
/// Chunk
- Chunk(ModuleReqFullId, NodeFullId, Vec>),
+ Chunk(ModuleReqFullId, NodeFullId, Vec>),
/// PendingDocuments
PendingDocuments(ModuleReqFullId, Vec),
/// Consensus
diff --git a/lib/modules/blockchain/blockchain-dal/block.rs b/lib/modules/blockchain/blockchain-dal/block.rs
index 60ccb8a2e218dea4532978cdc1d213fe7c85d07d..c6e95e05bdfd16e805fbbb7e5bee789d969b95d8 100644
--- a/lib/modules/blockchain/blockchain-dal/block.rs
+++ b/lib/modules/blockchain/blockchain-dal/block.rs
@@ -15,7 +15,7 @@
use super::constants::MAX_FORKS;
use crate::*;
-use dubp_documents::v10::BlockDocument;
+use dubp_documents::documents::block::BlockDocument;
use dubp_documents::Document;
use dubp_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
use dup_crypto::keys::*;
diff --git a/lib/modules/blockchain/blockchain-dal/currency_params.rs b/lib/modules/blockchain/blockchain-dal/currency_params.rs
index a695f05f854379face2cae50a09e57919f2a4e3e..8e017048030a44348120ffe8b4ae3bc6a95f3697 100644
--- a/lib/modules/blockchain/blockchain-dal/currency_params.rs
+++ b/lib/modules/blockchain/blockchain-dal/currency_params.rs
@@ -15,7 +15,7 @@
use crate::constants::*;
use crate::*;
-use dubp_documents::v10::block::BlockV10Parameters;
+use dubp_documents::documents::block::BlockV10Parameters;
use dubp_documents::CurrencyName;
#[derive(Debug, Copy, Clone)]
diff --git a/lib/modules/blockchain/blockchain-dal/identity.rs b/lib/modules/blockchain/blockchain-dal/identity.rs
index eec031d785f49dcd63e4c83920a55851c0bacc6a..7cdb0132d28fac3fd1cc4ddb22a797b5e98b1252 100644
--- a/lib/modules/blockchain/blockchain-dal/identity.rs
+++ b/lib/modules/blockchain/blockchain-dal/identity.rs
@@ -15,7 +15,7 @@
use crate::currency_params::CurrencyParameters;
use crate::{BinDB, DALError, IdentitiesV10Datas, MsExpirV10Datas};
-use dubp_documents::v10::identity::IdentityDocument;
+use dubp_documents::documents::identity::IdentityDocument;
use dubp_documents::{BlockId, Blockstamp};
use dup_crypto::keys::*;
use durs_wot::NodeId;
diff --git a/lib/modules/blockchain/blockchain-dal/lib.rs b/lib/modules/blockchain/blockchain-dal/lib.rs
index 78468397d39f37a493ded69afce1704c9c09ccf7..66cf116f0f9de50f1ce5329cf59c8366c3cce3bf 100644
--- a/lib/modules/blockchain/blockchain-dal/lib.rs
+++ b/lib/modules/blockchain/blockchain-dal/lib.rs
@@ -53,9 +53,6 @@ pub mod currency_params;
/// Identity operations
pub mod identity;
-/// Parsers
-pub mod parsers;
-
/// Define currency sources types
pub mod sources;
@@ -65,8 +62,8 @@ pub mod tools;
/// Contains all write databases functions
pub mod writers;
-use dubp_documents::v10::block::BlockV10Parameters;
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::block::BlockV10Parameters;
+use dubp_documents::documents::transaction::*;
use dubp_documents::CurrencyName;
use dubp_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
use dup_crypto::hashs::Hash;
diff --git a/lib/modules/blockchain/blockchain-dal/sources.rs b/lib/modules/blockchain/blockchain-dal/sources.rs
index 3055e9e87fce6914257a79525f26c4ea871323e1..64e5bc688b348f39f8db9c8d282238ef9f39f69a 100644
--- a/lib/modules/blockchain/blockchain-dal/sources.rs
+++ b/lib/modules/blockchain/blockchain-dal/sources.rs
@@ -13,7 +13,7 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::transaction::*;
use dubp_documents::BlockId;
use dup_crypto::hashs::Hash;
use dup_crypto::keys::PubKey;
diff --git a/lib/modules/blockchain/blockchain-dal/writers/certification.rs b/lib/modules/blockchain/blockchain-dal/writers/certification.rs
index de0f052b7bc14d2b00b65764dfa5f5e16dd23502..555ee97bc122071fda07e5dbec2f234e8412ce88 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/certification.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/certification.rs
@@ -15,7 +15,7 @@
use crate::currency_params::CurrencyParameters;
use crate::{BinDB, CertsExpirV10Datas, DALError, IdentitiesV10Datas};
-use dubp_documents::v10::certification::CompactCertificationDocument;
+use dubp_documents::documents::certification::CompactCertificationDocument;
use dubp_documents::BlockId;
use dup_crypto::keys::*;
use durs_wot::NodeId;
diff --git a/lib/modules/blockchain/blockchain-dal/writers/dividend.rs b/lib/modules/blockchain/blockchain-dal/writers/dividend.rs
index 94ca2645478571c55aba2bda26915fc006e78919..be5e9c4efe29be08552891ff05817be1efef6807 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/dividend.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/dividend.rs
@@ -15,7 +15,7 @@
use crate::sources::SourceAmount;
use crate::*;
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::transaction::*;
use dubp_documents::BlockId;
use dup_crypto::keys::PubKey;
use std::collections::{HashMap, HashSet};
@@ -29,6 +29,10 @@ pub fn create_du(
members: &[PubKey],
revert: bool,
) -> Result<(), DALError> {
+ debug!(
+ "create_du(amount, block_id, members, revert)=({:?}, {}, {:?}, {})",
+ du_amount, du_block_id.0, members, revert
+ );
// Insert/Remove UD sources in UDsV10DB
du_db.write(|db| {
for pubkey in members {
diff --git a/lib/modules/blockchain/blockchain-dal/writers/identity.rs b/lib/modules/blockchain/blockchain-dal/writers/identity.rs
index fe9d99837d6f86ce50e69a247b0713299b034fb3..09d04757c2298536b7c1a1d520a4653106bb594a 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/identity.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/identity.rs
@@ -16,7 +16,7 @@
use crate::currency_params::CurrencyParameters;
use crate::identity::{DALIdentity, DALIdentityState};
use crate::{BinDB, DALError, IdentitiesV10Datas, MsExpirV10Datas};
-use dubp_documents::v10::identity::IdentityDocument;
+use dubp_documents::documents::identity::IdentityDocument;
use dubp_documents::Document;
use dubp_documents::{BlockId, Blockstamp};
use dup_crypto::keys::PubKey;
diff --git a/lib/modules/blockchain/blockchain-dal/writers/requests.rs b/lib/modules/blockchain/blockchain-dal/writers/requests.rs
index 2ee202dc001eede25223521c7c22286537d05f38..09be3a2f24eab7992f3640504bf7fdf7e2db3b5f 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/requests.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/requests.rs
@@ -19,8 +19,8 @@ use crate::identity::DALIdentity;
use crate::sources::SourceAmount;
use crate::writers::transaction::DALTxV10;
use crate::*;
-use dubp_documents::v10::certification::CompactCertificationDocument;
-use dubp_documents::v10::identity::IdentityDocument;
+use dubp_documents::documents::certification::CompactCertificationDocument;
+use dubp_documents::documents::identity::IdentityDocument;
use dubp_documents::Blockstamp;
use dup_crypto::keys::PubKey;
use durs_wot::NodeId;
diff --git a/lib/modules/blockchain/blockchain-dal/writers/transaction.rs b/lib/modules/blockchain/blockchain-dal/writers/transaction.rs
index 338f0579de56273d8f986c103c3e1289b5685f19..bb7994ab09398f01029f1e59dc99029df1c089a3 100644
--- a/lib/modules/blockchain/blockchain-dal/writers/transaction.rs
+++ b/lib/modules/blockchain/blockchain-dal/writers/transaction.rs
@@ -13,7 +13,7 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::transaction::*;
use crate::sources::{SourceAmount, SourceIndexV10, UTXOIndexV10, UTXOV10};
use crate::*;
@@ -269,6 +269,7 @@ pub fn apply_and_write_tx(
if let SourceIndexV10::UTXO(utxo_index) = source_index {
// Get utxo
let utxo = db.get(&utxo_index).unwrap_or_else(|| {
+ debug!("apply_tx=\"{:#?}\"", tx_doc);
panic!(
"ApplyBLockError : unknow UTXO in inputs : {:?} !",
utxo_index
diff --git a/lib/modules/blockchain/blockchain/Cargo.toml b/lib/modules/blockchain/blockchain/Cargo.toml
index c8bf0556cf33e509b70c9ac80966fc0ca8101a72..611971f2a3ce7149a2a4a75f88465b35564d673a 100644
--- a/lib/modules/blockchain/blockchain/Cargo.toml
+++ b/lib/modules/blockchain/blockchain/Cargo.toml
@@ -15,11 +15,14 @@ duniter-conf = { path = "../../../core/conf" }
dup-crypto = { path = "../../../tools/crypto" }
durs-blockchain-dal = { path = "../blockchain-dal" }
dubp-documents= { path = "../../../tools/documents" }
+durs-common-tools = { path = "../../../tools/common-tools" }
durs-network-documents = { path = "../../../tools/network-documents" }
durs-message = { path = "../../../core/message" }
duniter-module = { path = "../../../core/module" }
duniter-network = { path = "../../../core/network" }
durs-wot = { path = "../../../tools/wot" }
+failure = "0.1.5"
+json-pest-parser = { path = "../../../tools/json-pest-parser" }
log = "0.4.*"
num_cpus = "1.8.*"
pbr = "1.0.*"
diff --git a/lib/modules/blockchain/blockchain/apply_valid_block.rs b/lib/modules/blockchain/blockchain/apply_valid_block.rs
index bd443de1e36310296a0e9b7bef1317219cd41013..a4e4683b88c417fdf9db9701103dcf1c0e6c2db3 100644
--- a/lib/modules/blockchain/blockchain/apply_valid_block.rs
+++ b/lib/modules/blockchain/blockchain/apply_valid_block.rs
@@ -13,8 +13,8 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-use dubp_documents::v10::transaction::{TxAmount, TxBase};
-use dubp_documents::v10::BlockDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::transaction::{TxAmount, TxBase};
use dubp_documents::BlockId;
use dubp_documents::Document;
use dup_crypto::keys::*;
@@ -42,7 +42,7 @@ pub enum ApplyValidBlockError {
}
pub fn apply_valid_block(
- block: &BlockDocument,
+ mut block: BlockDocument,
wot_index: &mut HashMap,
wot_db: &BinDB,
expire_certs: &HashMap<(NodeId, NodeId), BlockId>,
@@ -50,17 +50,17 @@ pub fn apply_valid_block(
) -> Result {
debug!(
"BlockchainModule : apply_valid_block({})",
- block.blockstamp()
+ block.blockstamp(),
);
let mut wot_dbs_requests = Vec::new();
let mut currency_dbs_requests = Vec::new();
let current_blockstamp = block.blockstamp();
let mut identities = HashMap::with_capacity(block.identities.len());
- for identity in block.identities.clone() {
+ for identity in &block.identities {
identities.insert(identity.issuers()[0], identity);
}
- for joiner in block.joiners.clone() {
- let pubkey = joiner.clone().issuers()[0];
+ for joiner in &block.joiners {
+ let pubkey = joiner.issuers()[0];
if let Some(idty_doc) = identities.get(&pubkey) {
// Newcomer
let wot_id = NodeId(
@@ -78,7 +78,7 @@ pub fn apply_valid_block(
wot_id,
current_blockstamp,
block.median_time,
- Box::new(idty_doc.clone()),
+ Box::new((*idty_doc).clone()),
joiner.blockstamp().id,
));
} else {
@@ -97,7 +97,7 @@ pub fn apply_valid_block(
));
}
}
- for active in block.actives.clone() {
+ for active in &block.actives {
let pubkey = active.issuers()[0];
if !identities.contains_key(&pubkey) {
let wot_id = wot_index[&pubkey];
@@ -114,7 +114,7 @@ pub fn apply_valid_block(
));
}
}
- for exclusion in block.excluded.clone() {
+ for exclusion in &block.excluded {
let wot_id = if let Some(wot_id) = wot_index.get(&exclusion) {
wot_id
} else {
@@ -126,11 +126,11 @@ pub fn apply_valid_block(
})
.expect("Fail to write in WotDB");
wot_dbs_requests.push(WotsDBsWriteQuery::ExcludeIdentity(
- exclusion,
+ *exclusion,
block.blockstamp(),
));
}
- for revocation in block.revoked.clone() {
+ for revocation in &block.revoked {
let compact_revoc = revocation.to_compact_document();
let wot_id = if let Some(wot_id) = wot_index.get(&compact_revoc.issuer) {
wot_id
@@ -148,7 +148,7 @@ pub fn apply_valid_block(
true,
));
}
- for certification in block.certifications.clone() {
+ for certification in &block.certifications {
trace!("stack_up_valid_block: apply cert...");
let compact_cert = certification.to_compact_document();
let wot_node_from = wot_index[&compact_cert.issuer];
@@ -210,7 +210,8 @@ pub fn apply_valid_block(
));
}
}
- for tx in block.transactions.clone() {
+
+ for tx in &block.transactions {
currency_dbs_requests.push(CurrencyDBsWriteQuery::WriteTx(Box::new(tx.unwrap_doc())));
}
@@ -263,7 +264,6 @@ pub fn apply_valid_block(
);
}*/
// Create DALBlock
- let mut block = block.clone();
let previous_blockcstamp = block.previous_blockstamp();
let block_hash = block
.hash
diff --git a/lib/modules/blockchain/blockchain/check_and_apply_block.rs b/lib/modules/blockchain/blockchain/check_and_apply_block.rs
index af4ee09e9397c4b6f47f1f3fe0685ec745fd726c..93226ab22644194a1400579238b4abf3bb4fb46d 100644
--- a/lib/modules/blockchain/blockchain/check_and_apply_block.rs
+++ b/lib/modules/blockchain/blockchain/check_and_apply_block.rs
@@ -16,28 +16,27 @@
use std::collections::HashMap;
use crate::apply_valid_block::*;
+use crate::verify_block::*;
use crate::*;
use dubp_documents::Document;
use dubp_documents::{BlockHash, BlockId, Blockstamp, PreviousBlockstamp};
-use duniter_network::documents::NetworkBlock;
use dup_crypto::keys::*;
use durs_blockchain_dal::block::DALBlock;
use durs_blockchain_dal::*;
#[derive(Debug, Copy, Clone)]
pub enum BlockError {
- BlockVersionNotSupported(),
- CompletedBlockError(CompletedBlockError),
+ VerifyBlockHashsError(VerifyBlockHashsError),
DALError(DALError),
- //CheckBlockError(),
+ InvalidBlock(InvalidBlockError),
ApplyValidBlockError(ApplyValidBlockError),
NoForkAvailable(),
UnknowError(),
}
-impl From for BlockError {
- fn from(err: CompletedBlockError) -> Self {
- BlockError::CompletedBlockError(err)
+impl From for BlockError {
+ fn from(err: VerifyBlockHashsError) -> Self {
+ BlockError::VerifyBlockHashsError(err)
}
}
@@ -56,26 +55,30 @@ impl From for BlockError {
pub fn check_and_apply_block(
blocks_databases: &BlocksV10DBs,
certs_db: &BinDB,
- block: &Block,
+ block: Block,
current_blockstamp: &Blockstamp,
wot_index: &mut HashMap,
wot_db: &BinDB,
forks_states: &[ForkStatus],
) -> Result {
- let (block_doc, already_have_block) = match *block {
- Block::NetworkBlock(network_block) => match *network_block {
- NetworkBlock::V10(ref network_block_v10) => {
- let already_have_block = DALBlock::already_have_block(
- &blocks_databases.blockchain_db,
- &blocks_databases.forks_blocks_db,
- network_block_v10.uncompleted_block_doc.blockstamp(),
- )?;
- (&network_block_v10.uncompleted_block_doc, already_have_block)
- }
- _ => return Err(BlockError::BlockVersionNotSupported()),
- },
- Block::LocalBlock(block_doc) => (block_doc, true),
+ let block_from_network = block.is_from_network();
+ let block_doc: BlockDocument = block.into_doc();
+
+ // Get BlockDocument && check if already have block
+ let already_have_block = if block_from_network {
+ DALBlock::already_have_block(
+ &blocks_databases.blockchain_db,
+ &blocks_databases.forks_blocks_db,
+ block_doc.blockstamp(),
+ )?
+ } else {
+ false
};
+
+ // Verify block hashs
+ verify_block_hashs(&block_doc)?;
+
+ // Check block chainability
if (block_doc.number.0 == current_blockstamp.id.0 + 1
&& block_doc.previous_hash.to_string() == current_blockstamp.hash.0.to_string())
|| (block_doc.number.0 == 0 && *current_blockstamp == Blockstamp::default())
@@ -88,20 +91,28 @@ pub fn check_and_apply_block(
let blocks_expiring = Vec::with_capacity(0);
let expire_certs =
durs_blockchain_dal::certs::find_expire_certs(certs_db, blocks_expiring)?;
+
// Try stack up block
- let mut old_fork_id = None;
- let block_doc = match *block {
- Block::NetworkBlock(network_block) => complete_network_block(network_block, true)?,
- Block::LocalBlock(block_doc) => {
- old_fork_id = durs_blockchain_dal::block::get_fork_id_of_blockstamp(
- &blocks_databases.forks_blocks_db,
- &block_doc.blockstamp(),
- )?;
- block_doc.clone()
- }
+ let old_fork_id = if block_from_network {
+ durs_blockchain_dal::block::get_fork_id_of_blockstamp(
+ &blocks_databases.forks_blocks_db,
+ &block_doc.blockstamp(),
+ )?
+ } else {
+ None
};
- return Ok(apply_valid_block(
+
+ // Verify block validity (check all protocol rule, very long !)
+ verify_block_validity(
&block_doc,
+ &blocks_databases.blockchain_db,
+ certs_db,
+ wot_index,
+ wot_db,
+ )?;
+
+ return Ok(apply_valid_block(
+ block_doc,
wot_index,
wot_db,
&expire_certs,
@@ -138,53 +149,30 @@ pub fn check_and_apply_block(
}
_ => {}
}
- match *block {
- Block::NetworkBlock(network_block) => {
- // Completed network block
- let block_doc = complete_network_block(network_block, true)?;
- let dal_block = DALBlock {
- fork_id,
- isolate,
- block: block_doc,
- expire_certs: None,
- };
- durs_blockchain_dal::writers::block::write(
- &blocks_databases.blockchain_db,
- &blocks_databases.forks_db,
- &blocks_databases.forks_blocks_db,
- &dal_block,
- None,
- false,
- false,
- )
- .expect("durs_blockchain_dal::writers::block::write() : DALError")
- }
- Block::LocalBlock(block_doc) => {
- let old_fork_id = None;
- let dal_block = DALBlock {
- fork_id,
- isolate,
- block: block_doc.clone(),
- expire_certs: None,
- };
- durs_blockchain_dal::writers::block::write(
- &blocks_databases.blockchain_db,
- &blocks_databases.forks_db,
- &blocks_databases.forks_blocks_db,
- &dal_block,
- old_fork_id,
- false,
- false,
- )
- .expect("durs_blockchain_dal::writers::block::write() : DALError")
- }
+
+ let dal_block = DALBlock {
+ fork_id,
+ isolate,
+ block: block_doc.clone(),
+ expire_certs: None,
};
+
+ durs_blockchain_dal::writers::block::write(
+ &blocks_databases.blockchain_db,
+ &blocks_databases.forks_db,
+ &blocks_databases.forks_blocks_db,
+ &dal_block,
+ None,
+ false,
+ false,
+ )
+ .expect("durs_blockchain_dal::writers::block::write() : DALError")
} else {
return Err(BlockError::NoForkAvailable());
}
} else {
debug!(
- "stackable_block : block {} not chainable and already stored !",
+ "stackable_block : block {} not chainable and already stored or out of forkWindowSize !",
block_doc.blockstamp()
);
}
diff --git a/lib/modules/blockchain/blockchain/constants.rs b/lib/modules/blockchain/blockchain/constants.rs
new file mode 100644
index 0000000000000000000000000000000000000000..6f575d144cff5384b0ed3e59473d100540b879f9
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/constants.rs
@@ -0,0 +1,26 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+/// Default currency
+pub static DEFAULT_CURRENCY: &'static str = "g1";
+
+/// Chunk size (in blocks)
+pub static CHUNK_SIZE: &'static usize = &250;
+
+/// Chunk file name begin
+pub static CHUNK_FILE_NAME_BEGIN: &'static str = "chunk_";
+
+/// Chunk file name end
+pub static CHUNK_FILE_NAME_END: &'static str = "-250.json";
diff --git a/lib/modules/blockchain/blockchain/dbex.rs b/lib/modules/blockchain/blockchain/dbex.rs
index 1a388f6c33cecf2fe7eac4bb4cc16d7691546f8f..97fdb82aa03af89d023a36a382ce3d195744693a 100644
--- a/lib/modules/blockchain/blockchain/dbex.rs
+++ b/lib/modules/blockchain/blockchain/dbex.rs
@@ -14,7 +14,7 @@
// along with this program. If not, see .
use crate::*;
-use dubp_documents::v10::transaction::*;
+use dubp_documents::documents::transaction::*;
use duniter_module::DuniterConf;
use dup_crypto::keys::*;
use durs_blockchain_dal::identity::DALIdentity;
diff --git a/lib/modules/blockchain/blockchain/lib.rs b/lib/modules/blockchain/blockchain/lib.rs
index cad6c5734e1e1f5171e38195880714bcafc01fe8..65bdad2c910346d8360609552459e366e8be08d4 100644
--- a/lib/modules/blockchain/blockchain/lib.rs
+++ b/lib/modules/blockchain/blockchain/lib.rs
@@ -16,7 +16,7 @@
//! Module managing the Duniter blockchain.
#![cfg_attr(feature = "strict", deny(warnings))]
-//#![cfg_attr(feature = "cargo-clippy", allow(duration_subsec))]
+#![allow(clippy::large_enum_variant)]
#![deny(
missing_docs,
missing_debug_implementations,
@@ -29,15 +29,18 @@
unused_qualifications
)]
+//#[macro_use]
+//extern crate failure;
#[macro_use]
extern crate log;
mod apply_valid_block;
mod check_and_apply_block;
+mod constants;
mod dbex;
mod revert_block;
mod sync;
-mod ts_parsers;
+mod verify_block;
use std::collections::HashMap;
use std::ops::Deref;
@@ -48,14 +51,15 @@ use std::time::{Duration, SystemTime, UNIX_EPOCH};
use crate::apply_valid_block::*;
use crate::check_and_apply_block::*;
+use crate::constants::*;
pub use crate::dbex::{DBExQuery, DBExTxQuery, DBExWotQuery};
-use dubp_documents::v10::{BlockDocument, V10Document};
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::DUBPDocument;
use dubp_documents::*;
-use dubp_documents::{DUBPDocument, Document};
use duniter_module::*;
use duniter_network::{
cli::sync::SyncOpt,
- documents::{BlockchainDocument, NetworkBlock},
+ documents::BlockchainDocument,
events::NetworkEvent,
requests::{NetworkResponse, OldNetworkRequest},
};
@@ -65,6 +69,7 @@ use durs_blockchain_dal::currency_params::CurrencyParameters;
use durs_blockchain_dal::identity::DALIdentity;
use durs_blockchain_dal::writers::requests::BlocksDBsWriteQuery;
use durs_blockchain_dal::*;
+use durs_common_tools::fatal_error;
use durs_message::events::*;
use durs_message::requests::*;
use durs_message::responses::*;
@@ -108,11 +113,42 @@ pub struct BlockchainModule {
#[derive(Debug, Clone)]
/// Block
-pub enum Block<'a> {
+pub enum Block {
/// Block coming from Network
- NetworkBlock(&'a NetworkBlock),
+ NetworkBlock(BlockDocument),
/// Block coming from local database
- LocalBlock(&'a BlockDocument),
+ LocalBlock(BlockDocument),
+}
+
+impl Block {
+ /// Into block document
+ pub fn into_doc(self) -> BlockDocument {
+ match self {
+ Block::NetworkBlock(block) => block,
+ Block::LocalBlock(block) => block,
+ }
+ }
+ /// Get block document ref
+ pub fn get_doc_ref(&self) -> &BlockDocument {
+ match *self {
+ Block::NetworkBlock(ref block) => block,
+ Block::LocalBlock(ref block) => block,
+ }
+ }
+ /// Return blockstamp
+ pub fn blockstamp(&self) -> Blockstamp {
+ match *self {
+ Block::NetworkBlock(ref block) => block.blockstamp(),
+ Block::LocalBlock(ref block) => block.blockstamp(),
+ }
+ }
+ /// Is from network ?
+ pub fn is_from_network(&self) -> bool {
+ match *self {
+ Block::NetworkBlock(_) => true,
+ _ => false,
+ }
+ }
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
@@ -126,12 +162,12 @@ pub enum SyncVerificationLevel {
}
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
-/// Error returned by function complete_network_block()
-pub enum CompletedBlockError {
+/// Error returned by function verify_block_hashs()
+pub enum VerifyBlockHashsError {
/// Invalid block inner hash
InvalidInnerHash(),
/// Invalid block hash
- InvalidHash(BlockId, Option, Option),
+ InvalidHash(BlockId, Option),
/// Invalid block version
InvalidVersion(),
}
@@ -193,28 +229,35 @@ impl BlockchainModule {
pub fn dbex(profile: &str, conf: &DC, csv: bool, req: &DBExQuery) {
dbex::dbex(profile, conf, csv, req);
}
- /// Synchronize blockchain from a duniter-ts database
+ /// Synchronize blockchain from local duniter json files
pub fn sync_ts(profile: &str, conf: &DC, sync_opts: &SyncOpt) {
- // get db_ts_path
- let db_ts_path = if let Some(ref ts_path) = sync_opts.source {
- PathBuf::from(ts_path)
+ // get json_chunks_path
+ let json_chunks_path = if let Some(ref path) = sync_opts.source {
+ PathBuf::from(path)
} else {
- let mut db_ts_path = match dirs::config_dir() {
+ let mut json_chunks_path = match dirs::config_dir() {
Some(path) => path,
None => panic!("Impossible to get user config directory !"),
};
- db_ts_path.push("duniter/");
- db_ts_path.push("duniter_default");
- db_ts_path.push("duniter.db");
- db_ts_path
+ json_chunks_path.push("duniter/");
+ json_chunks_path.push("duniter_default");
+
+ let currency = if let Some(currency) = &sync_opts.currency {
+ currency
+ } else {
+ DEFAULT_CURRENCY
+ };
+
+ json_chunks_path.push(currency);
+ json_chunks_path
};
- if !db_ts_path.as_path().exists() {
- panic!("Fatal error : duniter-ts database don't exist !");
+ if !json_chunks_path.as_path().exists() {
+ panic!("Fatal error : duniter json chunks folder don't exist !");
}
- sync::sync_ts(
+ sync::local_sync(
profile,
conf,
- db_ts_path,
+ json_chunks_path,
sync_opts.end,
sync_opts.cautious_mode,
!sync_opts.unsafe_mode,
@@ -290,7 +333,7 @@ impl BlockchainModule {
self.router_sender
.send(RouterThreadMessage::ModuleMessage(DursMsg::Event {
event_type: module_event,
- event_content: DursEvent::BlockchainEvent(event.clone()),
+ event_content: DursEvent::BlockchainEvent(Box::new(event.clone())),
}))
.unwrap_or_else(|_| panic!("Fail to send BlockchainEvent to router"));
}
@@ -316,160 +359,118 @@ impl BlockchainModule {
wot_index: &mut HashMap,
wot_db: &BinDB,
) -> Blockstamp {
- let mut blockchain_documents = Vec::new();
let mut current_blockstamp = *current_blockstamp;
- let mut save_blocks_dbs = false;
- let mut save_wots_dbs = false;
- let mut save_currency_dbs = false;
+
for network_document in network_documents {
- match *network_document {
- BlockchainDocument::Block(ref network_block) => {
- match check_and_apply_block::(
- &self.blocks_databases,
- &self.wot_databases.certs_db,
- &Block::NetworkBlock(network_block),
- ¤t_blockstamp,
- wot_index,
- wot_db,
- &self.forks_states,
- ) {
- Ok(ValidBlockApplyReqs(block_req, wot_dbs_reqs, currency_dbs_reqs)) => {
- let block_doc = network_block.uncompleted_block_doc().clone();
- // Apply wot dbs requests
- for req in &wot_dbs_reqs {
- req.apply(&self.wot_databases, &self.currency_params)
- .expect(
+ if let BlockchainDocument::Block(ref block_doc) = network_document {
+ let block_doc = block_doc.deref();
+ match check_and_apply_block::(
+ &self.blocks_databases,
+ &self.wot_databases.certs_db,
+ Block::NetworkBlock(block_doc.clone()),
+ ¤t_blockstamp,
+ wot_index,
+ wot_db,
+ &self.forks_states,
+ ) {
+ Ok(ValidBlockApplyReqs(block_req, wot_dbs_reqs, currency_dbs_reqs)) => {
+ let block_doc = block_doc.clone();
+ let mut save_wots_dbs = false;
+ let mut save_currency_dbs = false;
+
+ // Apply wot dbs requests
+ for req in &wot_dbs_reqs {
+ req.apply(&self.wot_databases, &self.currency_params)
+ .expect(
"Fatal error : fail to apply WotsDBsWriteQuery : DALError !",
);
- }
- // Apply currency dbs requests
- for req in currency_dbs_reqs {
- req.apply(&self.currency_databases).expect(
- "Fatal error : fail to apply CurrencyDBsWriteQuery : DALError !",
- );
- }
- // Write block
- block_req.apply(&self.blocks_databases, false).expect(
- "Fatal error : fail to write block in BlocksDBs : DALError !",
- );
- if let BlocksDBsWriteQuery::WriteBlock(_, _, _, block_hash) = block_req
- {
- info!("StackUpValidBlock({})", block_doc.number.0);
- self.send_event(&BlockchainEvent::StackUpValidBlock(
- Box::new(block_doc.clone()),
- Blockstamp {
- id: block_doc.number,
- hash: block_hash,
- },
- ));
- }
- current_blockstamp = network_block.blockstamp();
- // Update forks states
- self.forks_states = durs_blockchain_dal::block::get_forks(
- &self.blocks_databases.forks_db,
- current_blockstamp,
- )
- .expect("get_forks() : DALError");
- save_blocks_dbs = true;
- if !wot_dbs_reqs.is_empty() {
- save_wots_dbs = true;
- }
- if !block_doc.transactions.is_empty()
- || (block_doc.dividend.is_some()
- && block_doc.dividend.expect("safe unwrap") > 0)
- {
- save_currency_dbs = true;
- }
}
- Err(_) => {
- warn!(
- "RefusedBlock({})",
- network_block.uncompleted_block_doc().number.0
+ // Apply currency dbs requests
+ for req in currency_dbs_reqs {
+ req.apply(&self.currency_databases).expect(
+ "Fatal error : fail to apply CurrencyDBsWriteQuery : DALError !",
);
- self.send_event(&BlockchainEvent::RefusedPendingDoc(
- DUBPDocument::V10(Box::new(V10Document::Block(Box::new(
- network_block.uncompleted_block_doc().clone(),
- )))),
+ }
+ // Write block
+ block_req
+ .apply(&self.blocks_databases, false)
+ .expect("Fatal error : fail to write block in BlocksDBs : DALError !");
+ if let BlocksDBsWriteQuery::WriteBlock(_, _, _, block_hash) = block_req {
+ info!("StackUpValidBlock({})", block_doc.number.0);
+ self.send_event(&BlockchainEvent::StackUpValidBlock(
+ Box::new(block_doc.clone()),
+ Blockstamp {
+ id: block_doc.number,
+ hash: block_hash,
+ },
));
}
+ current_blockstamp = block_doc.blockstamp();
+ // Update forks states
+ self.forks_states = durs_blockchain_dal::block::get_forks(
+ &self.blocks_databases.forks_db,
+ current_blockstamp,
+ )
+ .expect("get_forks() : DALError");
+
+ if !wot_dbs_reqs.is_empty() {
+ save_wots_dbs = true;
+ }
+ if !block_doc.transactions.is_empty()
+ || (block_doc.dividend.is_some()
+ && block_doc.dividend.expect("safe unwrap") > 0)
+ {
+ save_currency_dbs = true;
+ }
+
+ // Save databases
+ self.blocks_databases.save_dbs();
+ if save_wots_dbs {
+ self.wot_databases.save_dbs();
+ }
+ if save_currency_dbs {
+ self.currency_databases.save_dbs(true, true);
+ }
+ }
+ Err(_) => {
+ warn!("RefusedBlock({})", block_doc.number.0);
+ self.send_event(&BlockchainEvent::RefusedPendingDoc(DUBPDocument::Block(
+ Box::new(block_doc.clone()),
+ )));
}
- }
- BlockchainDocument::Identity(ref doc) => blockchain_documents.push(
- DUBPDocument::V10(Box::new(V10Document::Identity(doc.deref().clone()))),
- ),
- BlockchainDocument::Membership(ref doc) => blockchain_documents.push(
- DUBPDocument::V10(Box::new(V10Document::Membership(doc.deref().clone()))),
- ),
- BlockchainDocument::Certification(ref doc) => {
- blockchain_documents.push(DUBPDocument::V10(Box::new(
- V10Document::Certification(Box::new(doc.deref().clone())),
- )))
- }
- BlockchainDocument::Revocation(ref doc) => {
- blockchain_documents.push(DUBPDocument::V10(Box::new(V10Document::Revocation(
- Box::new(doc.deref().clone()),
- ))))
- }
- BlockchainDocument::Transaction(ref doc) => {
- blockchain_documents.push(DUBPDocument::V10(Box::new(
- V10Document::Transaction(Box::new(doc.deref().clone())),
- )))
}
}
}
- if !blockchain_documents.is_empty() {
- self.receive_documents(&blockchain_documents);
- }
- // Save databases
- if save_blocks_dbs {
- self.blocks_databases.save_dbs();
- }
- if save_wots_dbs {
- self.wot_databases.save_dbs();
- }
- if save_currency_dbs {
- self.currency_databases.save_dbs(true, true);
- }
+
current_blockstamp
}
- fn receive_documents(&self, documents: &[DUBPDocument]) {
- debug!("BlockchainModule : receive_documents()");
- for document in documents {
- trace!("BlockchainModule : Treat one document.");
- match *document {
- DUBPDocument::V10(ref doc_v10) => match doc_v10.deref() {
- _ => {}
- },
- _ => self.send_event(&BlockchainEvent::RefusedPendingDoc(document.clone())),
- }
- }
- }
+
fn receive_blocks(
&mut self,
- blocks_in_box: &[Box],
+ blocks: Vec,
current_blockstamp: &Blockstamp,
wot_index: &mut HashMap,
wot: &BinDB,
) -> Blockstamp {
debug!("BlockchainModule : receive_blocks()");
- let blocks: Vec<&NetworkBlock> = blocks_in_box.iter().map(|b| b.deref()).collect();
let mut current_blockstamp = *current_blockstamp;
let mut save_blocks_dbs = false;
let mut save_wots_dbs = false;
let mut save_currency_dbs = false;
- for block in blocks {
+ for block in blocks.into_iter() {
+ let blockstamp = block.blockstamp();
if let Ok(ValidBlockApplyReqs(bc_db_query, wot_dbs_queries, tx_dbs_queries)) =
check_and_apply_block::(
&self.blocks_databases,
&self.wot_databases.certs_db,
- &Block::NetworkBlock(block),
+ block,
¤t_blockstamp,
wot_index,
wot,
&self.forks_states,
)
{
- current_blockstamp = block.blockstamp();
+ current_blockstamp = blockstamp;
// Update forks states
self.forks_states = durs_blockchain_dal::block::get_forks(
&self.blocks_databases.forks_db,
@@ -653,8 +654,16 @@ impl BlockchainModule {
_ => {}
}
}
- DursEvent::ReceiveValidDocsFromClient(ref docs) => {
- self.receive_documents(docs);
+ DursEvent::MemPoolEvent(ref mempool_event) => {
+ if let MemPoolEvent::FindNextBlock(next_block_box) = mempool_event {
+ let new_current_blockstamp = self.receive_blocks(
+ vec![Block::LocalBlock(next_block_box.deref().clone())],
+ ¤t_blockstamp,
+ &mut wot_index,
+ &wot_db,
+ );
+ current_blockstamp = new_current_blockstamp;
+ }
}
_ => {} // Others modules events
},
@@ -710,6 +719,11 @@ impl BlockchainModule {
if let NetworkResponse::Chunk(_, _, ref blocks) =
*network_response.deref()
{
+ let blocks: Vec = blocks
+ .iter()
+ .map(|b| Block::NetworkBlock(b.deref().clone()))
+ .collect();
+
let new_current_blockstamp = self.receive_blocks(
blocks,
¤t_blockstamp,
@@ -766,6 +780,10 @@ impl BlockchainModule {
let mut find_valid_block = false;
for stackable_block in stackable_blocks {
debug!("stackable_block({})", stackable_block.block.number);
+
+ let stackable_block_number = stackable_block.block.number;
+ let stackable_block_blockstamp = stackable_block.block.blockstamp();
+
if let Ok(ValidBlockApplyReqs(
bc_db_query,
wot_dbs_queries,
@@ -773,7 +791,7 @@ impl BlockchainModule {
)) = check_and_apply_block(
&self.blocks_databases,
&self.wot_databases.certs_db,
- &Block::LocalBlock(&stackable_block.block),
+ Block::LocalBlock(stackable_block.block),
¤t_blockstamp,
&mut wot_index,
&wot_db,
@@ -803,16 +821,13 @@ impl BlockchainModule {
if !tx_dbs_queries.is_empty() {
self.currency_databases.save_dbs(true, true);
}
- debug!(
- "success to stackable_block({})",
- stackable_block.block.number
- );
+ debug!("success to stackable_block({})", stackable_block_number);
- current_blockstamp = stackable_block.block.blockstamp();
+ current_blockstamp = stackable_block_blockstamp;
find_valid_block = true;
break;
} else {
- warn!("fail to stackable_block({})", stackable_block.block.number);
+ warn!("fail to stackable_block({})", stackable_block_number);
// Delete this fork
DALBlock::delete_fork(
&self.blocks_databases.forks_db,
@@ -839,56 +854,34 @@ impl BlockchainModule {
}
}
-/// Complete Network Block
-pub fn complete_network_block(
- network_block: &NetworkBlock,
- verif_inner_hash: bool,
-) -> Result {
- if let NetworkBlock::V10(ref network_block_v10) = *network_block {
- let mut block_doc = network_block_v10.uncompleted_block_doc.clone();
- trace!("complete_network_block #{}...", block_doc.number);
- block_doc.certifications =
- durs_blockchain_dal::parsers::certifications::parse_certifications_into_compact(
- &network_block_v10.certifications,
- );
- trace!("Success to complete certs.");
- block_doc.revoked = durs_blockchain_dal::parsers::revoked::parse_revocations_into_compact(
- &network_block_v10.revoked,
- );
- trace!("Success to complete certs & revocations.");
- let inner_hash = block_doc.inner_hash.expect(
- "BlockchainModule : complete_network_block() : fatal error : block.inner_hash = None",
+/// Verify block hashs
+pub fn verify_block_hashs(block_doc: &BlockDocument) -> Result<(), VerifyBlockHashsError> {
+ trace!("complete_block #{}...", block_doc.number);
+
+ if block_doc.inner_hash.is_none() {
+ fatal_error(
+ "BlockchainModule : verify_block_hashs() : fatal error : block.inner_hash = None",
);
- if verif_inner_hash && block_doc.number.0 > 0 {
- block_doc.compute_inner_hash();
- }
- let hash = block_doc.hash;
- block_doc.compute_hash();
- if block_doc.inner_hash.expect(
- "BlockchainModule : complete_network_block() : fatal error : block.inner_hash = None",
- ) == inner_hash
- {
- block_doc.fill_inner_hash_and_nonce_str(None);
- if !verif_inner_hash || block_doc.hash == hash {
- trace!("Succes to complete_network_block #{}", block_doc.number.0);
- Ok(block_doc)
- } else {
- warn!("BlockchainModule : Refuse Bloc : invalid hash !");
- Err(CompletedBlockError::InvalidHash(
- block_doc.number,
- block_doc.hash,
- hash,
- ))
- }
+ }
+
+ if block_doc.verify_inner_hash() {
+ if block_doc.verify_hash() {
+ trace!("Succes to verify_block_hashs #{}", block_doc.number.0);
+ Ok(())
} else {
- warn!("BlockchainModule : Refuse Bloc : invalid inner hash !");
- debug!(
- "BlockInnerFormat={}",
- block_doc.generate_compact_inner_text()
- );
- Err(CompletedBlockError::InvalidInnerHash())
+ warn!("BlockchainModule : Refuse Bloc : invalid hash !");
+ Err(VerifyBlockHashsError::InvalidHash(
+ block_doc.number,
+ block_doc.hash,
+ ))
}
} else {
- Err(CompletedBlockError::InvalidVersion())
+ warn!("BlockchainModule : Refuse Bloc : invalid inner hash !");
+ warn!("BlockDocument=\"{:?}\"", block_doc);
+ warn!(
+ "BlockInnerFormat=\"{}\"",
+ block_doc.generate_compact_inner_text()
+ );
+ Err(VerifyBlockHashsError::InvalidInnerHash())
}
}
diff --git a/lib/modules/blockchain/blockchain/revert_block.rs b/lib/modules/blockchain/blockchain/revert_block.rs
index b1d7f7e8a86b9d4f8aa28d2713a2a9fd262978c9..4721eda00ffa058025664240073a820d37e0d313 100644
--- a/lib/modules/blockchain/blockchain/revert_block.rs
+++ b/lib/modules/blockchain/blockchain/revert_block.rs
@@ -13,8 +13,8 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-use dubp_documents::v10::block::TxDocOrTxHash;
-use dubp_documents::v10::transaction::{TxAmount, TxBase};
+use dubp_documents::documents::block::TxDocOrTxHash;
+use dubp_documents::documents::transaction::{TxAmount, TxBase};
use dubp_documents::Document;
use dup_crypto::keys::*;
use durs_blockchain_dal::block::DALBlock;
diff --git a/lib/modules/blockchain/blockchain/sync.rs b/lib/modules/blockchain/blockchain/sync.rs
deleted file mode 100644
index 1ab46712921e33c4b51bfa0c99b736e153207e3f..0000000000000000000000000000000000000000
--- a/lib/modules/blockchain/blockchain/sync.rs
+++ /dev/null
@@ -1,684 +0,0 @@
-// Copyright (C) 2018 The Duniter Project Developers.
-//
-// This program is free software: you can redistribute it and/or modify
-// it under the terms of the GNU Affero General Public License as
-// published by the Free Software Foundation, either version 3 of the
-// License, or (at your option) any later version.
-//
-// This program is distributed in the hope that it will be useful,
-// but WITHOUT ANY WARRANTY; without even the implied warranty of
-// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
-// GNU Affero General Public License for more details.
-//
-// You should have received a copy of the GNU Affero General Public License
-// along with this program. If not, see .
-
-use crate::ts_parsers::*;
-use crate::*;
-use dubp_documents::{BlockHash, BlockId};
-use duniter_network::documents::NetworkBlock;
-use dup_crypto::hashs::Hash;
-use dup_crypto::keys::*;
-use durs_blockchain_dal::currency_params::CurrencyParameters;
-use durs_blockchain_dal::writers::requests::*;
-use durs_blockchain_dal::ForkId;
-use durs_wot::NodeId;
-use pbr::ProgressBar;
-use std::collections::{HashMap, VecDeque};
-use std::fs;
-use std::ops::Deref;
-use std::sync::mpsc;
-use std::thread;
-use std::time::SystemTime;
-use threadpool::ThreadPool;
-
-/// Number of sync jobs
-pub static NB_SYNC_JOBS: &'static usize = &4;
-
-#[derive(Debug, Clone, PartialEq, Eq)]
-/// Block header
-pub struct BlockHeader {
- pub number: BlockId,
- pub hash: BlockHash,
- pub issuer: PubKey,
-}
-
-#[derive(Debug)]
-/// Message for main sync thread
-enum MessForSyncThread {
- Target(CurrencyName, Blockstamp),
- NetworkBlock(NetworkBlock),
- DownloadFinish(),
- ApplyFinish(),
-}
-
-#[derive(Debug)]
-/// Message for a job thread
-enum SyncJobsMess {
- BlocksDBsWriteQuery(BlocksDBsWriteQuery),
- WotsDBsWriteQuery(WotsDBsWriteQuery, Box),
- CurrencyDBsWriteQuery(CurrencyDBsWriteQuery),
- End(),
-}
-
-/// Sync from a duniter-ts database
-pub fn sync_ts(
- profile: &str,
- conf: &DC,
- db_ts_path: PathBuf,
- end: Option,
- cautious: bool,
- verif_inner_hash: bool,
-) {
- // Get verification level
- let _verif_level = if cautious {
- println!("Start cautious sync...");
- info!("Start cautious sync...");
- SyncVerificationLevel::Cautious()
- } else {
- println!("Start fast sync...");
- info!("Start fast sync...");
- SyncVerificationLevel::FastSync()
- };
-
- // Create sync_thread channels
- let (sender_sync_thread, recv_sync_thread) = mpsc::channel();
-
- // Create ThreadPool
- let nb_cpus = num_cpus::get();
- let nb_workers = if nb_cpus < *NB_SYNC_JOBS {
- nb_cpus
- } else {
- *NB_SYNC_JOBS
- };
- let pool = ThreadPool::new(nb_workers);
-
- // Determine db_ts_copy_path
- let mut db_ts_copy_path = duniter_conf::datas_path(profile, &conf.currency().clone());
- db_ts_copy_path.push("tmp_db_ts_copy.db");
-
- // Lauch ts thread
- let profile_copy = String::from(profile);
- let sender_sync_thread_clone = sender_sync_thread.clone();
- pool.execute(move || {
- let ts_job_begin = SystemTime::now();
- // copy db_ts
- fs::copy(db_ts_path.as_path(), db_ts_copy_path.as_path())
- .expect("Fatal error : fail to copy duniter-ts database !");
- // open copy of db_ts
- let ts_db = sqlite::open(db_ts_copy_path.as_path())
- .expect("Fatal error : fail to open copy of duniter-ts database !");
- info!("sync_ts : Success to open duniter-ts database.");
-
- // Get ts target blockstamp
- debug!("Get ts-db current blockstamp...");
- let mut cursor: sqlite::Cursor = if let Some(end) = end {
- let mut cursor = ts_db
- .prepare("SELECT hash, number, currency FROM block WHERE fork=? AND number=? LIMIT 1;")
- .expect("Request SQL get_ts_current_block is wrong !")
- .cursor();
- cursor.bind(&[sqlite::Value::Integer(0), sqlite::Value::Integer(i64::from(end))]).expect("Fail to get ts target block !");
- cursor
- } else {
- let mut cursor = ts_db
- .prepare("SELECT hash, number, currency FROM block WHERE fork=? ORDER BY number DESC LIMIT 1;")
- .expect("Request SQL get_ts_current_block is wrong !")
- .cursor();
- cursor.bind(&[sqlite::Value::Integer(0)]).expect("Fail to get ts current block !");
- cursor
- };
-
- let (currency, current_ts_blockstamp) =
- if let Some(row) = cursor.next().expect("cursor error") {
- let block_id = BlockId(
- row[1]
- .as_integer()
- .expect("Fail to parse current ts blockstamp !") as u32,
- );
- let block_hash = BlockHash(
- Hash::from_hex(
- row[0]
- .as_string()
- .expect("Fail to parse current ts blockstamp !"),
- ).expect("Fail to parse current ts blockstamp !"),
- );
- (
- CurrencyName(String::from(
- row[2]
- .as_string()
- .expect("Fatal error :Fail to get currency !"),
- )),
- Blockstamp {
- id: block_id,
- hash: block_hash,
- },
- )
- } else {
- panic!("Fail to get current ts blockstamp !");
- };
-
- debug!("Success to ts-db current blockstamp.");
-
- // Get current local blockstamp
- debug!("Get local current blockstamp...");
- let db_path = duniter_conf::get_blockchain_db_path(&profile_copy, ¤cy);
- let blocks_databases = BlocksV10DBs::open(Some(&db_path));
- let current_blockstamp: Blockstamp = durs_blockchain_dal::block::get_current_blockstamp(
- &blocks_databases,
- ).expect("ForksV10DB : RustBreakError !")
- .unwrap_or_default();
- debug!("Success to get local current blockstamp.");
-
- // Send ts current blockstamp
- sender_sync_thread_clone
- .send(MessForSyncThread::Target(
- currency.clone(),
- current_ts_blockstamp,
- ))
- .expect("Fatal error : sync_thread unrechable !");
-
- // Get genesis block
- if current_blockstamp == Blockstamp::default() {
- let mut cursor: sqlite::Cursor = ts_db
- .prepare(
- "SELECT hash, inner_hash, signature, currency, issuer, parameters, previousHash,
- previousIssuer, version, membersCount, monetaryMass, medianTime, dividend, unitbase,
- time, powMin, number, nonce, transactions, certifications, identities, joiners,
- actives, leavers, revoked, excluded, issuersFrame, issuersFrameVar, issuersCount
- FROM block WHERE fork=0 AND number=? LIMIT 1;",
- )
- .expect("Request SQL get_ts_blocks is wrong !")
- .cursor();
- cursor
- .bind(&[sqlite::Value::Integer(0)])
- .expect("Fail to get genesis block !");
- if let Some(row) = cursor.next().expect("cursor error") {
- sender_sync_thread_clone
- .send(MessForSyncThread::NetworkBlock(parse_ts_block(row)))
- .expect("Fatal error : sync_thread unrechable !");
- }
- }
-
- // Request ts blocks
- let mut cursor: sqlite::Cursor = ts_db
- .prepare(
- "SELECT hash, inner_hash, signature, currency, issuer, parameters, previousHash,
- previousIssuer, version, membersCount, monetaryMass, medianTime, dividend, unitbase,
- time, powMin, number, nonce, transactions, certifications, identities, joiners,
- actives, leavers, revoked, excluded, issuersFrame, issuersFrameVar, issuersCount
- FROM block WHERE fork=? AND number > ? AND number <= ? ORDER BY number ASC;",
- )
- .expect("Request SQL get_ts_blocks is wrong !")
- .cursor();
- cursor
- .bind(&[
- sqlite::Value::Integer(0),
- sqlite::Value::Integer(i64::from(current_blockstamp.id.0)),
- sqlite::Value::Integer(i64::from(current_ts_blockstamp.id.0)),
- ])
- .expect("0");
-
- // Parse ts blocks
- //let mut ts_blocks = Vec::with_capacity(current_ts_blockstamp.id.0 + 1);
- //let pool = ThreadPool::new(4);
- while let Some(row) = cursor.next().expect("cursor error") {
- //let sender_sync_thread_clone = sender_sync_thread.clone();
- //pool.execute(move || {
- sender_sync_thread_clone
- .send(MessForSyncThread::NetworkBlock(parse_ts_block(row)))
- .expect("Fatal error : sync_thread unrechable !");
- //});
- }
- fs::remove_file(db_ts_copy_path.as_path())
- .expect("Fatal error : fail to remove db_ts_copy !");
- sender_sync_thread_clone
- .send(MessForSyncThread::DownloadFinish())
- .expect("Fatal error : sync_thread unrechable !");
- let ts_job_duration = SystemTime::now()
- .duration_since(ts_job_begin)
- .expect("duration_since error");
- info!(
- "ts_job_duration={},{:03} seconds.",
- ts_job_duration.as_secs(),
- ts_job_duration.subsec_millis()
- );
- });
-
- // Get currency and target blockstamp
- let (currency, target_blockstamp) =
- if let Ok(MessForSyncThread::Target(currency, target_blockstamp)) = recv_sync_thread.recv()
- {
- (currency, target_blockstamp)
- } else {
- panic!("Fatal error : no TargetBlockstamp !")
- };
-
- // Update DuniterConf
- let mut conf = conf.clone();
- conf.set_currency(currency.clone());
-
- // Get databases path
- let db_path = duniter_conf::get_blockchain_db_path(profile, ¤cy);
-
- // Write nex conf
- duniter_conf::write_conf_file(profile, &conf).expect("Fail to write new conf !");
-
- // Open wot db
- let wot_db = open_wot_db::(Some(&db_path)).expect("Fail to open WotDB !");
-
- // Open blocks databases
- let databases = BlocksV10DBs::open(Some(&db_path));
-
- // Open wot databases
- let wot_databases = WotsV10DBs::open(Some(&db_path));
-
- // Get local current blockstamp
- debug!("Get local current blockstamp...");
- let mut current_blockstamp: Blockstamp =
- durs_blockchain_dal::block::get_current_blockstamp(&databases)
- .expect("ForksV10DB : RustBreakError !")
- .unwrap_or_default();
- debug!("Success to get local current blockstamp.");
-
- // Node is already synchronized ?
- if target_blockstamp.id.0 < current_blockstamp.id.0 {
- println!("Your duniter-rs node is already synchronized.");
- return;
- }
-
- // Get wot index
- let mut wot_index: HashMap =
- DALIdentity::get_wot_index(&wot_databases.identities_db)
- .expect("Fatal eror : get_wot_index : Fail to read blockchain databases");
-
- // Start sync
- let sync_start_time = SystemTime::now();
- info!(
- "Sync from #{} to #{}...",
- current_blockstamp.id.0, target_blockstamp.id.0
- );
- println!(
- "Sync from #{} to #{}...",
- current_blockstamp.id.0, target_blockstamp.id.0
- );
-
- // Createprogess bar
- let count_blocks = target_blockstamp.id.0 + 1 - current_blockstamp.id.0;
- let count_chunks = if count_blocks % 250 > 0 {
- (count_blocks / 250) + 1
- } else {
- count_blocks / 250
- };
- let mut apply_pb = ProgressBar::new(count_chunks.into());
- apply_pb.format("╢▌▌░╟");
- // Create workers threads channels
- let (sender_blocks_thread, recv_blocks_thread) = mpsc::channel();
- let (sender_tx_thread, recv_tx_thread) = mpsc::channel();
- let (sender_wot_thread, recv_wot_thread) = mpsc::channel();
-
- // Launch blocks_worker thread
- let sender_sync_thread_clone = sender_sync_thread.clone();
- pool.execute(move || {
- let blocks_job_begin = SystemTime::now();
-
- // Listen db requets
- let mut chunk_index = 0;
- let mut blockchain_meta_datas = HashMap::new();
- let mut all_wait_duration = Duration::from_millis(0);
- let mut wait_begin = SystemTime::now();
- while let Ok(SyncJobsMess::BlocksDBsWriteQuery(req)) = recv_blocks_thread.recv() {
- all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
- // Apply db request
- req.apply(&databases, true)
- .expect("Fatal error : Fail to apply DBWriteRequest !");
- if let BlocksDBsWriteQuery::WriteBlock(
- ref _dal_block,
- ref _old_fork_id,
- ref previous_blockstamp,
- ref previous_hash,
- ) = req
- {
- blockchain_meta_datas.insert(*previous_blockstamp, *previous_hash);
- chunk_index += 1;
- if chunk_index == 250 {
- chunk_index = 0;
- apply_pb.inc();
- }
- }
- wait_begin = SystemTime::now();
- }
-
- // Indexing blockchain meta datas
- info!("Indexing blockchain meta datas...");
- /*let blockchain_meta_datas: HashMap = databases
- .blockchain_db
- .read(|db| {
- let mut blockchain_meta_datas: HashMap<
- PreviousBlockstamp,
- BlockHash,
- > = HashMap::new();
- for dal_block in db.values() {
- let block_previous_hash = if dal_block.block.number.0 == 0 {
- PreviousBlockstamp::default()
- } else {
- PreviousBlockstamp {
- id: BlockId(dal_block.block.number.0 - 1),
- hash: BlockHash(dal_block.block.previous_hash),
- }
- };
- blockchain_meta_datas
- .insert(block_previous_hash, dal_block.block.expect("Try to get hash of an uncompleted or reduce block !"));
- }
- blockchain_meta_datas
- })
- .expect("Indexing blockchain meta datas : DALError");*/
- databases
- .forks_db
- .write(|db| {
- db.insert(ForkId(0), blockchain_meta_datas);
- })
- .expect("Indexing blockchain meta datas : DALError");
-
- // Increment progress bar (last chunk)
- apply_pb.inc();
- // Save blockchain, and fork databases
- println!();
- println!("Write indexs in files...");
- info!("Save blockchain and forks databases in files...");
- databases.save_dbs();
-
- // Send finish signal
- sender_sync_thread_clone
- .send(MessForSyncThread::ApplyFinish())
- .expect("Fatal error : sync_thread unrechable !");
- let blocks_job_duration =
- SystemTime::now().duration_since(blocks_job_begin).unwrap() - all_wait_duration;
- info!(
- "blocks_job_duration={},{:03} seconds.",
- blocks_job_duration.as_secs(),
- blocks_job_duration.subsec_millis()
- );
- });
-
- // / Launch wot_worker thread
- let profile_copy2 = String::from(profile);
- let currency_copy2 = currency.clone();
- let sender_sync_thread_clone2 = sender_sync_thread.clone();
-
- pool.execute(move || {
- let wot_job_begin = SystemTime::now();
- // Open databases
- let db_path = duniter_conf::get_blockchain_db_path(&profile_copy2, ¤cy_copy2);
- let databases = WotsV10DBs::open(Some(&db_path));
-
- // Listen db requets
- let mut all_wait_duration = Duration::from_millis(0);
- let mut wait_begin = SystemTime::now();
- while let Ok(mess) = recv_wot_thread.recv() {
- all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
- match mess {
- SyncJobsMess::WotsDBsWriteQuery(req, currency_params) => req
- .apply(&databases, ¤cy_params.deref())
- .expect("Fatal error : Fail to apply DBWriteRequest !"),
- SyncJobsMess::End() => break,
- _ => {}
- }
- wait_begin = SystemTime::now();
- }
- // Save wots databases
- info!("Save wots databases in files...");
- databases.save_dbs();
-
- // Send finish signal
- sender_sync_thread_clone2
- .send(MessForSyncThread::ApplyFinish())
- .expect("Fatal error : sync_thread unrechable !");
- let wot_job_duration =
- SystemTime::now().duration_since(wot_job_begin).unwrap() - all_wait_duration;
- info!(
- "wot_job_duration={},{:03} seconds.",
- wot_job_duration.as_secs(),
- wot_job_duration.subsec_millis()
- );
- });
-
- // Launch tx_worker thread
- let profile_copy = String::from(profile);
- let currency_copy = conf.currency().clone();
- let sender_sync_thread_clone = sender_sync_thread.clone();
- pool.execute(move || {
- let tx_job_begin = SystemTime::now();
- // Open databases
- let db_path = duniter_conf::get_blockchain_db_path(&profile_copy, ¤cy_copy);
- let databases = CurrencyV10DBs::open(Some(&db_path));
-
- // Listen db requets
- let mut all_wait_duration = Duration::from_millis(0);
- let mut wait_begin = SystemTime::now();
- while let Ok(SyncJobsMess::CurrencyDBsWriteQuery(req)) = recv_tx_thread.recv() {
- all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
- // Apply db request
- req.apply(&databases)
- .expect("Fatal error : Fail to apply DBWriteRequest !");
- wait_begin = SystemTime::now();
- }
- // Save tx, utxo, du and balances databases
- info!("Save tx and sources database in file...");
- databases.save_dbs(true, true);
-
- // Send finish signal
- sender_sync_thread_clone
- .send(MessForSyncThread::ApplyFinish())
- .expect("Fatal error : sync_thread unrechable !");
- let tx_job_duration =
- SystemTime::now().duration_since(tx_job_begin).unwrap() - all_wait_duration;
- info!(
- "tx_job_duration={},{:03} seconds.",
- tx_job_duration.as_secs(),
- tx_job_duration.subsec_millis()
- );
- });
- let main_job_begin = SystemTime::now();
-
- // Open currency_params_db
- let dbs_path = duniter_conf::get_blockchain_db_path(profile, &conf.currency());
- let currency_params_db = open_file_db::(&dbs_path, "params.db")
- .expect("Fail to open params db");
-
- // Apply blocks
- let mut blocks_not_expiring = VecDeque::with_capacity(200_000);
- let mut last_block_expiring: isize = -1;
- let certs_db =
- BinDB::Mem(open_memory_db::().expect("Fail to create memory certs_db"));
- let mut currency_params = CurrencyParameters::default();
- let mut get_currency_params = false;
- let mut certs_count = 0;
-
- let mut all_wait_duration = Duration::from_millis(0);
- let mut wait_begin = SystemTime::now();
- let mut all_complete_block_duration = Duration::from_millis(0);
- let mut all_apply_valid_block_duration = Duration::from_millis(0);
- while let Ok(MessForSyncThread::NetworkBlock(network_block)) = recv_sync_thread.recv() {
- all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
- // Complete block
- let complete_block_begin = SystemTime::now();
- let block_doc = complete_network_block(&network_block, verif_inner_hash)
- .expect("Receive wrong block, please reset data and resync !");
- all_complete_block_duration += SystemTime::now()
- .duration_since(complete_block_begin)
- .unwrap();
- // Get currency params
- if !get_currency_params && block_doc.number.0 == 0 {
- if block_doc.parameters.is_some() {
- currency_params_db
- .write(|db| {
- db.0 = block_doc.currency.clone();
- db.1 = block_doc.parameters.unwrap();
- })
- .expect("fail to write in params DB");
- currency_params = CurrencyParameters::from((
- block_doc.currency.clone(),
- block_doc.parameters.unwrap(),
- ));
- get_currency_params = true;
- } else {
- panic!("The genesis block are None parameters !");
- }
- }
- // Push block median_time in blocks_not_expiring
- blocks_not_expiring.push_back(block_doc.median_time);
- // Get blocks_expiring
- let mut blocks_expiring = Vec::new();
- while blocks_not_expiring.front().cloned()
- < Some(block_doc.median_time - currency_params.sig_validity)
- {
- last_block_expiring += 1;
- blocks_expiring.push(BlockId(last_block_expiring as u32));
- blocks_not_expiring.pop_front();
- }
- // Find expire_certs
- let expire_certs =
- durs_blockchain_dal::certs::find_expire_certs(&certs_db, blocks_expiring)
- .expect("find_expire_certs() : DALError");
- // Apply block
- let apply_valid_block_begin = SystemTime::now();
- if let Ok(ValidBlockApplyReqs(block_req, wot_db_reqs, currency_db_reqs)) =
- apply_valid_block::(
- &block_doc,
- &mut wot_index,
- &wot_db,
- &expire_certs,
- None,
- )
- {
- all_apply_valid_block_duration += SystemTime::now()
- .duration_since(apply_valid_block_begin)
- .unwrap();
- current_blockstamp = network_block.blockstamp();
- debug!("Apply db requests...");
- // Send block request to blocks worker thread
- sender_blocks_thread
- .send(SyncJobsMess::BlocksDBsWriteQuery(block_req.clone()))
- .expect(
- "Fail to communicate with blocks worker thread, please reset data & resync !",
- );
- // Send wot requests to wot worker thread
- for req in wot_db_reqs {
- if let WotsDBsWriteQuery::CreateCert(
- ref _source_pubkey,
- ref source,
- ref target,
- ref created_block_id,
- ref _median_time,
- ) = req
- {
- certs_count += 1;
- // Add cert in certs_db
- certs_db
- .write(|db| {
- let mut created_certs =
- db.get(&created_block_id).cloned().unwrap_or_default();
- created_certs.insert((*source, *target));
- db.insert(*created_block_id, created_certs);
- })
- .expect("RustBreakError : please reset data and resync !");
- }
- sender_wot_thread
- .send(SyncJobsMess::WotsDBsWriteQuery(
- req.clone(),
- Box::new(currency_params),
- ))
- .expect(
- "Fail to communicate with tx worker thread, please reset data & resync !",
- )
- }
- // Send blocks and wot requests to wot worker thread
- for req in currency_db_reqs {
- sender_tx_thread
- .send(SyncJobsMess::CurrencyDBsWriteQuery(req.clone()))
- .expect(
- "Fail to communicate with tx worker thread, please reset data & resync !",
- );
- }
- debug!("Success to apply block #{}", current_blockstamp.id.0);
- if current_blockstamp.id.0 >= target_blockstamp.id.0 {
- if current_blockstamp == target_blockstamp {
- // Sync completed
- break;
- } else {
- panic!("Fatal Error : we get a fork, please reset data and sync again !");
- }
- }
- } else {
- panic!(
- "Fatal error : fail to stack up block #{}",
- current_blockstamp.id.0 + 1
- )
- }
- wait_begin = SystemTime::now();
- }
- // Send end signal to workers threads
- sender_blocks_thread
- .send(SyncJobsMess::End())
- .expect("Sync : Fail to send End signal to blocks worker !");
- info!("Sync : send End signal to blocks job.");
- sender_wot_thread
- .send(SyncJobsMess::End())
- .expect("Sync : Fail to send End signal to wot worker !");
- info!("Sync : send End signal to wot job.");
- sender_tx_thread
- .send(SyncJobsMess::End())
- .expect("Sync : Fail to send End signal to writer worker !");
- info!("Sync : send End signal to tx job.");
-
- // Save params db
- currency_params_db.save().expect("Fail to save params db");
-
- // Save wot file
- wot_db.save().expect("Fail to save wot db");
-
- let main_job_duration =
- SystemTime::now().duration_since(main_job_begin).unwrap() - all_wait_duration;
- info!(
- "main_job_duration={},{:03} seconds.",
- main_job_duration.as_secs(),
- main_job_duration.subsec_millis()
- );
- info!(
- "all_complete_block_duration={},{:03} seconds.",
- all_complete_block_duration.as_secs(),
- all_complete_block_duration.subsec_millis()
- );
- info!(
- "all_apply_valid_block_duration={},{:03} seconds.",
- all_apply_valid_block_duration.as_secs(),
- all_apply_valid_block_duration.subsec_millis()
- );
-
- // Wait recv two finish signals
- let mut wait_jobs = *NB_SYNC_JOBS - 1;
- while wait_jobs > 0 {
- match recv_sync_thread.recv() {
- Ok(MessForSyncThread::ApplyFinish()) => wait_jobs -= 1,
- Ok(_) => thread::sleep(Duration::from_millis(50)),
- Err(_) => wait_jobs -= 1,
- }
- }
- info!("All sync jobs finish.");
-
- // Log sync duration
- debug!("certs_count={}", certs_count);
- let sync_duration = SystemTime::now().duration_since(sync_start_time).unwrap();
- println!(
- "Sync {} blocks in {}.{:03} seconds.",
- current_blockstamp.id.0 + 1,
- sync_duration.as_secs(),
- sync_duration.subsec_millis(),
- );
- info!(
- "Sync {} blocks in {}.{:03} seconds.",
- current_blockstamp.id.0 + 1,
- sync_duration.as_secs(),
- sync_duration.subsec_millis(),
- );
-}
diff --git a/lib/modules/blockchain/blockchain/sync/apply/blocks_worker.rs b/lib/modules/blockchain/blockchain/sync/apply/blocks_worker.rs
new file mode 100644
index 0000000000000000000000000000000000000000..0d8daadab84325da0a2d8b6066df43a4d5767072
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/apply/blocks_worker.rs
@@ -0,0 +1,109 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+use crate::sync::*;
+use pbr::ProgressBar;
+use std::sync::mpsc;
+
+pub fn execute(
+ pool: &ThreadPool,
+ sender_sync_thread: mpsc::Sender,
+ recv: mpsc::Receiver,
+ databases: BlocksV10DBs,
+ mut apply_pb: ProgressBar,
+) {
+ // Launch blocks_worker thread
+ pool.execute(move || {
+ let blocks_job_begin = SystemTime::now();
+
+ // Listen db requets
+ let mut chunk_index = 0;
+ let mut blockchain_meta_datas = HashMap::new();
+ let mut all_wait_duration = Duration::from_millis(0);
+ let mut wait_begin = SystemTime::now();
+ while let Ok(SyncJobsMess::BlocksDBsWriteQuery(req)) = recv.recv() {
+ all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+ // Apply db request
+ req.apply(&databases, true)
+ .expect("Fatal error : Fail to apply DBWriteRequest !");
+ if let BlocksDBsWriteQuery::WriteBlock(
+ ref _dal_block,
+ ref _old_fork_id,
+ ref previous_blockstamp,
+ ref previous_hash,
+ ) = req
+ {
+ blockchain_meta_datas.insert(*previous_blockstamp, *previous_hash);
+ chunk_index += 1;
+ if chunk_index == 250 {
+ chunk_index = 0;
+ apply_pb.inc();
+ }
+ }
+ wait_begin = SystemTime::now();
+ }
+
+ // Indexing blockchain meta datas
+ info!("Indexing blockchain meta datas...");
+ /*let blockchain_meta_datas: HashMap = databases
+ .blockchain_db
+ .read(|db| {
+ let mut blockchain_meta_datas: HashMap<
+ PreviousBlockstamp,
+ BlockHash,
+ > = HashMap::new();
+ for dal_block in db.values() {
+ let block_previous_hash = if dal_block.block.number.0 == 0 {
+ PreviousBlockstamp::default()
+ } else {
+ PreviousBlockstamp {
+ id: BlockId(dal_block.block.number.0 - 1),
+ hash: BlockHash(dal_block.block.previous_hash),
+ }
+ };
+ blockchain_meta_datas
+ .insert(block_previous_hash, dal_block.block.expect("Try to get hash of an uncompleted or reduce block !"));
+ }
+ blockchain_meta_datas
+ })
+ .expect("Indexing blockchain meta datas : DALError");*/
+ databases
+ .forks_db
+ .write(|db| {
+ db.insert(ForkId(0), blockchain_meta_datas);
+ })
+ .expect("Indexing blockchain meta datas : DALError");
+
+ // Increment progress bar (last chunk)
+ apply_pb.inc();
+ // Save blockchain, and fork databases
+ println!();
+ println!("Write indexs in files...");
+ info!("Save blockchain and forks databases in files...");
+ databases.save_dbs();
+
+ // Send finish signal
+ sender_sync_thread
+ .send(MessForSyncThread::ApplyFinish())
+ .expect("Fatal error : sync_thread unrechable !");
+ let blocks_job_duration =
+ SystemTime::now().duration_since(blocks_job_begin).unwrap() - all_wait_duration;
+ info!(
+ "blocks_job_duration={},{:03} seconds.",
+ blocks_job_duration.as_secs(),
+ blocks_job_duration.subsec_millis()
+ );
+ });
+}
diff --git a/lib/modules/blockchain/blockchain-dal/parsers/mod.rs b/lib/modules/blockchain/blockchain/sync/apply/mod.rs
similarity index 80%
rename from lib/modules/blockchain/blockchain-dal/parsers/mod.rs
rename to lib/modules/blockchain/blockchain/sync/apply/mod.rs
index 8da75d900cba69452d9633aafc01f34f5961a148..2826b034494c08d5e0c590aed69582ffabaaead1 100644
--- a/lib/modules/blockchain/blockchain-dal/parsers/mod.rs
+++ b/lib/modules/blockchain/blockchain/sync/apply/mod.rs
@@ -1,4 +1,4 @@
-// Copyright (C) 2018 The Duniter Project Developers.
+// Copyright (C) 2018 The Durs Project Developers.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
@@ -13,8 +13,6 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-/// Parsers for certifications event
-pub mod certifications;
-
-/// Parsers for revoked event
-pub mod revoked;
+pub mod blocks_worker;
+pub mod txs_worker;
+pub mod wot_worker;
diff --git a/lib/modules/blockchain/blockchain/sync/apply/txs_worker.rs b/lib/modules/blockchain/blockchain/sync/apply/txs_worker.rs
new file mode 100644
index 0000000000000000000000000000000000000000..ebd456c995f2af033291a001665fade8e1f78911
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/apply/txs_worker.rs
@@ -0,0 +1,59 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+use crate::sync::*;
+use std::sync::mpsc;
+
+pub fn execute(
+ pool: &ThreadPool,
+ profile: String,
+ currency: CurrencyName,
+ sender_sync_thread: mpsc::Sender,
+ recv: mpsc::Receiver,
+) {
+ // Launch tx_worker thread
+ pool.execute(move || {
+ let tx_job_begin = SystemTime::now();
+ // Open databases
+ let db_path = duniter_conf::get_blockchain_db_path(&profile, ¤cy);
+ let databases = CurrencyV10DBs::open(Some(&db_path));
+
+ // Listen db requets
+ let mut all_wait_duration = Duration::from_millis(0);
+ let mut wait_begin = SystemTime::now();
+ while let Ok(SyncJobsMess::CurrencyDBsWriteQuery(req)) = recv.recv() {
+ all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+ // Apply db request
+ req.apply(&databases)
+ .expect("Fatal error : Fail to apply DBWriteRequest !");
+ wait_begin = SystemTime::now();
+ }
+ // Save tx, utxo, du and balances databases
+ info!("Save tx and sources database in file...");
+ databases.save_dbs(true, true);
+
+ // Send finish signal
+ sender_sync_thread
+ .send(MessForSyncThread::ApplyFinish())
+ .expect("Fatal error : sync_thread unrechable !");
+ let tx_job_duration =
+ SystemTime::now().duration_since(tx_job_begin).unwrap() - all_wait_duration;
+ info!(
+ "tx_job_duration={},{:03} seconds.",
+ tx_job_duration.as_secs(),
+ tx_job_duration.subsec_millis()
+ );
+ });
+}
diff --git a/lib/modules/blockchain/blockchain/sync/apply/wot_worker.rs b/lib/modules/blockchain/blockchain/sync/apply/wot_worker.rs
new file mode 100644
index 0000000000000000000000000000000000000000..33d2620a27622353c14e80e25eaa77290a166b1e
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/apply/wot_worker.rs
@@ -0,0 +1,63 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+use crate::sync::*;
+use std::sync::mpsc;
+
+pub fn execute(
+ pool: &ThreadPool,
+ profile: String,
+ currency: CurrencyName,
+ sender_sync_thread: mpsc::Sender,
+ recv: mpsc::Receiver,
+) {
+ // Launch wot_worker thread
+ pool.execute(move || {
+ let wot_job_begin = SystemTime::now();
+ // Open databases
+ let db_path = duniter_conf::get_blockchain_db_path(&profile, ¤cy);
+ let databases = WotsV10DBs::open(Some(&db_path));
+
+ // Listen db requets
+ let mut all_wait_duration = Duration::from_millis(0);
+ let mut wait_begin = SystemTime::now();
+ while let Ok(mess) = recv.recv() {
+ all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+ match mess {
+ SyncJobsMess::WotsDBsWriteQuery(req, currency_params) => req
+ .apply(&databases, ¤cy_params.deref())
+ .expect("Fatal error : Fail to apply DBWriteRequest !"),
+ SyncJobsMess::End() => break,
+ _ => {}
+ }
+ wait_begin = SystemTime::now();
+ }
+ // Save wots databases
+ info!("Save wots databases in files...");
+ databases.save_dbs();
+
+ // Send finish signal
+ sender_sync_thread
+ .send(MessForSyncThread::ApplyFinish())
+ .expect("Fatal error : sync_thread unrechable !");
+ let wot_job_duration =
+ SystemTime::now().duration_since(wot_job_begin).unwrap() - all_wait_duration;
+ info!(
+ "wot_job_duration={},{:03} seconds.",
+ wot_job_duration.as_secs(),
+ wot_job_duration.subsec_millis()
+ );
+ });
+}
diff --git a/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs b/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
new file mode 100644
index 0000000000000000000000000000000000000000..4d44db5694412e91f6c2ac83fdb34ebb27b068db
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/download/json_reader_worker.rs
@@ -0,0 +1,243 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+use crate::constants::*;
+use crate::sync::*;
+use dubp_documents::parsers::blocks::parse_json_block;
+use dubp_documents::Blockstamp;
+use durs_common_tools::fatal_error;
+use failure::Error;
+use std::collections::HashSet;
+use std::io::Read;
+use std::path::{Path, PathBuf};
+use std::sync::mpsc;
+use threadpool::ThreadPool;
+
+/// Json reader worker
+pub fn json_reader_worker(
+ pool: &ThreadPool,
+ profile: String,
+ sender_sync_thread: mpsc::Sender,
+ json_chunks_path: PathBuf,
+ end: Option,
+) {
+ // Lauch json reader thread
+ pool.execute(move || {
+ let ts_job_begin = SystemTime::now();
+
+ // Get list of json chunk files
+ let chunks_set = get_chunks_set(&json_chunks_path);
+ if chunks_set.is_empty() {
+ fatal_error("json_files_path directory is empty !");
+ }
+
+ // Get max chunk number and max block id
+ let (max_chunk_number, max_block_id): (usize, u32) = if let Some(end) = end {
+ (end as usize / (*crate::constants::CHUNK_SIZE), end)
+ } else {
+ (
+ chunks_set.len() - 1,
+ (chunks_set.len() * (*crate::constants::CHUNK_SIZE) - 1) as u32,
+ )
+ };
+
+ // Verify if max chunk exist
+ if chunks_set.get(&max_chunk_number).is_none() {
+ fatal_error(&format!("Missing chunk file n°{}", max_chunk_number));
+ };
+
+ // Open chunk file
+ let chunk_file_content_result = open_json_chunk_file(&json_chunks_path, max_chunk_number);
+ if chunk_file_content_result.is_err() {
+ fatal_error(&format!("Fail to open chunk file n°{}", max_chunk_number));
+ }
+
+ // Parse chunk file content
+ let blocks_result = parse_json_chunk(&chunk_file_content_result.expect("safe unwrap"));
+ let last_chunk_blocks = match blocks_result {
+ Ok(blocks) => blocks,
+ Err(e) => {
+ fatal_error(&format!(
+ "Fail to parse chunk file n°{} : {}",
+ max_chunk_number, e,
+ ));
+ unreachable!();
+ }
+ };
+
+ if last_chunk_blocks.is_empty() {
+ fatal_error("Last chunk is empty !");
+ }
+
+ let last_block = last_chunk_blocks
+ .get(max_block_id as usize % *crate::constants::CHUNK_SIZE)
+ .expect("safe unwrap because not empty");
+
+ // Send TargetBlockcstamp
+ sender_sync_thread
+ .send(MessForSyncThread::Target(
+ last_block.currency.clone(),
+ last_block.blockstamp(),
+ ))
+ .expect("Fatal error : sync_thread unrechable !");
+
+ // Get current local blockstamp
+ debug!("Get local current blockstamp...");
+ let db_path = duniter_conf::get_blockchain_db_path(&profile, &last_block.currency);
+ let blocks_databases = BlocksV10DBs::open(Some(&db_path));
+ let current_blockstamp: Blockstamp =
+ durs_blockchain_dal::block::get_current_blockstamp(&blocks_databases)
+ .expect("ForksV10DB : RustBreakError !")
+ .unwrap_or_default();
+ info!("Local current blockstamp = {}", current_blockstamp);
+
+ // Get first chunk number
+ let first_chunk_number: usize =
+ current_blockstamp.id.0 as usize / *crate::constants::CHUNK_SIZE;
+
+ // Parse chunks
+ for chunk_number in first_chunk_number..=max_chunk_number {
+ if chunks_set.get(&chunk_number).is_some() {
+ // Open chunk file
+ let chunk_file_content_result =
+ open_json_chunk_file(&json_chunks_path, chunk_number);
+ if chunk_file_content_result.is_err() {
+ fatal_error(&format!("Fail to open chunk file n°{}", chunk_number));
+ }
+
+ // Parse chunk file content
+ let blocks_result =
+ parse_json_chunk(&chunk_file_content_result.expect("safe unwrap"));
+ let blocks = match blocks_result {
+ Ok(blocks) => blocks,
+ Err(e) => {
+ fatal_error(&format!(
+ "Fail to parse chunk file n°{} : {}",
+ chunk_number, e,
+ ));
+ panic!(); // for compilator
+ }
+ };
+
+ // Send all blocks of this chunk
+ for block in blocks {
+ // Verify if the block number is within the expected interval
+ let block_id = block.blockstamp().id;
+ if (block_id > current_blockstamp.id && block_id.0 <= max_block_id)
+ || (block_id.0 == 0 && current_blockstamp == Blockstamp::default())
+ {
+ // Send block document
+ sender_sync_thread
+ .send(MessForSyncThread::BlockDocument(block))
+ .expect("Fatal error : sync_thread unrechable !");
+ }
+ }
+ } else {
+ fatal_error(&format!("Missing chunk file n°{}", chunk_number));
+ }
+ }
+
+ sender_sync_thread
+ .send(MessForSyncThread::DownloadFinish())
+ .expect("Fatal error : sync_thread unrechable !");
+ let ts_job_duration = SystemTime::now()
+ .duration_since(ts_job_begin)
+ .expect("duration_since error");
+ info!(
+ "ts_job_duration={},{:03} seconds.",
+ ts_job_duration.as_secs(),
+ ts_job_duration.subsec_millis()
+ );
+ });
+}
+
+/// Parse json chunk into BlockDocument Vector
+fn parse_json_chunk(json_chunk_content: &str) -> Result, Error> {
+ let mut block_doc_vec = Vec::with_capacity(*crate::constants::CHUNK_SIZE);
+
+ let json_value = json_pest_parser::parse_json_string(json_chunk_content)?;
+ if let Some(json_object) = json_value.to_object() {
+ if let Some(blocks) = json_object.get("blocks") {
+ if let Some(blocks_array) = blocks.to_array() {
+ for json_block in blocks_array {
+ block_doc_vec.push(parse_json_block(json_block)?);
+ }
+ } else {
+ fatal_error("Fail to parse json chunk : field \"blocks\" must be an array !");
+ }
+ } else {
+ fatal_error("Fail to parse json chunk : field \"blocks\" don't exist !");
+ }
+ } else {
+ fatal_error("Fail to parse json chunk : json root node must be an object !");
+ }
+
+ Ok(block_doc_vec)
+}
+
+fn get_chunks_set(dir: &Path) -> HashSet {
+ let json_chunk_file_list_result = fs::read_dir(dir);
+ if json_chunk_file_list_result.is_err() {
+ error!("Fail to read dir json_files_path !");
+ panic!("Fail to read dir json_files_path !");
+ }
+
+ let mut chunks_set = HashSet::new();
+
+ for dir_entry in json_chunk_file_list_result.expect("Dev error: err case must be treat before.")
+ {
+ if let Ok(dir_entry) = dir_entry {
+ if let Ok(file_name) = dir_entry.file_name().into_string() {
+ let file_name_len = file_name.len();
+
+ if let Ok(file_type) = dir_entry.file_type() {
+ if file_type.is_file()
+ && file_name[0..CHUNK_FILE_NAME_BEGIN.len()] == *CHUNK_FILE_NAME_BEGIN
+ && file_name[file_name_len - CHUNK_FILE_NAME_END.len()..]
+ == *CHUNK_FILE_NAME_END
+ {
+ let chunk_number_result: Result = file_name
+ [CHUNK_FILE_NAME_BEGIN.len()
+ ..file_name_len - CHUNK_FILE_NAME_END.len()]
+ .parse();
+
+ if let Ok(chunk_number) = chunk_number_result {
+ chunks_set.insert(chunk_number);
+ }
+ }
+ }
+ }
+ }
+ }
+
+ chunks_set
+}
+
+fn open_json_chunk_file(
+ json_chunks_path: &PathBuf,
+ chunk_number: usize,
+) -> std::io::Result<(String)> {
+ let mut chunk_file_path = json_chunks_path.clone();
+ chunk_file_path.push(&format!(
+ "{}{}{}",
+ CHUNK_FILE_NAME_BEGIN, chunk_number, CHUNK_FILE_NAME_END
+ ));
+ let file = std::fs::File::open(chunk_file_path)?;
+ let mut buf_reader = std::io::BufReader::new(file);
+ let mut contents = String::new();
+ buf_reader.read_to_string(&mut contents)?;
+
+ Ok(contents)
+}
diff --git a/lib/modules/blockchain/blockchain/sync/download/mod.rs b/lib/modules/blockchain/blockchain/sync/download/mod.rs
new file mode 100644
index 0000000000000000000000000000000000000000..f1f3bd4abe321aa2d464459fa0748e7307cc0aab
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/download/mod.rs
@@ -0,0 +1,16 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+pub mod json_reader_worker;
diff --git a/lib/modules/blockchain/blockchain/sync/mod.rs b/lib/modules/blockchain/blockchain/sync/mod.rs
new file mode 100644
index 0000000000000000000000000000000000000000..46641d81624d5d110c44e41f171534a936f18099
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/sync/mod.rs
@@ -0,0 +1,424 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+mod apply;
+mod download;
+
+use crate::*;
+use dubp_documents::{BlockHash, BlockId};
+use dup_crypto::keys::*;
+use durs_blockchain_dal::currency_params::CurrencyParameters;
+use durs_blockchain_dal::writers::requests::*;
+use durs_blockchain_dal::ForkId;
+use durs_common_tools::fatal_error;
+use durs_wot::NodeId;
+use pbr::ProgressBar;
+use std::collections::{HashMap, VecDeque};
+use std::fs;
+use std::sync::mpsc;
+use std::thread;
+use std::time::SystemTime;
+use threadpool::ThreadPool;
+
+/// Number of sync jobs
+pub static NB_SYNC_JOBS: &'static usize = &4;
+
+#[derive(Debug, Clone, PartialEq, Eq)]
+/// Block header
+pub struct BlockHeader {
+ pub number: BlockId,
+ pub hash: BlockHash,
+ pub issuer: PubKey,
+}
+
+#[derive(Debug)]
+/// Message for main sync thread
+pub enum MessForSyncThread {
+ Target(CurrencyName, Blockstamp),
+ BlockDocument(BlockDocument),
+ DownloadFinish(),
+ ApplyFinish(),
+}
+
+#[derive(Debug)]
+/// Message for a job thread
+pub enum SyncJobsMess {
+ BlocksDBsWriteQuery(BlocksDBsWriteQuery),
+ WotsDBsWriteQuery(WotsDBsWriteQuery, Box),
+ CurrencyDBsWriteQuery(CurrencyDBsWriteQuery),
+ End(),
+}
+
+/// Sync from local json files
+pub fn local_sync(
+ profile: &str,
+ conf: &DC,
+ json_files_path: PathBuf,
+ end: Option,
+ cautious: bool,
+ verif_inner_hash: bool,
+) {
+ // Get verification level
+ let _verif_level = if cautious {
+ println!("Start cautious sync...");
+ info!("Start cautious sync...");
+ SyncVerificationLevel::Cautious()
+ } else {
+ println!("Start fast sync...");
+ info!("Start fast sync...");
+ SyncVerificationLevel::FastSync()
+ };
+
+ // Create sync_thread channels
+ let (sender_sync_thread, recv_sync_thread) = mpsc::channel();
+
+ // Create ThreadPool
+ let nb_cpus = num_cpus::get();
+ let nb_workers = if nb_cpus < *NB_SYNC_JOBS {
+ nb_cpus
+ } else {
+ *NB_SYNC_JOBS
+ };
+ let pool = ThreadPool::new(nb_workers);
+
+ //match source {
+ //SyncSource::LocalJsonFiles(json_files_path) => {
+ // json_files_path must be a directory
+ if !json_files_path.is_dir() {
+ error!("json_files_path must be a directory");
+ panic!("json_files_path must be a directory");
+ }
+
+ // Lauch json reader worker
+ download::json_reader_worker::json_reader_worker(
+ &pool,
+ profile.to_owned(),
+ sender_sync_thread.clone(),
+ json_files_path,
+ end,
+ );
+
+ // Get target blockstamp
+ let (currency, target_blockstamp) =
+ if let Ok(MessForSyncThread::Target(currency, target_blockstamp)) = recv_sync_thread.recv()
+ {
+ (currency, target_blockstamp)
+ } else {
+ fatal_error("Fatal error : no target blockstamp !");
+ panic!(); // for compilator
+ };
+
+ // Update DuniterConf
+ let mut conf = conf.clone();
+ conf.set_currency(currency.clone());
+
+ // Get databases path
+ let db_path = duniter_conf::get_blockchain_db_path(profile, ¤cy);
+
+ // Write new conf
+ duniter_conf::write_conf_file(profile, &conf).expect("Fail to write new conf !");
+
+ // Open wot db
+ let wot_db = open_wot_db::(Some(&db_path)).expect("Fail to open WotDB !");
+
+ // Open blocks databases
+ let databases = BlocksV10DBs::open(Some(&db_path));
+
+ // Open wot databases
+ let wot_databases = WotsV10DBs::open(Some(&db_path));
+
+ // Get local current blockstamp
+ debug!("Get local current blockstamp...");
+ let mut current_blockstamp: Blockstamp =
+ durs_blockchain_dal::block::get_current_blockstamp(&databases)
+ .expect("DALError : fail to get current blockstamp !")
+ .unwrap_or_default();
+ debug!("Success to get local current blockstamp.");
+
+ // Node is already synchronized ?
+ if target_blockstamp.id.0 < current_blockstamp.id.0 {
+ println!("Your durs node is already synchronized.");
+ return;
+ }
+
+ // Get wot index
+ let mut wot_index: HashMap =
+ DALIdentity::get_wot_index(&wot_databases.identities_db)
+ .expect("Fatal eror : get_wot_index : Fail to read blockchain databases");
+
+ // Start sync
+ let sync_start_time = SystemTime::now();
+
+ // Count number of blocks and chunks
+ let count_blocks = target_blockstamp.id.0 + 1 - current_blockstamp.id.0;
+ let count_chunks = if count_blocks % 250 > 0 {
+ (count_blocks / 250) + 1
+ } else {
+ count_blocks / 250
+ };
+ println!(
+ "Sync from #{} to #{} :",
+ current_blockstamp.id.0, target_blockstamp.id.0
+ );
+ info!(
+ "Sync from #{} to #{} :",
+ current_blockstamp.id.0, target_blockstamp.id.0
+ );
+
+ // Createprogess bar
+ let mut apply_pb = ProgressBar::new(count_chunks.into());
+ apply_pb.format("╢▌▌░╟");
+
+ // Create workers threads channels
+ let (sender_blocks_thread, recv_blocks_thread) = mpsc::channel();
+ let (sender_wot_thread, recv_wot_thread) = mpsc::channel();
+ let (sender_tx_thread, recv_tx_thread) = mpsc::channel();
+
+ // Launch blocks_worker thread
+ apply::blocks_worker::execute(
+ &pool,
+ sender_sync_thread.clone(),
+ recv_blocks_thread,
+ databases,
+ apply_pb,
+ );
+
+ // / Launch wot_worker thread
+ apply::wot_worker::execute(
+ &pool,
+ profile.to_owned(),
+ currency.clone(),
+ sender_sync_thread.clone(),
+ recv_wot_thread,
+ );
+
+ // Launch tx_worker thread
+ apply::txs_worker::execute(
+ &pool,
+ profile.to_owned(),
+ currency.clone(),
+ sender_sync_thread.clone(),
+ recv_tx_thread,
+ );
+
+ let main_job_begin = SystemTime::now();
+
+ // Open currency_params_db
+ let dbs_path = duniter_conf::get_blockchain_db_path(profile, &conf.currency());
+ let currency_params_db = open_file_db::(&dbs_path, "params.db")
+ .expect("Fail to open params db");
+
+ // Apply blocks
+ let mut blocks_not_expiring = VecDeque::with_capacity(200_000);
+ let mut last_block_expiring: isize = -1;
+ let certs_db =
+ BinDB::Mem(open_memory_db::().expect("Fail to create memory certs_db"));
+ let mut currency_params = CurrencyParameters::default();
+ let mut get_currency_params = false;
+ let mut certs_count = 0;
+
+ let mut all_wait_duration = Duration::from_millis(0);
+ let mut wait_begin = SystemTime::now();
+ let mut all_verif_block_hashs_duration = Duration::from_millis(0);
+ let mut all_apply_valid_block_duration = Duration::from_millis(0);
+ while let Ok(MessForSyncThread::BlockDocument(block_doc)) = recv_sync_thread.recv() {
+ all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap();
+
+ // Verify block hashs
+ let verif_block_hashs_begin = SystemTime::now();
+ if verif_inner_hash {
+ verify_block_hashs(&block_doc)
+ .expect("Receive wrong block, please reset data and resync !");
+ }
+ all_verif_block_hashs_duration += SystemTime::now()
+ .duration_since(verif_block_hashs_begin)
+ .unwrap();
+ // Get currency params
+ if !get_currency_params && block_doc.number.0 == 0 {
+ if block_doc.parameters.is_some() {
+ currency_params_db
+ .write(|db| {
+ db.0 = block_doc.currency.clone();
+ db.1 = block_doc.parameters.unwrap();
+ })
+ .expect("fail to write in params DB");
+ currency_params = CurrencyParameters::from((
+ block_doc.currency.clone(),
+ block_doc.parameters.unwrap(),
+ ));
+ get_currency_params = true;
+ } else {
+ panic!("The genesis block are None parameters !");
+ }
+ }
+ // Push block median_time in blocks_not_expiring
+ blocks_not_expiring.push_back(block_doc.median_time);
+ // Get blocks_expiring
+ let mut blocks_expiring = Vec::new();
+ while blocks_not_expiring.front().cloned()
+ < Some(block_doc.median_time - currency_params.sig_validity)
+ {
+ last_block_expiring += 1;
+ blocks_expiring.push(BlockId(last_block_expiring as u32));
+ blocks_not_expiring.pop_front();
+ }
+ // Find expire_certs
+ let expire_certs =
+ durs_blockchain_dal::certs::find_expire_certs(&certs_db, blocks_expiring)
+ .expect("find_expire_certs() : DALError");
+ // Get block blockstamp
+ let blockstamp = block_doc.blockstamp();
+ // Apply block
+ let apply_valid_block_begin = SystemTime::now();
+ if let Ok(ValidBlockApplyReqs(block_req, wot_db_reqs, currency_db_reqs)) =
+ apply_valid_block::(
+ block_doc,
+ &mut wot_index,
+ &wot_db,
+ &expire_certs,
+ None,
+ )
+ {
+ all_apply_valid_block_duration += SystemTime::now()
+ .duration_since(apply_valid_block_begin)
+ .unwrap();
+ current_blockstamp = blockstamp;
+ debug!("Apply db requests...");
+ // Send block request to blocks worker thread
+ sender_blocks_thread
+ .send(SyncJobsMess::BlocksDBsWriteQuery(block_req.clone()))
+ .expect(
+ "Fail to communicate with blocks worker thread, please reset data & resync !",
+ );
+ // Send wot requests to wot worker thread
+ for req in wot_db_reqs {
+ if let WotsDBsWriteQuery::CreateCert(
+ ref _source_pubkey,
+ ref source,
+ ref target,
+ ref created_block_id,
+ ref _median_time,
+ ) = req
+ {
+ certs_count += 1;
+ // Add cert in certs_db
+ certs_db
+ .write(|db| {
+ let mut created_certs =
+ db.get(&created_block_id).cloned().unwrap_or_default();
+ created_certs.insert((*source, *target));
+ db.insert(*created_block_id, created_certs);
+ })
+ .expect("RustBreakError : please reset data and resync !");
+ }
+ sender_wot_thread
+ .send(SyncJobsMess::WotsDBsWriteQuery(
+ req.clone(),
+ Box::new(currency_params),
+ ))
+ .expect(
+ "Fail to communicate with tx worker thread, please reset data & resync !",
+ )
+ }
+ // Send blocks and wot requests to wot worker thread
+ for req in currency_db_reqs {
+ sender_tx_thread
+ .send(SyncJobsMess::CurrencyDBsWriteQuery(req.clone()))
+ .expect(
+ "Fail to communicate with tx worker thread, please reset data & resync !",
+ );
+ }
+ debug!("Success to apply block #{}", current_blockstamp.id.0);
+ if current_blockstamp.id.0 >= target_blockstamp.id.0 {
+ if current_blockstamp == target_blockstamp {
+ // Sync completed
+ break;
+ } else {
+ panic!("Fatal Error : we get a fork, please reset data and sync again !");
+ }
+ }
+ } else {
+ panic!(
+ "Fatal error : fail to stack up block #{}",
+ current_blockstamp.id.0 + 1
+ )
+ }
+ wait_begin = SystemTime::now();
+ }
+ // Send end signal to workers threads
+ sender_blocks_thread
+ .send(SyncJobsMess::End())
+ .expect("Sync : Fail to send End signal to blocks worker !");
+ info!("Sync : send End signal to blocks job.");
+ sender_wot_thread
+ .send(SyncJobsMess::End())
+ .expect("Sync : Fail to send End signal to wot worker !");
+ info!("Sync : send End signal to wot job.");
+ sender_tx_thread
+ .send(SyncJobsMess::End())
+ .expect("Sync : Fail to send End signal to writer worker !");
+ info!("Sync : send End signal to tx job.");
+
+ // Save params db
+ currency_params_db.save().expect("Fail to save params db");
+
+ // Save wot file
+ wot_db.save().expect("Fail to save wot db");
+
+ let main_job_duration =
+ SystemTime::now().duration_since(main_job_begin).unwrap() - all_wait_duration;
+ info!(
+ "main_job_duration={},{:03} seconds.",
+ main_job_duration.as_secs(),
+ main_job_duration.subsec_millis()
+ );
+ info!(
+ "all_verif_block_hashs_duration={},{:03} seconds.",
+ all_verif_block_hashs_duration.as_secs(),
+ all_verif_block_hashs_duration.subsec_millis()
+ );
+ info!(
+ "all_apply_valid_block_duration={},{:03} seconds.",
+ all_apply_valid_block_duration.as_secs(),
+ all_apply_valid_block_duration.subsec_millis()
+ );
+
+ // Wait recv two finish signals
+ let mut wait_jobs = *NB_SYNC_JOBS - 1;
+ while wait_jobs > 0 {
+ match recv_sync_thread.recv() {
+ Ok(MessForSyncThread::ApplyFinish()) => wait_jobs -= 1,
+ Ok(_) => thread::sleep(Duration::from_millis(50)),
+ Err(_) => wait_jobs -= 1,
+ }
+ }
+ info!("All sync jobs finish.");
+
+ // Log sync duration
+ debug!("certs_count={}", certs_count);
+ let sync_duration = SystemTime::now().duration_since(sync_start_time).unwrap();
+ println!(
+ "Sync {} blocks in {}.{:03} seconds.",
+ count_blocks,
+ sync_duration.as_secs(),
+ sync_duration.subsec_millis(),
+ );
+ info!(
+ "Sync {} blocks in {}.{:03} seconds.",
+ count_blocks,
+ sync_duration.as_secs(),
+ sync_duration.subsec_millis(),
+ );
+}
diff --git a/lib/modules/blockchain/blockchain/ts_parsers.rs b/lib/modules/blockchain/blockchain/ts_parsers.rs
index 1abf79eae8f6c51916614a6c84d9790e1c833084..de30356325e004c4cd2831bd426c0379b2560cac 100644
--- a/lib/modules/blockchain/blockchain/ts_parsers.rs
+++ b/lib/modules/blockchain/blockchain/ts_parsers.rs
@@ -14,15 +14,13 @@
// along with this program. If not, see .
use crate::sync::BlockHeader;
-use dubp_documents::v10::block::{BlockV10Parameters, TxDocOrTxHash};
-use dubp_documents::v10::identity::*;
-use dubp_documents::v10::membership::*;
-use dubp_documents::v10::transaction::*;
-use dubp_documents::v10::*;
+use dubp_documents::documents::block::{BlockDocument, BlockV10Parameters, TxDocOrTxHash};
+use dubp_documents::documents::identity::*;
+use dubp_documents::documents::membership::*;
+use dubp_documents::documents::transaction::*;
use dubp_documents::CurrencyName;
use dubp_documents::DocumentBuilder;
use dubp_documents::{BlockHash, BlockId, Blockstamp};
-use duniter_network::documents::{NetworkBlock, NetworkBlockV10};
use dup_crypto::hashs::Hash;
use dup_crypto::keys::*;
use std::str::FromStr;
@@ -33,183 +31,6 @@ pub enum MembershipParseError {
WrongFormat(),
}
-/// Parse a block from duniter-ts database
-pub fn parse_ts_block(row: &[sqlite::Value]) -> NetworkBlock {
- let current_header = BlockHeader {
- number: BlockId(row[16].as_integer().expect("Fail to parse block number") as u32),
- hash: BlockHash(
- Hash::from_hex(row[0].as_string().expect("Fail to parse block hash"))
- .expect("Fail to parse block hash (2)"),
- ),
- issuer: PubKey::Ed25519(
- ed25519::PublicKey::from_base58(
- row[4].as_string().expect("Fail to parse block issuer"),
- )
- .expect("Failt to parse block issuer (2)"),
- ),
- };
- let previous_header = if current_header.number.0 > 0 {
- Some(BlockHeader {
- number: BlockId(current_header.number.0 - 1),
- hash: BlockHash(
- Hash::from_hex(
- row[6]
- .as_string()
- .expect("Fail to parse block previous hash"),
- )
- .expect("Fail to parse block previous hash (2)"),
- ),
- issuer: PubKey::Ed25519(
- ed25519::PublicKey::from_base58(
- row[7]
- .as_string()
- .expect("Fail to parse previous block issuer"),
- )
- .expect("Fail to parse previous block issuer (2)"),
- ),
- })
- } else {
- None
- };
- let currency = row[3].as_string().expect("Fail to parse currency");
- let parameters = if let Some(params_str) = row[5].as_string() {
- if let Ok(params) = BlockV10Parameters::from_str(params_str) {
- Some(params)
- } else {
- None
- }
- } else {
- None
- };
- let dividend = match row[12].as_integer() {
- Some(dividend) => Some(dividend as usize),
- None => None,
- };
- let json_identities: serde_json::Value =
- serde_json::from_str(row[20].as_string().expect("Fail to parse block identities"))
- .expect("Fail to parse block identities (2)");
- let mut identities = Vec::new();
- for raw_idty in json_identities
- .as_array()
- .expect("Fail to parse block identities (3)")
- {
- identities
- .push(parse_compact_identity(¤cy, &raw_idty).expect("Fail to parse block idty"));
- }
- let json_txs: serde_json::Value =
- serde_json::from_str(row[18].as_string().expect("Fail to parse block txs"))
- .expect("Fail to parse block txs (2)");
- let mut transactions = Vec::new();
- for json_tx in json_txs.as_array().expect("Fail to parse block txs (3)") {
- transactions.push(TxDocOrTxHash::TxDoc(Box::new(
- parse_transaction(currency, &json_tx).expect("Fail to parse block tx"),
- )));
- }
- let previous_hash = match previous_header.clone() {
- Some(previous_header_) => previous_header_.hash.0,
- None => Hash::default(),
- };
- let previous_issuer = match previous_header {
- Some(previous_header_) => Some(previous_header_.issuer),
- None => None,
- };
- let excluded: serde_json::Value =
- serde_json::from_str(row[25].as_string().expect("Fail to parse excluded"))
- .expect("Fail to parse excluded (2)");
- let uncompleted_block_doc = BlockDocument {
- nonce: row[17].as_integer().expect("Fail to parse nonce") as u64,
- number: current_header.number,
- pow_min: row[15].as_integer().expect("Fail to parse pow_min") as usize,
- time: row[14].as_integer().expect("Fail to parse time") as u64,
- median_time: row[11].as_integer().expect("Fail to parse median_time") as u64,
- members_count: row[9].as_integer().expect("Fail to parse members_count") as usize,
- monetary_mass: row[10]
- .as_string()
- .expect("Fail to parse monetary_mass")
- .parse()
- .expect("Fail to parse monetary_mass (2)"),
- unit_base: row[13].as_integer().expect("Fail to parse unit_base") as usize,
- issuers_count: row[28].as_integer().expect("Fail to parse issuers_count") as usize,
- issuers_frame: row[26].as_integer().expect("Fail to parse issuers_frame") as isize,
- issuers_frame_var: row[27]
- .as_integer()
- .expect("Fail to parse issuers_frame_var") as isize,
- currency: CurrencyName(String::from(currency)),
- issuers: vec![PubKey::Ed25519(
- ed25519::PublicKey::from_base58(row[4].as_string().expect("Fail to parse issuer"))
- .expect("Fail to parse issuer '2)"),
- )],
- signatures: vec![Sig::Ed25519(
- ed25519::Signature::from_base64(row[2].as_string().expect("Fail to parse signature"))
- .expect("Fail to parse signature (2)"),
- )],
- hash: Some(current_header.hash),
- parameters,
- previous_hash,
- previous_issuer,
- inner_hash: Some(
- Hash::from_hex(row[1].as_string().expect("Fail to parse block inner_hash"))
- .expect("Fail to parse block inner_hash (2)"),
- ),
- dividend,
- identities,
- joiners: parse_memberships(
- currency,
- MembershipType::In(),
- row[21].as_string().expect("Fail to parse joiners"),
- )
- .expect("Fail to parse joiners (2)"),
- actives: parse_memberships(
- currency,
- MembershipType::In(),
- row[22].as_string().expect("Fail to parse actives"),
- )
- .expect("Fail to parse actives (2)"),
- leavers: parse_memberships(
- currency,
- MembershipType::In(),
- row[23].as_string().expect("Fail to parse leavers"),
- )
- .expect("Fail to parse leavers (2)"),
- revoked: Vec::new(),
- excluded: excluded
- .as_array()
- .expect("Fail to parse excluded (3)")
- .to_vec()
- .into_iter()
- .map(|e| {
- PubKey::Ed25519(
- ed25519::PublicKey::from_base58(
- e.as_str().expect("Fail to parse excluded (4)"),
- )
- .expect("Fail to parse excluded (5)"),
- )
- })
- .collect(),
- certifications: Vec::new(),
- transactions,
- inner_hash_and_nonce_str: String::new(),
- };
- let revoked: serde_json::Value =
- serde_json::from_str(row[24].as_string().expect("Fail to parse revoked"))
- .expect("Fail to parse revoked (2)");
- let certifications: serde_json::Value =
- serde_json::from_str(row[19].as_string().expect("Fail to parse certifications"))
- .expect("Fail to parse certifications (2)");
- // return NetworkBlock
- NetworkBlock::V10(Box::new(NetworkBlockV10 {
- uncompleted_block_doc,
- revoked: revoked
- .as_array()
- .expect("Fail to parse revoked (3)")
- .to_vec(),
- certifications: certifications
- .as_array()
- .expect("Fail to parse certifications (3)")
- .to_vec(),
- }))
-}
-
/// Parse a compact identity
pub fn parse_compact_identity(
currency: &str,
diff --git a/lib/modules/blockchain/blockchain/verify_block.rs b/lib/modules/blockchain/blockchain/verify_block.rs
new file mode 100644
index 0000000000000000000000000000000000000000..2d67b00ffd0ba8dd2790e313e610225f3cf6c0fe
--- /dev/null
+++ b/lib/modules/blockchain/blockchain/verify_block.rs
@@ -0,0 +1,57 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+use crate::check_and_apply_block::BlockError;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::*;
+use dup_crypto::keys::PubKey;
+use durs_blockchain_dal::block::DALBlock;
+use durs_blockchain_dal::*;
+use durs_wot::*;
+use std::collections::HashMap;
+
+#[derive(Debug, Copy, Clone)]
+pub enum InvalidBlockError {
+ NoPreviousBlock,
+ VersionDecrease,
+}
+
+pub fn verify_block_validity(
+ block: &BlockDocument,
+ blockchain_db: &BinDB,
+ _certs_db: &BinDB,
+ _wot_index: &mut HashMap,
+ _wot_db: &BinDB,
+) -> Result<(), BlockError> {
+ // Rules that do not concern genesis block
+ if block.number.0 > 0 {
+ // Get previous block
+ let previous_block_opt =
+ DALBlock::get_block_in_local_blockchain(blockchain_db, BlockId(block.number.0 - 1))?;
+
+ // Previous block must exist
+ if previous_block_opt.is_none() {
+ return Err(BlockError::InvalidBlock(InvalidBlockError::NoPreviousBlock));
+ }
+ let previous_block = previous_block_opt.expect("safe unwrap");
+
+ // Block version must not decrease
+ if previous_block.version > block.version {
+ return Err(BlockError::InvalidBlock(InvalidBlockError::VersionDecrease));
+ }
+ }
+
+ Ok(())
+}
diff --git a/lib/modules/skeleton/lib.rs b/lib/modules/skeleton/lib.rs
index 52679a7e17acae6dd7b376d8b8bd213899753343..74c68c886de545f2d42680b98a931c77f2800090 100644
--- a/lib/modules/skeleton/lib.rs
+++ b/lib/modules/skeleton/lib.rs
@@ -267,7 +267,7 @@ impl DursModule for SkeletonModule {
ref event_content, ..
} => match *event_content {
DursEvent::BlockchainEvent(ref blockchain_event) => {
- match *blockchain_event {
+ match *blockchain_event.deref() {
BlockchainEvent::StackUpValidBlock(
ref _block,
ref _blockstamp,
diff --git a/lib/modules/tui/lib.rs b/lib/modules/tui/lib.rs
index 57c6e0167f3d5c1b67162c91021dab5155ad3272..ba30d2926d07fef556a9d18960b87c9763e371b6 100644
--- a/lib/modules/tui/lib.rs
+++ b/lib/modules/tui/lib.rs
@@ -526,7 +526,7 @@ impl DursModule for TuiModule {
DursMsg::Event {
ref event_content, ..
} => match *event_content {
- DursEvent::BlockchainEvent(ref dal_event) => match *dal_event {
+ DursEvent::BlockchainEvent(ref dal_event) => match *dal_event.deref() {
BlockchainEvent::StackUpValidBlock(ref _block, ref _blockstamp) => {
}
BlockchainEvent::RevertBlocks(ref _blocks) => {}
diff --git a/lib/modules/ws2p-v1-legacy/lib.rs b/lib/modules/ws2p-v1-legacy/lib.rs
index 9235cc02913141a066070810a5fa743a3a63555e..aa000ed952a1c129a42fca793b90bd69d3300a79 100644
--- a/lib/modules/ws2p-v1-legacy/lib.rs
+++ b/lib/modules/ws2p-v1-legacy/lib.rs
@@ -471,7 +471,7 @@ impl DursModule for WS2PModule {
ref event_content, ..
} => {
if let DursEvent::BlockchainEvent(ref bc_event) = *event_content {
- match *bc_event {
+ match *bc_event.deref() {
BlockchainEvent::StackUpValidBlock(
ref _block,
ref blockstamp,
@@ -759,7 +759,9 @@ impl DursModule for WS2PModule {
let mut chunk = Vec::new();
for json_block in response.as_array().unwrap() {
if let Some(block) = parse_json_block(json_block) {
- chunk.push(BlockchainDocument::Block(block));
+ chunk.push(BlockchainDocument::Block(Box::new(
+ block,
+ )));
} else {
warn!("WS2PModule: Error : fail to parse one json block !");
}
@@ -933,9 +935,8 @@ impl DursModule for WS2PModule {
mod tests {
use super::parsers::blocks::parse_json_block;
use super::*;
- use dubp_documents::v10::BlockDocument;
+ use dubp_documents::documents::block::BlockDocument;
use duniter_module::DursModule;
- use duniter_network::documents::NetworkBlock;
use dup_crypto::keys::PublicKey;
use durs_network_documents::network_endpoint::NetworkEndpointApi;
use std::fs;
@@ -1089,12 +1090,7 @@ mod tests {
],
});
let mut block: BlockDocument =
- match parse_json_block(&json_block).expect("Fail to parse test json block !") {
- NetworkBlock::V10(network_block_v10) => network_block_v10.uncompleted_block_doc,
- _ => {
- panic!("Test block must be a v10 block !");
- }
- };
+ parse_json_block(&json_block).expect("Fail to parse test json block !");
assert_eq!(
block
.inner_hash
diff --git a/lib/modules/ws2p-v1-legacy/parsers/blocks.rs b/lib/modules/ws2p-v1-legacy/parsers/blocks.rs
index dfbe67ac1621623fb49b7e3880dbd3f1873a8896..18f487b50c44d17a8f178116a740d974a3eebdce 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/blocks.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/blocks.rs
@@ -1,12 +1,13 @@
use super::excluded::parse_exclusions_from_json_value;
use super::identities::parse_compact_identity;
use super::transactions::parse_transaction;
-use dubp_documents::v10::block::{BlockV10Parameters, TxDocOrTxHash};
-use dubp_documents::v10::membership::*;
-use dubp_documents::v10::BlockDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::block::{BlockV10Parameters, TxDocOrTxHash};
+use dubp_documents::documents::membership::*;
+use dubp_documents::parsers::certifications::*;
+use dubp_documents::parsers::revoked::*;
use dubp_documents::CurrencyName;
use dubp_documents::{BlockHash, BlockId};
-use duniter_network::documents::{NetworkBlock, NetworkBlockV10};
use dup_crypto::hashs::Hash;
use dup_crypto::keys::*;
use std::str::FromStr;
@@ -57,7 +58,7 @@ fn parse_memberships(
Some(memberships)
}
-pub fn parse_json_block(source: &serde_json::Value) -> Option {
+pub fn parse_json_block(source: &serde_json::Value) -> Option {
let number = BlockId(source.get("number")?.as_u64()? as u32);
let currency = source.get("currency")?.as_str()?.to_string();
let issuer = match ed25519::PublicKey::from_base58(source.get("issuer")?.as_str()?) {
@@ -98,14 +99,27 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option {
let joiners = parse_memberships(¤cy, MembershipType::In(), source.get("joiners")?)?;
let actives = parse_memberships(¤cy, MembershipType::In(), source.get("actives")?)?;
let leavers = parse_memberships(¤cy, MembershipType::Out(), source.get("actives")?)?;
+ let revoked: Vec<&str> = source
+ .get("revoked")?
+ .as_array()?
+ .iter()
+ .map(|v| v.as_str().unwrap_or(""))
+ .collect();
+ let certifications: Vec<&str> = source
+ .get("certifications")?
+ .as_array()?
+ .iter()
+ .map(|v| v.as_str().unwrap_or(""))
+ .collect();
let mut transactions = Vec::new();
for json_tx in source.get("transactions")?.as_array()? {
transactions.push(TxDocOrTxHash::TxDoc(Box::new(parse_transaction(
"g1", &json_tx,
)?)));
}
- let block_doc = BlockDocument {
+ Some(BlockDocument {
nonce: source.get("nonce")?.as_i64()? as u64,
+ version: source.get("version")?.as_u64()? as u32,
number: BlockId(source.get("number")?.as_u64()? as u32),
pow_min: source.get("powMin")?.as_u64()? as usize,
time: source.get("time")?.as_u64()?,
@@ -129,9 +143,9 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option {
joiners,
actives,
leavers,
- revoked: Vec::with_capacity(0),
+ revoked: parse_revocations_into_compact(&revoked),
excluded: parse_exclusions_from_json_value(&source.get("excluded")?.as_array()?),
- certifications: Vec::with_capacity(0),
+ certifications: parse_certifications_into_compact(&certifications),
transactions,
inner_hash_and_nonce_str: format!(
"InnerHash: {}\nNonce: {}\n",
@@ -140,10 +154,5 @@ pub fn parse_json_block(source: &serde_json::Value) -> Option {
.to_hex(),
source.get("nonce")?.as_u64()?
),
- };
- Some(NetworkBlock::V10(Box::new(NetworkBlockV10 {
- uncompleted_block_doc: block_doc,
- revoked: source.get("revoked")?.as_array()?.clone(),
- certifications: source.get("certifications")?.as_array()?.clone(),
- })))
+ })
}
diff --git a/lib/modules/ws2p-v1-legacy/parsers/identities.rs b/lib/modules/ws2p-v1-legacy/parsers/identities.rs
index 10f3608a67f94bb365026eb65d94135dde45c649..02aab9a9f82b9c130b5453e69fe3677ec5c3c7b4 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/identities.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/identities.rs
@@ -1,4 +1,4 @@
-use dubp_documents::v10::identity::*;
+use dubp_documents::documents::identity::*;
use dubp_documents::Blockstamp;
use dubp_documents::DocumentBuilder;
use dup_crypto::keys::*;
diff --git a/lib/modules/ws2p-v1-legacy/parsers/memberships.rs b/lib/modules/ws2p-v1-legacy/parsers/memberships.rs
index 43ceddcb7d0d984cc902d1173abf0fdd7d8af960..a04d2d99b24ae6b7951322c8609fbc458b4d21b1 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/memberships.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/memberships.rs
@@ -1,4 +1,4 @@
-use dubp_documents::v10::membership::*;
+use dubp_documents::documents::membership::*;
use dubp_documents::Blockstamp;
use dubp_documents::DocumentBuilder;
use dup_crypto::keys::*;
diff --git a/lib/modules/ws2p-v1-legacy/parsers/mod.rs b/lib/modules/ws2p-v1-legacy/parsers/mod.rs
index 5392408a4460fd51c3b3c3eb6062178aa92005eb..ca9a42a80f33ac97d570c0caeb1eeb1d33b14022 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/mod.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/mod.rs
@@ -22,7 +22,7 @@ pub mod transactions;
#[cfg(test)]
mod tests {
use super::transactions::*;
- use dubp_documents::v10::transaction::*;
+ use dubp_documents::documents::transaction::*;
use dubp_documents::Blockstamp;
use dubp_documents::DocumentBuilder;
use dup_crypto::keys::*;
diff --git a/lib/modules/ws2p-v1-legacy/parsers/transactions.rs b/lib/modules/ws2p-v1-legacy/parsers/transactions.rs
index 0a665a38359b5b3a2262c428356b24bfd8074263..f979aa73d19353710d94f0dd884a3aba18edb951 100644
--- a/lib/modules/ws2p-v1-legacy/parsers/transactions.rs
+++ b/lib/modules/ws2p-v1-legacy/parsers/transactions.rs
@@ -1,4 +1,4 @@
-use dubp_documents::v10::transaction::{
+use dubp_documents::documents::transaction::{
TransactionDocument, TransactionDocumentBuilder, TransactionInput, TransactionInputUnlocks,
TransactionOutput,
};
diff --git a/lib/modules/ws2p-v1-legacy/ws2p_connection.rs b/lib/modules/ws2p-v1-legacy/ws2p_connection.rs
index 64a1661130456dc0a4e823164ec7451f230165cf..e56098ad634f4b0e0b5cf2b937b934f84e0c9c79 100644
--- a/lib/modules/ws2p-v1-legacy/ws2p_connection.rs
+++ b/lib/modules/ws2p-v1-legacy/ws2p_connection.rs
@@ -485,9 +485,9 @@ impl WS2PConnectionMetaDatas {
match s.as_str().unwrap() {
"BLOCK" => match body.get("block") {
Some(block) => {
- if let Some(network_block) = parse_json_block(&block) {
+ if let Some(block_doc) = parse_json_block(&block) {
return WS2PConnectionMessagePayload::Document(
- BlockchainDocument::Block(network_block),
+ BlockchainDocument::Block(Box::new(block_doc)),
);
} else {
info!("WS2PSignal: receive invalid block (wrong format).");
diff --git a/lib/modules/ws2p/ws2p-messages/lib.rs b/lib/modules/ws2p/ws2p-messages/lib.rs
index d853927e275fa2cf9579e03d4966bdb5c351359e..a8920a3ea4774c8b214236b7e0d6a3e0d27e885d 100644
--- a/lib/modules/ws2p/ws2p-messages/lib.rs
+++ b/lib/modules/ws2p/ws2p-messages/lib.rs
@@ -150,7 +150,7 @@ mod tests {
use crate::v2::WS2Pv2Message;
use bincode;
use bincode::{deserialize, serialize};
- use dubp_documents::v10::certification::*;
+ use dubp_documents::documents::certification::*;
use dubp_documents::{Blockstamp, CurrencyName};
use dup_crypto::keys::bin_signable::BinSignable;
use dup_crypto::keys::*;
diff --git a/lib/modules/ws2p/ws2p-messages/v2/payload_container.rs b/lib/modules/ws2p/ws2p-messages/v2/payload_container.rs
index d136b8ea0729fc2f02c60e472de14e53f83d59f4..92bce7660269b8696d9158a1c1d6627cbc50f250 100644
--- a/lib/modules/ws2p/ws2p-messages/v2/payload_container.rs
+++ b/lib/modules/ws2p/ws2p-messages/v2/payload_container.rs
@@ -18,12 +18,12 @@ use super::ok::WS2Pv2OkMsg;
use super::req_responses::WS2Pv2ReqRes;
use super::requests::WS2Pv2Request;
use super::secret_flags::WS2Pv2SecretFlagsMsg;
-use dubp_documents::v10::block::BlockDocument;
-use dubp_documents::v10::certification::CertificationDocument;
-use dubp_documents::v10::identity::IdentityDocument;
-use dubp_documents::v10::membership::MembershipDocument;
-use dubp_documents::v10::revocation::RevocationDocument;
-use dubp_documents::v10::transaction::TransactionDocument;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::certification::CertificationDocument;
+use dubp_documents::documents::identity::IdentityDocument;
+use dubp_documents::documents::membership::MembershipDocument;
+use dubp_documents::documents::revocation::RevocationDocument;
+use dubp_documents::documents::transaction::TransactionDocument;
use dup_crypto::hashs::Hash;
use durs_network_documents::network_head_v2::NetworkHeadV2;
use durs_network_documents::network_head_v3::NetworkHeadV3;
diff --git a/lib/modules/ws2p/ws2p-messages/v2/req_responses.rs b/lib/modules/ws2p/ws2p-messages/v2/req_responses.rs
index 4013a2192cdb7d0cf85d0b43bcb74de575ba9411..c90e4614a13d7f6b3b4314303442b5d0a3fa34e9 100644
--- a/lib/modules/ws2p/ws2p-messages/v2/req_responses.rs
+++ b/lib/modules/ws2p/ws2p-messages/v2/req_responses.rs
@@ -13,10 +13,10 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-use dubp_documents::v10::block::BlockDocument;
-use dubp_documents::v10::certification::CompactCertificationDocument;
-use dubp_documents::v10::identity::CompactIdentityDocument;
-use dubp_documents::v10::membership::CompactPoolMembershipDoc;
+use dubp_documents::documents::block::BlockDocument;
+use dubp_documents::documents::certification::CompactCertificationDocument;
+use dubp_documents::documents::identity::CompactIdentityDocument;
+use dubp_documents::documents::membership::CompactPoolMembershipDoc;
use dubp_documents::Blockstamp;
use dup_crypto::hashs::Hash;
use std::str;
diff --git a/lib/tools/common-tools/Cargo.toml b/lib/tools/common-tools/Cargo.toml
new file mode 100644
index 0000000000000000000000000000000000000000..12bce29186a85db1f7ccf71b959975ab3a46ef70
--- /dev/null
+++ b/lib/tools/common-tools/Cargo.toml
@@ -0,0 +1,19 @@
+[package]
+name = "durs-common-tools"
+version = "0.1.0"
+authors = ["elois "]
+description = "Common rust tools for DURS project."
+repository = "https://git.duniter.org/nodes/rust/duniter-rs"
+readme = "README.md"
+keywords = ["durs", "tools"]
+license = "AGPL-3.0"
+edition = "2018"
+
+[lib]
+path = "src/lib.rs"
+
+[dependencies]
+log = "0.4.*"
+
+[dev-dependencies]
+pretty_assertions = "0.5.1"
diff --git a/lib/tools/common-tools/src/lib.rs b/lib/tools/common-tools/src/lib.rs
new file mode 100644
index 0000000000000000000000000000000000000000..3d894aa32db699a586cf2a3866f19f1962f0a036
--- /dev/null
+++ b/lib/tools/common-tools/src/lib.rs
@@ -0,0 +1,70 @@
+// Copyright (C) 2019 Éloïs SANCHEZ
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+//! Common rust tools for DURS project.
+
+#![deny(
+ missing_docs,
+ missing_debug_implementations,
+ missing_copy_implementations,
+ trivial_casts,
+ trivial_numeric_casts,
+ unsafe_code,
+ unstable_features,
+ unused_import_braces
+)]
+
+#[macro_use]
+extern crate log;
+
+/// Interrupts the program and log error message
+pub fn fatal_error(msg: &str) {
+ if cfg!(feature = "log_panics") {
+ panic!(format!("Fatal Error : {}", msg));
+ } else {
+ error!("{}", &format!("Fatal Error : {}", msg));
+ panic!(format!("Fatal Error : {}", msg));
+ }
+}
+
+/// Unescape backslash
+pub fn unescape_str(source: &str) -> String {
+ let mut previous_char = None;
+ let mut str_result = String::with_capacity(source.len());
+
+ for current_char in source.chars() {
+ if previous_char.is_some() && previous_char.unwrap() == '\\' {
+ match current_char {
+ '\\' => {} // Do nothing
+ _ => str_result.push(current_char),
+ }
+ } else {
+ str_result.push(current_char);
+ }
+ previous_char = Some(current_char);
+ }
+
+ str_result
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ pub fn test_unescape_str() {
+ assert_eq!("\\".to_owned(), unescape_str("\\\\"));
+ }
+}
diff --git a/lib/tools/crypto/Cargo.toml b/lib/tools/crypto/Cargo.toml
index 530dddd82e1342a7a9aeb11aba7e095a2bbc53fc..08f9b2f65312baf27c8313fe92eb274222519da3 100644
--- a/lib/tools/crypto/Cargo.toml
+++ b/lib/tools/crypto/Cargo.toml
@@ -16,6 +16,7 @@ path = "src/lib.rs"
base58 = "0.1.*"
base64 = "0.10.*"
bincode = "1.0.*"
+failure = "0.1.5"
rand = "0.5.*"
rust-crypto-wasm = "0.3.1"
serde = "1.0.*"
diff --git a/lib/tools/crypto/src/keys/mod.rs b/lib/tools/crypto/src/keys/mod.rs
index f0b7f5b20234b31b1dbdb0817abf7fca556205c3..4fabbfedc5a3767f794b13ad15c95fbd813822b7 100644
--- a/lib/tools/crypto/src/keys/mod.rs
+++ b/lib/tools/crypto/src/keys/mod.rs
@@ -74,12 +74,15 @@ pub trait GetKeysAlgo: Clone + Debug + PartialEq + Eq {
}
/// Errors enumeration for Base58/64 strings convertion.
-#[derive(Clone, Copy, Debug, PartialEq, Eq)]
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Fail)]
pub enum BaseConvertionError {
+ #[fail(display = "Data have invalid key length.")]
/// Data have invalid key length (found, expected).
InvalidKeyLendth(usize, usize),
+ #[fail(display = "Invalid character.")]
/// Base58 have an invalid character.
InvalidCharacter(char, usize),
+ #[fail(display = "Invalid base converter length.")]
/// Base58 have invalid lendth
InvalidBaseConverterLength(),
}
diff --git a/lib/tools/crypto/src/lib.rs b/lib/tools/crypto/src/lib.rs
index 3a824cc84bc29fdf0fb28509742f9b1f76a0e170..c7d4c63c2790d7997231ef44b362a612ca1a4e9a 100644
--- a/lib/tools/crypto/src/lib.rs
+++ b/lib/tools/crypto/src/lib.rs
@@ -29,6 +29,8 @@
)]
#![allow(non_camel_case_types)]
+#[macro_use]
+extern crate failure;
#[macro_use]
extern crate serde_derive;
diff --git a/lib/tools/documents/Cargo.toml b/lib/tools/documents/Cargo.toml
index 95f2c1ddedc6cfac1515f875a60cba380841f2b6..e2369aaad705f6a48fb4a70731a5ec6dddbbffa2 100644
--- a/lib/tools/documents/Cargo.toml
+++ b/lib/tools/documents/Cargo.toml
@@ -16,9 +16,12 @@ path = "src/lib.rs"
base58 = "0.1.*"
base64 = "0.9.*"
byteorder = "1.2.3"
+durs-common-tools = { path = "../common-tools" }
dup-crypto = { path = "../crypto" }
-pest = "2.0"
-pest_derive = "2.0"
+failure = "0.1.5"
+json-pest-parser = { path = "../json-pest-parser" }
+pest = "2.1.0"
+pest_derive = "2.1.0"
serde = "1.0.*"
serde_derive = "1.0.*"
serde_json = "1.0.*"
diff --git a/lib/tools/documents/src/blockstamp.rs b/lib/tools/documents/src/blockstamp.rs
index f800178eded54e0e9e6a42ae6a715701d92ad660..a042bac94823ab0bf75334096063a9e9fd375ad7 100644
--- a/lib/tools/documents/src/blockstamp.rs
+++ b/lib/tools/documents/src/blockstamp.rs
@@ -20,13 +20,16 @@ use crate::*;
/// Type of errors for [`BlockUId`] parsing.
///
/// [`BlockUId`]: struct.BlockUId.html
-#[derive(Debug, Copy, Clone, PartialEq, Eq)]
+#[derive(Debug, Copy, Clone, PartialEq, Eq, Fail)]
pub enum BlockstampParseError {
/// Given string have invalid format
+ #[fail(display = "Given string have invalid format")]
InvalidFormat(),
/// [`BlockId`](struct.BlockHash.html) part is not a valid number.
+ #[fail(display = "BlockId part is not a valid number.")]
InvalidBlockId(),
/// [`BlockHash`](struct.BlockHash.html) part is not a valid hex number.
+ #[fail(display = "BlockHash part is not a valid hex number.")]
InvalidBlockHash(),
}
diff --git a/lib/tools/documents/src/v10/block.rs b/lib/tools/documents/src/documents/block.rs
similarity index 89%
rename from lib/tools/documents/src/v10/block.rs
rename to lib/tools/documents/src/documents/block.rs
index fcb30e620489131d69feb95500ed7e1cf241aabe..0bef7e104050caf3b981251925a2cec9008efc97 100644
--- a/lib/tools/documents/src/v10/block.rs
+++ b/lib/tools/documents/src/documents/block.rs
@@ -20,20 +20,22 @@ use dup_crypto::keys::*;
use std::ops::Deref;
use crate::blockstamp::Blockstamp;
-use crate::v10::certification::CertificationDocument;
-use crate::v10::identity::IdentityDocument;
-use crate::v10::membership::MembershipDocument;
-use crate::v10::revocation::RevocationDocument;
-use crate::v10::transaction::TransactionDocument;
-use crate::v10::*;
-use crate::*;
-
-#[derive(Debug, Clone)]
+use crate::documents::certification::CertificationDocument;
+use crate::documents::identity::IdentityDocument;
+use crate::documents::membership::MembershipDocument;
+use crate::documents::revocation::RevocationDocument;
+use crate::documents::transaction::TransactionDocument;
+use crate::documents::*;
+use crate::text_document_traits::*;
+
+#[derive(Debug, Clone, Fail)]
/// Store error in block parameters parsing
pub enum ParseParamsError {
/// ParseIntError
+ #[fail(display = "Fail to parse params :ParseIntError !")]
ParseIntError(::std::num::ParseIntError),
/// ParseFloatError
+ #[fail(display = "Fail to parse params :ParseFloatError !")]
ParseFloatError(::std::num::ParseFloatError),
}
@@ -50,7 +52,7 @@ impl From<::std::num::ParseFloatError> for ParseParamsError {
}
/// Currency parameters
-#[derive(Debug, Copy, Clone, PartialEq, Deserialize, Serialize)]
+#[derive(Debug, Copy, Clone, Deserialize, Serialize, PartialEq)]
pub struct BlockV10Parameters {
/// UD target growth rate (see Relative Theorie of Money)
pub c: f64,
@@ -153,8 +155,38 @@ impl ::std::str::FromStr for BlockV10Parameters {
}
}
+impl ToString for BlockV10Parameters {
+ fn to_string(&self) -> String {
+ format!(
+ "{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}:{}",
+ self.c,
+ self.dt,
+ self.ud0,
+ self.sig_period,
+ self.sig_stock,
+ self.sig_window,
+ self.sig_validity,
+ self.sig_qty,
+ self.idty_window,
+ self.ms_window,
+ self.x_percent,
+ self.ms_validity,
+ self.step_max,
+ self.median_time_blocks,
+ self.avg_gen_time,
+ self.dt_diff_eval,
+ self.percent_rot,
+ self.ud_time0,
+ self.ud_reeval_time0,
+ self.dt_reeval,
+ )
+ }
+}
+
+impl Eq for BlockV10Parameters {}
+
/// Store a transaction document or just its hash.
-#[derive(Debug, Clone, Deserialize, Serialize, PartialEq)]
+#[derive(Debug, Clone, Deserialize, Serialize, PartialEq, Eq)]
pub enum TxDocOrTxHash {
/// Transaction document
TxDoc(Box),
@@ -190,8 +222,10 @@ impl TxDocOrTxHash {
/// Wrap a Block document.
///
/// Must be created by parsing a text document or using a builder.
-#[derive(Clone, Debug, Deserialize, Serialize)]
+#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
pub struct BlockDocument {
+ /// Version
+ pub version: u32,
/// Nonce
pub nonce: u64,
/// number
@@ -253,14 +287,6 @@ pub struct BlockDocument {
pub inner_hash_and_nonce_str: String,
}
-impl PartialEq for BlockDocument {
- fn eq(&self, other: &BlockDocument) -> bool {
- self.hash == other.hash
- }
-}
-
-impl Eq for BlockDocument {}
-
impl BlockDocument {
/// Return previous blockstamp
pub fn previous_blockstamp(&self) -> Blockstamp {
@@ -277,18 +303,44 @@ impl BlockDocument {
pub fn compute_inner_hash(&mut self) {
self.inner_hash = Some(Hash::compute_str(&self.generate_compact_inner_text()));
}
- /// Fill inner_hash_and_nonce_str
- pub fn fill_inner_hash_and_nonce_str(&mut self, new_nonce: Option) {
- if let Some(new_nonce) = new_nonce {
- self.nonce = new_nonce;
+ /// Compute inner hash
+ pub fn verify_inner_hash(&self) -> bool {
+ match self.inner_hash {
+ Some(inner_hash) => {
+ inner_hash == Hash::compute_str(&self.generate_compact_inner_text())
+ }
+ None => false,
}
- self.inner_hash_and_nonce_str = format!(
+ }
+ // Generate the character string that will be hashed
+ fn generate_will_hashed_string(&self) -> String {
+ format!(
"InnerHash: {}\nNonce: {}\n",
self.inner_hash
.expect("Try to get inner_hash of an uncompleted or reduce block !")
.to_hex(),
self.nonce
- );
+ )
+ }
+ /// Verify block hash
+ pub fn verify_hash(&self) -> bool {
+ match self.hash {
+ Some(hash) => {
+ hash == BlockHash(Hash::compute_str(&format!(
+ "{}{}\n",
+ self.generate_will_hashed_string(),
+ self.signatures[0]
+ )))
+ }
+ None => false,
+ }
+ }
+ /// Fill inner_hash_and_nonce_str
+ pub fn fill_inner_hash_and_nonce_str(&mut self, new_nonce: Option) {
+ if let Some(new_nonce) = new_nonce {
+ self.nonce = new_nonce;
+ }
+ self.inner_hash_and_nonce_str = self.generate_will_hashed_string();
}
/// Sign block
pub fn sign(&mut self, privkey: PrivKey) {
@@ -298,11 +350,8 @@ impl BlockDocument {
/// Compute hash
pub fn compute_hash(&mut self) {
self.hash = Some(BlockHash(Hash::compute_str(&format!(
- "InnerHash: {}\nNonce: {}\n{}\n",
- self.inner_hash
- .expect("Try to get inner_hash of an uncompleted or reduce block !")
- .to_hex(),
- self.nonce,
+ "{}{}\n",
+ self.generate_will_hashed_string(),
self.signatures[0]
))));
}
@@ -384,8 +433,31 @@ impl BlockDocument {
dividend_str.push_str("\n");
}
}
+ let mut parameters_str = String::from("");
+ if let Some(params) = self.parameters {
+ parameters_str.push_str("Parameters: ");
+ parameters_str.push_str(¶ms.to_string());
+ parameters_str.push_str("\n");
+ }
+ let mut previous_hash_str = String::from("");
+ if self.number.0 > 0 {
+ previous_hash_str.push_str("PreviousHash: ");
+ previous_hash_str.push_str(&self.previous_hash.to_string());
+ previous_hash_str.push_str("\n");
+ }
+ let mut previous_issuer_str = String::from("");
+ if self.number.0 > 0 {
+ previous_issuer_str.push_str("PreviousIssuer: ");
+ previous_issuer_str.push_str(
+ &self
+ .previous_issuer
+ .expect("No genesis block must have previous issuer")
+ .to_string(),
+ );
+ previous_issuer_str.push_str("\n");
+ }
format!(
- "Version: 10
+ "Version: {version}
Type: Block
Currency: {currency}
Number: {block_number}
@@ -397,9 +469,7 @@ Issuer: {issuer}
IssuersFrame: {issuers_frame}
IssuersFrameVar: {issuers_frame_var}
DifferentIssuersCount: {issuers_count}
-PreviousHash: {previous_hash}
-PreviousIssuer: {previous_issuer}
-MembersCount: {members_count}
+{parameters}{previous_hash}{previous_issuer}MembersCount: {members_count}
Identities:{identities}
Joiners:{joiners}
Actives:{actives}
@@ -409,6 +479,7 @@ Excluded:{excluded}
Certifications:{certifications}
Transactions:{transactions}
",
+ version = self.version,
currency = self.currency,
block_number = self.number,
pow_min = self.pow_min,
@@ -420,8 +491,9 @@ Transactions:{transactions}
issuers_frame = self.issuers_frame,
issuers_frame_var = self.issuers_frame_var,
issuers_count = self.issuers_count,
- previous_hash = self.previous_hash,
- previous_issuer = self.previous_issuer.unwrap(),
+ parameters = parameters_str,
+ previous_hash = previous_hash_str,
+ previous_issuer = previous_issuer_str,
members_count = self.members_count,
identities = identities_str,
joiners = joiners_str,
@@ -496,7 +568,7 @@ impl TextDocument for BlockDocument {
impl IntoSpecializedDocument for BlockDocument {
fn into_specialized(self) -> DUBPDocument {
- DUBPDocument::V10(Box::new(V10Document::Block(Box::new(self))))
+ DUBPDocument::Block(Box::new(self))
}
}
@@ -511,6 +583,7 @@ mod tests {
fn generate_and_verify_empty_block() {
let mut block = BlockDocument {
nonce: 100_010_200_000_006_940,
+ version: 10,
number: BlockId(174_260),
pow_min: 68,
time: 1_525_296_873,
@@ -612,6 +685,7 @@ a9PHPuSfw7jW8FRQHXFsGi/bnLjbtDnTYvEVgUC9u0WlR7GVofa+Xb+l5iy6NwuEXiwvueAkf08wPVY8
let mut block = BlockDocument {
nonce: 0,
+ version: 10,
number: BlockId(107_984),
pow_min: 88,
time: 1_522_685_861,
@@ -792,6 +866,7 @@ nxr4exGrt16jteN9ZX3XZPP9l+X0OUbZ1o/QjE1hbWQNtVU3HhH9SJoEvNj2iVl3gCRr9u2OA9uj9vCy
let mut block = BlockDocument {
nonce: 0,
+ version: 10,
number: BlockId(165_647),
pow_min: 90,
time: 1_540_633_175,
diff --git a/lib/tools/documents/src/v10/certification.rs b/lib/tools/documents/src/documents/certification.rs
similarity index 99%
rename from lib/tools/documents/src/v10/certification.rs
rename to lib/tools/documents/src/documents/certification.rs
index de874e0abe27bfe86aa284c55d16d7ff450aee08..a21db71c8e43f86569e3d16b281cc0505c000c70 100644
--- a/lib/tools/documents/src/v10/certification.rs
+++ b/lib/tools/documents/src/documents/certification.rs
@@ -19,8 +19,8 @@ use dup_crypto::keys::*;
use pest::Parser;
use crate::blockstamp::Blockstamp;
-use crate::v10::*;
-use crate::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
#[derive(Copy, Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
/// Wrap an Compact Revocation document (in block content)
@@ -178,7 +178,7 @@ impl TextDocument for CertificationDocument {
impl IntoSpecializedDocument for CertificationDocument {
fn into_specialized(self) -> DUBPDocument {
- DUBPDocument::V10(Box::new(V10Document::Certification(Box::new(self))))
+ DUBPDocument::Certification(Box::new(self))
}
}
diff --git a/lib/tools/documents/src/v10/identity.rs b/lib/tools/documents/src/documents/identity.rs
similarity index 99%
rename from lib/tools/documents/src/v10/identity.rs
rename to lib/tools/documents/src/documents/identity.rs
index 503e44f2329737a84706d668fd78e874c5fe46ce..3b01663f49f27d4bbaf6d16032315193a0695f0c 100644
--- a/lib/tools/documents/src/v10/identity.rs
+++ b/lib/tools/documents/src/documents/identity.rs
@@ -17,9 +17,9 @@
use pest::Parser;
-use crate::v10::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
use crate::Blockstamp;
-use crate::*;
/// Wrap an Identity document.
///
@@ -162,7 +162,7 @@ impl TextDocument for IdentityDocument {
impl IntoSpecializedDocument for IdentityDocument {
fn into_specialized(self) -> DUBPDocument {
- DUBPDocument::V10(Box::new(V10Document::Identity(self)))
+ DUBPDocument::Identity(self)
}
}
diff --git a/lib/tools/documents/src/v10/membership.rs b/lib/tools/documents/src/documents/membership.rs
similarity index 99%
rename from lib/tools/documents/src/v10/membership.rs
rename to lib/tools/documents/src/documents/membership.rs
index 47c4f01799f809d8e12d94e613dacc688722f820..e7b9ada11736b9ec8dcd6b033de17bebd1fd0267 100644
--- a/lib/tools/documents/src/v10/membership.rs
+++ b/lib/tools/documents/src/documents/membership.rs
@@ -19,8 +19,8 @@ use dup_crypto::keys::*;
use pest::Parser;
use crate::blockstamp::Blockstamp;
-use crate::v10::*;
-use crate::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
/// Type of a Membership.
#[derive(Debug, Deserialize, Clone, Copy, Hash, Serialize, PartialEq, Eq)]
@@ -207,7 +207,7 @@ impl TextDocument for MembershipDocument {
impl IntoSpecializedDocument for MembershipDocument {
fn into_specialized(self) -> DUBPDocument {
- DUBPDocument::V10(Box::new(V10Document::Membership(self)))
+ DUBPDocument::Membership(self)
}
}
diff --git a/lib/tools/documents/src/v10/mod.rs b/lib/tools/documents/src/documents/mod.rs
similarity index 62%
rename from lib/tools/documents/src/v10/mod.rs
rename to lib/tools/documents/src/documents/mod.rs
index bf96fd99820f994b472852dea8d5f289c048b674..109393d8e30ae2858a411a7cc81672d12076a2fc 100644
--- a/lib/tools/documents/src/v10/mod.rs
+++ b/lib/tools/documents/src/documents/mod.rs
@@ -1,4 +1,4 @@
-// Copyright (C) 2018 The Duniter Project Developers.
+// Copyright (C) 2018 The Durs Project Developers.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
@@ -13,7 +13,19 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-//! Provide wrappers around Duniter blockchain documents for protocol version 10.
+//! Implements the Durs blockchain Documents.
+
+use crate::documents::block::*;
+use crate::documents::certification::*;
+use crate::documents::identity::*;
+use crate::documents::membership::*;
+use crate::documents::revocation::*;
+use crate::documents::transaction::*;
+use crate::Rule;
+use crate::*;
+
+use pest::iterators::Pair;
+use pest::Parser;
pub mod block;
pub mod certification;
@@ -22,40 +34,9 @@ pub mod membership;
pub mod revocation;
pub mod transaction;
-use dup_crypto::keys::PrivateKey;
-use pest::Parser;
-
-pub use crate::v10::block::BlockDocument;
-use crate::v10::certification::*;
-use crate::v10::identity::*;
-use crate::v10::membership::*;
-use crate::v10::revocation::*;
-use crate::v10::transaction::*;
-use crate::ToStringObject;
-use crate::*;
-
-#[derive(Clone, Debug, Deserialize, Serialize)]
-/// Contains a document in full or compact format
-pub enum TextDocumentFormat {
- /// Complete format (Allows to check the validity of the signature)
- Complete(D),
- /// Format present in the blocks (does not always allow to verify the signature)
- Compact(D::CompactTextDocument_),
-}
-
-impl TextDocumentFormat {
- /// To compact document
- pub fn to_compact_document(&self) -> D::CompactTextDocument_ {
- match *self {
- TextDocumentFormat::Complete(ref doc) => doc.to_compact_document(),
- TextDocumentFormat::Compact(ref compact_doc) => (*compact_doc).clone(),
- }
- }
-}
-
-/// List of wrapped document types.
+/// Document of DUBP (DUniter Blockhain Protocol)
#[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum V10Document {
+pub enum DUBPDocument {
/// Block document.
Block(Box),
@@ -77,7 +58,7 @@ pub enum V10Document {
/// List of stringified document types.
#[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum V10DocumentString {
+pub enum DUBPDocumentStr {
/// Block document (not yet implemented)
Block(),
@@ -97,57 +78,66 @@ pub enum V10DocumentString {
Revocation(Box),
}
-impl ToStringObject for V10Document {
- type StringObject = V10DocumentString;
+impl ToStringObject for DUBPDocument {
+ type StringObject = DUBPDocumentStr;
/// Transforms an object into a json object
fn to_string_object(&self) -> Self::StringObject {
match *self {
- V10Document::Block(_) => unimplemented!(),
- V10Document::Identity(ref doc) => V10DocumentString::Identity(doc.to_string_object()),
- V10Document::Membership(ref doc) => {
- V10DocumentString::Membership(doc.to_string_object())
+ DUBPDocument::Block(_) => unimplemented!(),
+ DUBPDocument::Identity(ref doc) => DUBPDocumentStr::Identity(doc.to_string_object()),
+ DUBPDocument::Membership(ref doc) => {
+ DUBPDocumentStr::Membership(doc.to_string_object())
}
- V10Document::Certification(ref doc) => {
- V10DocumentString::Certification(Box::new(doc.to_string_object()))
+ DUBPDocument::Certification(ref doc) => {
+ DUBPDocumentStr::Certification(Box::new(doc.to_string_object()))
}
- V10Document::Revocation(ref doc) => {
- V10DocumentString::Revocation(Box::new(doc.to_string_object()))
+ DUBPDocument::Revocation(ref doc) => {
+ DUBPDocumentStr::Revocation(Box::new(doc.to_string_object()))
}
- V10Document::Transaction(ref doc) => {
- V10DocumentString::Transaction(Box::new(doc.to_string_object()))
+ DUBPDocument::Transaction(ref doc) => {
+ DUBPDocumentStr::Transaction(Box::new(doc.to_string_object()))
}
}
}
}
-impl TextDocumentParser for V10Document {
- type DocumentType = V10Document;
+impl TextDocumentParser for DUBPDocument {
+ type DocumentType = DUBPDocument;
- fn parse(doc: &str) -> Result {
- match DocumentsParser::parse(Rule::document_v10, doc) {
- Ok(mut document_v10_pairs) => Ok(V10Document::from_pest_pair(
- document_v10_pairs.next().unwrap(),
- )), // get and unwrap the `document_v10` rule; never fails
+ fn parse(doc: &str) -> Result {
+ match DocumentsParser::parse(Rule::document, doc) {
+ Ok(mut doc_pairs) => Ok(DUBPDocument::from_pest_pair(doc_pairs.next().unwrap())), // get and unwrap the `document` rule; never fails
Err(pest_error) => Err(TextDocumentParseError::PestError(format!("{}", pest_error))),
}
}
fn from_pest_pair(pair: Pair) -> Self::DocumentType {
+ let doc_vx_pair = pair.into_inner().next().unwrap(); // get and unwrap the `document_vX` rule; never fails
+
+ match doc_vx_pair.as_rule() {
+ Rule::document_v10 => DUBPDocument::from_pest_pair_v10(doc_vx_pair),
+ _ => panic!("unexpected rule: {:?}", doc_vx_pair.as_rule()), // Grammar ensures that we never reach this line
+ }
+ }
+}
+
+impl DUBPDocument {
+ pub fn from_pest_pair_v10(pair: Pair) -> DUBPDocument {
let doc_type_v10_pair = pair.into_inner().next().unwrap(); // get and unwrap the `{DOC_TYPE}_v10` rule; never fails
match doc_type_v10_pair.as_rule() {
- Rule::idty_v10 => V10Document::Identity(
+ Rule::idty_v10 => DUBPDocument::Identity(
identity::IdentityDocumentParser::from_pest_pair(doc_type_v10_pair),
),
- Rule::membership_v10 => V10Document::Membership(
+ Rule::membership_v10 => DUBPDocument::Membership(
membership::MembershipDocumentParser::from_pest_pair(doc_type_v10_pair),
),
- Rule::cert_v10 => V10Document::Certification(Box::new(
+ Rule::cert_v10 => DUBPDocument::Certification(Box::new(
certification::CertificationDocumentParser::from_pest_pair(doc_type_v10_pair),
)),
- Rule::revoc_v10 => V10Document::Revocation(Box::new(
+ Rule::revoc_v10 => DUBPDocument::Revocation(Box::new(
revocation::RevocationDocumentParser::from_pest_pair(doc_type_v10_pair),
)),
- Rule::tx_v10 => V10Document::Transaction(Box::new(
+ Rule::tx_v10 => DUBPDocument::Transaction(Box::new(
transaction::TransactionDocumentParser::from_pest_pair(doc_type_v10_pair),
)),
_ => panic!("unexpected rule: {:?}", doc_type_v10_pair.as_rule()), // Grammar ensures that we never reach this line
@@ -155,125 +145,22 @@ impl TextDocumentParser for V10Document {
}
}
-/// Trait for a compact V10 document.
-pub trait CompactTextDocument: Sized + Clone {
- /// Generate document compact text.
- /// the compact format is the one used in the blocks.
- ///
- /// - Don't contains leading signatures
- /// - Contains line breaks on all line.
- fn as_compact_text(&self) -> String;
-}
-
-impl CompactTextDocument for TextDocumentFormat {
- fn as_compact_text(&self) -> String {
- match *self {
- TextDocumentFormat::Complete(ref doc) => doc.generate_compact_text(),
- TextDocumentFormat::Compact(ref doc) => doc.as_compact_text(),
- }
- }
-}
-
-/// Trait for a V10 document.
-pub trait TextDocument: Document {
- /// Type of associated compact document.
- type CompactTextDocument_: CompactTextDocument;
-
- /// Return document as text.
- fn as_text(&self) -> &str;
-
- /// Return document as text without signature.
- fn as_text_without_signature(&self) -> &str {
- let text = self.as_text();
- let mut lines: Vec<&str> = self.as_text().split('\n').collect();
- let sigs = self.signatures();
- let mut sigs_str_len = sigs.len() - 1;
- for _ in sigs {
- sigs_str_len += lines.pop().unwrap_or("").len();
- }
- &text[0..(text.len() - sigs_str_len)]
- }
-
- /*/// Return document as text with leading signatures.
- fn as_text_with_signatures(&self) -> String {
- let mut text = self.as_text().to_string();
-
- for sig in self.signatures() {
- text = format!("{}{}\n", text, sig.to_base64());
- }
-
- text
- }*/
-
- /// Generate compact document.
- /// the compact format is the one used in the blocks.
- /// - Don't contains leading signatures
- fn to_compact_document(&self) -> Self::CompactTextDocument_;
-
- /// Generate document compact text.
- /// the compact format is the one used in the blocks.
- ///
- /// - Don't contains leading signatures
- /// - Contains line breaks on all line.
- fn generate_compact_text(&self) -> String {
- self.to_compact_document().as_compact_text()
- }
-}
-
-/// Trait for a V10 document builder.
-pub trait TextDocumentBuilder: DocumentBuilder {
- /// Generate document text.
- ///
- /// - Don't contains leading signatures
- /// - Contains line breaks on all line.
- fn generate_text(&self) -> String;
-
- /// Generate final document with signatures, and also return them in an array.
- ///
- /// Returns :
- ///
- /// - Text without signatures
- /// - Signatures
- fn build_signed_text(&self, private_keys: Vec) -> (String, Vec) {
- let text = self.generate_text();
-
- let signatures: Vec<_> = {
- let text_bytes = text.as_bytes();
- private_keys
- .iter()
- .map(|key| key.sign(text_bytes))
- .collect()
- };
-
- (text, signatures)
- }
-}
-
-/// V10 Documents in separated parts
-#[derive(Debug, Clone)]
-pub struct V10DocumentParts {
- /// Whole document in text
- pub doc: String,
- /// Payload
- pub body: String,
- /// Currency
- pub currency: String,
- /// Signatures
- pub signatures: Vec,
-}
-
#[cfg(test)]
mod tests {
+ use crate::blockstamp::Blockstamp;
+ use crate::*;
+
use super::certification::CertificationDocumentParser;
use super::identity::IdentityDocumentParser;
use super::membership::MembershipDocumentParser;
use super::revocation::RevocationDocumentParser;
use super::transaction::TransactionDocumentParser;
use super::*;
+
use dup_crypto::keys::*;
// simple text document for signature testing
- #[derive(Debug, Clone)]
+ #[derive(Debug, Clone, PartialEq, Eq)]
struct PlainTextDocument {
pub text: &'static str,
pub issuers: Vec,
diff --git a/lib/tools/documents/src/v10/revocation.rs b/lib/tools/documents/src/documents/revocation.rs
similarity index 99%
rename from lib/tools/documents/src/v10/revocation.rs
rename to lib/tools/documents/src/documents/revocation.rs
index cd98ea229a7a201aff608c9b628c408d60a5fdbc..0f3cbca7e2919206a7b47b80b58d4772eca2e541 100644
--- a/lib/tools/documents/src/v10/revocation.rs
+++ b/lib/tools/documents/src/documents/revocation.rs
@@ -19,8 +19,8 @@ use dup_crypto::keys::*;
use pest::Parser;
use crate::blockstamp::Blockstamp;
-use crate::v10::*;
-use crate::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
#[derive(Debug, Copy, Clone, Deserialize, Serialize, PartialEq, Eq)]
/// Wrap an Compact Revocation document (in block content)
@@ -150,7 +150,7 @@ impl TextDocument for RevocationDocument {
impl IntoSpecializedDocument for RevocationDocument {
fn into_specialized(self) -> DUBPDocument {
- DUBPDocument::V10(Box::new(V10Document::Revocation(Box::new(self))))
+ DUBPDocument::Revocation(Box::new(self))
}
}
diff --git a/lib/tools/documents/src/v10/transaction.rs b/lib/tools/documents/src/documents/transaction.rs
similarity index 99%
rename from lib/tools/documents/src/v10/transaction.rs
rename to lib/tools/documents/src/documents/transaction.rs
index d56f57934f7a96b9e00a8f904ee367415ec397fe..610524b41b8f9d8f9b709b2d8e4f4a699c7eba48 100644
--- a/lib/tools/documents/src/v10/transaction.rs
+++ b/lib/tools/documents/src/documents/transaction.rs
@@ -23,8 +23,8 @@ use std::ops::{Add, Deref, Sub};
use std::str::FromStr;
use crate::blockstamp::Blockstamp;
-use crate::v10::*;
-use crate::*;
+use crate::documents::*;
+use crate::text_document_traits::*;
/// Wrap a transaction amount
#[derive(Debug, Copy, Clone, Eq, Ord, PartialEq, PartialOrd, Deserialize, Hash, Serialize)]
@@ -713,7 +713,7 @@ impl TextDocument for TransactionDocument {
impl IntoSpecializedDocument for TransactionDocument {
fn into_specialized(self) -> DUBPDocument {
- DUBPDocument::V10(Box::new(V10Document::Transaction(Box::new(self))))
+ DUBPDocument::Transaction(Box::new(self))
}
}
diff --git a/lib/tools/documents/src/documents_grammar.pest b/lib/tools/documents/src/documents_grammar.pest
index 12f7d49706b9a4e91ffc4ae4f3e6e96c49a273fa..f9a3b960cd87429b8cda5500b2fce97da5468426 100644
--- a/lib/tools/documents/src/documents_grammar.pest
+++ b/lib/tools/documents/src/documents_grammar.pest
@@ -16,7 +16,6 @@ uid = @{ ASCII_ALPHA ~ (ASCII_ALPHANUMERIC | "-" | "_")* }
block_id = @{ u_int }
blockstamp = ${ block_id ~ "-" ~ hash }
ed25519_sig = @{ base64{88} | (base64{87} ~ "=") | (base64{86} ~ "==") }
-//^(?:[A-Za-z0-9+/]{4})*(?:[A-Za-z0-9+/]{2}==|[A-Za-z0-9+/]{3}=)?$
v10 = _{ "Version: 10" }
diff --git a/lib/tools/documents/src/lib.rs b/lib/tools/documents/src/lib.rs
index 20308e47935413df5a4c61babe7858a65e30eb96..909aceb0f310846ef68b5be38e9704e9d625b54f 100644
--- a/lib/tools/documents/src/lib.rs
+++ b/lib/tools/documents/src/lib.rs
@@ -1,4 +1,4 @@
-// Copyright (C) 2018 The Duniter Project Developers.
+// Copyright (C) 2018 The Durs Project Developers.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
@@ -13,7 +13,7 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-//! Implements the Duniter Documents Protocol.
+//! Implements the Durs Documents Protocol.
#![cfg_attr(feature = "strict", deny(warnings))]
#![deny(
@@ -26,6 +26,8 @@
unused_import_braces
)]
+#[macro_use]
+extern crate failure;
#[macro_use]
extern crate pest_derive;
#[cfg(test)]
@@ -36,14 +38,16 @@ extern crate serde_derive;
pub mod blockstamp;
mod currencies_codes;
-pub mod v10;
+pub mod documents;
+pub mod parsers;
+pub mod text_document_traits;
use crate::currencies_codes::*;
use byteorder::{BigEndian, ReadBytesExt, WriteBytesExt};
use dup_crypto::hashs::Hash;
use dup_crypto::keys::*;
use pest::iterators::Pair;
-use pest::{Parser, RuleType};
+use pest::RuleType;
use serde::Serialize;
use std::cmp::Ordering;
use std::fmt::{Debug, Display, Error, Formatter};
@@ -68,48 +72,22 @@ pub trait TextDocumentParser {
}
/// List of possible errors while parsing.
-#[derive(Debug, Clone)]
+#[derive(Debug, Clone, Fail)]
pub enum TextDocumentParseError {
/// The given source don't have a valid specific document format (document type).
+ #[fail(display = "TextDocumentParseError: Invalid inner format.")]
InvalidInnerFormat(&'static str),
/// Error with pest parser
+ #[fail(display = "TextDocumentParseError: PestError.")]
PestError(String),
+ #[fail(display = "TextDocumentParseError: UnexpectedVersion.")]
/// UnexpectedVersion
UnexpectedVersion(String),
+ #[fail(display = "TextDocumentParseError: UnknownType.")]
/// Unknown type
UnknownType,
}
-/// Document of DUBP (DUniter Blockhain Protocol)
-#[derive(Debug, Clone, Serialize, Deserialize)]
-pub enum DUBPDocument {
- /// Version 10.
- V10(Box),
- /// Version 11. (not done yet, but defined for tests)
- V11(),
-}
-
-impl TextDocumentParser for DUBPDocument {
- type DocumentType = DUBPDocument;
-
- fn parse(doc: &str) -> Result {
- match DocumentsParser::parse(Rule::document, doc) {
- Ok(mut doc_pairs) => Ok(DUBPDocument::from_pest_pair(doc_pairs.next().unwrap())), // get and unwrap the `document` rule; never fails
- Err(pest_error) => Err(TextDocumentParseError::PestError(format!("{}", pest_error))),
- }
- }
- fn from_pest_pair(pair: Pair) -> DUBPDocument {
- let doc_vx_pair = pair.into_inner().next().unwrap(); // get and unwrap the `document_vX` rule; never fails
-
- match doc_vx_pair.as_rule() {
- Rule::document_v10 => {
- DUBPDocument::V10(Box::new(v10::V10Document::from_pest_pair(doc_vx_pair)))
- }
- _ => panic!("unexpected rule: {:?}", doc_vx_pair.as_rule()), // Grammar ensures that we never reach this line
- }
- }
-}
-
/// Currency name
#[derive(Debug, Clone, Eq, PartialEq, Deserialize, Serialize, Hash)]
pub struct CurrencyName(pub String);
@@ -207,7 +185,7 @@ impl Debug for BlockHash {
///
/// Allow only ed25519 for protocol 10 and many differents
/// schemes for protocol 11 through a proxy type.
-pub trait Document: Debug + Clone {
+pub trait Document: Debug + Clone + PartialEq + Eq {
/// Type of the `PublicKey` used by the document.
type PublicKey: PublicKey;
/// Data type of the currency code used by the document.
@@ -333,7 +311,7 @@ impl ToJsonObject for T {}
#[cfg(test)]
mod tests {
use super::*;
- //use dup_crypto::keys::*;
+ use crate::documents::DUBPDocument;
#[test]
fn parse_dubp_document() {
diff --git a/lib/tools/documents/src/parsers/blocks.rs b/lib/tools/documents/src/parsers/blocks.rs
new file mode 100644
index 0000000000000000000000000000000000000000..b788f38efe7d94c1befcb585167f11f1633921bd
--- /dev/null
+++ b/lib/tools/documents/src/parsers/blocks.rs
@@ -0,0 +1,353 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+use crate::documents::block::{BlockDocument, BlockV10Parameters, TxDocOrTxHash};
+use crate::documents::membership::MembershipType;
+use crate::parsers::DefaultHasher;
+use crate::*;
+use dup_crypto::hashs::Hash;
+use dup_crypto::keys::*;
+use failure::Error;
+use json_pest_parser::*;
+use std::str::FromStr;
+
+pub fn parse_json_block(json_block: &JSONValue) -> Result {
+ if !json_block.is_object() {
+ return Err(ParseJsonError {
+ cause: "Json block must be an object !".to_owned(),
+ }
+ .into());
+ }
+
+ let json_block = json_block.to_object().expect("safe unwrap");
+
+ let currency = get_str(json_block, "currency")?;
+
+ let block_number = get_number(json_block, "number")?.trunc() as u32;
+
+ Ok(BlockDocument {
+ version: get_number(json_block, "version")?.trunc() as u32,
+ nonce: get_number(json_block, "nonce")?.trunc() as u64,
+ number: BlockId(block_number),
+ pow_min: get_number(json_block, "powMin")?.trunc() as usize,
+ time: get_number(json_block, "time")?.trunc() as u64,
+ median_time: get_number(json_block, "medianTime")?.trunc() as u64,
+ members_count: get_number(json_block, "membersCount")?.trunc() as usize,
+ monetary_mass: get_number(json_block, "monetaryMass")?.trunc() as usize,
+ unit_base: get_number(json_block, "unitbase")?.trunc() as usize,
+ issuers_count: get_number(json_block, "issuersCount")?.trunc() as usize,
+ issuers_frame: get_number(json_block, "issuersFrame")?.trunc() as isize,
+ issuers_frame_var: get_number(json_block, "issuersFrameVar")?.trunc() as isize,
+ currency: CurrencyName(currency.to_owned()),
+ issuers: vec![PubKey::Ed25519(ed25519::PublicKey::from_base58(get_str(
+ json_block, "issuer",
+ )?)?)],
+ signatures: vec![Sig::Ed25519(ed25519::Signature::from_base64(get_str(
+ json_block,
+ "signature",
+ )?)?)],
+ hash: Some(BlockHash(Hash::from_hex(get_str(json_block, "hash")?)?)),
+ parameters: if let Some(params) = get_optional_str_not_empty(json_block, "parameters")? {
+ Some(BlockV10Parameters::from_str(params)?)
+ } else {
+ None
+ },
+ previous_hash: if block_number == 0 {
+ Hash::default()
+ } else {
+ Hash::from_hex(get_str(json_block, "previousHash")?)?
+ },
+ previous_issuer: if block_number == 0 {
+ None
+ } else {
+ Some(PubKey::Ed25519(ed25519::PublicKey::from_base58(get_str(
+ json_block,
+ "previousIssuer",
+ )?)?))
+ },
+ inner_hash: Some(Hash::from_hex(get_str(json_block, "inner_hash")?)?),
+ dividend: get_optional_usize(json_block, "dividend")?,
+ identities: crate::parsers::identities::parse_compact_identities(
+ currency,
+ get_str_array(json_block, "identities")?,
+ )?,
+ joiners: crate::parsers::memberships::parse_compact_memberships(
+ currency,
+ MembershipType::In(),
+ &get_str_array(json_block, "joiners")?,
+ )?,
+ actives: crate::parsers::memberships::parse_compact_memberships(
+ currency,
+ MembershipType::In(),
+ &get_str_array(json_block, "actives")?,
+ )?,
+ leavers: crate::parsers::memberships::parse_compact_memberships(
+ currency,
+ MembershipType::Out(),
+ &get_str_array(json_block, "leavers")?,
+ )?,
+ revoked: crate::parsers::revoked::parse_revocations_into_compact(&get_str_array(
+ json_block, "revoked",
+ )?),
+ excluded: get_str_array(json_block, "excluded")?
+ .iter()
+ .map(|p| ed25519::PublicKey::from_base58(p))
+ .map(|p| p.map(PubKey::Ed25519))
+ .collect::, BaseConvertionError>>()?,
+ certifications: crate::parsers::certifications::parse_certifications_into_compact(
+ &get_str_array(json_block, "certifications")?,
+ ),
+ transactions: json_block
+ .get("transactions")
+ .ok_or_else(|| ParseJsonError {
+ cause: "Fail to parse json block : field 'transactions' must exist !".to_owned(),
+ })?
+ .to_array()
+ .ok_or_else(|| ParseJsonError {
+ cause: "Fail to parse json block : field 'transactions' must be an array !"
+ .to_owned(),
+ })?
+ .iter()
+ .map(|tx| crate::parsers::transactions::parse_json_transaction(tx))
+ .map(|tx_result| tx_result.map(|tx_doc| TxDocOrTxHash::TxDoc(Box::new(tx_doc))))
+ .collect::, Error>>()?,
+ inner_hash_and_nonce_str: "".to_owned(),
+ })
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn parse_empty_json_block() {
+ let block_json_str = r#"{
+ "version": 10,
+ "nonce": 10200000037108,
+ "number": 7,
+ "powMin": 70,
+ "time": 1488987677,
+ "medianTime": 1488987394,
+ "membersCount": 59,
+ "monetaryMass": 59000,
+ "unitbase": 0,
+ "issuersCount": 1,
+ "issuersFrame": 6,
+ "issuersFrameVar": 0,
+ "currency": "g1",
+ "issuer": "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ",
+ "signature": "xaWNjdFeE4yr9+AKckgR6QuAvMzmKUWfY+uIlC3HKjn2apJqG70Gf59A71W+Ucz6E9WPXRzDDF/xOrf6GCGHCA==",
+ "hash": "0000407900D981FC17B5A6FBCF8E8AFA4C00FAD7AFC5BEA9A96FF505E5D105EC",
+ "parameters": "",
+ "previousHash": "0000379BBE6ABC18DCFD6E4733F9F76CB06593D10FAEDF722BE190C277AC16EA",
+ "previousIssuer": "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ",
+ "inner_hash": "CF2701092D5A34A55802E343B5F8D61D9B7E8089F1F13A19721234DF5B2F0F38",
+ "dividend": null,
+ "identities": [],
+ "joiners": [],
+ "actives": [],
+ "leavers": [],
+ "revoked": [],
+ "excluded": [],
+ "certifications": [],
+ "transactions": [],
+ "raw": "Version: 10\nType: Block\nCurrency: g1\nNumber: 7\nPoWMin: 70\nTime: 1488987677\nMedianTime: 1488987394\nUnitBase: 0\nIssuer: 2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\nIssuersFrame: 6\nIssuersFrameVar: 0\nDifferentIssuersCount: 1\nPreviousHash: 0000379BBE6ABC18DCFD6E4733F9F76CB06593D10FAEDF722BE190C277AC16EA\nPreviousIssuer: 2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\nMembersCount: 59\nIdentities:\nJoiners:\nActives:\nLeavers:\nRevoked:\nExcluded:\nCertifications:\nTransactions:\nInnerHash: CF2701092D5A34A55802E343B5F8D61D9B7E8089F1F13A19721234DF5B2F0F38\nNonce: 10200000037108\n"
+ }"#;
+
+ let block_json_value = json_pest_parser::parse_json_string(block_json_str)
+ .expect("Fail to parse json block !");
+ assert_eq!(
+ BlockDocument {
+ version: 10,
+ nonce: 10200000037108,
+ number: BlockId(7),
+ pow_min: 70,
+ time: 1488987677,
+ median_time: 1488987394,
+ members_count: 59,
+ monetary_mass: 59000,
+ unit_base: 0,
+ issuers_count: 1,
+ issuers_frame: 6,
+ issuers_frame_var: 0,
+ currency: CurrencyName("g1".to_owned()),
+ issuers: vec![PubKey::Ed25519(
+ ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+ .expect("Fail to parse issuer !")
+ )],
+ signatures: vec![Sig::Ed25519(
+ ed25519::Signature::from_base64("xaWNjdFeE4yr9+AKckgR6QuAvMzmKUWfY+uIlC3HKjn2apJqG70Gf59A71W+Ucz6E9WPXRzDDF/xOrf6GCGHCA==").expect("Fail to parse sig !")
+ )],
+ hash: Some(BlockHash(
+ Hash::from_hex(
+ "0000407900D981FC17B5A6FBCF8E8AFA4C00FAD7AFC5BEA9A96FF505E5D105EC"
+ )
+ .expect("Fail to parse hash !")
+ )),
+ parameters: None,
+ previous_hash: Hash::from_hex(
+ "0000379BBE6ABC18DCFD6E4733F9F76CB06593D10FAEDF722BE190C277AC16EA"
+ )
+ .expect("Fail to parse previous_hash !"),
+ previous_issuer: Some(PubKey::Ed25519(
+ ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+ .expect("Fail to parse previous issuer !")
+ )),
+ inner_hash: Some(
+ Hash::from_hex(
+ "CF2701092D5A34A55802E343B5F8D61D9B7E8089F1F13A19721234DF5B2F0F38"
+ )
+ .expect("Fail to parse inner hash !")
+ ),
+ dividend: None,
+ identities: vec![],
+ joiners: vec![],
+ actives: vec![],
+ leavers: vec![],
+ revoked: vec![],
+ excluded: vec![],
+ certifications: vec![],
+ transactions: vec![],
+ inner_hash_and_nonce_str: "".to_owned(),
+ },
+ parse_json_block(&block_json_value).expect("Fail to parse block_json_value !")
+ );
+ }
+
+ #[test]
+ fn parse_json_block_with_one_tx() {
+ let block_json_str = r#"{
+ "version": 10,
+ "nonce": 10100000033688,
+ "number": 52,
+ "powMin": 74,
+ "time": 1488990898,
+ "medianTime": 1488990117,
+ "membersCount": 59,
+ "monetaryMass": 59000,
+ "unitbase": 0,
+ "issuersCount": 1,
+ "issuersFrame": 6,
+ "issuersFrameVar": 0,
+ "currency": "g1",
+ "issuer": "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ",
+ "signature": "4/UIwXzWQekbYw7fpD8ueMH4GnDEwCM+DvDaTfquBXOvFXLRYo/S+Vrk5u7so/98gYaZ2O7Myh20xgQvhh5FDQ==",
+ "hash": "000057D4B29AF6DADB16F841F19C54C00EB244CECA9C8F2D4839D54E5F91451C",
+ "parameters": "",
+ "previousHash": "00000FEDA61240DD125A26886FEB2E6995B52A94778C71224CAF8492FF257D47",
+ "previousIssuer": "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ",
+ "inner_hash": "6B27ACDA51F416449E5A61FC69438F8974D11FC27EB7A992410C276FC0B9BA5F",
+ "dividend": null,
+ "identities": [],
+ "joiners": [],
+ "actives": [],
+ "leavers": [],
+ "revoked": [],
+ "excluded": [],
+ "certifications": [],
+ "transactions": [
+ {
+ "version": 10,
+ "currency": "g1",
+ "locktime": 0,
+ "blockstamp": "50-00001DAA4559FEDB8320D1040B0F22B631459F36F237A0D9BC1EB923C12A12E7",
+ "blockstampTime": 1488990016,
+ "issuers": [
+ "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ"
+ ],
+ "inputs": [
+ "1000:0:D:2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ:1"
+ ],
+ "outputs": [
+ "1:0:SIG(Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm)",
+ "999:0:SIG(2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ)"
+ ],
+ "unlocks": [
+ "0:SIG(0)"
+ ],
+ "signatures": [
+ "fAH5Gor+8MtFzQZ++JaJO6U8JJ6+rkqKtPrRr/iufh3MYkoDGxmjzj6jCADQL+hkWBt8y8QzlgRkz0ixBcKHBw=="
+ ],
+ "comment": "TEST",
+ "block_number": 0,
+ "time": 0
+ }
+ ],
+ "raw": "Version: 10\nType: Block\nCurrency: g1\nNumber: 52\nPoWMin: 74\nTime: 1488990898\nMedianTime: 1488990117\nUnitBase: 0\nIssuer: 2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\nIssuersFrame: 6\nIssuersFrameVar: 0\nDifferentIssuersCount: 1\nPreviousHash: 00000FEDA61240DD125A26886FEB2E6995B52A94778C71224CAF8492FF257D47\nPreviousIssuer: 2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\nMembersCount: 59\nIdentities:\nJoiners:\nActives:\nLeavers:\nRevoked:\nExcluded:\nCertifications:\nTransactions:\nTX:10:1:1:1:2:1:0\n50-00001DAA4559FEDB8320D1040B0F22B631459F36F237A0D9BC1EB923C12A12E7\n2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ\n1000:0:D:2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ:1\n0:SIG(0)\n1:0:SIG(Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm)\n999:0:SIG(2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ)\nTEST\nfAH5Gor+8MtFzQZ++JaJO6U8JJ6+rkqKtPrRr/iufh3MYkoDGxmjzj6jCADQL+hkWBt8y8QzlgRkz0ixBcKHBw==\nInnerHash: 6B27ACDA51F416449E5A61FC69438F8974D11FC27EB7A992410C276FC0B9BA5F\nNonce: 10100000033688\n"
+ }"#;
+
+ let block_json_value = json_pest_parser::parse_json_string(block_json_str)
+ .expect("Fail to parse json block !");
+
+ let expected_block = BlockDocument {
+ version: 10,
+ nonce: 10100000033688,
+ number: BlockId(52),
+ pow_min: 74,
+ time: 1488990898,
+ median_time: 1488990117,
+ members_count: 59,
+ monetary_mass: 59000,
+ unit_base: 0,
+ issuers_count: 1,
+ issuers_frame: 6,
+ issuers_frame_var: 0,
+ currency: CurrencyName("g1".to_owned()),
+ issuers: vec![PubKey::Ed25519(
+ ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+ .expect("Fail to parse issuer !")
+ )],
+ signatures: vec![Sig::Ed25519(
+ ed25519::Signature::from_base64("4/UIwXzWQekbYw7fpD8ueMH4GnDEwCM+DvDaTfquBXOvFXLRYo/S+Vrk5u7so/98gYaZ2O7Myh20xgQvhh5FDQ==").expect("Fail to parse sig !")
+ )],
+ hash: Some(BlockHash(
+ Hash::from_hex(
+ "000057D4B29AF6DADB16F841F19C54C00EB244CECA9C8F2D4839D54E5F91451C"
+ )
+ .expect("Fail to parse hash !")
+ )),
+ parameters: None,
+ previous_hash: Hash::from_hex(
+ "00000FEDA61240DD125A26886FEB2E6995B52A94778C71224CAF8492FF257D47"
+ )
+ .expect("Fail to parse previous_hash !"),
+ previous_issuer: Some(PubKey::Ed25519(
+ ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+ .expect("Fail to parse previous issuer !")
+ )),
+ inner_hash: Some(
+ Hash::from_hex(
+ "6B27ACDA51F416449E5A61FC69438F8974D11FC27EB7A992410C276FC0B9BA5F"
+ )
+ .expect("Fail to parse inner hash !")
+ ),
+ dividend: None,
+ identities: vec![],
+ joiners: vec![],
+ actives: vec![],
+ leavers: vec![],
+ revoked: vec![],
+ excluded: vec![],
+ certifications: vec![],
+ transactions: vec![TxDocOrTxHash::TxDoc(Box::new(crate::parsers::tests::first_g1_tx_doc()))],
+ inner_hash_and_nonce_str: "".to_owned(),
+ };
+ assert_eq!(
+ expected_block,
+ parse_json_block(&block_json_value).expect("Fail to parse block_json_value !")
+ );
+ assert!(expected_block.verify_inner_hash());
+ }
+}
diff --git a/lib/modules/blockchain/blockchain-dal/parsers/certifications.rs b/lib/tools/documents/src/parsers/certifications.rs
similarity index 79%
rename from lib/modules/blockchain/blockchain-dal/parsers/certifications.rs
rename to lib/tools/documents/src/parsers/certifications.rs
index 868c828645ff6a482045b57e165ad60fa17e070f..c8653ec38db695d0b307dfaf99355f1eb5798bb5 100644
--- a/lib/modules/blockchain/blockchain-dal/parsers/certifications.rs
+++ b/lib/tools/documents/src/parsers/certifications.rs
@@ -1,4 +1,4 @@
-// Copyright (C) 2018 The Duniter Project Developers.
+// Copyright (C) 2018 The Durs Project Developers.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
@@ -13,23 +13,18 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-use dubp_documents::v10::certification::{CertificationDocument, CompactCertificationDocument};
-use dubp_documents::v10::TextDocumentFormat;
-use dubp_documents::BlockId;
+use crate::documents::certification::{CertificationDocument, CompactCertificationDocument};
+use crate::text_document_traits::TextDocumentFormat;
+use crate::BlockId;
use dup_crypto::keys::*;
-use serde_json;
/// Parse array of certification json documents into vector of `CompactCertificationDocument`
pub fn parse_certifications_into_compact(
- json_certs: &[serde_json::Value],
+ str_certs: &[&str],
) -> Vec> {
let mut certifications: Vec> = Vec::new();
- for certification in json_certs.iter() {
- let certifications_datas: Vec<&str> = certification
- .as_str()
- .expect("Receive block in wrong format : fail to split cert !")
- .split(':')
- .collect();
+ for certification in str_certs {
+ let certifications_datas: Vec<&str> = certification.split(':').collect();
if certifications_datas.len() == 4 {
certifications.push(TextDocumentFormat::Compact(CompactCertificationDocument {
issuer: PubKey::Ed25519(
diff --git a/lib/tools/documents/src/parsers/identities.rs b/lib/tools/documents/src/parsers/identities.rs
new file mode 100644
index 0000000000000000000000000000000000000000..ab583f85348d1294846a3865066150e82910fd45
--- /dev/null
+++ b/lib/tools/documents/src/parsers/identities.rs
@@ -0,0 +1,71 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+use crate::documents::identity::*;
+use crate::Blockstamp;
+use crate::DocumentBuilder;
+use dup_crypto::keys::*;
+
+#[derive(Debug, Fail)]
+#[fail(display = "Fail to parse identity : {:?} !", cause)]
+pub struct ParseIdentityError {
+ pub cause: String,
+}
+
+/// Parse a compact identity
+pub fn parse_compact_identities(
+ currency: &str,
+ str_identities: Vec<&str>,
+) -> Result, ParseIdentityError> {
+ let mut identities = Vec::with_capacity(str_identities.len());
+
+ for str_identity in str_identities {
+ let idty_elements: Vec<&str> = str_identity.split(':').collect();
+ let issuer = match ed25519::PublicKey::from_base58(idty_elements[0]) {
+ Ok(pubkey) => PubKey::Ed25519(pubkey),
+ Err(_) => {
+ return Err(ParseIdentityError {
+ cause: "invalid pubkey".to_owned(),
+ });
+ }
+ };
+ let signature = match ed25519::Signature::from_base64(idty_elements[1]) {
+ Ok(sig) => Sig::Ed25519(sig),
+ Err(_) => {
+ return Err(ParseIdentityError {
+ cause: "invalid signature".to_owned(),
+ });
+ }
+ };
+ let blockstamp = match Blockstamp::from_string(idty_elements[2]) {
+ Ok(blockstamp) => blockstamp,
+ Err(_) => {
+ return Err(ParseIdentityError {
+ cause: "invalid blockstamp".to_owned(),
+ });
+ }
+ };
+ let username = idty_elements[3];
+ let idty_doc_builder = IdentityDocumentBuilder {
+ currency,
+ username,
+ blockstamp: &blockstamp,
+ issuer: &issuer,
+ };
+ identities.push(idty_doc_builder.build_with_signature(vec![signature]))
+ }
+
+ Ok(identities)
+}
diff --git a/lib/tools/documents/src/parsers/memberships.rs b/lib/tools/documents/src/parsers/memberships.rs
new file mode 100644
index 0000000000000000000000000000000000000000..7225812baec2724054ff6b9d721f2e5f6190f2aa
--- /dev/null
+++ b/lib/tools/documents/src/parsers/memberships.rs
@@ -0,0 +1,56 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+use crate::documents::membership::*;
+use crate::Blockstamp;
+use crate::DocumentBuilder;
+use dup_crypto::keys::*;
+use failure::Error;
+
+#[derive(Debug, Fail, Copy, Clone)]
+pub enum ParseMembershipError {
+ #[fail(display = "Fail to parse membership : wrong format !")]
+ WrongFormat,
+}
+
+/// Parse memberships documents from array of str
+pub fn parse_compact_memberships(
+ currency: &str,
+ membership_type: MembershipType,
+ array_memberships: &[&str],
+) -> Result, Error> {
+ //let memberships: Vec = Vec::new();
+ array_memberships
+ .iter()
+ .map(|membership| {
+ let membership_datas: Vec<&str> = membership.split(':').collect();
+ if membership_datas.len() == 5 {
+ let membership_doc_builder = MembershipDocumentBuilder {
+ currency,
+ issuer: &PubKey::Ed25519(ed25519::PublicKey::from_base58(membership_datas[0])?),
+ blockstamp: &Blockstamp::from_string(membership_datas[2])?,
+ membership: membership_type,
+ identity_username: membership_datas[4],
+ identity_blockstamp: &Blockstamp::from_string(membership_datas[3])?,
+ };
+ let membership_sig =
+ Sig::Ed25519(ed25519::Signature::from_base64(membership_datas[1])?);
+ Ok(membership_doc_builder.build_with_signature(vec![membership_sig]))
+ } else {
+ Err(ParseMembershipError::WrongFormat.into())
+ }
+ })
+ .collect()
+}
diff --git a/lib/tools/documents/src/parsers/mod.rs b/lib/tools/documents/src/parsers/mod.rs
new file mode 100644
index 0000000000000000000000000000000000000000..6a85ecb98c0ddbd5b440627990bc1258a6a872f4
--- /dev/null
+++ b/lib/tools/documents/src/parsers/mod.rs
@@ -0,0 +1,80 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+/// Parsers for block
+pub mod blocks;
+
+/// Parsers for certifications
+pub mod certifications;
+
+/// Parsers for identities
+pub mod identities;
+
+/// Parsers for memberships
+pub mod memberships;
+
+/// Parsers for revocations
+pub mod revoked;
+
+/// Parsers for transactions
+pub mod transactions;
+
+type DefaultHasher = std::hash::BuildHasherDefault;
+
+#[cfg(test)]
+mod tests {
+ use crate::blockstamp::Blockstamp;
+ use crate::documents::transaction::*;
+ use crate::*;
+ use std::str::FromStr;
+
+ pub fn first_g1_tx_doc() -> TransactionDocument {
+ let expected_tx_builder = TransactionDocumentBuilder {
+ currency: &"g1",
+ blockstamp: &Blockstamp::from_string(
+ "50-00001DAA4559FEDB8320D1040B0F22B631459F36F237A0D9BC1EB923C12A12E7",
+ )
+ .expect("Fail to parse blockstamp"),
+ locktime: &0,
+ issuers: &vec![PubKey::Ed25519(
+ ed25519::PublicKey::from_base58("2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ")
+ .expect("Fail to parse issuer !"),
+ )],
+ inputs: &vec![TransactionInput::from_str(
+ "1000:0:D:2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ:1",
+ )
+ .expect("Fail to parse inputs")],
+ unlocks: &vec![
+ TransactionInputUnlocks::from_str("0:SIG(0)").expect("Fail to parse unlocks")
+ ],
+ outputs: &vec![
+ TransactionOutput::from_str(
+ "1:0:SIG(Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm)",
+ )
+ .expect("Fail to parse outputs"),
+ TransactionOutput::from_str(
+ "999:0:SIG(2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ)",
+ )
+ .expect("Fail to parse outputs"),
+ ],
+ comment: "TEST",
+ hash: None,
+ };
+
+ expected_tx_builder.build_with_signature(vec![Sig::Ed25519(
+ ed25519::Signature::from_base64("fAH5Gor+8MtFzQZ++JaJO6U8JJ6+rkqKtPrRr/iufh3MYkoDGxmjzj6jCADQL+hkWBt8y8QzlgRkz0ixBcKHBw==").expect("Fail to parse sig !")
+ )])
+ }
+}
diff --git a/lib/modules/blockchain/blockchain-dal/parsers/revoked.rs b/lib/tools/documents/src/parsers/revoked.rs
similarity index 76%
rename from lib/modules/blockchain/blockchain-dal/parsers/revoked.rs
rename to lib/tools/documents/src/parsers/revoked.rs
index c47b486a60f5f6e43015feced71a8cfec756bcf0..6b57990ff8c64e70b2d0dda3c3680c357f3e0800 100644
--- a/lib/modules/blockchain/blockchain-dal/parsers/revoked.rs
+++ b/lib/tools/documents/src/parsers/revoked.rs
@@ -1,4 +1,4 @@
-// Copyright (C) 2018 The Duniter Project Developers.
+// Copyright (C) 2018 The Durs Project Developers.
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU Affero General Public License as
@@ -13,22 +13,17 @@
// You should have received a copy of the GNU Affero General Public License
// along with this program. If not, see .
-use dubp_documents::v10::revocation::{CompactRevocationDocument, RevocationDocument};
-use dubp_documents::v10::TextDocumentFormat;
+use crate::documents::revocation::{CompactRevocationDocument, RevocationDocument};
+use crate::text_document_traits::TextDocumentFormat;
use dup_crypto::keys::*;
-use serde_json;
/// Parse array of revocations json documents into vector of `CompactRevocationDocument`
pub fn parse_revocations_into_compact(
- json_revocations: &[serde_json::Value],
+ str_revocations: &[&str],
) -> Vec> {
let mut revocations: Vec> = Vec::new();
- for revocation in json_revocations.iter() {
- let revocations_datas: Vec<&str> = revocation
- .as_str()
- .expect("Receive block in wrong format !")
- .split(':')
- .collect();
+ for revocation in str_revocations {
+ let revocations_datas: Vec<&str> = revocation.split(':').collect();
if revocations_datas.len() == 2 {
revocations.push(TextDocumentFormat::Compact(CompactRevocationDocument {
issuer: PubKey::Ed25519(
diff --git a/lib/tools/documents/src/parsers/transactions.rs b/lib/tools/documents/src/parsers/transactions.rs
new file mode 100644
index 0000000000000000000000000000000000000000..5ce785705d415d7f4002a8db839b5637bf706a5b
--- /dev/null
+++ b/lib/tools/documents/src/parsers/transactions.rs
@@ -0,0 +1,125 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+use crate::documents::transaction::*;
+use crate::parsers::DefaultHasher;
+use crate::TextDocumentParseError;
+use crate::*;
+use dup_crypto::hashs::Hash;
+use dup_crypto::keys::*;
+use failure::Error;
+use json_pest_parser::*;
+use std::str::FromStr;
+
+#[derive(Debug, Fail, Copy, Clone)]
+pub enum ParseTxError {
+ #[fail(display = "Fail to parse transaction : wrong format !")]
+ WrongFormat,
+}
+
+/// Parse transaction from json value
+pub fn parse_json_transaction(
+ json_tx: &JSONValue,
+) -> Result {
+ if !json_tx.is_object() {
+ return Err(ParseJsonError {
+ cause: "Json transaction must be an object !".to_owned(),
+ }
+ .into());
+ }
+
+ let json_tx = json_tx.to_object().expect("safe unwrap");
+
+ let tx_doc_builder = TransactionDocumentBuilder {
+ currency: get_str(json_tx, "currency")?,
+ blockstamp: &Blockstamp::from_string(get_str(json_tx, "blockstamp")?)?,
+ locktime: &(get_number(json_tx, "locktime")?.trunc() as u64),
+ issuers: &get_str_array(json_tx, "issuers")?
+ .iter()
+ .map(|p| ed25519::PublicKey::from_base58(p))
+ .map(|p| p.map(PubKey::Ed25519))
+ .collect::, BaseConvertionError>>()?,
+ inputs: &get_str_array(json_tx, "inputs")?
+ .iter()
+ .map(|i| TransactionInput::from_str(i))
+ .collect::, TextDocumentParseError>>()?,
+ unlocks: &get_str_array(json_tx, "unlocks")?
+ .iter()
+ .map(|i| TransactionInputUnlocks::from_str(i))
+ .collect::, TextDocumentParseError>>()?,
+ outputs: &get_str_array(json_tx, "outputs")?
+ .iter()
+ .map(|i| TransactionOutput::from_str(i))
+ .collect::, TextDocumentParseError>>()?,
+ comment: &durs_common_tools::unescape_str(get_str(json_tx, "comment")?),
+ hash: if let Some(hash_str) = get_optional_str(json_tx, "hash")? {
+ Some(Hash::from_hex(hash_str)?)
+ } else {
+ None
+ },
+ };
+
+ Ok(tx_doc_builder.build_with_signature(
+ get_str_array(json_tx, "signatures")?
+ .iter()
+ .map(|p| ed25519::Signature::from_base64(p))
+ .map(|p| p.map(Sig::Ed25519))
+ .collect::, BaseConvertionError>>()?,
+ ))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn parse_empty_json_block() {
+ let tx_json_str = r#"{
+ "version": 10,
+ "currency": "g1",
+ "locktime": 0,
+ "blockstamp": "50-00001DAA4559FEDB8320D1040B0F22B631459F36F237A0D9BC1EB923C12A12E7",
+ "blockstampTime": 1488990016,
+ "issuers": [
+ "2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ"
+ ],
+ "inputs": [
+ "1000:0:D:2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ:1"
+ ],
+ "outputs": [
+ "1:0:SIG(Com8rJukCozHZyFao6AheSsfDQdPApxQRnz7QYFf64mm)",
+ "999:0:SIG(2ny7YAdmzReQxAayyJZsyVYwYhVyax2thKcGknmQy5nQ)"
+ ],
+ "unlocks": [
+ "0:SIG(0)"
+ ],
+ "signatures": [
+ "fAH5Gor+8MtFzQZ++JaJO6U8JJ6+rkqKtPrRr/iufh3MYkoDGxmjzj6jCADQL+hkWBt8y8QzlgRkz0ixBcKHBw=="
+ ],
+ "comment": "TEST",
+ "block_number": 0,
+ "time": 0
+ }"#;
+
+ let tx_json_value =
+ json_pest_parser::parse_json_string(tx_json_str).expect("Fail to parse json tx !");
+
+ assert_eq!(
+ crate::parsers::tests::first_g1_tx_doc(),
+ parse_json_transaction(&tx_json_value).expect("Fail to parse tx_json_value !")
+ );
+ }
+
+}
diff --git a/lib/tools/documents/src/text_document_traits.rs b/lib/tools/documents/src/text_document_traits.rs
new file mode 100644
index 0000000000000000000000000000000000000000..0c4ce9146cbccc6ec6d157aa597d58dec5a0140e
--- /dev/null
+++ b/lib/tools/documents/src/text_document_traits.rs
@@ -0,0 +1,132 @@
+// Copyright (C) 2018 The Durs Project Developers.
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+//! Define the Text Document Traits.
+
+use crate::*;
+use dup_crypto::keys::*;
+
+#[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)]
+/// Contains a document in full or compact format
+pub enum TextDocumentFormat {
+ /// Complete format (Allows to check the validity of the signature)
+ Complete(D),
+ /// Format present in the blocks (does not always allow to verify the signature)
+ Compact(D::CompactTextDocument_),
+}
+
+impl TextDocumentFormat {
+ /// To compact document
+ pub fn to_compact_document(&self) -> D::CompactTextDocument_ {
+ match *self {
+ TextDocumentFormat::Complete(ref doc) => doc.to_compact_document(),
+ TextDocumentFormat::Compact(ref compact_doc) => (*compact_doc).clone(),
+ }
+ }
+}
+
+/// Trait for a compact text document.
+pub trait CompactTextDocument: Sized + Clone {
+ /// Generate document compact text.
+ /// the compact format is the one used in the blocks.
+ ///
+ /// - Don't contains leading signatures
+ /// - Contains line breaks on all line.
+ fn as_compact_text(&self) -> String;
+}
+
+impl CompactTextDocument for TextDocumentFormat {
+ fn as_compact_text(&self) -> String {
+ match *self {
+ TextDocumentFormat::Complete(ref doc) => doc.generate_compact_text(),
+ TextDocumentFormat::Compact(ref doc) => doc.as_compact_text(),
+ }
+ }
+}
+
+/// Trait for a V10 document.
+pub trait TextDocument: Document {
+ /// Type of associated compact document.
+ type CompactTextDocument_: CompactTextDocument;
+
+ /// Return document as text.
+ fn as_text(&self) -> &str;
+
+ /// Return document as text without signature.
+ fn as_text_without_signature(&self) -> &str {
+ let text = self.as_text();
+ let mut lines: Vec<&str> = self.as_text().split('\n').collect();
+ let sigs = self.signatures();
+ let mut sigs_str_len = sigs.len() - 1;
+ for _ in sigs {
+ sigs_str_len += lines.pop().unwrap_or("").len();
+ }
+ &text[0..(text.len() - sigs_str_len)]
+ }
+
+ /*/// Return document as text with leading signatures.
+ fn as_text_with_signatures(&self) -> String {
+ let mut text = self.as_text().to_string();
+
+ for sig in self.signatures() {
+ text = format!("{}{}\n", text, sig.to_base64());
+ }
+
+ text
+ }*/
+
+ /// Generate compact document.
+ /// the compact format is the one used in the blocks.
+ /// - Don't contains leading signatures
+ fn to_compact_document(&self) -> Self::CompactTextDocument_;
+
+ /// Generate document compact text.
+ /// the compact format is the one used in the blocks.
+ ///
+ /// - Don't contains leading signatures
+ /// - Contains line breaks on all line.
+ fn generate_compact_text(&self) -> String {
+ self.to_compact_document().as_compact_text()
+ }
+}
+
+/// Trait for a V10 document builder.
+pub trait TextDocumentBuilder: DocumentBuilder {
+ /// Generate document text.
+ ///
+ /// - Don't contains leading signatures
+ /// - Contains line breaks on all line.
+ fn generate_text(&self) -> String;
+
+ /// Generate final document with signatures, and also return them in an array.
+ ///
+ /// Returns :
+ ///
+ /// - Text without signatures
+ /// - Signatures
+ fn build_signed_text(&self, private_keys: Vec) -> (String, Vec) {
+ let text = self.generate_text();
+
+ let signatures: Vec<_> = {
+ let text_bytes = text.as_bytes();
+ private_keys
+ .iter()
+ .map(|key| key.sign(text_bytes))
+ .collect()
+ };
+
+ (text, signatures)
+ }
+}
diff --git a/lib/tools/json-pest-parser/Cargo.toml b/lib/tools/json-pest-parser/Cargo.toml
new file mode 100644
index 0000000000000000000000000000000000000000..317f02892798d4d9e545015de34ce94a89c91807
--- /dev/null
+++ b/lib/tools/json-pest-parser/Cargo.toml
@@ -0,0 +1,21 @@
+[package]
+name = "json-pest-parser"
+version = "0.1.0"
+authors = ["elois "]
+description = "Parse JSON with pest grammar."
+repository = "https://git.duniter.org/nodes/rust/duniter-rs"
+readme = "README.md"
+keywords = ["json", "pest", "parser"]
+license = "AGPL-3.0"
+edition = "2018"
+
+[lib]
+path = "src/lib.rs"
+
+[dependencies]
+failure = "0.1.5"
+pest = "2.1.0"
+pest_derive = "2.1.0"
+
+[dev-dependencies]
+pretty_assertions = "0.5.1"
\ No newline at end of file
diff --git a/lib/tools/json-pest-parser/src/json_grammar.pest b/lib/tools/json-pest-parser/src/json_grammar.pest
new file mode 100644
index 0000000000000000000000000000000000000000..adc4149c840e38bb467a1f45d9614aadce478196
--- /dev/null
+++ b/lib/tools/json-pest-parser/src/json_grammar.pest
@@ -0,0 +1,33 @@
+WHITESPACE = _{ " " | "\t" | "\r" | "\n" }
+
+null = { "null" }
+boolean = { "true" | "false" }
+number = @{
+ "-"?
+ ~ ("0" | ASCII_NONZERO_DIGIT ~ ASCII_DIGIT*)
+ ~ ("." ~ ASCII_DIGIT*)?
+ ~ (^"e" ~ ("+" | "-")? ~ ASCII_DIGIT+)?
+}
+
+string = ${ "\"" ~ inner_string ~ "\"" }
+inner_string = @{ char* }
+char = {
+ !("\"" | "\\") ~ ANY
+ | "\\" ~ ("\"" | "\\" | "/" | "b" | "f" | "n" | "r" | "t")
+ | "\\" ~ ("u" ~ ASCII_HEX_DIGIT{4})
+}
+
+object = {
+ "{" ~ "}" |
+ "{" ~ pair ~ ("," ~ pair)* ~ "}"
+}
+pair = { string ~ ":" ~ value }
+
+array = {
+ "[" ~ "]" |
+ "[" ~ value ~ ("," ~ value)* ~ "]"
+}
+
+value = _{ object | array | string | number | boolean | null }
+
+json = _{ SOI ~ (object | array) ~ EOI }
diff --git a/lib/tools/json-pest-parser/src/lib.rs b/lib/tools/json-pest-parser/src/lib.rs
new file mode 100644
index 0000000000000000000000000000000000000000..0b364451902e14813e18f418c2c5630ec9c4729e
--- /dev/null
+++ b/lib/tools/json-pest-parser/src/lib.rs
@@ -0,0 +1,413 @@
+// Copyright (C) 2019 Éloïs SANCHEZ
+//
+// This program is free software: you can redistribute it and/or modify
+// it under the terms of the GNU Affero General Public License as
+// published by the Free Software Foundation, either version 3 of the
+// License, or (at your option) any later version.
+//
+// This program is distributed in the hope that it will be useful,
+// but WITHOUT ANY WARRANTY; without even the implied warranty of
+// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+// GNU Affero General Public License for more details.
+//
+// You should have received a copy of the GNU Affero General Public License
+// along with this program. If not, see .
+
+//! Parse JSON String.
+
+#![deny(
+ missing_debug_implementations,
+ missing_copy_implementations,
+ trivial_casts,
+ trivial_numeric_casts,
+ unsafe_code,
+ unstable_features,
+ unused_import_braces
+)]
+
+#[macro_use]
+extern crate failure;
+#[macro_use]
+extern crate pest_derive;
+
+#[cfg(test)]
+#[macro_use]
+extern crate pretty_assertions;
+
+use failure::Error;
+use pest::iterators::Pair;
+use pest::Parser;
+use std::collections::HashMap;
+
+#[derive(Parser)]
+#[grammar = "json_grammar.pest"]
+struct JSONParser;
+
+#[derive(Debug, PartialEq)]
+pub enum JSONValue<'a, S: std::hash::BuildHasher> {
+ Object(HashMap<&'a str, JSONValue<'a, S>, S>),
+ Array(Vec>),
+ String(&'a str),
+ Number(f64),
+ Boolean(bool),
+ Null,
+}
+
+type JsonObject<'a, S> = HashMap<&'a str, JSONValue<'a, S>, S>;
+
+impl<'a, S: std::hash::BuildHasher> JSONValue<'a, S> {
+ pub fn is_object(&self) -> bool {
+ if let JSONValue::Object(_) = self {
+ true
+ } else {
+ false
+ }
+ }
+
+ pub fn to_object(&self) -> Option<&HashMap<&'a str, JSONValue<'a, S>, S>> {
+ if let JSONValue::Object(object) = self {
+ Some(object)
+ } else {
+ None
+ }
+ }
+
+ pub fn is_array(&self) -> bool {
+ if let JSONValue::Array(_) = self {
+ true
+ } else {
+ false
+ }
+ }
+
+ pub fn to_array(&self) -> Option<&Vec>> {
+ if let JSONValue::Array(array) = self {
+ Some(array)
+ } else {
+ None
+ }
+ }
+
+ pub fn is_str(&self) -> bool {
+ if let JSONValue::String(_) = self {
+ true
+ } else {
+ false
+ }
+ }
+
+ pub fn to_str(&self) -> Option<&'a str> {
+ if let JSONValue::String(string) = self {
+ Some(string)
+ } else {
+ None
+ }
+ }
+
+ pub fn is_number(&self) -> bool {
+ if let JSONValue::Number(_) = self {
+ true
+ } else {
+ false
+ }
+ }
+
+ pub fn to_number(&self) -> Option {
+ if let JSONValue::Number(number) = self {
+ Some(*number)
+ } else {
+ None
+ }
+ }
+
+ pub fn is_bool(&self) -> bool {
+ if let JSONValue::Boolean(_) = self {
+ true
+ } else {
+ false
+ }
+ }
+
+ pub fn to_bool(&self) -> Option {
+ if let JSONValue::Boolean(boolean) = self {
+ Some(*boolean)
+ } else {
+ None
+ }
+ }
+
+ pub fn is_null(&self) -> bool {
+ if let JSONValue::Null = self {
+ true
+ } else {
+ false
+ }
+ }
+}
+
+impl<'a, S: std::hash::BuildHasher> ToString for JSONValue<'a, S> {
+ fn to_string(&self) -> String {
+ match self {
+ JSONValue::Object(o) => {
+ let contents: Vec<_> = o
+ .iter()
+ .map(|(name, value)| format!("\"{}\":{}", name, value.to_string()))
+ .collect();
+ format!("{{{}}}", contents.join(","))
+ }
+ JSONValue::Array(a) => {
+ let contents: Vec<_> = a.iter().map(Self::to_string).collect();
+ format!("[{}]", contents.join(","))
+ }
+ JSONValue::String(s) => format!("\"{}\"", s),
+ JSONValue::Number(n) => format!("{}", n),
+ JSONValue::Boolean(b) => format!("{}", b),
+ JSONValue::Null => "null".to_owned(),
+ }
+ }
+}
+
+#[derive(Debug, Fail)]
+#[fail(display = "Fail to parse JSON String : {:?}", cause)]
+pub struct ParseJsonError {
+ pub cause: String,
+}
+
+pub fn parse_json_string<'a>(
+ source: &'a str,
+) -> Result<
+ JSONValue<'a, std::hash::BuildHasherDefault>,
+ ParseJsonError,
+> {
+ parse_json_string_with_specific_hasher::<
+ std::hash::BuildHasherDefault,
+ >(source)
+}
+
+pub fn parse_json_string_with_specific_hasher(
+ source: &str,
+) -> Result, ParseJsonError> {
+ match JSONParser::parse(Rule::json, source) {
+ Ok(mut pair) => Ok(parse_value(pair.next().unwrap())),
+ Err(pest_error) => Err(ParseJsonError {
+ cause: format!("{:?}", pest_error),
+ }),
+ }
+}
+
+fn parse_value(pair: Pair) -> JSONValue {
+ match pair.as_rule() {
+ Rule::object => JSONValue::Object(
+ pair.into_inner()
+ .map(|pair| {
+ let mut inner_rules = pair.into_inner();
+ let name = inner_rules
+ .next()
+ .unwrap()
+ .into_inner()
+ .next()
+ .unwrap()
+ .as_str();
+ let value = parse_value(inner_rules.next().unwrap());
+ (name, value)
+ })
+ .collect(),
+ ),
+ Rule::array => JSONValue::Array(pair.into_inner().map(parse_value).collect()),
+ Rule::string => JSONValue::String(pair.into_inner().next().unwrap().as_str()),
+ Rule::number => JSONValue::Number(pair.as_str().parse().unwrap()),
+ Rule::boolean => JSONValue::Boolean(pair.as_str().parse().unwrap()),
+ Rule::null => JSONValue::Null,
+ Rule::json
+ | Rule::EOI
+ | Rule::pair
+ | Rule::value
+ | Rule::inner_string
+ | Rule::char
+ | Rule::WHITESPACE => unreachable!(),
+ }
+}
+
+pub fn get_optional_usize(
+ json_block: &HashMap<&str, JSONValue, S>,
+ field: &str,
+) -> Result