diff --git a/Cargo.lock b/Cargo.lock index de991b0325611281e9ed9d0c4990a8ee086f8aea..f73bbd70320c150054c3a3d0aa04530161277b07 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -168,6 +168,15 @@ dependencies = [ "iovec 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "c2-chacha" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "lazy_static 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)", + "ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "cc" version = "1.0.41" @@ -481,43 +490,39 @@ dependencies = [ ] [[package]] -name = "durs-blockchain" +name = "durs-bc-db-reader" version = "0.3.0-dev" dependencies = [ - "dirs 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", "dubp-block-doc 0.1.0", "dubp-common-doc 0.1.0", "dubp-currency-params 0.2.0", + "dubp-indexes 0.1.0", "dubp-user-docs 0.14.0", "dubp-user-docs-tests-tools 0.1.0", "dup-crypto 0.7.0", "dup-crypto-tests-tools 0.1.0", - "durs-blockchain-dal 0.3.0-dev", + "durs-common-tests-tools 0.1.0", "durs-common-tools 0.2.0", "durs-conf 0.3.0-dev", - "durs-message 0.3.0-dev", + "durs-dbs-tools 0.1.0-a", "durs-module 0.3.0-dev", - "durs-network 0.3.0-dev", - "durs-network-documents 0.4.0", "durs-wot 0.8.0-a0.9", - "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", - "json-pest-parser 0.2.0", + "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "maplit 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)", "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", "pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", - "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", + "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "unwrap 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "durs-blockchain-dal" +name = "durs-bc-db-writer" version = "0.3.0-dev" dependencies = [ - "bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", "dubp-block-doc 0.1.0", "dubp-common-doc 0.1.0", "dubp-currency-params 0.2.0", @@ -526,33 +531,53 @@ dependencies = [ "dubp-user-docs-tests-tools 0.1.0", "dup-crypto 0.7.0", "dup-crypto-tests-tools 0.1.0", - "durs-common-dal 0.1.0-a", + "durs-bc-db-reader 0.3.0-dev", "durs-common-tests-tools 0.1.0", "durs-common-tools 0.2.0", "durs-conf 0.3.0-dev", + "durs-dbs-tools 0.1.0-a", "durs-module 0.3.0-dev", "durs-wot 0.8.0-a0.9", "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "rkv 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", "serde_derive 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", - "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", "unwrap 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] [[package]] -name = "durs-common-dal" -version = "0.1.0-a" +name = "durs-blockchain" +version = "0.3.0-dev" dependencies = [ - "bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dirs 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)", + "dubp-block-doc 0.1.0", + "dubp-common-doc 0.1.0", + "dubp-currency-params 0.2.0", + "dubp-user-docs 0.14.0", + "dubp-user-docs-tests-tools 0.1.0", + "dup-crypto 0.7.0", + "dup-crypto-tests-tools 0.1.0", + "durs-bc-db-reader 0.3.0-dev", + "durs-bc-db-writer 0.3.0-dev", "durs-common-tools 0.2.0", - "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "durs-conf 0.3.0-dev", + "durs-message 0.3.0-dev", + "durs-module 0.3.0-dev", + "durs-network 0.3.0-dev", + "durs-network-documents 0.4.0", + "durs-wot 0.8.0-a0.9", + "failure 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)", + "json-pest-parser 0.2.0", "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", - "rkv 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)", - "rustbreak 2.0.0-rc3 (registry+https://github.com/rust-lang/crates.io-index)", + "num_cpus 1.10.1 (registry+https://github.com/rust-lang/crates.io-index)", + "pbr 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)", + "rayon 1.0.3 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", + "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "threadpool 1.7.1 (registry+https://github.com/rust-lang/crates.io-index)", "unwrap 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -615,6 +640,21 @@ dependencies = [ "unwrap 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "durs-dbs-tools" +version = "0.1.0-a" +dependencies = [ + "bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "durs-common-tools 0.2.0", + "fnv 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", + "rkv 0.9.7 (registry+https://github.com/rust-lang/crates.io-index)", + "rustbreak 2.0.0-rc3 (registry+https://github.com/rust-lang/crates.io-index)", + "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", + "tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)", + "unwrap 1.2.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "durs-message" version = "0.3.0-dev" @@ -624,7 +664,6 @@ dependencies = [ "dubp-currency-params 0.2.0", "dubp-user-docs 0.14.0", "dup-crypto 0.7.0", - "durs-blockchain-dal 0.3.0-dev", "durs-module 0.3.0-dev", "durs-network 0.3.0-dev", "durs-network-documents 0.4.0", @@ -739,7 +778,9 @@ name = "durs-wot" version = "0.8.0-a0.9" dependencies = [ "bincode 1.0.1 (registry+https://github.com/rust-lang/crates.io-index)", + "dup-crypto 0.7.0", "durs-common-tools 0.2.0", + "log 0.4.8 (registry+https://github.com/rust-lang/crates.io-index)", "rayon 1.2.0 (registry+https://github.com/rust-lang/crates.io-index)", "serde 1.0.99 (registry+https://github.com/rust-lang/crates.io-index)", ] @@ -932,6 +973,16 @@ dependencies = [ "typenum 1.11.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "getrandom" +version = "0.1.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "wasi 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "half" version = "1.3.0" @@ -1371,6 +1422,11 @@ dependencies = [ "serde_json 1.0.40 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "ppv-lite86" +version = "0.2.5" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "pretty_assertions" version = "0.5.1" @@ -1482,6 +1538,18 @@ dependencies = [ "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rand" +version = "0.7.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "getrandom 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rand_chacha" version = "0.1.1" @@ -1491,6 +1559,15 @@ dependencies = [ "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rand_chacha" +version = "0.2.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)", + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rand_core" version = "0.3.1" @@ -1504,6 +1581,14 @@ name = "rand_core" version = "0.4.2" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "rand_core" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "getrandom 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rand_hc" version = "0.1.0" @@ -1512,6 +1597,14 @@ dependencies = [ "rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "rand_hc" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "rand_isaac" version = "0.1.1" @@ -1959,6 +2052,19 @@ dependencies = [ "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", ] +[[package]] +name = "tempfile" +version = "3.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +dependencies = [ + "cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)", + "libc 0.2.62 (registry+https://github.com/rust-lang/crates.io-index)", + "rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)", + "redox_syscall 0.1.56 (registry+https://github.com/rust-lang/crates.io-index)", + "remove_dir_all 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)", + "winapi 0.3.8 (registry+https://github.com/rust-lang/crates.io-index)", +] + [[package]] name = "term" version = "0.5.2" @@ -2129,6 +2235,11 @@ name = "version_check" version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" +[[package]] +name = "wasi" +version = "0.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" + [[package]] name = "wasm-bindgen" version = "0.2.50" @@ -2302,6 +2413,7 @@ dependencies = [ "checksum byte-tools 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "e3b5ca7a04898ad4bcd41c90c5285445ff5b791899bb1b0abdd2a2aa791211d7" "checksum byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "94f88df23a25417badc922ab0f5716cc1330e87f71ddd9203b3a3ccd9cedf75d" "checksum bytes 0.4.12 (registry+https://github.com/rust-lang/crates.io-index)" = "206fdffcfa2df7cbe15601ef46c813fce0965eb3286db6b56c583b814b51c81c" +"checksum c2-chacha 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)" = "7d64d04786e0f528460fc884753cf8dddcc466be308f6026f8e355c41a0e4101" "checksum cc 1.0.41 (registry+https://github.com/rust-lang/crates.io-index)" = "8dae9c4b8fedcae85592ba623c4fd08cfdab3e3b72d6df780c6ead964a69bfff" "checksum cfg-if 0.1.9 (registry+https://github.com/rust-lang/crates.io-index)" = "b486ce3ccf7ffd79fdeb678eac06a9e6c09fc88d33836340becb8fffe87c5e33" "checksum chacha20-poly1305-aead 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)" = "77d2058ba29594f69c75e8a9018e0485e3914ca5084e3613cd64529042f5423b" @@ -2335,6 +2447,7 @@ dependencies = [ "checksum fuchsia-zircon 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "2e9763c69ebaae630ba35f74888db465e49e259ba1bc0eda7d06f4a067615d82" "checksum fuchsia-zircon-sys 0.3.3 (registry+https://github.com/rust-lang/crates.io-index)" = "3dcaa9ae7725d12cdb85b3ad99a434db70b468c09ded17e012d86b5c1010f7a7" "checksum generic-array 0.12.3 (registry+https://github.com/rust-lang/crates.io-index)" = "c68f0274ae0e023facc3c97b2e00f076be70e254bc851d972503b328db79b2ec" +"checksum getrandom 0.1.11 (registry+https://github.com/rust-lang/crates.io-index)" = "fc344b02d3868feb131e8b5fe2b9b0a1cc42942679af493061fc13b853243872" "checksum half 1.3.0 (registry+https://github.com/rust-lang/crates.io-index)" = "9353c2a89d550b58fa0061d8ed8d002a7d8cdf2494eb0e432859bd3a9e543836" "checksum heck 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "20564e78d53d2bb135c343b3f47714a56af2061f1c928fdb541dc7b9fdd94205" "checksum hex 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)" = "805026a5d0141ffc30abb3be3173848ad46a1b1664fe632428479619a3644d77" @@ -2384,6 +2497,7 @@ dependencies = [ "checksum pest_generator 2.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "63120576c4efd69615b5537d3d052257328a4ca82876771d6944424ccfd9f646" "checksum pest_meta 2.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "f249ea6de7c7b7aba92b4ff4376a994c6dbd98fd2166c89d5c4947397ecb574d" "checksum pkg-config 0.3.15 (registry+https://github.com/rust-lang/crates.io-index)" = "a7c1d2cfa5a714db3b5f24f0915e74fcdf91d09d496ba61329705dda7774d2af" +"checksum ppv-lite86 0.2.5 (registry+https://github.com/rust-lang/crates.io-index)" = "e3cbf9f658cdb5000fcf6f362b8ea2ba154b9f146a61c7a20d647034c6b6561b" "checksum pretty_assertions 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3a029430f0d744bc3d15dd474d591bed2402b645d024583082b9f63bb936dac6" "checksum pretty_assertions 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)" = "3f81e1644e1b54f5a68959a29aa86cde704219254669da328ecfdf6a1f09d427" "checksum prettytable-rs 0.8.0 (registry+https://github.com/rust-lang/crates.io-index)" = "0fd04b170004fa2daccf418a7f8253aaf033c27760b5f225889024cf66d7ac2e" @@ -2395,10 +2509,14 @@ dependencies = [ "checksum quote 1.0.2 (registry+https://github.com/rust-lang/crates.io-index)" = "053a8c8bcc71fcce321828dc897a98ab9760bef03a4fc36693c231e5b3216cfe" "checksum rand 0.4.6 (registry+https://github.com/rust-lang/crates.io-index)" = "552840b97013b1a26992c11eac34bdd778e464601a4c2054b5f0bff7c6761293" "checksum rand 0.6.5 (registry+https://github.com/rust-lang/crates.io-index)" = "6d71dacdc3c88c1fde3885a3be3fbab9f35724e6ce99467f7d9c5026132184ca" +"checksum rand 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d47eab0e83d9693d40f825f86948aa16eff6750ead4bdffc4ab95b8b3a7f052c" "checksum rand_chacha 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "556d3a1ca6600bfcbab7c7c91ccb085ac7fbbcd70e008a98742e7847f4f7bcef" +"checksum rand_chacha 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "03a2a90da8c7523f554344f921aa97283eadf6ac484a6d2a7d0212fa7f8d6853" "checksum rand_core 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6fdeb83b075e8266dcc8762c22776f6877a63111121f5f8c7411e5be7eed4b" "checksum rand_core 0.4.2 (registry+https://github.com/rust-lang/crates.io-index)" = "9c33a3c44ca05fa6f1807d8e6743f3824e8509beca625669633be0acbdf509dc" +"checksum rand_core 0.5.1 (registry+https://github.com/rust-lang/crates.io-index)" = "90bde5296fc891b0cef12a6d03ddccc162ce7b2aff54160af9338f8d40df6d19" "checksum rand_hc 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7b40677c7be09ae76218dc623efbf7b18e34bced3f38883af07bb75630a21bc4" +"checksum rand_hc 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "ca3129af7b92a17112d59ad498c6f81eaf463253766b90396d39ea7a39d6613c" "checksum rand_isaac 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)" = "ded997c9d5f13925be2a6fd7e66bf1872597f759fd9dd93513dd7e92e5a5ee08" "checksum rand_jitter 0.1.4 (registry+https://github.com/rust-lang/crates.io-index)" = "1166d5c91dc97b88d1decc3285bb0a99ed84b05cfd0bc2341bdf2d43fc41e39b" "checksum rand_os 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "7b75f676a1e053fc562eafbb47838d67c84801e38fc1ba459e8f180deabd5071" @@ -2449,6 +2567,7 @@ dependencies = [ "checksum syn 1.0.5 (registry+https://github.com/rust-lang/crates.io-index)" = "66850e97125af79138385e9b88339cbcd037e3f28ceab8c5ad98e64f0f1f80bf" "checksum synstructure 0.10.2 (registry+https://github.com/rust-lang/crates.io-index)" = "02353edf96d6e4dc81aea2d8490a7e9db177bf8acb0e951c24940bf866cb313f" "checksum tempdir 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "15f2b5fb00ccdf689e0149d1b1b3c03fead81c2b37735d812fa8bddbbf41b6d8" +"checksum tempfile 3.1.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a6e24d9338a0a5be79593e2fa15a648add6138caa803e2d5bc782c371732ca9" "checksum term 0.5.2 (registry+https://github.com/rust-lang/crates.io-index)" = "edd106a334b7657c10b7c540a0106114feadeb4dc314513e97df481d5d966f42" "checksum termcolor 0.3.6 (registry+https://github.com/rust-lang/crates.io-index)" = "adc4587ead41bf016f11af03e55a624c06568b5a19db4e90fde573d805074f83" "checksum termion 1.5.3 (registry+https://github.com/rust-lang/crates.io-index)" = "6a8fb22f7cde82c8220e5aeacb3258ed7ce996142c77cba193f203515e26c330" @@ -2473,6 +2592,7 @@ dependencies = [ "checksum vcpkg 0.2.7 (registry+https://github.com/rust-lang/crates.io-index)" = "33dd455d0f96e90a75803cfeb7f948768c08d70a6de9a8d2362461935698bf95" "checksum vec_map 0.8.1 (registry+https://github.com/rust-lang/crates.io-index)" = "05c78687fb1a80548ae3250346c3db86a80a7cdd77bda190189f2d0a0987c81a" "checksum version_check 0.1.5 (registry+https://github.com/rust-lang/crates.io-index)" = "914b1a6776c4c929a602fafd8bc742e06365d4bcbe48c30f9cca5824f70dc9dd" +"checksum wasi 0.5.0 (registry+https://github.com/rust-lang/crates.io-index)" = "fd5442abcac6525a045cc8c795aedb60da7a2e5e89c7bf18a0d5357849bb23c7" "checksum wasm-bindgen 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "dcddca308b16cd93c2b67b126c688e5467e4ef2e28200dc7dfe4ae284f2faefc" "checksum wasm-bindgen-backend 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "f805d9328b5fc7e5c6399960fd1889271b9b58ae17bdb2417472156cc9fafdd0" "checksum wasm-bindgen-macro 0.2.50 (registry+https://github.com/rust-lang/crates.io-index)" = "3ff88201a482abfc63921621f6cb18eb1efd74f136b05e5841e7f8ca434539e9" diff --git a/Cargo.toml b/Cargo.toml index bc8bd99bc875c6f7075a950bb05aed66fabc923b..6298c974568d0ccc30b4821176069c26e34a5444 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -14,8 +14,9 @@ members = [ "lib/dubp/user-docs", "lib/dubp/wot", "lib/dunp/network-documents", + "lib/modules-lib/bc-db-reader", "lib/modules/blockchain/blockchain", - "lib/modules/blockchain/blockchain-dal", + "lib/modules/blockchain/bc-db-writer", "lib/modules/skeleton", "lib/modules/tui", "lib/modules/ws2p-v1-legacy", diff --git a/lib/core/core/src/router.rs b/lib/core/core/src/router.rs index 705f0137f8d083d5e75e945799bb558e5a43c479..acba54faf64e3b9e06c98bc6eec6204248f6acb1 100644 --- a/lib/core/core/src/router.rs +++ b/lib/core/core/src/router.rs @@ -413,7 +413,7 @@ pub fn start_router( trace!("Router thread receive ModuleMessage({:?})", msg); match msg { DursMsg::Stop => { - info!("TMP: Router: RECEIVE STOP MESSAGE !"); + info!("Router: RECEIVE STOP MESSAGE."); // Relay stop signal to broadcasting thread broadcasting_sender .send(RouterThreadMessage::ModuleMessage(msg)) diff --git a/lib/core/message/Cargo.toml b/lib/core/message/Cargo.toml index 04ed969b07be3014011754e09cbd0b112c5b9567..96c9a980002b11cf0ee066691eb6f7cc01d28e0e 100644 --- a/lib/core/message/Cargo.toml +++ b/lib/core/message/Cargo.toml @@ -17,7 +17,6 @@ dubp-user-docs= { path = "../../dubp/user-docs" } dup-crypto = { path = "../../crypto" } durs-module = { path = "../module" } durs-network = { path = "../network" } -durs-blockchain-dal = { path = "../../modules/blockchain/blockchain-dal" } durs-network-documents = { path = "../../dunp/network-documents" } serde = "1.0.*" serde_derive = "1.0.*" diff --git a/lib/core/message/src/requests.rs b/lib/core/message/src/requests.rs index 96411b6a5e6ce4c21dfb97d5f24703451d28ff0d..c598dc39361e6fd301bf3f1a0c984b40ebd47391 100644 --- a/lib/core/message/src/requests.rs +++ b/lib/core/message/src/requests.rs @@ -17,7 +17,6 @@ use crate::*; use dubp_common_doc::BlockNumber; use dup_crypto::hashs::Hash; use dup_crypto::keys::*; -use durs_blockchain_dal::filters::identities::IdentitiesFilter; use durs_network::requests::OldNetworkRequest; #[derive(Debug, Clone)] @@ -58,8 +57,6 @@ pub enum BlockchainRequest { }, /// Usernames corresponding to the public keys in parameter UIDs(Vec<PubKey>), - /// Get identities - GetIdentities(IdentitiesFilter), } #[derive(Debug, Copy, Clone)] diff --git a/lib/dubp/common-doc/src/blockstamp.rs b/lib/dubp/common-doc/src/blockstamp.rs index 9ca3cc8cdbd7d749fbdf9299a3cdbc904e880c13..3f87b514053e38da765ceedb0f359076cfb860c0 100644 --- a/lib/dubp/common-doc/src/blockstamp.rs +++ b/lib/dubp/common-doc/src/blockstamp.rs @@ -28,6 +28,9 @@ use std::fmt::{Debug, Display, Error, Formatter}; /// [`Blockstamp`]: struct.Blockstamp.html #[derive(Debug, Copy, Clone, PartialEq, Eq, Fail)] pub enum BlockstampParseError { + /// Given string have invalid format + #[fail(display = "Given bytes with invalid length")] + InvalidLen, /// Given string have invalid format #[fail(display = "Given string have invalid format")] InvalidFormat(), @@ -72,6 +75,15 @@ impl Blockstamp { pub const SIZE_IN_BYTES: usize = 36; } +impl Into<Vec<u8>> for Blockstamp { + fn into(self) -> Vec<u8> { + let mut bytes = Vec::with_capacity(Self::SIZE_IN_BYTES); + bytes.append(&mut self.id.0.to_be_bytes().to_vec()); + bytes.append(&mut (self.hash.0).0.to_vec()); + bytes + } +} + impl Display for Blockstamp { fn fmt(&self, f: &mut Formatter) -> Result<(), Error> { write!(f, "{}-{}", self.id, self.hash) @@ -127,6 +139,22 @@ impl From<std::io::Error> for ReadBytesBlockstampError { } impl Blockstamp { + /// Create a `Blockstamp` from bytes. + pub fn from_bytes(src: &[u8]) -> Result<Blockstamp, BlockstampParseError> { + if src.len() != Blockstamp::SIZE_IN_BYTES { + Err(BlockstampParseError::InvalidLen) + } else { + let mut id_bytes = [0u8; 4]; + id_bytes.copy_from_slice(&src[..4]); + let mut hash_bytes = [0u8; 32]; + hash_bytes.copy_from_slice(&src[4..]); + Ok(Blockstamp { + id: BlockNumber(u32::from_be_bytes(id_bytes)), + hash: BlockHash(Hash(hash_bytes)), + }) + } + } + /// Create a `Blockstamp` from a text. pub fn from_string(src: &str) -> Result<Blockstamp, BlockstampParseError> { let mut split = src.split('-'); diff --git a/lib/dubp/wot/Cargo.toml b/lib/dubp/wot/Cargo.toml index 861e1ae4e712bee5a37e54bdcecf3cfb216ac3cc..a0a9ba81e6911fcecef424cb61f820b1f523b7cd 100644 --- a/lib/dubp/wot/Cargo.toml +++ b/lib/dubp/wot/Cargo.toml @@ -13,11 +13,13 @@ edition = "2018" path = "lib.rs" [dependencies] +dup-crypto = { path = "../../crypto", version = "0.7.0" } +durs-common-tools = { path = "../../tools/common-tools", version = "0.2.0" } +log = "0.4.*" rayon = "1.2.0" serde = { version = "1.0.*", features = ["derive"] } [dev-dependencies] bincode = "1.0.*" -durs-common-tools = { path = "../../tools/common-tools", version = "0.2.0" } [features] \ No newline at end of file diff --git a/lib/dubp/wot/operations/density.rs b/lib/dubp/wot/operations/density.rs new file mode 100644 index 0000000000000000000000000000000000000000..f77def8a09d62584f98d1ff3a431d90e74a44e8a --- /dev/null +++ b/lib/dubp/wot/operations/density.rs @@ -0,0 +1,33 @@ +// Copyright (C) 2017-2019 The AXIOM TEAM Association. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as +// published by the Free Software Foundation, either version 3 of the +// License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see <https://www.gnu.org/licenses/>. + +//! Provide function to compute average density. + +use crate::data::WebOfTrust; +use durs_common_tools::fatal_error; +use log::error; + +/// Compute average density +pub fn calculate_average_density<T: WebOfTrust>(wot: &T) -> usize { + let enabled_members = wot.get_enabled(); + let enabled_members_count = enabled_members.len(); + let mut count_actives_links: usize = 0; + for member in &enabled_members { + count_actives_links += wot + .issued_count(*member) + .unwrap_or_else(|| fatal_error!("Fail to get issued_count of wot_id {}", (*member).0)); + } + ((count_actives_links as f32 / enabled_members_count as f32) * 1_000.0) as usize +} diff --git a/lib/dubp/wot/operations/distance.rs b/lib/dubp/wot/operations/distance.rs index 4fb1dd12932b785766c48b7d82504c8852fcde8f..5c6ccb5fc800b855452c6a1299d45c80f05f5658 100644 --- a/lib/dubp/wot/operations/distance.rs +++ b/lib/dubp/wot/operations/distance.rs @@ -56,6 +56,15 @@ pub trait DistanceCalculator<T: WebOfTrust> { /// Returns `None` if this node doesn't exist. fn compute_distance(&self, wot: &T, params: WotDistanceParameters) -> Option<WotDistance>; + /// Compute distances of all members + fn compute_distances( + &self, + wot: &T, + sentry_requirement: u32, + step_max: u32, + x_percent: f64, + ) -> (usize, Vec<usize>, usize, Vec<usize>); + /// Test if a node is outdistanced in the network. /// Returns `Node` if this node doesn't exist. fn is_outdistanced(&self, wot: &T, params: WotDistanceParameters) -> Option<bool>; @@ -125,4 +134,50 @@ impl<T: WebOfTrust + Sync> DistanceCalculator<T> for RustyDistanceCalculator { fn is_outdistanced(&self, wot: &T, params: WotDistanceParameters) -> Option<bool> { Self::compute_distance(&self, wot, params).map(|result| result.outdistanced) } + + fn compute_distances( + &self, + wot: &T, + sentry_requirement: u32, + step_max: u32, + x_percent: f64, + ) -> (usize, Vec<usize>, usize, Vec<usize>) { + let members_count = wot.get_enabled().len(); + let mut distances = Vec::new(); + let mut average_distance: usize = 0; + let mut connectivities = Vec::new(); + let mut average_connectivity: usize = 0; + for i in 0..wot.size() { + let distance_datas: WotDistance = Self::compute_distance( + &self, + wot, + WotDistanceParameters { + node: WotId(i), + sentry_requirement, + step_max, + x_percent, + }, + ) + .expect("Fatal Error: compute_distance return None !"); + let distance = ((f64::from(distance_datas.success) + / (x_percent * f64::from(distance_datas.sentries))) + * 100.0) as usize; + distances.push(distance); + average_distance += distance; + let connectivity = + ((f64::from(distance_datas.success - distance_datas.success_at_border) + / (x_percent * f64::from(distance_datas.sentries))) + * 100.0) as usize; + connectivities.push(connectivity); + average_connectivity += connectivity; + } + average_distance /= members_count; + average_connectivity /= members_count; + ( + average_distance, + distances, + average_connectivity, + connectivities, + ) + } } diff --git a/lib/dubp/wot/operations/mod.rs b/lib/dubp/wot/operations/mod.rs index cbb938ee7381e0f6cb5c1f21cacbb0f863e184c5..376ad1c281812d82094c2a66810bedaf351051ee 100644 --- a/lib/dubp/wot/operations/mod.rs +++ b/lib/dubp/wot/operations/mod.rs @@ -16,5 +16,6 @@ //! Provide operation traits and implementations on `WebOfTrust` objects. pub mod centrality; +pub mod density; pub mod distance; pub mod path; diff --git a/lib/dunp/network-documents/src/network_documents.pest b/lib/dunp/network-documents/src/network_documents.pest index 4b0d72b878d66da4f00da5e83f6926470732b5a7..83ec90488d7a315f33f42c722f2d2e026f1c9f61 100644 --- a/lib/dunp/network-documents/src/network_documents.pest +++ b/lib/dunp/network-documents/src/network_documents.pest @@ -29,7 +29,7 @@ host = @{ ASCII_ALPHA_LOWER ~ (alphanum_lower | "-" | "_" | ".")* } port = @{ u_int } path_inner = @{ (ASCII_ALPHANUMERIC | "-" | "_" | ".")+ } -endpoint_v1 = ${ api_name ~ (" " ~ node_id)? ~ " " ~ host ~ " " ~ port ~ (" " ~ "/"? ~ path_inner)? ~ " "? } +endpoint_v1 = ${ api_name ~ (" " ~ node_id)? ~ " " ~ (ip4 | ip6 | host) ~ " " ~ port ~ (" " ~ "/"? ~ path_inner)? ~ " "? } // IP v6 rules ip6_seg = _{ hexa_lower{1,4} } diff --git a/lib/modules-lib/bc-db-reader/Cargo.toml b/lib/modules-lib/bc-db-reader/Cargo.toml new file mode 100644 index 0000000000000000000000000000000000000000..20bbab46c05a8d09158222f415d5e32caee36e27 --- /dev/null +++ b/lib/modules-lib/bc-db-reader/Cargo.toml @@ -0,0 +1,36 @@ +[package] +name = "durs-bc-db-reader" +version = "0.3.0-dev" +authors = ["librelois <elois@ifee.fr>"] +description = "Reader for Dunitrust Blockchain Database." +license = "AGPL-3.0" +edition = "2018" + +[lib] +path = "src/lib.rs" + +[dependencies] +dubp-block-doc = { path = "../../dubp/block-doc"} #, version = "0.1.0" } +dubp-common-doc = { path = "../../dubp/common-doc"} #, version = "0.1.0" } +dubp-indexes = { path = "../../dubp/indexes"} #, version = "0.1.0" } +dup-crypto = { path = "../../crypto" } +dubp-currency-params = { path = "../../dubp/currency-params" } +dubp-user-docs= { path = "../../dubp/user-docs" } +durs-conf = { path = "../../core/conf" } +durs-module = { path = "../../core/module" } +durs-common-tools = { path = "../../tools/common-tools" } +durs-dbs-tools = { path = "../../tools/dbs-tools" } +durs-wot = { path = "../../dubp/wot" } +log = "0.4.*" +maplit = "1.0.1" +fnv = "1.0.6" +serde = { version = "1.0.*", features = ["derive"] } +unwrap = "1.2.1" + +[dev-dependencies] +dup-crypto-tests-tools = { path = "../../tests-tools/crypto-tests-tools" } +dubp-user-docs-tests-tools = { path = "../../tests-tools/user-docs-tests-tools" } +durs-common-tests-tools = { path = "../../tests-tools/common-tests-tools" } +tempfile = "3.1.0" + +[features] diff --git a/lib/modules-lib/bc-db-reader/src/constants.rs b/lib/modules-lib/bc-db-reader/src/constants.rs new file mode 100644 index 0000000000000000000000000000000000000000..a7aecb4ca7f6ea3f54fd144c1af257ab46421c1f --- /dev/null +++ b/lib/modules-lib/bc-db-reader/src/constants.rs @@ -0,0 +1,38 @@ +// Copyright (C) 2017-2019 The AXIOM TEAM Association. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as +// published by the Free Software Foundation, either version 3 of the +// License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see <https://www.gnu.org/licenses/>. + +//! Define BlockChain database constants needed for read operations. + +/// Default page size for requests responses +pub static DEFAULT_PAGE_SIZE: &usize = &50; + +//////////////////////////////// +// BLOCKCHAIN DATABASE STORES // +//////////////////////////////// + +/// Current meta datas (CurrentMetaDataKey, ?) +pub static CURRENT_METAS_DATAS: &str = "cmd"; + +/// Fork blocks referenced in tree or in orphan blockstamps (Blockstamp, DbBlock) +pub static FORK_BLOCKS: &str = "fb"; + +/// Blocks in main branch (BlockNumber, DbBlock) +pub static MAIN_BLOCKS: &str = "bc"; + +/// Blockstamp orphaned (no parent block) indexed by their previous blockstamp (PreviousBlockstamp, Vec<Blockstamp>) +pub static ORPHAN_BLOCKSTAMP: &str = "ob"; + +/// Identities (PubKey, DbIdentity) +pub static IDENTITIES: &str = "idty"; diff --git a/lib/modules/blockchain/blockchain-dal/src/entities/mod.rs b/lib/modules-lib/bc-db-reader/src/entities.rs similarity index 93% rename from lib/modules/blockchain/blockchain-dal/src/entities/mod.rs rename to lib/modules-lib/bc-db-reader/src/entities.rs index 3b96d4d91d8cebddaf55f69046ccfe3f83e44b8e..1eff00ee87e3dfbfc43f7be82569a8a10148ea41 100644 --- a/lib/modules/blockchain/blockchain-dal/src/entities/mod.rs +++ b/lib/modules-lib/bc-db-reader/src/entities.rs @@ -13,9 +13,13 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. +//! Define database entities. + /// Block pub mod block; +pub mod current_meta_datas; + /// Forks tree pub mod fork_tree; diff --git a/lib/modules/blockchain/blockchain-dal/src/entities/block.rs b/lib/modules-lib/bc-db-reader/src/entities/block.rs similarity index 91% rename from lib/modules/blockchain/blockchain-dal/src/entities/block.rs rename to lib/modules-lib/bc-db-reader/src/entities/block.rs index 67e27062cabcf6b1af837dd35566c3d682bcdbd8..eaf40ea416cb3ec1f1a3acb740f200ad70026ec1 100644 --- a/lib/modules/blockchain/blockchain-dal/src/entities/block.rs +++ b/lib/modules-lib/bc-db-reader/src/entities/block.rs @@ -13,16 +13,16 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::*; use dubp_block_doc::block::{BlockDocument, BlockDocumentTrait}; use dubp_common_doc::traits::Document; -use dubp_common_doc::BlockNumber; +use dubp_common_doc::{BlockNumber, Blockstamp, PreviousBlockstamp}; use durs_wot::WotId; +use serde::{Deserialize, Serialize}; use std::collections::HashMap; #[derive(Clone, Debug, Deserialize, Serialize)] /// A block as it is saved in a database -pub struct DALBlock { +pub struct DbBlock { /// Block document pub block: BlockDocument, /// List of certifications that expire in this block. @@ -31,7 +31,7 @@ pub struct DALBlock { pub expire_certs: Option<HashMap<(WotId, WotId), BlockNumber>>, } -impl DALBlock { +impl DbBlock { /// Get blockstamp pub fn blockstamp(&self) -> Blockstamp { self.block.blockstamp() diff --git a/lib/modules/blockchain/blockchain-dal/src/readers/fork_tree.rs b/lib/modules-lib/bc-db-reader/src/entities/current_meta_datas.rs similarity index 53% rename from lib/modules/blockchain/blockchain-dal/src/readers/fork_tree.rs rename to lib/modules-lib/bc-db-reader/src/entities/current_meta_datas.rs index 37c448f6cc272819d30d08fe8b6334b0cb2e9793..d479f0bdf5feb8a8dff012ba7a8ec4a823b9b7b0 100644 --- a/lib/modules/blockchain/blockchain-dal/src/readers/fork_tree.rs +++ b/lib/modules-lib/bc-db-reader/src/entities/current_meta_datas.rs @@ -13,27 +13,32 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::*; -use dubp_common_doc::Blockstamp; +//! Describe current meta datas -/// get current blockstamp -pub fn get_current_blockstamp(forks_dbs: &ForksDBs) -> Result<Option<Blockstamp>, DALError> { - Ok(forks_dbs - .fork_tree_db - .read(|fork_tree| fork_tree.get_current_blockstamp())?) +#[derive(Clone, Copy, Debug)] +/// Current meta data key +pub enum CurrentMetaDataKey { + /// Version of the database structure + DbVersion, + /// Currency name + CurrencyName, + /// Current blockstamp + CurrentBlockstamp, + /// Current "blokchain" time + CurrentBlockchainTime, + /// Fork tree + ForkTree, } -/// Get stackables blocks -pub fn get_stackables_blocks( - forks_dbs: &ForksDBs, - current_blockstamp: &Blockstamp, -) -> Result<Vec<DALBlock>, DALError> { - if let Some(stackables_blocks) = forks_dbs - .orphan_blocks_db - .read(|db| db.get(¤t_blockstamp).cloned())? - { - Ok(stackables_blocks) - } else { - Ok(vec![]) +impl CurrentMetaDataKey { + /// To u32 + pub fn to_u32(self) -> u32 { + match self { + Self::DbVersion => 0, + Self::CurrencyName => 1, + Self::CurrentBlockstamp => 2, + Self::CurrentBlockchainTime => 3, + Self::ForkTree => 4, + } } } diff --git a/lib/modules/blockchain/blockchain-dal/src/entities/fork_tree.rs b/lib/modules-lib/bc-db-reader/src/entities/fork_tree.rs similarity index 98% rename from lib/modules/blockchain/blockchain-dal/src/entities/fork_tree.rs rename to lib/modules-lib/bc-db-reader/src/entities/fork_tree.rs index 2838c59feaf2630e9ce11fbc9acfa545ecd2d53e..ba2bd95c8bba0e5aa5fd0994a931a8cc04e58671 100644 --- a/lib/modules/blockchain/blockchain-dal/src/entities/fork_tree.rs +++ b/lib/modules-lib/bc-db-reader/src/entities/fork_tree.rs @@ -16,8 +16,9 @@ //! Describe fork tree use dubp_common_doc::{BlockHash, BlockNumber, Blockstamp}; -use serde::de::{self, Deserialize, Deserializer, Visitor}; -use serde::{Serialize, Serializer}; +use log::error; +use serde::de::{self, Deserializer, Visitor}; +use serde::{Deserialize, Serialize, Serializer}; use std::collections::{HashMap, HashSet}; use std::fmt; @@ -112,7 +113,6 @@ impl TreeNode { #[derive(Debug, Clone, Serialize, Deserialize)] /// Tree store all forks branchs pub struct ForkTree { - current_blockstamp: Option<Blockstamp>, main_branch: HashMap<BlockNumber, TreeNodeId>, max_depth: usize, nodes: Vec<Option<TreeNode>>, @@ -133,7 +133,6 @@ impl ForkTree { #[inline] pub fn new(max_depth: usize) -> Self { ForkTree { - current_blockstamp: None, main_branch: HashMap::with_capacity(max_depth + 1), max_depth, nodes: Vec::with_capacity(max_depth * 2), @@ -142,11 +141,6 @@ impl ForkTree { sheets: HashSet::new(), } } - /// Get tree size - #[inline] - pub fn get_current_blockstamp(&self) -> Option<Blockstamp> { - self.current_blockstamp - } /// Set max depth #[inline] pub fn set_max_depth(&mut self, max_depth: usize) { @@ -364,9 +358,6 @@ impl ForkTree { self.pruning(); } } - - // Update current blockstamp - self.current_blockstamp = Some(new_current_blockstamp); } /// Find node with specific blockstamp pub fn find_node_with_blockstamp(&self, blockstamp: &Blockstamp) -> Option<TreeNodeId> { @@ -419,7 +410,6 @@ impl ForkTree { self.removed_blockstamps.clear(); if main_branch { self.main_branch.insert(data.id, new_node_id); - self.current_blockstamp = Some(data); if self.main_branch.len() > self.max_depth { self.pruning(); } diff --git a/lib/modules/blockchain/blockchain-dal/src/entities/identity.rs b/lib/modules-lib/bc-db-reader/src/entities/identity.rs similarity index 94% rename from lib/modules/blockchain/blockchain-dal/src/entities/identity.rs rename to lib/modules-lib/bc-db-reader/src/entities/identity.rs index 934002ffea1942edb9c3c184820cadc7cc762423..f15e245aa14b4ff33f907ae16c6108ec32f7ddd1 100644 --- a/lib/modules/blockchain/blockchain-dal/src/entities/identity.rs +++ b/lib/modules-lib/bc-db-reader/src/entities/identity.rs @@ -16,10 +16,11 @@ use dubp_common_doc::{BlockNumber, Blockstamp}; use dubp_user_docs::documents::identity::IdentityDocumentV10; use durs_wot::WotId; +use serde::{Deserialize, Serialize}; #[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, Hash)] /// Identity state -pub enum DALIdentityState { +pub enum DbIdentityState { /// Member Member(Vec<usize>), /// Expire Member @@ -34,11 +35,11 @@ pub enum DALIdentityState { #[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq, Hash)] /// Identity in database -pub struct DALIdentity { +pub struct DbIdentity { /// Identity hash pub hash: String, /// Identity state - pub state: DALIdentityState, + pub state: DbIdentityState, /// Blockstamp the identity was written pub joined_on: Blockstamp, /// Blockstamp the identity was expired diff --git a/lib/modules/blockchain/blockchain-dal/src/entities/sources.rs b/lib/modules-lib/bc-db-reader/src/entities/sources.rs similarity index 98% rename from lib/modules/blockchain/blockchain-dal/src/entities/sources.rs rename to lib/modules-lib/bc-db-reader/src/entities/sources.rs index ca2e642088a6893eeefe4857f646d6aa7236596f..fff15894960ebfc01545c5913233fffe0cc925da 100644 --- a/lib/modules/blockchain/blockchain-dal/src/entities/sources.rs +++ b/lib/modules-lib/bc-db-reader/src/entities/sources.rs @@ -16,6 +16,8 @@ use dubp_indexes::sindex::UniqueIdUTXOv10; use dubp_user_docs::documents::transaction::*; use durs_common_tools::fatal_error; +use log::error; +use serde::{Deserialize, Serialize}; use std::cmp::Ordering; use std::ops::{Add, Sub}; diff --git a/lib/modules/blockchain/blockchain-dal/src/filters/mod.rs b/lib/modules-lib/bc-db-reader/src/filters.rs similarity index 96% rename from lib/modules/blockchain/blockchain-dal/src/filters/mod.rs rename to lib/modules-lib/bc-db-reader/src/filters.rs index ff4bee88904f8c9974bb7e7d33d83c20d1e39884..47bd2513303a3ddb86b3a43e574a07dfc3e41258 100644 --- a/lib/modules/blockchain/blockchain-dal/src/filters/mod.rs +++ b/lib/modules-lib/bc-db-reader/src/filters.rs @@ -13,6 +13,8 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. +//! Define Define all filters applicable to database entities. + pub mod identities; use dubp_common_doc::BlockNumber; diff --git a/lib/modules/blockchain/blockchain-dal/src/filters/identities.rs b/lib/modules-lib/bc-db-reader/src/filters/identities.rs similarity index 100% rename from lib/modules/blockchain/blockchain-dal/src/filters/identities.rs rename to lib/modules-lib/bc-db-reader/src/filters/identities.rs diff --git a/lib/modules-lib/bc-db-reader/src/lib.rs b/lib/modules-lib/bc-db-reader/src/lib.rs new file mode 100644 index 0000000000000000000000000000000000000000..ac91cbe5a803099d4914075a52d1326db8fa8b0c --- /dev/null +++ b/lib/modules-lib/bc-db-reader/src/lib.rs @@ -0,0 +1,101 @@ +// Copyright (C) 2017-2019 The AXIOM TEAM Association. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as +// published by the Free Software Foundation, either version 3 of the +// License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see <https://www.gnu.org/licenses/>. + +//! BlockChain Datas Access Layer in Read-Only mode. + +#![allow(clippy::large_enum_variant)] +#![deny( + missing_docs, + missing_copy_implementations, + trivial_casts, + trivial_numeric_casts, + unsafe_code, + unstable_features, + unused_import_braces, + unused_qualifications +)] + +pub mod constants; +pub mod entities; +pub mod filters; +pub mod readers; +pub mod tools; + +pub use durs_dbs_tools::kv_db::{ + KvFileDbRead as DbReadable, KvFileDbRoHandler as BcDbRo, KvFileDbSchema, KvFileDbStoreType, + KvFileDbValue as DbValue, +}; + +use constants::*; +use durs_dbs_tools::DbError; +use maplit::hashmap; +use std::path::Path; + +#[inline] +/// Get BlockChain DB Schema +pub fn bc_db_schema() -> KvFileDbSchema { + KvFileDbSchema { + stores: hashmap![ + CURRENT_METAS_DATAS.to_owned() => KvFileDbStoreType::SingleIntKey, + MAIN_BLOCKS.to_owned() => KvFileDbStoreType::SingleIntKey, + FORK_BLOCKS.to_owned() => KvFileDbStoreType::Single, + ORPHAN_BLOCKSTAMP.to_owned() => KvFileDbStoreType::Single, + IDENTITIES.to_owned() => KvFileDbStoreType::Single, + ], + } +} + +/// Open database +#[inline] +pub fn open_db_ro(path: &Path) -> Result<BcDbRo, DbError> { + BcDbRo::open_db_ro(path, &bc_db_schema()) +} + +/////////////////////////// +// Migration in progress // +/////////////////////////// + +/// Certifications sorted by created block +pub type CertsExpirV10Datas = fnv::FnvHashMap< + dubp_common_doc::BlockNumber, + std::collections::HashSet<(durs_wot::WotId, durs_wot::WotId)>, +>; + +/// V10 Balances accounts +pub type BalancesV10Datas = std::collections::HashMap< + dubp_user_docs::documents::transaction::UTXOConditionsGroup, + ( + crate::entities::sources::SourceAmount, + std::collections::HashSet<dubp_indexes::sindex::UniqueIdUTXOv10>, + ), +>; + +#[cfg(test)] +pub mod tests { + + use super::*; + use durs_dbs_tools::kv_db::KvFileDbHandler; + use tempfile::tempdir; + + #[inline] + /// Open database in an arbitrary temporary directory given by OS + /// and automatically cleaned when `Db` is dropped + pub fn open_tmp_db() -> Result<KvFileDbHandler, DbError> { + KvFileDbHandler::open_db( + tempdir().map_err(DbError::FileSystemError)?.path(), + &bc_db_schema(), + ) + } +} diff --git a/lib/modules/blockchain/blockchain-dal/src/readers/mod.rs b/lib/modules-lib/bc-db-reader/src/readers.rs similarity index 90% rename from lib/modules/blockchain/blockchain-dal/src/readers/mod.rs rename to lib/modules-lib/bc-db-reader/src/readers.rs index b53bf67d63640063f5448161d2c9ccb1b65fa623..175d39025f4df2adaed11d496d8dddee838a19ae 100644 --- a/lib/modules/blockchain/blockchain-dal/src/readers/mod.rs +++ b/lib/modules-lib/bc-db-reader/src/readers.rs @@ -13,6 +13,8 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. +//! Blockchain database readers. + /// Balances read functions pub mod balance; @@ -25,8 +27,8 @@ pub mod certs; /// Currency params read functions pub mod currency_params; -/// Fork tree read functions -pub mod fork_tree; +/// Current meta datas read functions +pub mod current_meta_datas; /// Identities read functions pub mod identity; diff --git a/lib/modules/blockchain/blockchain-dal/src/readers/balance.rs b/lib/modules-lib/bc-db-reader/src/readers/balance.rs similarity index 85% rename from lib/modules/blockchain/blockchain-dal/src/readers/balance.rs rename to lib/modules-lib/bc-db-reader/src/readers/balance.rs index df07d5d0dc3c9e2a8ffc8df96cee8cdea37d4b4b..93f6975c05633dd30135b918603cd4bfac736535 100644 --- a/lib/modules/blockchain/blockchain-dal/src/readers/balance.rs +++ b/lib/modules-lib/bc-db-reader/src/readers/balance.rs @@ -14,13 +14,15 @@ // along with this program. If not, see <https://www.gnu.org/licenses/>. use crate::entities::sources::*; -use crate::*; +use crate::BalancesV10Datas; +use dubp_user_docs::documents::transaction::UTXOConditionsGroup; +use durs_dbs_tools::{BinFreeStructDb, DbError}; /// Get address balance pub fn get_address_balance( balances_db: &BinFreeStructDb<BalancesV10Datas>, address: &UTXOConditionsGroup, -) -> Result<Option<SourceAmount>, DALError> { +) -> Result<Option<SourceAmount>, DbError> { Ok(balances_db.read(|db| { if let Some(balance_and_utxos) = db.get(address) { Some(balance_and_utxos.0) diff --git a/lib/modules-lib/bc-db-reader/src/readers/block.rs b/lib/modules-lib/bc-db-reader/src/readers/block.rs new file mode 100644 index 0000000000000000000000000000000000000000..f2a38f6c0f75d7273411ea96997e2486dea64b54 --- /dev/null +++ b/lib/modules-lib/bc-db-reader/src/readers/block.rs @@ -0,0 +1,218 @@ +// Copyright (C) 2017-2019 The AXIOM TEAM Association. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as +// published by the Free Software Foundation, either version 3 of the +// License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see <https://www.gnu.org/licenses/>. + +use crate::constants::*; +use crate::entities::block::DbBlock; +use crate::*; +use dubp_block_doc::block::{BlockDocument, BlockDocumentTrait}; +use dubp_common_doc::traits::Document; +use dubp_common_doc::{BlockHash, BlockNumber, Blockstamp, PreviousBlockstamp}; +use dup_crypto::hashs::Hash; +use dup_crypto::keys::*; +use durs_dbs_tools::DbError; +use std::collections::HashMap; + +/// Return true if the node already knows this block +pub fn already_have_block<DB: DbReadable>( + db: &DB, + blockstamp: Blockstamp, + previous_hash: Option<Hash>, +) -> Result<bool, DbError> { + db.read(|r| { + let blockstamp_bytes: Vec<u8> = blockstamp.into(); + if db + .get_store(FORK_BLOCKS) + .get(r, &blockstamp_bytes)? + .is_some() + { + return Ok(true); + } else if blockstamp.id > BlockNumber(0) { + let previous_blockstamp_bytes: Vec<u8> = PreviousBlockstamp { + id: BlockNumber(blockstamp.id.0 - 1), + hash: BlockHash(previous_hash.expect("no genesis block must have previous hash")), + } + .into(); + if let Some(v) = db + .get_store(ORPHAN_BLOCKSTAMP) + .get(r, &previous_blockstamp_bytes)? + { + for orphan_blockstamp in DB::from_db_value::<Vec<Blockstamp>>(v)? { + if orphan_blockstamp == blockstamp { + return Ok(true); + } + } + } + } + if let Some(v) = db.get_int_store(MAIN_BLOCKS).get(r, blockstamp.id.0)? { + if DB::from_db_value::<DbBlock>(v)?.block.blockstamp() == blockstamp { + Ok(true) + } else { + Ok(false) + } + } else { + Ok(false) + } + }) +} + +/// Get block +pub fn get_block<DB: DbReadable>( + db: &DB, + blockstamp: Blockstamp, +) -> Result<Option<DbBlock>, DbError> { + let opt_dal_block = get_dal_block_in_local_blockchain(db, blockstamp.id)?; + if opt_dal_block.is_none() { + get_fork_block(db, blockstamp) + } else { + Ok(opt_dal_block) + } +} + +/// Get fork block +pub fn get_fork_block<DB: DbReadable>( + db: &DB, + blockstamp: Blockstamp, +) -> Result<Option<DbBlock>, DbError> { + let blockstamp_bytes: Vec<u8> = blockstamp.into(); + db.read(|r| { + if let Some(v) = db.get_store(FORK_BLOCKS).get(r, &blockstamp_bytes)? { + Ok(Some(DB::from_db_value(v)?)) + } else { + Ok(None) + } + }) +} + +/// Get block hash +pub fn get_block_hash<DB: DbReadable>( + db: &DB, + block_number: BlockNumber, +) -> Result<Option<BlockHash>, DbError> { + Ok( + if let Some(block) = get_block_in_local_blockchain(db, block_number)? { + block.hash() + } else { + None + }, + ) +} + +/// Get block in local blockchain +#[inline] +pub fn get_block_in_local_blockchain<DB: DbReadable>( + db: &DB, + block_number: BlockNumber, +) -> Result<Option<BlockDocument>, DbError> { + Ok(get_dal_block_in_local_blockchain(db, block_number)?.map(|dal_block| dal_block.block)) +} + +/// Get block in local blockchain +pub fn get_dal_block_in_local_blockchain<DB: DbReadable>( + db: &DB, + block_number: BlockNumber, +) -> Result<Option<DbBlock>, DbError> { + db.read(|r| { + if let Some(v) = db.get_int_store(MAIN_BLOCKS).get(r, block_number.0)? { + Ok(Some(DB::from_db_value(v)?)) + } else { + Ok(None) + } + }) +} + +/// Get several blocks in local blockchain +pub fn get_blocks_in_local_blockchain<DB: DbReadable>( + db: &DB, + first_block_number: BlockNumber, + mut count: u32, +) -> Result<Vec<BlockDocument>, DbError> { + db.read(|r| { + let bc_store = db.get_int_store(MAIN_BLOCKS); + let mut blocks = Vec::with_capacity(count as usize); + let mut current_block_number = first_block_number; + + while let Some(v) = bc_store.get(r, current_block_number.0)? { + blocks.push(DB::from_db_value::<DbBlock>(v)?.block); + count -= 1; + if count > 0 { + current_block_number = BlockNumber(current_block_number.0 + 1); + } else { + return Ok(blocks); + } + } + Ok(blocks) + }) +} + +/// Get current frame of calculating members +pub fn get_current_frame<DB: DbReadable>( + current_block: &BlockDocument, + db: &DB, +) -> Result<HashMap<PubKey, usize>, DbError> { + let frame_begin = current_block.number().0 - current_block.current_frame_size() as u32; + + let blocks = get_blocks_in_local_blockchain( + db, + BlockNumber(frame_begin), + current_block.current_frame_size() as u32, + )?; + + let mut current_frame: HashMap<PubKey, usize> = HashMap::new(); + for block in blocks { + let issuer = block.issuers()[0]; + let issuer_count_blocks = if let Some(issuer_count_blocks) = current_frame.get(&issuer) { + issuer_count_blocks + 1 + } else { + 1 + }; + current_frame.insert(issuer, issuer_count_blocks); + } + + Ok(current_frame) +} + +/// Get stackables blocks +#[inline] +pub fn get_stackables_blocks<DB: DbReadable>( + db: &DB, + current_blockstamp: Blockstamp, +) -> Result<Vec<DbBlock>, DbError> { + get_orphan_blocks(db, current_blockstamp) +} + +/// Get orphan blocks +pub fn get_orphan_blocks<DB: DbReadable>( + db: &DB, + blockstamp: PreviousBlockstamp, +) -> Result<Vec<DbBlock>, DbError> { + let blockstamp_bytes: Vec<u8> = blockstamp.into(); + db.read(|r| { + if let Some(v) = db.get_store(ORPHAN_BLOCKSTAMP).get(r, &blockstamp_bytes)? { + let orphan_blockstamps = DB::from_db_value::<Vec<Blockstamp>>(v)?; + let mut orphan_blocks = Vec::with_capacity(orphan_blockstamps.len()); + for orphan_blockstamp in orphan_blockstamps { + let orphan_blockstamp_bytes: Vec<u8> = orphan_blockstamp.into(); + if let Some(v) = db.get_store(FORK_BLOCKS).get(r, &orphan_blockstamp_bytes)? { + orphan_blocks.push(DB::from_db_value::<DbBlock>(v)?); + } else { + return Err(DbError::DBCorrupted); + } + } + Ok(orphan_blocks) + } else { + Ok(vec![]) + } + }) +} diff --git a/lib/modules/blockchain/blockchain-dal/src/readers/certs.rs b/lib/modules-lib/bc-db-reader/src/readers/certs.rs similarity index 90% rename from lib/modules/blockchain/blockchain-dal/src/readers/certs.rs rename to lib/modules-lib/bc-db-reader/src/readers/certs.rs index ec57457899aca37548ee2e3777d45e4cc9ac49f0..0e2433d627c2acff9f972752f3e9b51a168a2c39 100644 --- a/lib/modules/blockchain/blockchain-dal/src/readers/certs.rs +++ b/lib/modules-lib/bc-db-reader/src/readers/certs.rs @@ -13,8 +13,9 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::{BinFreeStructDb, CertsExpirV10Datas, DALError}; +use crate::CertsExpirV10Datas; use dubp_common_doc::BlockNumber; +use durs_dbs_tools::{BinFreeStructDb, DbError}; use durs_wot::WotId; use std::collections::HashMap; @@ -22,7 +23,7 @@ use std::collections::HashMap; pub fn find_expire_certs( certs_db: &BinFreeStructDb<CertsExpirV10Datas>, blocks_expiring: Vec<BlockNumber>, -) -> Result<HashMap<(WotId, WotId), BlockNumber>, DALError> { +) -> Result<HashMap<(WotId, WotId), BlockNumber>, DbError> { Ok(certs_db.read(|db| { let mut all_expire_certs = HashMap::new(); for expire_block_id in blocks_expiring { diff --git a/lib/modules/blockchain/blockchain-dal/src/readers/currency_params.rs b/lib/modules-lib/bc-db-reader/src/readers/currency_params.rs similarity index 96% rename from lib/modules/blockchain/blockchain-dal/src/readers/currency_params.rs rename to lib/modules-lib/bc-db-reader/src/readers/currency_params.rs index 70aa7f536541d0accea97ee63fcc544f74b4f7d6..0e5eba74dc3ee1745d1649bbeec5af7606c06dc5 100644 --- a/lib/modules/blockchain/blockchain-dal/src/readers/currency_params.rs +++ b/lib/modules-lib/bc-db-reader/src/readers/currency_params.rs @@ -13,12 +13,14 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::*; use dubp_block_doc::block::{BlockDocument, BlockDocumentTrait}; use dubp_common_doc::traits::Document; use dubp_currency_params::db::write_currency_params; use dubp_currency_params::genesis_block_params::GenesisBlockParams; use dubp_currency_params::CurrencyParameters; +use durs_common_tools::fatal_error; +use log::error; +use std::path::PathBuf; use unwrap::unwrap; /// Get and write currency params diff --git a/lib/modules-lib/bc-db-reader/src/readers/current_meta_datas.rs b/lib/modules-lib/bc-db-reader/src/readers/current_meta_datas.rs new file mode 100644 index 0000000000000000000000000000000000000000..5dc9f6beccc165413bb772ead8687dc45689814d --- /dev/null +++ b/lib/modules-lib/bc-db-reader/src/readers/current_meta_datas.rs @@ -0,0 +1,110 @@ +// Copyright (C) 2017-2019 The AXIOM TEAM Association. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as +// published by the Free Software Foundation, either version 3 of the +// License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see <https://www.gnu.org/licenses/>. + +use crate::constants::*; +use crate::entities::current_meta_datas::CurrentMetaDataKey; +use crate::entities::fork_tree::ForkTree; +use crate::{DbReadable, DbValue}; +use dubp_common_doc::{Blockstamp, CurrencyName}; +use durs_dbs_tools::DbError; + +/// Get DB version +pub fn get_db_version<DB: DbReadable>(db: &DB) -> Result<usize, DbError> { + db.read(|r| { + if let Some(v) = db + .get_int_store(CURRENT_METAS_DATAS) + .get(r, CurrentMetaDataKey::DbVersion.to_u32())? + { + if let DbValue::U64(db_version) = v { + Ok(db_version as usize) + } else { + Err(DbError::DBCorrupted) + } + } else { + Err(DbError::DBCorrupted) + } + }) +} + +/// Get currency name +pub fn get_currency_name<DB: DbReadable>(db: &DB) -> Result<Option<CurrencyName>, DbError> { + db.read(|r| { + if let Some(v) = db + .get_int_store(CURRENT_METAS_DATAS) + .get(r, CurrentMetaDataKey::CurrencyName.to_u32())? + { + if let DbValue::Str(curency_name) = v { + Ok(Some(CurrencyName(curency_name.to_owned()))) + } else { + Err(DbError::DBCorrupted) + } + } else { + Ok(None) + } + }) +} + +/// Get current blockstamp +pub fn get_current_blockstamp<DB: DbReadable>(db: &DB) -> Result<Option<Blockstamp>, DbError> { + db.read(|r| { + if let Some(v) = db + .get_int_store(CURRENT_METAS_DATAS) + .get(r, CurrentMetaDataKey::CurrentBlockstamp.to_u32())? + { + if let DbValue::Blob(current_blockstamp_bytes) = v { + Ok(Some( + Blockstamp::from_bytes(current_blockstamp_bytes) + .map_err(|_| DbError::DBCorrupted)?, + )) + } else { + Err(DbError::DBCorrupted) + } + } else { + Ok(None) + } + }) +} + +/// Get current common time (also named "blockchain time") +pub fn get_current_common_time<DB: DbReadable>(db: &DB) -> Result<u64, DbError> { + db.read(|r| { + if let Some(v) = db + .get_int_store(CURRENT_METAS_DATAS) + .get(r, CurrentMetaDataKey::CurrentBlockchainTime.to_u32())? + { + if let DbValue::U64(current_common_time) = v { + Ok(current_common_time) + } else { + Err(DbError::DBCorrupted) + } + } else { + Ok(0u64) + } + }) +} + +/// Get fork tree root +pub fn get_fork_tree<DB: DbReadable>(db: &DB) -> Result<ForkTree, DbError> { + db.read(|r| { + if let Some(v) = db + .get_int_store(CURRENT_METAS_DATAS) + .get(r, CurrentMetaDataKey::ForkTree.to_u32())? + { + Ok(DB::from_db_value::<ForkTree>(v)?) + } else { + Ok(ForkTree::default()) + } + }) +} diff --git a/lib/modules/blockchain/blockchain-dal/src/readers/identity.rs b/lib/modules-lib/bc-db-reader/src/readers/identity.rs similarity index 54% rename from lib/modules/blockchain/blockchain-dal/src/readers/identity.rs rename to lib/modules-lib/bc-db-reader/src/readers/identity.rs index bd25cfa9e3bae493d580d88c5a66b665365fdaac..4d05e3942a2d5167c1d12bb19cce4d2ae609af14 100644 --- a/lib/modules/blockchain/blockchain-dal/src/readers/identity.rs +++ b/lib/modules-lib/bc-db-reader/src/readers/identity.rs @@ -13,105 +13,119 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::entities::identity::DALIdentity; +use crate::constants::*; +use crate::entities::identity::DbIdentity; use crate::filters::identities::IdentitiesFilter; -use crate::{BinFreeStructDb, DALError, IdentitiesV10Datas}; +use crate::*; use dubp_common_doc::traits::Document; use dubp_common_doc::BlockNumber; use dup_crypto::keys::*; +use durs_dbs_tools::DbError; use durs_wot::WotId; use std::collections::HashMap; /// Get identities in databases -pub fn get_identities( - db: &BinFreeStructDb<IdentitiesV10Datas>, +pub fn get_identities<DB: DbReadable>( + db: &DB, filters: IdentitiesFilter, current_block_id: BlockNumber, -) -> Result<Vec<DALIdentity>, DALError> { +) -> Result<Vec<DbIdentity>, DbError> { if let Some(pubkey) = filters.by_pubkey { - if let Some(idty) = db.read(|db| db.get(&pubkey).cloned())? { + if let Some(idty) = get_identity(db, &pubkey)? { Ok(vec![idty]) } else { Ok(vec![]) } } else { - Ok(db.read(|db| { - let mut identities: Vec<&DALIdentity> = db - .values() - .filter(|idty| { - filters + db.read(|r| { + let mut identities: Vec<DbIdentity> = Vec::new(); + for entry in db.get_store(IDENTITIES).iter_start(r)? { + if let Some(v) = entry?.1 { + let db_idty = DB::from_db_value::<DbIdentity>(v)?; + if filters .paging - .check_created_on(idty.idty_doc.blockstamp().id, current_block_id) - }) - .collect(); + .check_created_on(db_idty.idty_doc.blockstamp().id, current_block_id) + { + identities.push(db_idty); + } + } + } identities.sort_by(|i1, i2| { i1.idty_doc .blockstamp() .id .cmp(&i2.idty_doc.blockstamp().id) }); - identities + Ok(identities .into_iter() .skip(filters.paging.page_size * filters.paging.page_number) .take(filters.paging.page_size) - .cloned() - .collect() - })?) + .collect()) + }) } } /// Get identity in databases -pub fn get_identity( - db: &BinFreeStructDb<IdentitiesV10Datas>, +pub fn get_identity<DB: DbReadable>( + db: &DB, pubkey: &PubKey, -) -> Result<Option<DALIdentity>, DALError> { - Ok(db.read(|db| { - if let Some(member_datas) = db.get(&pubkey) { - Some(member_datas.clone()) +) -> Result<Option<DbIdentity>, DbError> { + db.read(|r| { + if let Some(v) = db.get_store(IDENTITIES).get(r, &pubkey.to_bytes_vector())? { + Ok(Some(DB::from_db_value(v)?)) } else { - None + Ok(None) } - })?) + }) } /// Get uid from pubkey -pub fn get_uid( - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, - pubkey: PubKey, -) -> Result<Option<String>, DALError> { - Ok(identities_db.read(|db| { - if let Some(dal_idty) = db.get(&pubkey) { - Some(String::from(dal_idty.idty_doc.username())) - } else { - None - } - })?) +#[inline] +pub fn get_uid<DB: DbReadable>(db: &DB, pubkey: &PubKey) -> Result<Option<String>, DbError> { + Ok(get_identity(db, pubkey)?.map(|db_idty| db_idty.idty_doc.username().to_owned())) } /// Get pubkey from uid -pub fn get_pubkey_from_uid( - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, - uid: &str, -) -> Result<Option<PubKey>, DALError> { - Ok(identities_db.read(|db| { - for (pubkey, dal_idty) in db { - if uid == dal_idty.idty_doc.username() { - return Some(*pubkey); +pub fn get_pubkey_from_uid<DB: DbReadable>(db: &DB, uid: &str) -> Result<Option<PubKey>, DbError> { + db.read(|r| { + for entry in db.get_store(IDENTITIES).iter_start(r)? { + if let Some(v) = entry?.1 { + let idty_doc = DB::from_db_value::<DbIdentity>(v)?.idty_doc; + if idty_doc.username() == uid { + return Ok(Some(idty_doc.issuers()[0])); + } } } - None - })?) + Ok(None) + }) } /// Get wot_id index -pub fn get_wot_index( - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, -) -> Result<HashMap<PubKey, WotId>, DALError> { - Ok(identities_db.read(|db| { - db.iter() - .map(|(pubkey, member_datas)| (*pubkey, member_datas.wot_id)) - .collect() - })?) +pub fn get_wot_index<DB: DbReadable>(db: &DB) -> Result<HashMap<PubKey, WotId>, DbError> { + db.read(|r| { + let mut wot_index = HashMap::new(); + for entry in db.get_store(IDENTITIES).iter_start(r)? { + if let Some(v) = entry?.1 { + let db_idty = DB::from_db_value::<DbIdentity>(v)?; + wot_index.insert(db_idty.idty_doc.issuers()[0], db_idty.wot_id); + } + } + Ok(wot_index) + }) +} + +/// Get wot_uid index +pub fn get_wot_uid_index<DB: DbReadable>(db: &DB) -> Result<HashMap<WotId, String>, DbError> { + db.read(|r| { + let mut wot_uid_index = HashMap::new(); + for entry in db.get_store(IDENTITIES).iter_start(r)? { + if let Some(v) = entry?.1 { + let db_idty = DB::from_db_value::<DbIdentity>(v)?; + wot_uid_index.insert(db_idty.wot_id, db_idty.idty_doc.username().to_owned()); + } + } + Ok(wot_uid_index) + }) } #[cfg(test)] @@ -120,15 +134,15 @@ mod test { use super::*; use crate::entities::identity::*; use crate::filters::PagingFilter; - use crate::*; use dubp_common_doc::Blockstamp; use dup_crypto_tests_tools::mocks::pubkey; use durs_common_tests_tools::collections::slice_same_elems; + use durs_dbs_tools::kv_db::KvFileDbHandler; - fn gen_mock_dal_idty(pubkey: PubKey, created_block_id: BlockNumber) -> DALIdentity { - DALIdentity { + fn gen_mock_dal_idty(pubkey: PubKey, created_block_id: BlockNumber) -> DbIdentity { + DbIdentity { hash: "".to_owned(), - state: DALIdentityState::Member(vec![]), + state: DbIdentityState::Member(vec![]), joined_on: Blockstamp::default(), expired_on: None, revoked_on: None, @@ -144,7 +158,7 @@ mod test { } #[test] - fn test_get_identities() -> Result<(), DALError> { + fn test_get_identities() -> Result<(), DbError> { // Create mock identities let mock_identities = vec![ gen_mock_dal_idty(pubkey('A'), BlockNumber(0)), @@ -155,12 +169,16 @@ mod test { ]; // Write mock identities in DB - let identities_db = BinFreeStructDb::Mem( - open_free_struct_memory_db::<IdentitiesV10Datas>().expect("Fail to create memory DB !"), - ); + let db = crate::tests::open_tmp_db()?; for idty in &mock_identities { - identities_db.write(|db| { - db.insert(idty.idty_doc.issuers()[0], idty.clone()); + let idty_bin = durs_dbs_tools::to_bytes(idty)?; + db.write(|mut w| { + db.get_store(IDENTITIES).put( + w.as_mut(), + &idty.idty_doc.issuers()[0].to_bytes_vector(), + &KvFileDbHandler::db_value(&idty_bin)?, + )?; + Ok(w) })?; } @@ -168,18 +186,18 @@ mod test { let mut filters = IdentitiesFilter::default(); assert!(slice_same_elems( &mock_identities, - &get_identities(&identities_db, filters, BlockNumber(5))? + &get_identities(&db, filters, BlockNumber(5))? )); // Test by pubkey filter filters = IdentitiesFilter::by_pubkey(pubkey('A')); assert_eq!( vec![mock_identities[0].clone()], - get_identities(&identities_db, filters, BlockNumber(5))? + get_identities(&db, filters, BlockNumber(5))? ); filters = IdentitiesFilter::by_pubkey(pubkey('C')); assert_eq!( vec![mock_identities[2].clone()], - get_identities(&identities_db, filters, BlockNumber(5))? + get_identities(&db, filters, BlockNumber(5))? ); // Test paging filter with little page size @@ -194,7 +212,7 @@ mod test { }; assert!(slice_same_elems( &vec![mock_identities[2].clone(), mock_identities[3].clone()], - &get_identities(&identities_db, filters, BlockNumber(5))? + &get_identities(&db, filters, BlockNumber(5))? )); // Test paging filter with limited interval @@ -209,7 +227,7 @@ mod test { }; assert_eq!( vec![mock_identities[2].clone()], - get_identities(&identities_db, filters, BlockNumber(5))? + get_identities(&db, filters, BlockNumber(5))? ); Ok(()) diff --git a/lib/modules/blockchain/blockchain-dal/src/tools.rs b/lib/modules-lib/bc-db-reader/src/tools.rs similarity index 56% rename from lib/modules/blockchain/blockchain-dal/src/tools.rs rename to lib/modules-lib/bc-db-reader/src/tools.rs index 507a31fe65ed2edd4fc02d009e7cc4b174ea01fa..6142df902a03ec5b9ad9357269203a3d600d488f 100644 --- a/lib/modules/blockchain/blockchain-dal/src/tools.rs +++ b/lib/modules-lib/bc-db-reader/src/tools.rs @@ -13,23 +13,47 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::entities::block::DALBlock; +//! Data calculation tools + +use crate::entities::block::DbBlock; use dubp_block_doc::block::BlockDocumentTrait; use dup_crypto::keys::PubKey; use durs_common_tools::fatal_error; -use durs_wot::operations::centrality::{CentralitiesCalculator, UlrikBrandesCentralityCalculator}; -use durs_wot::operations::distance::{ - DistanceCalculator, RustyDistanceCalculator, WotDistance, WotDistanceParameters, -}; -use durs_wot::{WebOfTrust, WotId}; +use log::error; use std::collections::HashMap; -/// CENTRALITY_CALCULATOR -pub static CENTRALITY_CALCULATOR: UlrikBrandesCentralityCalculator = - UlrikBrandesCentralityCalculator {}; +/// Compute median issuers frame +pub fn compute_median_issuers_frame<S: std::hash::BuildHasher>( + current_block: &DbBlock, + current_frame: &HashMap<PubKey, usize, S>, +) -> usize { + if !current_frame.is_empty() { + let mut current_frame_vec: Vec<_> = current_frame.values().cloned().collect(); + current_frame_vec.sort_unstable(); + + // Calculate median + let mut median_index = match current_block.block.issuers_count() % 2 { + 1 => (current_block.block.issuers_count() / 2) + 1, + _ => current_block.block.issuers_count() / 2, + }; + if median_index >= current_block.block.issuers_count() { + median_index = current_block.block.issuers_count() - 1; + } + current_frame_vec[median_index] -/// DISTANCE_CALCULATOR -pub static DISTANCE_CALCULATOR: RustyDistanceCalculator = RustyDistanceCalculator {}; + /*// Calculate second tiercile index + let mut second_tiercile_index = match self.block.issuers_count % 3 { + 1 | 2 => (self.block.issuers_count as f64 * (2.0 / 3.0)) as usize + 1, + _ => (self.block.issuers_count as f64 * (2.0 / 3.0)) as usize, + }; + if second_tiercile_index >= self.block.issuers_count { + second_tiercile_index = self.block.issuers_count - 1; + } + self.second_tiercile_frame = current_frame_vec[second_tiercile_index];*/ + } else { + 0 + } +} /// Get sentry requirement pub fn get_sentry_requirement(members_count: usize, step_max: u32) -> u32 { @@ -78,99 +102,3 @@ pub fn get_sentry_requirement(members_count: usize, step_max: u32) -> u32 { _ => fatal_error!("get_sentry_requirement not define for step_max != 5 !"), } } - -/// Compute average density -pub fn calculate_average_density<T: WebOfTrust>(wot: &T) -> usize { - let enabled_members = wot.get_enabled(); - let enabled_members_count = enabled_members.len(); - let mut count_actives_links: usize = 0; - for member in &enabled_members { - count_actives_links += wot - .issued_count(*member) - .unwrap_or_else(|| fatal_error!("Fail to get issued_count of wot_id {}", (*member).0)); - } - ((count_actives_links as f32 / enabled_members_count as f32) * 1_000.0) as usize -} - -/// Compute distances -pub fn compute_distances<T: WebOfTrust + Sync>( - wot: &T, - sentry_requirement: u32, - step_max: u32, - x_percent: f64, -) -> (usize, Vec<usize>, usize, Vec<usize>) { - let members_count = wot.get_enabled().len(); - let mut distances = Vec::new(); - let mut average_distance: usize = 0; - let mut connectivities = Vec::new(); - let mut average_connectivity: usize = 0; - for i in 0..wot.size() { - let distance_datas: WotDistance = DISTANCE_CALCULATOR - .compute_distance( - wot, - WotDistanceParameters { - node: WotId(i), - sentry_requirement, - step_max, - x_percent, - }, - ) - .expect("Fatal Error: compute_distance return None !"); - let distance = ((f64::from(distance_datas.success) - / (x_percent * f64::from(distance_datas.sentries))) - * 100.0) as usize; - distances.push(distance); - average_distance += distance; - let connectivity = ((f64::from(distance_datas.success - distance_datas.success_at_border) - / (x_percent * f64::from(distance_datas.sentries))) - * 100.0) as usize; - connectivities.push(connectivity); - average_connectivity += connectivity; - } - average_distance /= members_count; - average_connectivity /= members_count; - ( - average_distance, - distances, - average_connectivity, - connectivities, - ) -} - -/// Compute distance stress centralities -pub fn calculate_distance_stress_centralities<T: WebOfTrust>(wot: &T, step_max: u32) -> Vec<u64> { - CENTRALITY_CALCULATOR.distance_stress_centralities(wot, step_max as usize) -} - -/// Compute median issuers frame -pub fn compute_median_issuers_frame<S: std::hash::BuildHasher>( - current_block: &DALBlock, - current_frame: &HashMap<PubKey, usize, S>, -) -> usize { - if !current_frame.is_empty() { - let mut current_frame_vec: Vec<_> = current_frame.values().cloned().collect(); - current_frame_vec.sort_unstable(); - - // Calculate median - let mut median_index = match current_block.block.issuers_count() % 2 { - 1 => (current_block.block.issuers_count() / 2) + 1, - _ => current_block.block.issuers_count() / 2, - }; - if median_index >= current_block.block.issuers_count() { - median_index = current_block.block.issuers_count() - 1; - } - current_frame_vec[median_index] - - /*// Calculate second tiercile index - let mut second_tiercile_index = match self.block.issuers_count % 3 { - 1 | 2 => (self.block.issuers_count as f64 * (2.0 / 3.0)) as usize + 1, - _ => (self.block.issuers_count as f64 * (2.0 / 3.0)) as usize, - }; - if second_tiercile_index >= self.block.issuers_count { - second_tiercile_index = self.block.issuers_count - 1; - } - self.second_tiercile_frame = current_frame_vec[second_tiercile_index];*/ - } else { - 0 - } -} diff --git a/lib/modules/blockchain/blockchain-dal/Cargo.toml b/lib/modules/blockchain/bc-db-writer/Cargo.toml similarity index 88% rename from lib/modules/blockchain/blockchain-dal/Cargo.toml rename to lib/modules/blockchain/bc-db-writer/Cargo.toml index c432e225b43aa2d819bf63ac552cd8fde795169a..4de7c2e9656d4ee7be66f1a79955fde8009e27a4 100644 --- a/lib/modules/blockchain/blockchain-dal/Cargo.toml +++ b/lib/modules/blockchain/bc-db-writer/Cargo.toml @@ -1,8 +1,8 @@ [package] -name = "durs-blockchain-dal" +name = "durs-bc-db-writer" version = "0.3.0-dev" authors = ["librelois <elois@ifee.fr>"] -description = "Data Access Layer for Dunitrust Blockchain module." +description = "Writer for Dunitrust Blockchain Database." license = "AGPL-3.0" edition = "2018" @@ -10,24 +10,22 @@ edition = "2018" path = "src/lib.rs" [dependencies] -bincode = "1.0.*" dubp-block-doc = { path = "../../../dubp/block-doc"} #, version = "0.1.0" } dubp-common-doc = { path = "../../../dubp/common-doc"} #, version = "0.1.0" } -dubp-indexes = { path = "../../../dubp/indexes"} #, version = "0.1.0" } -dup-crypto = { path = "../../../crypto" } dubp-currency-params = { path = "../../../dubp/currency-params" } +dubp-indexes = { path = "../../../dubp/indexes"} #, version = "0.1.0" } dubp-user-docs= { path = "../../../dubp/user-docs" } -durs-conf = { path = "../../../core/conf" } -durs-module = { path = "../../../core/module" } +dup-crypto = { path = "../../../crypto" } +durs-bc-db-reader = { path = "../../../modules-lib/bc-db-reader" } durs-common-tools = { path = "../../../tools/common-tools" } +durs-conf = { path = "../../../core/conf" } durs-dbs-tools = { path = "../../../tools/dbs-tools" } +durs-module = { path = "../../../core/module" } durs-wot = { path = "../../../dubp/wot" } fnv = "1.0.6" log = "0.4.*" -maplit = "1.0.1" serde = "1.0.*" serde_derive = "1.0.*" -serde_json = "1.0.*" unwrap = "1.2.1" [dev-dependencies] diff --git a/lib/modules/blockchain/blockchain-dal/clippy.toml b/lib/modules/blockchain/bc-db-writer/clippy.toml similarity index 100% rename from lib/modules/blockchain/blockchain-dal/clippy.toml rename to lib/modules/blockchain/bc-db-writer/clippy.toml diff --git a/lib/modules/blockchain/blockchain-dal/src/constants.rs b/lib/modules/blockchain/bc-db-writer/src/constants.rs similarity index 83% rename from lib/modules/blockchain/blockchain-dal/src/constants.rs rename to lib/modules/blockchain/bc-db-writer/src/constants.rs index 49557d05f161a13be41311c3ded4d6de2a696619..1e7fe0215c1ba1afd10c67e5393908b992757baf 100644 --- a/lib/modules/blockchain/blockchain-dal/src/constants.rs +++ b/lib/modules/blockchain/bc-db-writer/src/constants.rs @@ -13,11 +13,5 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -/// Default page size for requests responses -pub static DEFAULT_PAGE_SIZE: &usize = &50; - /// Currency parameters DB name pub const CURRENCY_PARAMS_DB_NAME: &str = "params.db"; - -/// Local blockchain collection name -pub static LOCAL_BC: &str = "bc"; diff --git a/lib/modules/blockchain/blockchain-dal/src/lib.rs b/lib/modules/blockchain/bc-db-writer/src/lib.rs similarity index 63% rename from lib/modules/blockchain/blockchain-dal/src/lib.rs rename to lib/modules/blockchain/bc-db-writer/src/lib.rs index ef27366cb3133a10ce9eafb6ede9da69a84186f3..1d02c53297237d0de7f355b24cb033406adaa17c 100644 --- a/lib/modules/blockchain/blockchain-dal/src/lib.rs +++ b/lib/modules/blockchain/bc-db-writer/src/lib.rs @@ -35,20 +35,6 @@ extern crate serde_derive; /// Define crate constants pub mod constants; -/// Contains all entities stored in databases -pub mod entities; - -/// Define all filters applicable to entities -pub mod filters; - -/// Contains all read databases functions -pub mod readers; - -//pub mod storage; - -/// Tools -pub mod tools; - /// Contains all write databases functions pub mod writers; @@ -59,22 +45,19 @@ pub use durs_dbs_tools::kv_db::{ pub use durs_dbs_tools::{ open_free_struct_db, open_free_struct_file_db, open_free_struct_memory_db, }; -pub use durs_dbs_tools::{BinFreeStructDb, DALError}; +pub use durs_dbs_tools::{BinFreeStructDb, DbError}; -use crate::constants::LOCAL_BC; -use crate::entities::block::DALBlock; -use crate::entities::identity::DALIdentity; -use crate::entities::sources::{SourceAmount, UTXOContentV10}; -use crate::writers::transaction::DALTxV10; -use dubp_common_doc::{BlockNumber, Blockstamp, PreviousBlockstamp}; +use crate::writers::transaction::DbTxV10; +use dubp_common_doc::{BlockNumber, Blockstamp}; use dubp_indexes::sindex::UniqueIdUTXOv10; use dubp_user_docs::documents::transaction::*; use dup_crypto::hashs::Hash; use dup_crypto::keys::*; +use durs_bc_db_reader::entities::sources::UTXOContentV10; +use durs_bc_db_reader::{BalancesV10Datas, CertsExpirV10Datas}; use durs_common_tools::fatal_error; use durs_wot::data::{rusty::RustyWebOfTrust, WotId}; use fnv::FnvHashMap; -use maplit::hashmap; use serde::Serialize; use std::collections::{HashMap, HashSet}; use std::path::{Path, PathBuf}; @@ -86,79 +69,22 @@ pub type Db = KvFileDbHandler; pub type DbReader = KvFileDbRoHandler; /// Forks tree meta datas (block number and hash only) -pub type ForksTreeV10Datas = entities::fork_tree::ForkTree; -/// Forks blocks referenced in tree indexed by their blockstamp -pub type ForksBlocksV10Datas = HashMap<Blockstamp, DALBlock>; -/// Blocks orphaned (no parent block) indexed by their previous blockstamp -pub type OrphanBlocksV10Datas = HashMap<PreviousBlockstamp, Vec<DALBlock>>; +pub type ForksTreeV10Datas = durs_bc_db_reader::entities::fork_tree::ForkTree; /// Database containing the wot graph (each node of the graph in an u32) pub type WotDB = RustyWebOfTrust; -/// V10 Identities indexed by public key -pub type IdentitiesV10Datas = HashMap<PubKey, DALIdentity>; /// Memberships sorted by created block pub type MsExpirV10Datas = FnvHashMap<BlockNumber, HashSet<WotId>>; -/// Certifications sorted by created block -pub type CertsExpirV10Datas = FnvHashMap<BlockNumber, HashSet<(WotId, WotId)>>; /// V10 Transactions indexed by their hashs -pub type TxV10Datas = HashMap<Hash, DALTxV10>; +pub type TxV10Datas = HashMap<Hash, DbTxV10>; /// V10 Unused Transaction Output (=sources) pub type UTXOsV10Datas = HashMap<UniqueIdUTXOv10, UTXOContentV10>; /// V10 UDs sources pub type UDsV10Datas = HashMap<PubKey, HashSet<BlockNumber>>; -/// V10 Balances accounts -pub type BalancesV10Datas = HashMap<UTXOConditionsGroup, (SourceAmount, HashSet<UniqueIdUTXOv10>)>; /// Open database -pub fn open_db(path: &Path) -> Result<Db, DALError> { - Db::open_db( - path, - &KvFileDbSchema { - stores: hashmap![ - LOCAL_BC.to_owned() => KvFileDbStoreType::SingleIntKey, - ], - }, - ) -} - -#[derive(Debug)] -/// Set of databases storing forks informations -pub struct ForksDBs { - /// Fork tree (store only blockstamp) - pub fork_tree_db: BinFreeStructDb<ForksTreeV10Datas>, - /// Blocks in fork tree - pub fork_blocks_db: BinFreeStructDb<ForksBlocksV10Datas>, - /// Orphan blocks - pub orphan_blocks_db: BinFreeStructDb<OrphanBlocksV10Datas>, -} - -impl ForksDBs { - /// Open fork databases from their respective files - pub fn open(db_path: Option<&PathBuf>) -> ForksDBs { - ForksDBs { - fork_tree_db: open_free_struct_db::<ForksTreeV10Datas>(db_path, "fork_tree.db") - .expect("Fail to open ForksTreeV10Datas"), - fork_blocks_db: open_free_struct_db::<ForksBlocksV10Datas>(db_path, "fork_blocks.db") - .expect("Fail to open ForkForksBlocksV10DatassV10DB"), - orphan_blocks_db: open_free_struct_db::<OrphanBlocksV10Datas>( - db_path, - "orphan_blocks.db", - ) - .expect("Fail to open OrphanBlocksV10Datas"), - } - } - /// Save fork databases in their respective files - pub fn save_dbs(&self) { - info!("BLOCKCHAIN-DAL: Save ForksDBs."); - self.fork_tree_db - .save() - .expect("Fatal error : fail to save ForksTreeV10Datas !"); - self.fork_blocks_db - .save() - .expect("Fatal error : fail to save ForkForksBlocksV10DatassV10DB !"); - self.orphan_blocks_db - .save() - .expect("Fatal error : fail to save OrphanBlocksV10Datas !"); - } +#[inline] +pub fn open_db(path: &Path) -> Result<Db, DbError> { + Db::open_db(path, &durs_bc_db_reader::bc_db_schema()) } #[derive(Debug)] @@ -166,8 +92,6 @@ impl ForksDBs { pub struct WotsV10DBs { /// Store wot graph pub wot_db: BinFreeStructDb<WotDB>, - /// Store idrntities - pub identities_db: BinFreeStructDb<IdentitiesV10Datas>, /// Store memberships created_block_id (Use only to detect expirations) pub ms_db: BinFreeStructDb<MsExpirV10Datas>, /// Store certifications created_block_id (Use only to detect expirations) @@ -180,8 +104,6 @@ impl WotsV10DBs { WotsV10DBs { wot_db: open_free_struct_db::<RustyWebOfTrust>(db_path, "wot.db") .expect("Fail to open WotDB"), - identities_db: open_free_struct_db::<IdentitiesV10Datas>(db_path, "identities.db") - .expect("Fail to open IdentitiesV10DB"), ms_db: open_free_struct_db::<MsExpirV10Datas>(db_path, "ms.db") .expect("Fail to open MsExpirV10DB"), certs_db: open_free_struct_db::<CertsExpirV10Datas>(db_path, "certs.db") @@ -190,7 +112,7 @@ impl WotsV10DBs { } /// Save wot databases from their respective files pub fn save_dbs(&self) { - info!("BLOCKCHAIN-DAL: Save WotsV10DBs."); + info!("BC-DB-WRITER: Save WotsV10DBs."); self.wot_db .save() .expect("Fatal error : fail to save WotDB !"); @@ -198,9 +120,6 @@ impl WotsV10DBs { } /// Save wot databases from their respective files (except wot graph) pub fn save_dbs_except_graph(&self) { - self.identities_db - .save() - .expect("Fatal error : fail to save IdentitiesV10DB !"); self.ms_db .save() .expect("Fatal error : fail to save MsExpirV10DB !"); @@ -240,7 +159,7 @@ impl CurrencyV10DBs { /// Save currency databases in their respective files pub fn save_dbs(&self, tx: bool, du: bool) { if tx { - info!("BLOCKCHAIN-DAL: Save CurrencyV10DBs."); + info!("BC-DB-WRITER: Save CurrencyV10DBs."); self.tx_db .save() .expect("Fatal error : fail to save LocalBlockchainV10DB !"); @@ -282,7 +201,7 @@ pub mod tests { #[inline] /// Open database in an arbitrary temporary directory given by OS /// and automatically cleaned when `Db` is dropped - pub fn open_tmp_db() -> Result<Db, DALError> { - open_db(tempdir().map_err(DALError::FileSystemError)?.path()) + pub fn open_tmp_db() -> Result<Db, DbError> { + open_db(tempdir().map_err(DbError::FileSystemError)?.path()) } } diff --git a/lib/modules/blockchain/bc-db-writer/src/writers/block.rs b/lib/modules/blockchain/bc-db-writer/src/writers/block.rs new file mode 100644 index 0000000000000000000000000000000000000000..631bd52fb4146561683c4800d5705583d7eed043 --- /dev/null +++ b/lib/modules/blockchain/bc-db-writer/src/writers/block.rs @@ -0,0 +1,156 @@ +// Copyright (C) 2017-2019 The AXIOM TEAM Association. +// +// This program is free software: you can redistribute it and/or modify +// it under the terms of the GNU Affero General Public License as +// published by the Free Software Foundation, either version 3 of the +// License, or (at your option) any later version. +// +// This program is distributed in the hope that it will be useful, +// but WITHOUT ANY WARRANTY; without even the implied warranty of +// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the +// GNU Affero General Public License for more details. +// +// You should have received a copy of the GNU Affero General Public License +// along with this program. If not, see <https://www.gnu.org/licenses/>. + +use crate::DbError; +use crate::*; +use dubp_block_doc::block::BlockDocumentTrait; +use dubp_common_doc::traits::Document; +use durs_bc_db_reader::constants::*; +use durs_bc_db_reader::entities::block::DbBlock; +use durs_bc_db_reader::entities::current_meta_datas::CurrentMetaDataKey; +use durs_bc_db_reader::entities::fork_tree::ForkTree; +use durs_bc_db_reader::DbValue; +use unwrap::unwrap; + +/// Insert new head Block in databases +pub fn insert_new_head_block( + db: &Db, + fork_tree: Option<&mut ForkTree>, + dal_block: DbBlock, +) -> Result<(), DbError> { + // Serialize datas + let bin_dal_block = durs_dbs_tools::to_bytes(&dal_block)?; + let new_current_blockstamp_bytes: Vec<u8> = dal_block.blockstamp().into(); + + // Open write transaction + db.write(|mut w| { + let current_meta_datas_store = db.get_int_store(CURRENT_METAS_DATAS); + let main_blocks_store = db.get_int_store(MAIN_BLOCKS); + let fork_blocks_store = db.get_store(FORK_BLOCKS); + + // Insert block in MAIN_BLOCKS store + main_blocks_store.put( + w.as_mut(), + *dal_block.block.number(), + &Db::db_value(&bin_dal_block)?, + )?; + + // Update current blockstamp + current_meta_datas_store.put( + w.as_mut(), + CurrentMetaDataKey::CurrentBlockstamp.to_u32(), + &DbValue::Blob(&new_current_blockstamp_bytes), + )?; + // Update current common time (also named "blockchain time") + current_meta_datas_store.put( + w.as_mut(), + CurrentMetaDataKey::CurrentBlockchainTime.to_u32(), + &DbValue::U64(dal_block.block.common_time()), + )?; + + if let Some(fork_tree) = fork_tree { + // Insert head block in fork tree + let removed_blockstamps = crate::writers::fork_tree::insert_new_head_block( + fork_tree, + dal_block.blockstamp(), + )?; + // Insert head block in ForkBlocks + let blockstamp_bytes: Vec<u8> = dal_block.blockstamp().into(); + fork_blocks_store.put( + w.as_mut(), + &blockstamp_bytes, + &Db::db_value(&bin_dal_block)?, + )?; + // Remove too old blocks + for blockstamp in removed_blockstamps { + let blockstamp_bytes: Vec<u8> = blockstamp.into(); + fork_blocks_store.delete(w.as_mut(), &blockstamp_bytes)?; + } + } + + Ok(w) + }) +} + +/// Remove a block in local blockchain storage +pub fn remove_block(db: &Db, block_number: BlockNumber) -> Result<(), DbError> { + db.write(|mut w| { + db.get_int_store(MAIN_BLOCKS) + .delete(w.as_mut(), block_number.0)?; + Ok(w) + }) +} + +/// Insert new fork Block in databases +pub fn insert_new_fork_block( + db: &Db, + fork_tree: &mut ForkTree, + dal_block: DbBlock, +) -> Result<bool, DbError> { + let bin_dal_block = durs_dbs_tools::to_bytes(&dal_block)?; + let blockstamp_bytes: Vec<u8> = dal_block.blockstamp().into(); + if crate::writers::fork_tree::insert_new_fork_block( + fork_tree, + dal_block.block.blockstamp(), + unwrap!(dal_block.block.previous_hash()), + )? { + // Insert fork block FORK_BLOCKS + db.write(|mut w| { + db.get_store(FORK_BLOCKS).put( + w.as_mut(), + &blockstamp_bytes, + &Db::db_value(&bin_dal_block)?, + )?; + Ok(w) + })?; + + // As long as orphan blocks can succeed the last inserted block, they are inserted + for stackable_block in + durs_bc_db_reader::readers::block::get_stackables_blocks(db, dal_block.blockstamp())? + { + let _ = insert_new_fork_block(db, fork_tree, stackable_block); + } + + Ok(true) + } else { + // Insert block in OrphanBlocks store + let previous_blockstamp_bytes: Vec<u8> = dal_block.previous_blockstamp().into(); + db.write(|mut w| { + let orphan_blockstamps_store = db.get_store(ORPHAN_BLOCKSTAMP); + let mut orphan_blockstamps = if let Some(v) = + orphan_blockstamps_store.get(w.as_ref(), &previous_blockstamp_bytes)? + { + Db::from_db_value::<Vec<Blockstamp>>(v)? + } else { + vec![] + }; + orphan_blockstamps.push(dal_block.blockstamp()); + orphan_blockstamps_store.put( + w.as_mut(), + &previous_blockstamp_bytes, + &DbValue::Blob(&durs_dbs_tools::to_bytes(&orphan_blockstamps)?), + )?; + // Insert orphan block in FORK_BLOCKS + db.get_store(FORK_BLOCKS).put( + w.as_mut(), + &blockstamp_bytes, + &Db::db_value(&bin_dal_block)?, + )?; + // Commit + Ok(w) + })?; + Ok(false) + } +} diff --git a/lib/modules/blockchain/blockchain-dal/src/writers/certification.rs b/lib/modules/blockchain/bc-db-writer/src/writers/certification.rs similarity index 70% rename from lib/modules/blockchain/blockchain-dal/src/writers/certification.rs rename to lib/modules/blockchain/bc-db-writer/src/writers/certification.rs index a6faeb472a0f812c5c9aa82fcb5d49ec46043dad..0001db3814e4d92ff5bd442cc83ed02fca56cc27 100644 --- a/lib/modules/blockchain/blockchain-dal/src/writers/certification.rs +++ b/lib/modules/blockchain/bc-db-writer/src/writers/certification.rs @@ -13,37 +13,43 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::{BinFreeStructDb, CertsExpirV10Datas, DALError, IdentitiesV10Datas}; +use crate::{BinFreeStructDb, Db, DbError}; use dubp_common_doc::BlockNumber; use dubp_currency_params::CurrencyParameters; use dubp_user_docs::documents::certification::CompactCertificationDocumentV10; use dup_crypto::keys::*; +use durs_bc_db_reader::constants::*; +use durs_bc_db_reader::entities::identity::DbIdentity; +use durs_bc_db_reader::{CertsExpirV10Datas, DbReadable, DbValue}; use durs_wot::WotId; /// Apply "certification" event in databases pub fn write_certification( currency_params: &CurrencyParameters, - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, + db: &Db, certs_db: &BinFreeStructDb<CertsExpirV10Datas>, source_pubkey: PubKey, source: WotId, target: WotId, created_block_id: BlockNumber, written_timestamp: u64, -) -> Result<(), DALError> { +) -> Result<(), DbError> { // Get cert_chainable_on - let mut member_datas = identities_db.read(|db| { - db.get(&source_pubkey) - .expect("Database Corrupted, please reset data !") - .clone() - })?; + let mut member_datas = durs_bc_db_reader::readers::identity::get_identity(db, &source_pubkey)? + .expect("Try to write certification with unexist certifier."); // Push new cert_chainable_on member_datas .cert_chainable_on .push(written_timestamp + currency_params.sig_period); // Write new identity datas - identities_db.write(|db| { - db.insert(source_pubkey, member_datas); + let bin_member_datas = durs_dbs_tools::to_bytes(&member_datas)?; + db.write(|mut w| { + db.get_store(IDENTITIES).put( + w.as_mut(), + &source_pubkey.to_bytes_vector(), + &DbValue::Blob(&bin_member_datas), + )?; + Ok(w) })?; // Add cert in certs_db certs_db.write(|db| { @@ -56,12 +62,12 @@ pub fn write_certification( /// Revert writtent certification pub fn revert_write_cert( - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, + db: &Db, certs_db: &BinFreeStructDb<CertsExpirV10Datas>, compact_doc: CompactCertificationDocumentV10, source: WotId, target: WotId, -) -> Result<(), DALError> { +) -> Result<(), DbError> { // Remove CertsExpirV10Datas entry certs_db.write(|db| { let mut certs = db @@ -72,11 +78,16 @@ pub fn revert_write_cert( db.insert(compact_doc.block_number, certs); })?; // Pop last cert_chainable_on - identities_db.write(|db| { - if let Some(mut member_datas) = db.get(&compact_doc.issuer).cloned() { + db.write(|mut w| { + let identities_store = db.get_store(IDENTITIES); + let pubkey_bytes = compact_doc.issuer.to_bytes_vector(); + if let Some(v) = identities_store.get(w.as_ref(), &pubkey_bytes)? { + let mut member_datas = Db::from_db_value::<DbIdentity>(v)?; member_datas.cert_chainable_on.pop(); - db.insert(compact_doc.issuer, member_datas); + let bin_member_datas = durs_dbs_tools::to_bytes(&member_datas)?; + identities_store.put(w.as_mut(), &pubkey_bytes, &DbValue::Blob(&bin_member_datas))? } + Ok(w) })?; Ok(()) } @@ -87,7 +98,7 @@ pub fn revert_expire_cert( source: WotId, target: WotId, created_block_id: BlockNumber, -) -> Result<(), DALError> { +) -> Result<(), DbError> { // Reinsert CertsExpirV10Datas entry certs_db.write(|db| { let mut certs = db.get(&created_block_id).cloned().unwrap_or_default(); @@ -101,7 +112,7 @@ pub fn revert_expire_cert( pub fn expire_certs( certs_db: &BinFreeStructDb<CertsExpirV10Datas>, created_block_id: BlockNumber, -) -> Result<(), DALError> { +) -> Result<(), DbError> { // Remove CertsExpirV10Datas entries certs_db.write(|db| { db.remove(&created_block_id); diff --git a/lib/modules/blockchain/blockchain-dal/src/writers/dividend.rs b/lib/modules/blockchain/bc-db-writer/src/writers/dividend.rs similarity index 96% rename from lib/modules/blockchain/blockchain-dal/src/writers/dividend.rs rename to lib/modules/blockchain/bc-db-writer/src/writers/dividend.rs index 25af8596ed74846898b981c945efd08db60f27ff..0943e99f31c25d43e10816964e7d6d0657e49893 100644 --- a/lib/modules/blockchain/blockchain-dal/src/writers/dividend.rs +++ b/lib/modules/blockchain/bc-db-writer/src/writers/dividend.rs @@ -13,11 +13,12 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::entities::sources::SourceAmount; use crate::*; use dubp_common_doc::BlockNumber; use dubp_user_docs::documents::transaction::*; use dup_crypto::keys::PubKey; +use durs_bc_db_reader::entities::sources::SourceAmount; +use durs_bc_db_reader::BalancesV10Datas; use std::collections::{HashMap, HashSet}; /// Apply UD creation in databases @@ -28,7 +29,7 @@ pub fn create_du( du_block_id: BlockNumber, members: &[PubKey], revert: bool, -) -> Result<(), DALError> { +) -> Result<(), DbError> { debug!( "create_du(amount, block_id, members, revert)=({:?}, {}, {:?}, {})", du_amount, du_block_id.0, members, revert diff --git a/lib/modules/blockchain/blockchain-dal/src/writers/fork_tree.rs b/lib/modules/blockchain/bc-db-writer/src/writers/fork_tree.rs similarity index 56% rename from lib/modules/blockchain/blockchain-dal/src/writers/fork_tree.rs rename to lib/modules/blockchain/bc-db-writer/src/writers/fork_tree.rs index 1bff1dc617bfd1268f36893dc34636009e55a50f..3eaf641343c88049376ce21f3285bb3ae1490ab4 100644 --- a/lib/modules/blockchain/blockchain-dal/src/writers/fork_tree.rs +++ b/lib/modules/blockchain/bc-db-writer/src/writers/fork_tree.rs @@ -13,48 +13,58 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::entities::fork_tree::ForkTree; use crate::*; use dubp_common_doc::BlockHash; +use durs_bc_db_reader::constants::*; +use durs_bc_db_reader::entities::current_meta_datas::CurrentMetaDataKey; +use durs_bc_db_reader::entities::fork_tree::ForkTree; + +/// SAve fork tree +pub fn save_fork_tree(db: &Db, fork_tree: &ForkTree) -> Result<(), DbError> { + let bin_fork_tree = durs_dbs_tools::to_bytes(&fork_tree)?; + db.write(|mut w| { + db.get_int_store(CURRENT_METAS_DATAS).put( + w.as_mut(), + CurrentMetaDataKey::ForkTree.to_u32(), + &Db::db_value(&bin_fork_tree)?, + )?; + Ok(w) + }) +} /// Insert new head Block in fork tree, /// return vector of removed blockstamps pub fn insert_new_head_block( - fork_tree_db: &BinFreeStructDb<ForksTreeV10Datas>, + fork_tree: &mut ForkTree, blockstamp: Blockstamp, -) -> Result<Vec<Blockstamp>, DALError> { - fork_tree_db.write(|fork_tree| { - let parent_id_opt = if blockstamp.id.0 > 0 && fork_tree.size() > 0 { - Some(fork_tree.get_main_branch_node_id(BlockNumber(blockstamp.id.0 - 1)) - .expect("Fatal error: fail to insert new head block : previous block not exist in main branch")) - } else { - None - }; - fork_tree.insert_new_node(blockstamp, parent_id_opt, true); - })?; +) -> Result<Vec<Blockstamp>, DbError> { + let parent_id_opt = if blockstamp.id.0 > 0 && fork_tree.size() > 0 { + Some(fork_tree.get_main_branch_node_id(BlockNumber(blockstamp.id.0 - 1)) + .expect("Fatal error: fail to insert new head block : previous block not exist in main branch")) + } else { + None + }; + fork_tree.insert_new_node(blockstamp, parent_id_opt, true); - Ok(fork_tree_db.read(ForkTree::get_removed_blockstamps)?) + Ok(fork_tree.get_removed_blockstamps()) } /// Insert new fork block in fork tree only if parent exist in fork tree (orphan block not inserted) /// Returns true if block has a parent and has therefore been inserted, return false if block is orphaned pub fn insert_new_fork_block( - fork_tree_db: &BinFreeStructDb<ForksTreeV10Datas>, + fork_tree: &mut ForkTree, blockstamp: Blockstamp, previous_hash: Hash, -) -> Result<bool, DALError> { +) -> Result<bool, DbError> { let previous_blockstamp = Blockstamp { id: BlockNumber(blockstamp.id.0 - 1), hash: BlockHash(previous_hash), }; - let parent_id_opt = - fork_tree_db.read(|fork_tree| fork_tree.find_node_with_blockstamp(&previous_blockstamp))?; + let parent_id_opt = fork_tree.find_node_with_blockstamp(&previous_blockstamp); if let Some(parent_id) = parent_id_opt { - fork_tree_db.write(|fork_tree| { - fork_tree.insert_new_node(blockstamp, Some(parent_id), false); - })?; + fork_tree.insert_new_node(blockstamp, Some(parent_id), false); Ok(true) } else { Ok(false) @@ -63,109 +73,105 @@ pub fn insert_new_fork_block( /// Modify the main branch (function to call after a successful roolback) pub fn change_main_branch( - forks_dbs: &ForksDBs, + db: &Db, + fork_tree: &mut ForkTree, old_current_blockstamp: Blockstamp, new_current_blockstamp: Blockstamp, -) -> Result<(), DALError> { - forks_dbs.fork_tree_db.write(|tree| { - tree.change_main_branch(old_current_blockstamp, new_current_blockstamp); - })?; +) -> Result<(), DbError> { + fork_tree.change_main_branch(old_current_blockstamp, new_current_blockstamp); - let removed_blockstamps = forks_dbs - .fork_tree_db - .read(ForkTree::get_removed_blockstamps)?; + let removed_blockstamps = fork_tree.get_removed_blockstamps(); // Remove too old blocks - forks_dbs.fork_blocks_db.write(|db| { + db.write(|mut w| { + let fork_blocks_store = db.get_store(FORK_BLOCKS); for blockstamp in removed_blockstamps { - db.remove(&blockstamp); + let blockstamp_bytes: Vec<u8> = blockstamp.into(); + fork_blocks_store.delete(w.as_mut(), &blockstamp_bytes)?; } - })?; - Ok(()) + Ok(w) + }) } #[cfg(test)] mod test { use super::*; - use crate::entities::fork_tree::TreeNodeId; use dubp_currency_params::constants::DEFAULT_FORK_WINDOW_SIZE; + use durs_bc_db_reader::entities::fork_tree::{ForkTree, TreeNodeId}; #[test] - fn test_insert_new_head_block() -> Result<(), DALError> { + fn test_insert_new_head_block() -> Result<(), DbError> { // Create mock datas let blockstamps = dubp_user_docs_tests_tools::mocks::generate_blockstamps(*DEFAULT_FORK_WINDOW_SIZE + 2); - let fork_tree_db = open_free_struct_db::<ForksTreeV10Datas>(None, "")?; + let mut fork_tree = ForkTree::default(); // Insert genesis block assert_eq!( Vec::<Blockstamp>::with_capacity(0), - insert_new_head_block(&fork_tree_db, blockstamps[0])? + insert_new_head_block(&mut fork_tree, blockstamps[0])? ); // Check tree state - assert_eq!(1, fork_tree_db.read(|tree| tree.size())?); + assert_eq!(1, fork_tree.size()); assert_eq!( vec![(TreeNodeId(0), blockstamps[0])], - fork_tree_db.read(|tree| tree.get_sheets())? + fork_tree.get_sheets() ); // Insert FORK_WINDOW_SIZE blocks for i in 1..*DEFAULT_FORK_WINDOW_SIZE { assert_eq!( Vec::<Blockstamp>::with_capacity(0), - insert_new_head_block(&fork_tree_db, blockstamps[i])? + insert_new_head_block(&mut fork_tree, blockstamps[i])? ); } // Check tree state - assert_eq!( - *DEFAULT_FORK_WINDOW_SIZE, - fork_tree_db.read(|tree| tree.size())? - ); + assert_eq!(*DEFAULT_FORK_WINDOW_SIZE, fork_tree.size()); assert_eq!( vec![( TreeNodeId(*DEFAULT_FORK_WINDOW_SIZE - 1), blockstamps[*DEFAULT_FORK_WINDOW_SIZE - 1] )], - fork_tree_db.read(|tree| tree.get_sheets())? + fork_tree.get_sheets() ); // Insert blocks after FORK_WINDOW_SIZE (firsts blocks must be removed) assert_eq!( vec![blockstamps[0]], - insert_new_head_block(&fork_tree_db, blockstamps[*DEFAULT_FORK_WINDOW_SIZE])? + insert_new_head_block(&mut fork_tree, blockstamps[*DEFAULT_FORK_WINDOW_SIZE])? ); assert_eq!( vec![blockstamps[1]], - insert_new_head_block(&fork_tree_db, blockstamps[*DEFAULT_FORK_WINDOW_SIZE + 1])? + insert_new_head_block(&mut fork_tree, blockstamps[*DEFAULT_FORK_WINDOW_SIZE + 1])? ); Ok(()) } #[test] - fn test_insert_new_fork_block() -> Result<(), DALError> { + fn test_insert_new_fork_block() -> Result<(), DbError> { // Create mock datas let blockstamps = dubp_user_docs_tests_tools::mocks::generate_blockstamps(*DEFAULT_FORK_WINDOW_SIZE + 3); - let fork_tree_db = open_free_struct_db::<ForksTreeV10Datas>(None, "")?; + let mut fork_tree = ForkTree::default(); // Insert 4 main blocks for i in 0..4 { assert_eq!( Vec::<Blockstamp>::with_capacity(0), - insert_new_head_block(&fork_tree_db, blockstamps[i])? + insert_new_head_block(&mut fork_tree, blockstamps[i])? ); } // Check tree state - assert_eq!(4, fork_tree_db.read(|tree| tree.size())?); + assert_eq!(4, fork_tree.size()); assert_eq!( vec![(TreeNodeId(3), blockstamps[3])], - fork_tree_db.read(|tree| tree.get_sheets())? + fork_tree.get_sheets() ); // Insert first fork block at child of block 2 @@ -175,17 +181,17 @@ mod test { }; assert_eq!( true, - insert_new_fork_block(&fork_tree_db, fork_blockstamp, blockstamps[2].hash.0)? + insert_new_fork_block(&mut fork_tree, fork_blockstamp, blockstamps[2].hash.0)? ); // Check tree state - assert_eq!(5, fork_tree_db.read(|tree| tree.size())?); + assert_eq!(5, fork_tree.size()); assert!(durs_common_tests_tools::collections::slice_same_elems( &vec![ (TreeNodeId(3), blockstamps[3]), (TreeNodeId(4), fork_blockstamp) ], - &fork_tree_db.read(|tree| tree.get_sheets())? + &fork_tree.get_sheets() )); // Insert second fork block at child of first fork block @@ -195,32 +201,29 @@ mod test { }; assert_eq!( true, - insert_new_fork_block(&fork_tree_db, fork_blockstamp_2, fork_blockstamp.hash.0)? + insert_new_fork_block(&mut fork_tree, fork_blockstamp_2, fork_blockstamp.hash.0)? ); // Check tree state - assert_eq!(6, fork_tree_db.read(|tree| tree.size())?); + assert_eq!(6, fork_tree.size()); assert!(durs_common_tests_tools::collections::slice_same_elems( &vec![ (TreeNodeId(3), blockstamps[3]), (TreeNodeId(5), fork_blockstamp_2) ], - &fork_tree_db.read(|tree| tree.get_sheets())? + &fork_tree.get_sheets() )); // Insert FORK_WINDOW_SIZE blocks for i in 4..*DEFAULT_FORK_WINDOW_SIZE { assert_eq!( Vec::<Blockstamp>::with_capacity(0), - insert_new_head_block(&fork_tree_db, blockstamps[i])? + insert_new_head_block(&mut fork_tree, blockstamps[i])? ); } // Check tree state - assert_eq!( - *DEFAULT_FORK_WINDOW_SIZE + 2, - fork_tree_db.read(|tree| tree.size())? - ); + assert_eq!(*DEFAULT_FORK_WINDOW_SIZE + 2, fork_tree.size()); assert!(durs_common_tests_tools::collections::slice_same_elems( &vec![ ( @@ -229,32 +232,37 @@ mod test { ), (TreeNodeId(5), fork_blockstamp_2) ], - &fork_tree_db.read(|tree| tree.get_sheets())? + &fork_tree.get_sheets() )); // Insert 2 new main blocks (too old blocks must be removed) for i in 0..2 { assert_eq!( vec![blockstamps[i]], - insert_new_head_block(&fork_tree_db, blockstamps[*DEFAULT_FORK_WINDOW_SIZE + i])? + insert_new_head_block(&mut fork_tree, blockstamps[*DEFAULT_FORK_WINDOW_SIZE + i])? ); } // Insert one new main block (fork branch must be removed) assert_eq!( vec![blockstamps[2], fork_blockstamp_2, fork_blockstamp], - insert_new_head_block(&fork_tree_db, blockstamps[*DEFAULT_FORK_WINDOW_SIZE + 2])? + insert_new_head_block(&mut fork_tree, blockstamps[*DEFAULT_FORK_WINDOW_SIZE + 2])? ); // Check tree state + assert_eq!(*DEFAULT_FORK_WINDOW_SIZE, fork_tree.size()); assert_eq!( - *DEFAULT_FORK_WINDOW_SIZE, - fork_tree_db.read(|tree| tree.size())? - ); - assert_eq!( - vec![(TreeNodeId(1), blockstamps[*DEFAULT_FORK_WINDOW_SIZE + 2])], - fork_tree_db.read(|tree| tree.get_sheets())? - ); + <<<<<<< dev:lib/modules/blockchain/blockchain-dal/src/writers/fork_tree.rs + vec![(TreeNodeId(1), blockstamps[*DEFAULT_FORK_WINDOW_SIZE + 2])], + fork_tree_db.read(|tree| tree.get_sheets())? + ======= + vec![( + TreeNodeId(*DEFAULT_FORK_WINDOW_SIZE + 4), + blockstamps[*DEFAULT_FORK_WINDOW_SIZE + 2] + )], + fork_tree.get_sheets() + >>>>>>> [ref] blockchain: migrate forks & identities in LMDB:lib/modules/blockchain/bc-db-writer/src/writers/fork_tree.rs + ); Ok(()) } diff --git a/lib/modules/blockchain/blockchain-dal/src/writers/identity.rs b/lib/modules/blockchain/bc-db-writer/src/writers/identity.rs similarity index 60% rename from lib/modules/blockchain/blockchain-dal/src/writers/identity.rs rename to lib/modules/blockchain/bc-db-writer/src/writers/identity.rs index 5e73017231bd6fd5748769d1759b7bbc326acdea..e1f1d36a7b1287bb9813183ac48d30611d57f690 100644 --- a/lib/modules/blockchain/blockchain-dal/src/writers/identity.rs +++ b/lib/modules/blockchain/bc-db-writer/src/writers/identity.rs @@ -13,27 +13,27 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::entities::identity::{DALIdentity, DALIdentityState}; -use crate::{BinFreeStructDb, DALError, IdentitiesV10Datas, MsExpirV10Datas}; +use crate::{BinFreeStructDb, Db, DbError, MsExpirV10Datas}; use dubp_common_doc::traits::Document; use dubp_common_doc::{BlockNumber, Blockstamp}; use dubp_currency_params::CurrencyParameters; use dubp_user_docs::documents::identity::IdentityDocumentV10; use dup_crypto::keys::PubKey; +use dup_crypto::keys::PublicKey; +use durs_bc_db_reader::constants::*; +use durs_bc_db_reader::entities::identity::{DbIdentity, DbIdentityState}; +use durs_bc_db_reader::{DbReadable, DbValue}; use durs_common_tools::fatal_error; use durs_wot::WotId; /// Remove identity from databases pub fn revert_create_identity( - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, + db: &Db, ms_db: &BinFreeStructDb<MsExpirV10Datas>, pubkey: &PubKey, -) -> Result<(), DALError> { - let dal_idty = identities_db.read(|db| { - db.get(&pubkey) - .expect("Fatal error : try to revert unknow identity !") - .clone() - })?; +) -> Result<(), DbError> { + let dal_idty = durs_bc_db_reader::readers::identity::get_identity(db, pubkey)? + .expect("Try to revert unexist idty."); // Remove membership ms_db.write(|db| { let mut memberships = db @@ -44,8 +44,12 @@ pub fn revert_create_identity( db.insert(dal_idty.ms_created_block_id, memberships); })?; // Remove identity - identities_db.write(|db| { - db.remove(&dal_idty.idty_doc.issuers()[0]); + db.write(|mut w| { + db.get_store(IDENTITIES).delete( + w.as_mut(), + &dal_idty.idty_doc.issuers()[0].to_bytes_vector(), + )?; + Ok(w) })?; Ok(()) } @@ -53,19 +57,19 @@ pub fn revert_create_identity( /// Write identity in databases pub fn create_identity( currency_params: &CurrencyParameters, - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, + db: &Db, ms_db: &BinFreeStructDb<MsExpirV10Datas>, idty_doc: &IdentityDocumentV10, ms_created_block_id: BlockNumber, wot_id: WotId, current_blockstamp: Blockstamp, current_bc_time: u64, -) -> Result<(), DALError> { +) -> Result<(), DbError> { let mut idty_doc = idty_doc.clone(); idty_doc.reduce(); - let idty = DALIdentity { + let idty = DbIdentity { hash: "0".to_string(), - state: DALIdentityState::Member(vec![0]), + state: DbIdentityState::Member(vec![0]), joined_on: current_blockstamp, expired_on: None, revoked_on: None, @@ -76,8 +80,14 @@ pub fn create_identity( cert_chainable_on: vec![], }; // Write Identity - identities_db.write(|db| { - db.insert(idty.idty_doc.issuers()[0], idty.clone()); + let bin_idty = durs_dbs_tools::to_bytes(&idty)?; + db.write(|mut w| { + db.get_store(IDENTITIES).put( + w.as_mut(), + &idty.idty_doc.issuers()[0].to_bytes_vector(), + &DbValue::Blob(&bin_idty), + )?; + Ok(w) })?; // Write membership ms_db.write(|db| { @@ -90,25 +100,24 @@ pub fn create_identity( /// Apply "exclude identity" event pub fn exclude_identity( - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, + db: &Db, pubkey: &PubKey, exclusion_blockstamp: &Blockstamp, revert: bool, -) -> Result<(), DALError> { - let mut idty_datas = identities_db - .read(|db| db.get(pubkey).cloned())? - .expect("Fatal error : try to renewal unknow identity !"); +) -> Result<(), DbError> { + let mut idty_datas = durs_bc_db_reader::readers::identity::get_identity(db, pubkey)? + .expect("Try to exclude unexist idty."); idty_datas.state = if revert { match idty_datas.state { - DALIdentityState::ExpireMember(renewed_counts) => { - DALIdentityState::Member(renewed_counts) + DbIdentityState::ExpireMember(renewed_counts) => { + DbIdentityState::Member(renewed_counts) } _ => fatal_error!("Try to revert exclusion for a no excluded identity !"), } } else { match idty_datas.state { - DALIdentityState::Member(renewed_counts) => { - DALIdentityState::ExpireMember(renewed_counts) + DbIdentityState::Member(renewed_counts) => { + DbIdentityState::ExpireMember(renewed_counts) } _ => fatal_error!("Try to exclude for an already excluded/revoked identity !"), } @@ -119,45 +128,50 @@ pub fn exclude_identity( Some(*exclusion_blockstamp) }; // Write new identity datas - identities_db.write(|db| { - db.insert(*pubkey, idty_datas); + let bin_idty = durs_dbs_tools::to_bytes(&idty_datas)?; + db.write(|mut w| { + db.get_store(IDENTITIES).put( + w.as_mut(), + &pubkey.to_bytes_vector(), + &DbValue::Blob(&bin_idty), + )?; + Ok(w) })?; Ok(()) } /// Apply "revoke identity" event pub fn revoke_identity( - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, + db: &Db, pubkey: &PubKey, renewal_blockstamp: &Blockstamp, explicit: bool, revert: bool, -) -> Result<(), DALError> { - let mut member_datas = identities_db - .read(|db| db.get(pubkey).cloned())? - .expect("Fatal error : Try to revoke unknow idty !"); +) -> Result<(), DbError> { + let mut member_datas = durs_bc_db_reader::readers::identity::get_identity(db, pubkey)? + .expect("Try to revoke unexist idty."); member_datas.state = if revert { match member_datas.state { - DALIdentityState::ExplicitRevoked(renewed_counts) => { - DALIdentityState::Member(renewed_counts) + DbIdentityState::ExplicitRevoked(renewed_counts) => { + DbIdentityState::Member(renewed_counts) } - DALIdentityState::ExplicitExpireRevoked(renewed_counts) - | DALIdentityState::ImplicitRevoked(renewed_counts) => { - DALIdentityState::ExpireMember(renewed_counts) + DbIdentityState::ExplicitExpireRevoked(renewed_counts) + | DbIdentityState::ImplicitRevoked(renewed_counts) => { + DbIdentityState::ExpireMember(renewed_counts) } _ => fatal_error!("Try to revert revoke_identity() for a no revoked idty !"), } } else { match member_datas.state { - DALIdentityState::ExpireMember(renewed_counts) => { - DALIdentityState::ExplicitExpireRevoked(renewed_counts) + DbIdentityState::ExpireMember(renewed_counts) => { + DbIdentityState::ExplicitExpireRevoked(renewed_counts) } - DALIdentityState::Member(renewed_counts) => { + DbIdentityState::Member(renewed_counts) => { if explicit { - DALIdentityState::ExplicitRevoked(renewed_counts) + DbIdentityState::ExplicitRevoked(renewed_counts) } else { - DALIdentityState::ImplicitRevoked(renewed_counts) + DbIdentityState::ImplicitRevoked(renewed_counts) } } _ => fatal_error!("Try to revert revoke an already revoked idty !"), @@ -169,8 +183,15 @@ pub fn revoke_identity( Some(*renewal_blockstamp) }; - identities_db.write(|db| { - db.insert(*pubkey, member_datas); + // Update idty + let bin_idty = durs_dbs_tools::to_bytes(&member_datas)?; + db.write(|mut w| { + db.get_store(IDENTITIES).put( + w.as_mut(), + &pubkey.to_bytes_vector(), + &DbValue::Blob(&bin_idty), + )?; + Ok(w) })?; Ok(()) } @@ -178,43 +199,42 @@ pub fn revoke_identity( /// Apply "renewal identity" event in databases pub fn renewal_identity( currency_params: &CurrencyParameters, - identities_db: &BinFreeStructDb<IdentitiesV10Datas>, + db: &Db, ms_db: &BinFreeStructDb<MsExpirV10Datas>, pubkey: &PubKey, idty_wot_id: WotId, renewal_timestamp: u64, ms_created_block_id: BlockNumber, revert: bool, -) -> Result<(), DALError> { +) -> Result<(), DbError> { // Get idty_datas - let mut idty_datas = identities_db - .read(|db| db.get(pubkey).cloned())? + let mut idty_datas = durs_bc_db_reader::readers::identity::get_identity(db, pubkey)? .expect("Fatal error : try to renewal unknow identity !"); // Calculate new state value idty_datas.state = if revert { match idty_datas.state { - DALIdentityState::Member(renewed_counts) => { + DbIdentityState::Member(renewed_counts) => { let mut new_renewed_counts = renewed_counts.clone(); new_renewed_counts[renewed_counts.len() - 1] -= 1; if new_renewed_counts[renewed_counts.len() - 1] > 0 { - DALIdentityState::Member(new_renewed_counts) + DbIdentityState::Member(new_renewed_counts) } else { - DALIdentityState::ExpireMember(new_renewed_counts) + DbIdentityState::ExpireMember(new_renewed_counts) } } _ => fatal_error!("Try to revert renewal_identity() for an excluded or revoked idty !"), } } else { match idty_datas.state { - DALIdentityState::Member(renewed_counts) => { + DbIdentityState::Member(renewed_counts) => { let mut new_renewed_counts = renewed_counts.clone(); new_renewed_counts[renewed_counts.len() - 1] += 1; - DALIdentityState::Member(new_renewed_counts) + DbIdentityState::Member(new_renewed_counts) } - DALIdentityState::ExpireMember(renewed_counts) => { + DbIdentityState::ExpireMember(renewed_counts) => { let mut new_renewed_counts = renewed_counts.clone(); new_renewed_counts.push(0); - DALIdentityState::Member(new_renewed_counts) + DbIdentityState::Member(new_renewed_counts) } _ => fatal_error!("Try to renewed a revoked identity !"), } @@ -228,8 +248,14 @@ pub fn renewal_identity( .push(renewal_timestamp + currency_params.ms_period); } // Write new identity datas - identities_db.write(|db| { - db.insert(*pubkey, idty_datas); + let bin_idty = durs_dbs_tools::to_bytes(&idty_datas)?; + db.write(|mut w| { + db.get_store(IDENTITIES).put( + w.as_mut(), + &pubkey.to_bytes_vector(), + &DbValue::Blob(&bin_idty), + )?; + Ok(w) })?; // Update MsExpirV10DB ms_db.write(|db| { @@ -241,12 +267,10 @@ pub fn renewal_identity( } /// Remove identity from databases -pub fn remove_identity( - db: &BinFreeStructDb<IdentitiesV10Datas>, - pubkey: PubKey, -) -> Result<(), DALError> { - db.write(|db| { - db.remove(&pubkey); - })?; - Ok(()) +pub fn remove_identity(db: &Db, pubkey: PubKey) -> Result<(), DbError> { + db.write(|mut w| { + db.get_store(IDENTITIES) + .delete(w.as_mut(), &pubkey.to_bytes_vector())?; + Ok(w) + }) } diff --git a/lib/modules/blockchain/blockchain-dal/src/writers/mod.rs b/lib/modules/blockchain/bc-db-writer/src/writers/mod.rs similarity index 100% rename from lib/modules/blockchain/blockchain-dal/src/writers/mod.rs rename to lib/modules/blockchain/bc-db-writer/src/writers/mod.rs diff --git a/lib/modules/blockchain/blockchain-dal/src/writers/requests.rs b/lib/modules/blockchain/bc-db-writer/src/writers/requests.rs similarity index 85% rename from lib/modules/blockchain/blockchain-dal/src/writers/requests.rs rename to lib/modules/blockchain/bc-db-writer/src/writers/requests.rs index 00e51d6d35615028b7761ea64ca281aa49775dbc..315855d0f04daa2782f9a455c66f9d65a81fcdc8 100644 --- a/lib/modules/blockchain/blockchain-dal/src/writers/requests.rs +++ b/lib/modules/blockchain/bc-db-writer/src/writers/requests.rs @@ -13,9 +13,7 @@ // You should have received a copy of the GNU Affero General Public License // along with this program. If not, see <https://www.gnu.org/licenses/>. -use crate::entities::block::DALBlock; -use crate::entities::sources::SourceAmount; -use crate::writers::transaction::DALTxV10; +use crate::writers::transaction::DbTxV10; use crate::*; use dubp_block_doc::block::{BlockDocument, BlockDocumentTrait}; use dubp_common_doc::Blockstamp; @@ -23,6 +21,9 @@ use dubp_currency_params::CurrencyParameters; use dubp_user_docs::documents::certification::CompactCertificationDocumentV10; use dubp_user_docs::documents::identity::IdentityDocumentV10; use dup_crypto::keys::PubKey; +use durs_bc_db_reader::entities::block::DbBlock; +use durs_bc_db_reader::entities::fork_tree::ForkTree; +use durs_bc_db_reader::entities::sources::SourceAmount; use durs_wot::WotId; use std::ops::Deref; @@ -41,9 +42,9 @@ pub enum DBsWriteRequest { /// Contain a pending write request for blocks databases pub enum BlocksDBsWriteQuery { /// Write block - WriteBlock(DALBlock), + WriteBlock(DbBlock), /// Revert block - RevertBlock(DALBlock), + RevertBlock(DbBlock), } impl BlocksDBsWriteQuery { @@ -58,19 +59,19 @@ impl BlocksDBsWriteQuery { pub fn apply( self, db: &Db, - forks_db: &ForksDBs, + fork_tree: &mut ForkTree, fork_window_size: usize, sync_target: Option<Blockstamp>, - ) -> Result<(), DALError> { + ) -> Result<(), DbError> { match self { BlocksDBsWriteQuery::WriteBlock(dal_block) => { - let dal_block: DALBlock = dal_block; + let dal_block: DbBlock = dal_block; trace!("BlocksDBsWriteQuery::WriteBlock..."); if sync_target.is_none() || dal_block.blockstamp().id.0 + fork_window_size as u32 >= sync_target.expect("safe unwrap").id.0 { - super::block::insert_new_head_block(db, Some(forks_db), dal_block)?; + super::block::insert_new_head_block(db, Some(fork_tree), dal_block)?; } else { super::block::insert_new_head_block(db, None, dal_block)?; } @@ -127,7 +128,8 @@ impl WotsDBsWriteQuery { _blockstamp: &Blockstamp, currency_params: &CurrencyParameters, databases: &WotsV10DBs, - ) -> Result<(), DALError> { + db: &Db, + ) -> Result<(), DbError> { match *self { WotsDBsWriteQuery::CreateIdentity( ref wot_id, @@ -138,7 +140,7 @@ impl WotsDBsWriteQuery { ) => { writers::identity::create_identity( currency_params, - &databases.identities_db, + &db, &databases.ms_db, idty_doc.deref(), *ms_created_block_id, @@ -148,11 +150,7 @@ impl WotsDBsWriteQuery { )?; } WotsDBsWriteQuery::RevertCreateIdentity(ref pubkey) => { - writers::identity::revert_create_identity( - &databases.identities_db, - &databases.ms_db, - pubkey, - )?; + writers::identity::revert_create_identity(&db, &databases.ms_db, pubkey)?; } WotsDBsWriteQuery::RenewalIdentity( ref pubkey, @@ -163,7 +161,7 @@ impl WotsDBsWriteQuery { trace!("WotsDBsWriteQuery::RenewalIdentity..."); writers::identity::renewal_identity( currency_params, - &databases.identities_db, + &db, &databases.ms_db, pubkey, *idty_wot_id, @@ -181,7 +179,7 @@ impl WotsDBsWriteQuery { ) => { writers::identity::renewal_identity( currency_params, - &databases.identities_db, + &db, &databases.ms_db, pubkey, *idty_wot_id, @@ -191,38 +189,16 @@ impl WotsDBsWriteQuery { )?; } WotsDBsWriteQuery::ExcludeIdentity(ref pubkey, ref blockstamp) => { - writers::identity::exclude_identity( - &databases.identities_db, - pubkey, - blockstamp, - false, - )?; + writers::identity::exclude_identity(&db, pubkey, blockstamp, false)?; } WotsDBsWriteQuery::RevertExcludeIdentity(ref pubkey, ref blockstamp) => { - writers::identity::exclude_identity( - &databases.identities_db, - pubkey, - blockstamp, - true, - )?; + writers::identity::exclude_identity(&db, pubkey, blockstamp, true)?; } WotsDBsWriteQuery::RevokeIdentity(ref pubkey, ref blockstamp, ref explicit) => { - writers::identity::revoke_identity( - &databases.identities_db, - pubkey, - blockstamp, - *explicit, - false, - )?; + writers::identity::revoke_identity(&db, pubkey, blockstamp, *explicit, false)?; } WotsDBsWriteQuery::RevertRevokeIdentity(ref pubkey, ref blockstamp, ref explicit) => { - writers::identity::revoke_identity( - &databases.identities_db, - pubkey, - blockstamp, - *explicit, - true, - )?; + writers::identity::revoke_identity(&db, pubkey, blockstamp, *explicit, true)?; } WotsDBsWriteQuery::CreateCert( ref source_pubkey, @@ -234,7 +210,7 @@ impl WotsDBsWriteQuery { trace!("WotsDBsWriteQuery::CreateCert..."); writers::certification::write_certification( currency_params, - &databases.identities_db, + &db, &databases.certs_db, *source_pubkey, *source, @@ -247,7 +223,7 @@ impl WotsDBsWriteQuery { WotsDBsWriteQuery::RevertCert(ref compact_doc, ref source, ref target) => { trace!("WotsDBsWriteQuery::CreateCert..."); writers::certification::revert_write_cert( - &databases.identities_db, + &db, &databases.certs_db, *compact_doc, *source, @@ -277,7 +253,7 @@ pub enum CurrencyDBsWriteQuery { /// Write transaction WriteTx(Box<TransactionDocument>), /// Revert transaction - RevertTx(Box<DALTxV10>), + RevertTx(Box<DbTxV10>), /// Create dividend CreateUD(SourceAmount, BlockNumber, Vec<PubKey>), /// Revert dividend @@ -290,7 +266,7 @@ impl CurrencyDBsWriteQuery { &self, blockstamp: &Blockstamp, databases: &CurrencyV10DBs, - ) -> Result<(), DALError> { + ) -> Result<(), DbError> { match *self { CurrencyDBsWriteQuery::WriteTx(ref tx_doc) => { super::transaction::apply_and_write_tx(blockstamp, &databases, tx_doc.deref())?; diff --git a/lib/modules/blockchain/blockchain-dal/src/writers/transaction.rs b/lib/modules/blockchain/bc-db-writer/src/writers/transaction.rs similarity index 98% rename from lib/modules/blockchain/blockchain-dal/src/writers/transaction.rs rename to lib/modules/blockchain/bc-db-writer/src/writers/transaction.rs index 8714a46df2d23c85c1dd8e7ed7fbb8ac089dc08a..f9989386f8aacfc9afb11ecab7cbc0e23161a72f 100644 --- a/lib/modules/blockchain/blockchain-dal/src/writers/transaction.rs +++ b/lib/modules/blockchain/bc-db-writer/src/writers/transaction.rs @@ -16,28 +16,28 @@ use dubp_user_docs::documents::transaction::*; use durs_common_tools::fatal_error; -use crate::entities::sources::{SourceAmount, UTXOV10}; use crate::*; use dubp_indexes::sindex::{SourceUniqueIdV10, UniqueIdUTXOv10}; +use durs_bc_db_reader::entities::sources::{SourceAmount, UTXOV10}; #[derive(Debug)] /// Transaction error pub enum TxError { /// UnkonwError UnkonwError(), - /// DALError - DALError(DALError), + /// DbError + DbError(DbError), } -impl From<DALError> for TxError { - fn from(err: DALError) -> TxError { - TxError::DALError(err) +impl From<DbError> for TxError { + fn from(err: DbError) -> TxError { + TxError::DbError(err) } } #[derive(Debug, Clone, Deserialize, Serialize)] -/// DAL Transaction V10 -pub struct DALTxV10 { +/// DB Transaction V10 +pub struct DbTxV10 { /// Transaction document pub tx_doc: TransactionDocument, /// Index of sources destroyed by this transaction @@ -48,8 +48,8 @@ pub struct DALTxV10 { pub fn revert_tx( blockstamp: &Blockstamp, dbs: &CurrencyV10DBs, - dal_tx: &DALTxV10, -) -> Result<(), DALError> { + dal_tx: &DbTxV10, +) -> Result<(), DbError> { let mut tx_doc = dal_tx.tx_doc.clone(); let tx_hash = tx_doc.get_hash(); let sources_destroyed = &dal_tx.sources_destroyed; @@ -257,7 +257,7 @@ pub fn apply_and_write_tx( blockstamp: &Blockstamp, dbs: &CurrencyV10DBs, tx_doc: &TransactionDocument, -) -> Result<(), DALError> { +) -> Result<(), DbError> { let mut tx_doc = tx_doc.clone(); let tx_hash = tx_doc.get_hash(); let mut sources_destroyed = HashSet::new(); @@ -426,7 +426,7 @@ pub fn apply_and_write_tx( dbs.tx_db.write(|db| { db.insert( tx_hash, - DALTxV10 { + DbTxV10 { tx_doc, sources_destroyed, }, @@ -571,7 +571,7 @@ mod tests { revert_tx( &blockstamp, ¤cy_dbs, - &DALTxV10 { + &DbTxV10 { tx_doc: tx_doc.clone(), sources_destroyed: HashSet::with_capacity(0), }, diff --git a/lib/modules/blockchain/blockchain-dal/src/readers/block.rs b/lib/modules/blockchain/blockchain-dal/src/readers/block.rs deleted file mode 100644 index f795061bf6223626f31a9e6152ae1509cb744703..0000000000000000000000000000000000000000 --- a/lib/modules/blockchain/blockchain-dal/src/readers/block.rs +++ /dev/null @@ -1,184 +0,0 @@ -// Copyright (C) 2017-2019 The AXIOM TEAM Association. -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as -// published by the Free Software Foundation, either version 3 of the -// License, or (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. -// -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see <https://www.gnu.org/licenses/>. - -use crate::constants::*; -use crate::*; -use dubp_block_doc::block::{BlockDocument, BlockDocumentTrait}; -use dubp_common_doc::traits::Document; -use dubp_common_doc::{BlockHash, BlockNumber, Blockstamp}; -use dup_crypto::keys::*; -use std::collections::HashMap; -use unwrap::unwrap; - -/// Get block hash -pub fn get_block_hash<DB: DbReadable>( - db: &DB, - block_number: BlockNumber, -) -> Result<Option<BlockHash>, DALError> { - Ok( - if let Some(block) = get_block_in_local_blockchain(db, block_number)? { - block.hash() - } else { - None - }, - ) -} -/// Return true if the node already knows this block -pub fn already_have_block<DB: DbReadable>( - db: &DB, - forks_dbs: &ForksDBs, - blockstamp: Blockstamp, - previous_hash: Option<Hash>, -) -> Result<bool, DALError> { - let previous_blockstamp = if blockstamp.id.0 > 0 { - Blockstamp { - id: BlockNumber(blockstamp.id.0 - 1), - hash: BlockHash(unwrap!(previous_hash)), - } - } else { - Blockstamp::default() - }; - - if forks_dbs - .fork_blocks_db - .read(|db| db.contains_key(&blockstamp))? - { - return Ok(true); - } else if let Some(orphan_blockstamps) = forks_dbs.orphan_blocks_db.read(|db| { - if let Some(orphan_blocks) = db.get(&previous_blockstamp) { - let orphan_blockstamps: Vec<Blockstamp> = - orphan_blocks.iter().map(DALBlock::blockstamp).collect(); - Some(orphan_blockstamps) - } else { - None - } - })? { - for orphan_blockstamp in orphan_blockstamps { - if orphan_blockstamp == blockstamp { - return Ok(true); - } - } - } else { - return Ok(get_block_in_local_blockchain(db, blockstamp.id)?.is_some()); - } - - Ok(false) -} - -/// Get block -pub fn get_block<DB: DbReadable>( - db: &DB, - forks_blocks_db: Option<&BinFreeStructDb<ForksBlocksV10Datas>>, - blockstamp: &Blockstamp, -) -> Result<Option<DALBlock>, DALError> { - let opt_dal_block = get_dal_block_in_local_blockchain(db, blockstamp.id)?; - if opt_dal_block.is_none() && forks_blocks_db.is_some() { - Ok(forks_blocks_db - .expect("safe unwrap") - .read(|db| db.get(&blockstamp).cloned())?) - } else { - Ok(opt_dal_block) - } -} - -/// Get block in local blockchain -#[inline] -pub fn get_block_in_local_blockchain<DB: DbReadable>( - db: &DB, - block_number: BlockNumber, -) -> Result<Option<BlockDocument>, DALError> { - Ok(get_dal_block_in_local_blockchain(db, block_number)?.map(|dal_block| dal_block.block)) -} - -/// Get block in local blockchain -pub fn get_dal_block_in_local_blockchain<DB: DbReadable>( - db: &DB, - block_number: BlockNumber, -) -> Result<Option<DALBlock>, DALError> { - db.read(|r| { - if let Some(v) = db.get_int_store(LOCAL_BC).get(r, block_number.0)? { - Ok(Some(DB::from_db_value(v)?)) - } else { - Ok(None) - } - }) - //local_bc_db.read(|r| local_bc_db.get(&r, block_number.0)) -} - -/// Get several blocks in local blockchain -pub fn get_blocks_in_local_blockchain<DB: DbReadable>( - db: &DB, - first_block_number: BlockNumber, - mut count: u32, -) -> Result<Vec<BlockDocument>, DALError> { - db.read(|r| { - let bc_store = db.get_int_store(LOCAL_BC); - let mut blocks = Vec::with_capacity(count as usize); - let mut current_block_number = first_block_number; - - while let Some(v) = bc_store.get(r, current_block_number.0)? { - blocks.push(DB::from_db_value::<DALBlock>(v)?.block); - count -= 1; - if count > 0 { - current_block_number = BlockNumber(current_block_number.0 + 1); - } else { - return Ok(blocks); - } - } - Ok(blocks) - }) - /*bc_db.read(|r| { - let mut blocks = Vec::with_capacity(count as usize); - let mut current_block_number = first_block_number; - while let Some(dal_block) = bc_db.get(&r, current_block_number.0)? { - blocks.push(dal_block.block); - count -= 1; - if count > 0 { - current_block_number = BlockNumber(current_block_number.0 + 1); - } else { - return Ok(blocks); - } - } - Ok(blocks) - })*/ -} - -/// Get current frame of calculating members -pub fn get_current_frame<DB: DbReadable>( - current_block: &DALBlock, - db: &DB, -) -> Result<HashMap<PubKey, usize>, DALError> { - let frame_begin = - current_block.block.number().0 - current_block.block.current_frame_size() as u32; - - let blocks = get_blocks_in_local_blockchain( - db, - BlockNumber(frame_begin), - current_block.block.current_frame_size() as u32, - )?; - - let mut current_frame: HashMap<PubKey, usize> = HashMap::new(); - for block in blocks { - let issuer = block.issuers()[0]; - let issuer_count_blocks = if let Some(issuer_count_blocks) = current_frame.get(&issuer) { - issuer_count_blocks + 1 - } else { - 1 - }; - current_frame.insert(issuer, issuer_count_blocks); - } - - Ok(current_frame) -} diff --git a/lib/modules/blockchain/blockchain-dal/src/storage.rs b/lib/modules/blockchain/blockchain-dal/src/storage.rs deleted file mode 100644 index 041a2d0ebf07a0d583d0e9629041775a4d297b51..0000000000000000000000000000000000000000 --- a/lib/modules/blockchain/blockchain-dal/src/storage.rs +++ /dev/null @@ -1,66 +0,0 @@ -// Copyright (C) 2017-2019 The AXIOM TEAM Association. -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as -// published by the Free Software Foundation, either version 3 of the -// License, or (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. -// -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see <https://www.gnu.org/licenses/>. - -use crate::DALError; - -/// Storage type -pub enum StorageType { - Single, - SingleInteger, - Multi, - MultiInteger, -} - -pub enum DB { - File(Arc<RwLock<Rkv>>), - Mem(), -} - -impl DB { - /// Open database - pub fn open(db_path: Option<&PathBuf>) -> DB { - let bc_backend = if let Some(db_path) = db_path { - let mut manager = Manager::singleton() - .write() - .expect("fail to get rkb manager !"); - let db = manager - .get_or_create(db_path.as_path(), Rkv::new) - .expect("Fail to open LMDB blockchain database !"); - DB::File(db) - } else { - DB::Mem() - }; - } - /// Open integer storage (astorage is like a table or collection) - pub fn open_integer_storage(&self, storage_name: &str) -> Result<IntegerStore<u32>, DalError> { - let rkv = self.clone().read().expect("Fail to read lock Rkv"); - rkv.open_integer(storage_name, StoreOptions::create())?; - } -} - -/*pub trait MapStorage<K, V> { - - open(Option<&PathBuf>) -> Result<Self, DALError>; - - get(&self, key: &K) -> Result<Option<V>, DALError>; - put(&self, key: K, value: V) -> Result<(), DALError>; - delete(&self, key: &K) -> Result<(), DALError>; - - get_values(&self, keys: Vec<&K>) -> Result<Vec<(&K, Option<V>)>, DALError>; - put_values(&self, datas: Vec<(K, V)>) -> Result<Vec<()>, DALError>; - delete_values(&self, keys: Vec<&K>) -> Result<(), DALError>; - - fn save(&self) -> Result<(), DALError>; -}*/ diff --git a/lib/modules/blockchain/blockchain-dal/src/storage/local_blockchain.rs b/lib/modules/blockchain/blockchain-dal/src/storage/local_blockchain.rs deleted file mode 100644 index a5cf34f2ff2361f84328ba61fce24de576e5378b..0000000000000000000000000000000000000000 --- a/lib/modules/blockchain/blockchain-dal/src/storage/local_blockchain.rs +++ /dev/null @@ -1,42 +0,0 @@ -// Copyright (C) 2017-2019 The AXIOM TEAM Association. -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as -// published by the Free Software Foundation, either version 3 of the -// License, or (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. -// -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see <https://www.gnu.org/licenses/>. - -//! Local blockchain storage - -use crate::entities::block::DALBlock; -use super::MapStorage; -use dubp_common_doc::BlockNumber; -use fnv::FnvHashMap; -use rkv::{IntegerStore, Manager, Rkv, StoreOptions, Value}; - -impl MapStorage<BlockNumber, DALBlock> { - - open(Option<&PathBuf>) -> Result<Self, DALError>; - - get(&self, key: &K) -> Result<Option<V>, DALError>; - put(&self, key: K, value: V) -> Result<(), DALError>; - delete(&self, key: &K) -> Result<(), DALError>; - - get_values(&self, keys: Vec<&K>) -> Result<Vec<(&K, Option<V>)>, DALError>; - put_values(&self, datas: Vec<(K, V)>) -> Result<Vec<()>, DALError>; - delete_values(&self, keys: Vec<&K>) -> Result<(), DALError>; - - fn save(&self) -> Result<(), DALError> { - if let Some(file_backend) = self.open_file_backend() { - file_backend.sync(true)?; - } - Ok(()) - } -} \ No newline at end of file diff --git a/lib/modules/blockchain/blockchain-dal/src/writers/block.rs b/lib/modules/blockchain/blockchain-dal/src/writers/block.rs deleted file mode 100644 index 21316eef45edce18813f1d43e96494d9be4ee067..0000000000000000000000000000000000000000 --- a/lib/modules/blockchain/blockchain-dal/src/writers/block.rs +++ /dev/null @@ -1,118 +0,0 @@ -// Copyright (C) 2017-2019 The AXIOM TEAM Association. -// -// This program is free software: you can redistribute it and/or modify -// it under the terms of the GNU Affero General Public License as -// published by the Free Software Foundation, either version 3 of the -// License, or (at your option) any later version. -// -// This program is distributed in the hope that it will be useful, -// but WITHOUT ANY WARRANTY; without even the implied warranty of -// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -// GNU Affero General Public License for more details. -// -// You should have received a copy of the GNU Affero General Public License -// along with this program. If not, see <https://www.gnu.org/licenses/>. - -use crate::constants::*; -use crate::entities::block::DALBlock; -use crate::DALError; -use crate::*; -use dubp_block_doc::block::BlockDocumentTrait; -use dubp_common_doc::traits::Document; -use unwrap::unwrap; - -/// Insert new head Block in databases -pub fn insert_new_head_block( - db: &Db, - forks_dbs: Option<&ForksDBs>, - dal_block: DALBlock, -) -> Result<(), DALError> { - // Insert head block in blockchain - let bin_block = durs_dbs_tools::to_bytes(&dal_block)?; - db.write(|mut w| { - db.get_int_store(LOCAL_BC).put( - w.as_mut(), - *dal_block.block.number(), - &Db::db_value(&bin_block)?, - )?; - Ok(w) - })?; - - if let Some(forks_dbs) = forks_dbs { - // Insert head block in fork tree - let removed_blockstamps = crate::writers::fork_tree::insert_new_head_block( - &forks_dbs.fork_tree_db, - dal_block.blockstamp(), - )?; - - // Insert head block in ForksBlocks - forks_dbs.fork_blocks_db.write(|db| { - db.insert(dal_block.blockstamp(), dal_block); - })?; - - // Remove too old blocks - forks_dbs.fork_blocks_db.write(|db| { - for blockstamp in removed_blockstamps { - db.remove(&blockstamp); - } - })?; - } - Ok(()) -} - -/// Remove a block in local blockchain storage -pub fn remove_block(db: &Db, block_number: BlockNumber) -> Result<(), DALError> { - db.write(|mut w| { - db.get_int_store(LOCAL_BC) - .delete(w.as_mut(), block_number.0)?; - Ok(w) - }) -} - -/// Insert new fork Block in databases -pub fn insert_new_fork_block(forks_dbs: &ForksDBs, dal_block: DALBlock) -> Result<bool, DALError> { - if crate::writers::fork_tree::insert_new_fork_block( - &forks_dbs.fork_tree_db, - dal_block.block.blockstamp(), - unwrap!(dal_block.block.previous_hash()), - )? { - // Insert in ForksBlocks - forks_dbs.fork_blocks_db.write(|db| { - db.insert(dal_block.blockstamp(), dal_block.clone()); - })?; - - // As long as orphan blocks can succeed the last inserted block, they are inserted - if let Some(stackables_blocks) = forks_dbs - .orphan_blocks_db - .read(|db| db.get(&dal_block.blockstamp()).cloned())? - { - for stackable_block in stackables_blocks { - let _ = insert_new_fork_block(forks_dbs, stackable_block); - } - } - - Ok(true) - } else { - let previous_blockstamp = dal_block.previous_blockstamp(); - - // Get orphanBlocks vector - let mut orphan_blocks = if let Some(orphan_blocks) = forks_dbs - .orphan_blocks_db - .read(|db| db.get(&previous_blockstamp).cloned())? - { - orphan_blocks - } else { - Vec::new() - }; - - // Add fork block - orphan_blocks.push(dal_block); - - // Update OrphanBlocks DB - forks_dbs.orphan_blocks_db.write(|db| { - db.insert(previous_blockstamp, orphan_blocks); - })?; - - Ok(false) - } -} diff --git a/lib/modules/blockchain/blockchain/Cargo.toml b/lib/modules/blockchain/blockchain/Cargo.toml index 364115a9ad2f4300fb026301d209cc65e689828d..8d494dbb4129f0cb04268fa43955b44aa8faea8c 100644 --- a/lib/modules/blockchain/blockchain/Cargo.toml +++ b/lib/modules/blockchain/blockchain/Cargo.toml @@ -15,7 +15,8 @@ durs-conf = { path = "../../../core/conf" } dubp-block-doc = { path = "../../../dubp/block-doc"} #, version = "0.1.0" } dubp-common-doc = { path = "../../../dubp/common-doc"} #, version = "0.1.0" } dubp-currency-params = { path = "../../../dubp/currency-params" } -durs-blockchain-dal = { path = "../blockchain-dal" } +durs-bc-db-reader = { path = "../../../modules-lib/bc-db-reader" } +durs-bc-db-writer = { path = "../bc-db-writer" } dup-crypto = { path = "../../../crypto" } dubp-user-docs= { path = "../../../dubp/user-docs" } durs-common-tools = { path = "../../../tools/common-tools" } diff --git a/lib/modules/blockchain/blockchain/src/dbex.rs b/lib/modules/blockchain/blockchain/src/dbex.rs index 7f4131a7acb745a57ce9df2abd24ec664bee9411..4dda821e85f64ceef1a72636c53e3cde5a84142c 100644 --- a/lib/modules/blockchain/blockchain/src/dbex.rs +++ b/lib/modules/blockchain/blockchain/src/dbex.rs @@ -20,6 +20,7 @@ use dubp_block_doc::block::BlockDocumentTrait; use dubp_common_doc::BlockNumber; use dubp_user_docs::documents::transaction::*; use dup_crypto::keys::*; +use durs_bc_db_reader::BcDbRo; use durs_wot::data::rusty::RustyWebOfTrust; use durs_wot::data::WebOfTrust; use durs_wot::operations::distance::{DistanceCalculator, WotDistance, WotDistanceParameters}; @@ -74,6 +75,23 @@ pub enum DbExQuery { WotQuery(DbExWotQuery), } +fn open_bc_db_ro(profile_path: PathBuf) -> Option<BcDbRo> { + // Get db path + let db_path = durs_conf::get_blockchain_db_path(profile_path); + + match durs_bc_db_reader::open_db_ro(&db_path) { + Ok(db) => Some(db), + Err(DbError::DBNotExist) => { + println!("DB not exist, please sync."); + None + } + Err(e) => { + println!("Fail to open DB: {:?}", e); + None + } + } +} + /// Execute DbExQuery pub fn dbex(profile_path: PathBuf, csv: bool, query: &DbExQuery) { match *query { @@ -87,15 +105,13 @@ pub fn dbex(profile_path: PathBuf, csv: bool, query: &DbExQuery) { } /// Execute DbExBcQuery -pub fn dbex_bc(profile_path: PathBuf, _csv: bool, _query: DbExBcQuery) -> Result<(), DALError> { +pub fn dbex_bc(profile_path: PathBuf, _csv: bool, _query: DbExBcQuery) -> Result<(), DbError> { // Get db path let db_path = durs_conf::get_blockchain_db_path(profile_path); // Open databases let load_dbs_begin = SystemTime::now(); let db = open_db(&db_path.as_path())?; - let forks_dbs = ForksDBs::open(Some(&db_path)); - let wot_databases = WotsV10DBs::open(Some(&db_path)); let load_dbs_duration = SystemTime::now() .duration_since(load_dbs_begin) @@ -107,14 +123,17 @@ pub fn dbex_bc(profile_path: PathBuf, _csv: bool, _query: DbExBcQuery) -> Result ); if let Some(current_blockstamp) = - durs_blockchain_dal::readers::fork_tree::get_current_blockstamp(&forks_dbs)? + durs_bc_db_reader::readers::current_meta_datas::get_current_blockstamp(&db)? { println!("Current block: #{}.", current_blockstamp); if let Some(current_block) = - durs_blockchain_dal::readers::block::get_block(&db, None, ¤t_blockstamp)? + durs_bc_db_reader::readers::block::get_block_in_local_blockchain( + &db, + current_blockstamp.id, + )? { let map_pubkey = - durs_blockchain_dal::readers::block::get_current_frame(¤t_block, &db)?; + durs_bc_db_reader::readers::block::get_current_frame(¤t_block, &db)?; let mut vec = map_pubkey.iter().collect::<Vec<(&PubKey, &usize)>>(); vec.sort_by(|a, b| b.1.cmp(&a.1)); @@ -122,10 +141,9 @@ pub fn dbex_bc(profile_path: PathBuf, _csv: bool, _query: DbExBcQuery) -> Result if _csv { println!("{},{},{}", &BLOCK, &USERNAME, &PUB_KEY); for (pub_key, v) in &vec { - if let Ok(Some(identity)) = durs_blockchain_dal::readers::identity::get_identity( - &wot_databases.identities_db, - &pub_key, - ) { + if let Ok(Some(identity)) = + durs_bc_db_reader::readers::identity::get_identity(&db, &pub_key) + { println!( "{},{},{}", v, @@ -138,10 +156,9 @@ pub fn dbex_bc(profile_path: PathBuf, _csv: bool, _query: DbExBcQuery) -> Result let mut table = Table::new(); table.add_row(row![&BLOCK, &USERNAME, &PUB_KEY]); for (pub_key, v) in &vec { - if let Ok(Some(identity)) = durs_blockchain_dal::readers::identity::get_identity( - &wot_databases.identities_db, - &pub_key, - ) { + if let Ok(Some(identity)) = + durs_bc_db_reader::readers::identity::get_identity(&db, &pub_key) + { table.add_row(row![v, identity.idty_doc.username(), pub_key.to_string()]); } } @@ -155,30 +172,23 @@ pub fn dbex_bc(profile_path: PathBuf, _csv: bool, _query: DbExBcQuery) -> Result /// Print fork tree pub fn dbex_fork_tree(profile_path: PathBuf, _csv: bool) { - // Get db path - let db_path = durs_conf::get_blockchain_db_path(profile_path); - - // Open forks databases - let load_dbs_begin = SystemTime::now(); - let forks_dbs = ForksDBs::open(Some(&db_path)); - let load_dbs_duration = SystemTime::now() - .duration_since(load_dbs_begin) + // Open DB + let load_db_begin = SystemTime::now(); + let db = if let Some(db) = open_bc_db_ro(profile_path) { + db + } else { + return; + }; + let load_db_duration = SystemTime::now() + .duration_since(load_db_begin) .expect("duration_since error !"); println!( "Databases loaded in {}.{:03} seconds.", - load_dbs_duration.as_secs(), - load_dbs_duration.subsec_millis() + load_db_duration.as_secs(), + load_db_duration.subsec_millis() ); - let fork_tree_db = forks_dbs.fork_tree_db; - let fork_tree = fork_tree_db - .read(|fork_tree| fork_tree.clone()) - .expect("Fail to read fork tree DB !"); - - // Print all sheets - println!("-----------------------------------"); - println!("sheets={:?}", fork_tree.get_sheets()); - println!("-----------------------------------"); - + let fork_tree = durs_bc_db_reader::readers::current_meta_datas::get_fork_tree(&db) + .expect("fail to get fork tree"); // Print all fork branches for (tree_node_id, blockstamp) in fork_tree.get_sheets() { debug!( @@ -196,15 +206,18 @@ pub fn dbex_fork_tree(profile_path: PathBuf, _csv: bool) { /// Execute DbExTxQuery pub fn dbex_tx(profile_path: PathBuf, _csv: bool, query: &DbExTxQuery) { // Get db path - let db_path = durs_conf::get_blockchain_db_path(profile_path); + let db_path = durs_conf::get_blockchain_db_path(profile_path.clone()); - // Open databases - let load_dbs_begin = SystemTime::now(); - //let blocks_databases = BlocksV10DBs::open(Some(&db_path)); + // Open DB + let load_db_begin = SystemTime::now(); + let db = if let Some(db) = open_bc_db_ro(profile_path) { + db + } else { + return; + }; let currency_databases = CurrencyV10DBs::open(Some(&db_path)); - let wot_databases = WotsV10DBs::open(Some(&db_path)); let load_dbs_duration = SystemTime::now() - .duration_since(load_dbs_begin) + .duration_since(load_db_begin) .expect("duration_since error !"); println!( "Databases loaded in {}.{:03} seconds.", @@ -217,11 +230,8 @@ pub fn dbex_tx(profile_path: PathBuf, _csv: bool, query: &DbExTxQuery) { let pubkey = if let Ok(ed25519_pubkey) = ed25519::PublicKey::from_base58(address_str) { PubKey::Ed25519(ed25519_pubkey) } else if let Some(pubkey) = - durs_blockchain_dal::readers::identity::get_pubkey_from_uid( - &wot_databases.identities_db, - address_str, - ) - .expect("get_uid : DALError") + durs_bc_db_reader::readers::identity::get_pubkey_from_uid(&db, address_str) + .expect("get_uid : DbError") { pubkey } else { @@ -229,11 +239,11 @@ pub fn dbex_tx(profile_path: PathBuf, _csv: bool, query: &DbExTxQuery) { return; }; let address = UTXOConditionsGroup::Single(TransactionOutputCondition::Sig(pubkey)); - let address_balance = durs_blockchain_dal::readers::balance::get_address_balance( + let address_balance = durs_bc_db_reader::readers::balance::get_address_balance( ¤cy_databases.balances_db, &address, ) - .expect("get_address_balance : DALError") + .expect("get_address_balance : DbError") .expect("Address not found in balances DB."); println!( "Balance={},{} Äž1", @@ -258,11 +268,16 @@ pub fn dbex_wot(profile_path: PathBuf, csv: bool, query: &DbExWotQuery) { // Get db path let db_path = durs_conf::get_blockchain_db_path(profile_path.clone()); - // Open databases - let load_dbs_begin = SystemTime::now(); + // Open DB + let load_db_begin = SystemTime::now(); + let db = if let Some(db) = open_bc_db_ro(profile_path.clone()) { + db + } else { + return; + }; let wot_databases = WotsV10DBs::open(Some(&db_path)); let load_dbs_duration = SystemTime::now() - .duration_since(load_dbs_begin) + .duration_since(load_db_begin) .expect("duration_since error"); println!( "Databases loaded in {}.{:03} seconds.", @@ -281,22 +296,15 @@ pub fn dbex_wot(profile_path: PathBuf, csv: bool, query: &DbExWotQuery) { let currency_params = unwrap!(currency_params_db_datas).1; // get wot_index - let wot_index = - readers::identity::get_wot_index(&wot_databases.identities_db).expect("DALError"); + let wot_index = durs_bc_db_reader::readers::identity::get_wot_index(&db).expect("DbError"); // get wot_reverse_index let wot_reverse_index: HashMap<WotId, &PubKey> = wot_index.iter().map(|(p, id)| (*id, p)).collect(); // get wot uid index - let wot_uid_index: HashMap<WotId, String> = wot_databases - .identities_db - .read(|db| { - db.iter() - .map(|(_, idty)| (idty.wot_id, String::from(idty.idty_doc.username()))) - .collect() - }) - .expect("Fail to read IdentitiesDB !"); + let wot_uid_index = + durs_bc_db_reader::readers::identity::get_wot_uid_index(&db).expect("DbError"); // Open wot db let wot_db = BinFreeStructDb::File( @@ -373,7 +381,7 @@ pub fn dbex_wot(profile_path: PathBuf, csv: bool, query: &DbExWotQuery) { // Open blockchain database let db = open_db(&db_path.as_path()).expect("Fail to open DB."); // Get blocks_times - let all_blocks = durs_blockchain_dal::readers::block::get_blocks_in_local_blockchain( + let all_blocks = durs_bc_db_reader::readers::block::get_blocks_in_local_blockchain( &db, BlockNumber(0), 10_000_000, @@ -415,11 +423,9 @@ pub fn dbex_wot(profile_path: PathBuf, csv: bool, query: &DbExWotQuery) { } DbExWotQuery::MemberDatas(ref uid) => { println!(" Members count = {}.", members_count); - if let Some(pubkey) = durs_blockchain_dal::readers::identity::get_pubkey_from_uid( - &wot_databases.identities_db, - uid, - ) - .expect("get_pubkey_from_uid() : DALError !") + if let Some(pubkey) = + durs_bc_db_reader::readers::identity::get_pubkey_from_uid(&db, uid) + .expect("get_pubkey_from_uid() : DbError !") { let wot_id = wot_index[&pubkey]; println!( @@ -454,11 +460,11 @@ pub fn dbex_wot(profile_path: PathBuf, csv: bool, query: &DbExWotQuery) { .expect("Fail to get links source !"); println!("Certifiers : {}", sources.len()); for (i, source) in sources.iter().enumerate() { - let source_uid = durs_blockchain_dal::readers::identity::get_uid( - &wot_databases.identities_db, - *(wot_reverse_index[&source]), + let source_uid = durs_bc_db_reader::readers::identity::get_uid( + &db, + wot_reverse_index[&source], ) - .expect("get_uid() : DALError") + .expect("get_uid() : DbError") .expect("Not found source_uid !"); println!("{}: {}", i + 1, source_uid); } diff --git a/lib/modules/blockchain/blockchain/src/dubp/apply/mod.rs b/lib/modules/blockchain/blockchain/src/dubp/apply/mod.rs index 571c7708ec7aa41f72f1a93298166a965942d3d1..c8519cbe163ae7a1266715f0d330a756450e526e 100644 --- a/lib/modules/blockchain/blockchain/src/dubp/apply/mod.rs +++ b/lib/modules/blockchain/blockchain/src/dubp/apply/mod.rs @@ -20,10 +20,10 @@ use dubp_common_doc::traits::Document; use dubp_common_doc::BlockNumber; use dubp_user_docs::documents::transaction::{TxAmount, TxBase}; use dup_crypto::keys::*; -use durs_blockchain_dal::entities::block::DALBlock; -use durs_blockchain_dal::entities::sources::SourceAmount; -use durs_blockchain_dal::writers::requests::*; -use durs_blockchain_dal::BinFreeStructDb; +use durs_bc_db_reader::entities::block::DbBlock; +use durs_bc_db_reader::entities::sources::SourceAmount; +use durs_bc_db_writer::writers::requests::*; +use durs_bc_db_writer::BinFreeStructDb; use durs_common_tools::fatal_error; use durs_wot::data::NewLinkResult; use durs_wot::{WebOfTrust, WotId}; @@ -257,7 +257,7 @@ pub fn apply_valid_block_v10<W: WebOfTrust>( (centralities.iter().sum::<u64>() as f64 / centralities.len() as f64) as usize; // Register the state of the wot let max_connectivity = currency_params.max_connectivity(); - durs_blockchain_dal::register_wot_state( + durs_bc_db_writer::register_wot_state( db, &WotState { block_number: block.number.0, @@ -282,9 +282,9 @@ pub fn apply_valid_block_v10<W: WebOfTrust>( }, ); }*/ - // Create DALBlock + // Create DbBlock block.reduce(); - let dal_block = DALBlock { + let dal_block = DbBlock { block: BlockDocument::V10(block), expire_certs: Some(expire_certs.clone()), }; diff --git a/lib/modules/blockchain/blockchain/src/dubp/check/mod.rs b/lib/modules/blockchain/blockchain/src/dubp/check/mod.rs index 2ffefd8be2065781e6fa5e68497e4ea6815030af..efd20a331e4deea13bd191202520544ffc485a4b 100644 --- a/lib/modules/blockchain/blockchain/src/dubp/check/mod.rs +++ b/lib/modules/blockchain/blockchain/src/dubp/check/mod.rs @@ -22,7 +22,8 @@ use dubp_block_doc::block::{BlockDocument, BlockDocumentTrait}; use dubp_common_doc::traits::Document; use dubp_common_doc::BlockNumber; use dup_crypto::keys::PubKey; -use durs_blockchain_dal::*; +use durs_bc_db_reader::CertsExpirV10Datas; +use durs_bc_db_writer::*; use durs_wot::*; use std::collections::HashMap; @@ -46,8 +47,10 @@ where // Rules that do not concern genesis block if block.number().0 > 0 { // Get previous block - let previous_block_opt = - readers::block::get_block_in_local_blockchain(db, BlockNumber(block.number().0 - 1))?; + let previous_block_opt = durs_bc_db_reader::readers::block::get_block_in_local_blockchain( + db, + BlockNumber(block.number().0 - 1), + )?; // Previous block must exist if previous_block_opt.is_none() { diff --git a/lib/modules/blockchain/blockchain/src/dubp/mod.rs b/lib/modules/blockchain/blockchain/src/dubp/mod.rs index 8a86e0351b4e3ce8217b99563e36b35b2ea65b25..e4207b3830d661f7ab0aa2f84cf01c1e21e0ecb7 100644 --- a/lib/modules/blockchain/blockchain/src/dubp/mod.rs +++ b/lib/modules/blockchain/blockchain/src/dubp/mod.rs @@ -24,8 +24,8 @@ use check::*; use dubp_block_doc::block::{BlockDocumentTrait, VerifyBlockHashError}; use dubp_common_doc::traits::Document; use dubp_common_doc::{BlockNumber, Blockstamp}; -use durs_blockchain_dal::entities::block::DALBlock; -use durs_blockchain_dal::*; +use durs_bc_db_reader::entities::block::DbBlock; +use durs_bc_db_writer::*; use unwrap::unwrap; #[derive(Debug, Clone)] @@ -40,7 +40,7 @@ pub enum BlockError { AlreadyHaveBlock, BlockOrOutForkWindow, VerifyBlockHashError(VerifyBlockHashError), - DALError(DALError), + DbError(DbError), InvalidBlock(InvalidBlockError), ApplyValidBlockError(ApplyValidBlockError), } @@ -51,9 +51,9 @@ impl From<VerifyBlockHashError> for BlockError { } } -impl From<DALError> for BlockError { - fn from(err: DALError) -> Self { - BlockError::DALError(err) +impl From<DbError> for BlockError { + fn from(err: DbError) -> Self { + BlockError::DbError(err) } } @@ -68,9 +68,8 @@ pub fn check_and_apply_block( block_doc: BlockDocument, ) -> Result<CheckAndApplyBlockReturn, BlockError> { // Get BlockDocument && check if already have block - let already_have_block = readers::block::already_have_block( + let already_have_block = durs_bc_db_reader::readers::block::already_have_block( &bc.db, - &bc.forks_dbs, block_doc.blockstamp(), block_doc.previous_hash(), )?; @@ -90,7 +89,7 @@ pub fn check_and_apply_block( ); // Detect expire_certs let blocks_expiring = Vec::with_capacity(0); - let expire_certs = durs_blockchain_dal::readers::certs::find_expire_certs( + let expire_certs = durs_bc_db_reader::readers::certs::find_expire_certs( &bc.wot_databases.certs_db, blocks_expiring, )?; @@ -110,7 +109,7 @@ pub fn check_and_apply_block( let datas_path = durs_conf::get_datas_path(bc.profile_path.clone()); // Get and write currency params bc.currency_params = Some( - durs_blockchain_dal::readers::currency_params::get_and_write_currency_params( + durs_bc_db_reader::readers::currency_params::get_and_write_currency_params( &datas_path, &block_doc, ), @@ -134,13 +133,17 @@ pub fn check_and_apply_block( block_doc.blockstamp() ); - let dal_block = DALBlock { + let dal_block = DbBlock { block: block_doc.clone(), expire_certs: None, }; - if durs_blockchain_dal::writers::block::insert_new_fork_block(&bc.forks_dbs, dal_block) - .expect("durs_blockchain_dal::writers::block::insert_new_fork_block() : DALError") + if durs_bc_db_writer::writers::block::insert_new_fork_block( + &bc.db, + &mut bc.fork_tree, + dal_block, + ) + .expect("durs_bc_db_writer::writers::block::insert_new_fork_block() : DbError") { Ok(CheckAndApplyBlockReturn::ForkBlock) } else { diff --git a/lib/modules/blockchain/blockchain/src/dunp/queries.rs b/lib/modules/blockchain/blockchain/src/dunp/queries.rs index eb7e905cc13db1be0d043aec7f61fac4b2f14d15..0efeb7632b14503080f33b83a3f0f03d1d29db21 100644 --- a/lib/modules/blockchain/blockchain/src/dunp/queries.rs +++ b/lib/modules/blockchain/blockchain/src/dunp/queries.rs @@ -89,7 +89,7 @@ pub fn request_orphan_previous( _orphan_block_number: BlockNumber, ) -> HashMap<ModuleReqId, OldNetworkRequest> { /*if orphan_block_number.0 - > bc.current_blockstamp.id.0 - *durs_blockchain_dal::constants::FORK_WINDOW_SIZE as u32 + > bc.current_blockstamp.id.0 - *durs_bc_db_writer::constants::FORK_WINDOW_SIZE as u32 && orphan_block_number.0 <= bc.current_blockstamp.id.0 + *CHUNK_SIZE { request_blocks_from_to( diff --git a/lib/modules/blockchain/blockchain/src/dunp/receiver.rs b/lib/modules/blockchain/blockchain/src/dunp/receiver.rs index 782820eb14b2cdd40195f329db336fd332f3fa07..5d15e94300c15e81d6f8dc5d5d2a73008f6c17a6 100644 --- a/lib/modules/blockchain/blockchain/src/dunp/receiver.rs +++ b/lib/modules/blockchain/blockchain/src/dunp/receiver.rs @@ -54,14 +54,19 @@ pub fn receive_blocks(bc: &mut BlockchainModule, blocks: Vec<BlockDocument>) { bc_db_query .apply( &bc.db, - &bc.forks_dbs, + &mut bc.fork_tree, unwrap!(bc.currency_params).fork_window_size, None, ) .expect("Fatal error : Fail to apply DBWriteRequest !"); for query in &wot_dbs_queries { query - .apply(&blockstamp, &unwrap!(bc.currency_params), &bc.wot_databases) + .apply( + &blockstamp, + &unwrap!(bc.currency_params), + &bc.wot_databases, + &bc.db, + ) .expect("Fatal error : Fail to apply WotsDBsWriteRequest !"); } for query in &tx_dbs_queries { @@ -83,9 +88,9 @@ pub fn receive_blocks(bc: &mut BlockchainModule, blocks: Vec<BlockDocument>) { } CheckAndApplyBlockReturn::ForkBlock => { info!("blockchain: new fork block(#{})", blockstamp); - bc.forks_dbs.save_dbs(); if let Ok(Some(new_bc_branch)) = fork_algo::fork_resolution_algo( - &bc.forks_dbs, + &bc.db, + &bc.fork_tree, unwrap!(bc.currency_params).fork_window_size, bc.current_blockstamp, &bc.invalid_forks, @@ -111,8 +116,8 @@ pub fn receive_blocks(bc: &mut BlockchainModule, blocks: Vec<BlockDocument>) { error!("ApplyValidBlockError(#{}): {:?}", blockstamp, e2); crate::events::sent::send_event(bc, &BlockchainEvent::RefusedBlock(blockstamp)); } - BlockError::DALError(e2) => { - error!("BlockError::DALError(#{}): {:?}", blockstamp, e2); + BlockError::DbError(e2) => { + error!("BlockError::DbError(#{}): {:?}", blockstamp, e2); crate::events::sent::send_event(bc, &BlockchainEvent::RefusedBlock(blockstamp)); } BlockError::AlreadyHaveBlock => { @@ -129,7 +134,8 @@ pub fn receive_blocks(bc: &mut BlockchainModule, blocks: Vec<BlockDocument>) { bc.db .save() .unwrap_or_else(|_| fatal_error!("DB corrupted, please reset data.")); - bc.forks_dbs.save_dbs(); + durs_bc_db_writer::writers::fork_tree::save_fork_tree(&bc.db, &bc.fork_tree) + .unwrap_or_else(|_| fatal_error!("DB corrupted, please reset data.")); } if save_wots_dbs { bc.wot_databases.save_dbs(); diff --git a/lib/modules/blockchain/blockchain/src/fork/fork_algo.rs b/lib/modules/blockchain/blockchain/src/fork/fork_algo.rs index 2cc94befa0a316c18b08d350460b393e457a07c3..15a09c4e0a8df001b5c8f0c01ac669ade875eaf5 100644 --- a/lib/modules/blockchain/blockchain/src/fork/fork_algo.rs +++ b/lib/modules/blockchain/blockchain/src/fork/fork_algo.rs @@ -15,8 +15,9 @@ use dubp_block_doc::block::BlockDocumentTrait; use dubp_common_doc::Blockstamp; -use durs_blockchain_dal::entities::fork_tree::ForkTree; -use durs_blockchain_dal::{DALError, ForksDBs}; +use durs_bc_db_reader::entities::fork_tree::ForkTree; +use durs_bc_db_reader::DbReadable; +use durs_bc_db_writer::DbError; use std::collections::HashSet; /// Number of advance blocks required @@ -24,45 +25,39 @@ pub static ADVANCE_BLOCKS: &u32 = &3; /// Advance blockchain time required (in seconds) pub static ADVANCE_TIME: &u64 = &900; -pub fn fork_resolution_algo( - forks_dbs: &ForksDBs, +pub fn fork_resolution_algo<DB: DbReadable>( + db: &DB, + fork_tree: &ForkTree, fork_window_size: usize, current_blockstamp: Blockstamp, invalid_blocks: &HashSet<Blockstamp>, -) -> Result<Option<Vec<Blockstamp>>, DALError> { - let current_bc_time = forks_dbs.fork_blocks_db.read(|db| { - db.get(¤t_blockstamp) - .expect("safe unwrap") - .block - .common_time() - })?; +) -> Result<Option<Vec<Blockstamp>>, DbError> { + let current_bc_time = + durs_bc_db_reader::readers::current_meta_datas::get_current_common_time(db)?; debug!( "fork_resolution_algo({}, {})", fork_window_size, current_bc_time ); - let mut sheets = forks_dbs.fork_tree_db.read(ForkTree::get_sheets)?; + let mut sheets = fork_tree.get_sheets(); sheets.sort_unstable_by(|s1, s2| s2.1.id.cmp(&s1.1.id)); for sheet in sheets { if sheet.1 != current_blockstamp { - let branch = forks_dbs - .fork_tree_db - .read(|fork_tree| fork_tree.get_fork_branch(sheet.0))?; + let branch = fork_tree.get_fork_branch(sheet.0); if branch.is_empty() { continue; } let branch_head_blockstamp = branch.last().expect("safe unwrap"); - let branch_head_median_time = forks_dbs.fork_blocks_db.read(|db| { - db.get(&branch_head_blockstamp) + let branch_head_median_time = + durs_bc_db_reader::readers::block::get_fork_block(db, *branch_head_blockstamp)? .expect("safe unwrap") .block - .common_time() - })?; + .common_time(); if branch_head_blockstamp.id.0 >= current_blockstamp.id.0 + *ADVANCE_BLOCKS && branch_head_median_time >= current_bc_time + *ADVANCE_TIME @@ -102,19 +97,17 @@ mod tests { use crate::*; use dubp_block_doc::BlockDocument; use dubp_common_doc::{BlockHash, BlockNumber}; - use durs_blockchain_dal::entities::block::DALBlock; + use durs_bc_db_reader::entities::block::DbBlock; #[test] - fn test_fork_resolution_algo() -> Result<(), DALError> { + fn test_fork_resolution_algo() -> Result<(), DbError> { // Open empty DB in tmp dir let db = crate::tests::open_tmp_db()?; + let mut fork_tree = ForkTree::default(); // Get FORK_WINDOW_SIZE value let fork_window_size = *dubp_currency_params::constants::DEFAULT_FORK_WINDOW_SIZE; - // Open empty databases in memory mode - let forks_dbs = ForksDBs::open(None); - // Begin with no invalid blocks let invalid_blocks: HashSet<Blockstamp> = HashSet::new(); @@ -127,10 +120,10 @@ mod tests { // Insert mock blocks in forks_dbs for block in &main_branch { - durs_blockchain_dal::writers::block::insert_new_head_block( + durs_bc_db_writer::writers::block::insert_new_head_block( &db, - Some(&forks_dbs), - DALBlock { + Some(&mut fork_tree), + DbBlock { block: block.clone(), expire_certs: None, }, @@ -139,7 +132,7 @@ mod tests { // Local blockchain must contain at least `fork_window_size +2` blocks assert!( - durs_blockchain_dal::readers::block::get_block_in_local_blockchain( + durs_bc_db_reader::readers::block::get_block_in_local_blockchain( &db, BlockNumber((fork_window_size + 1) as u32) )? @@ -147,22 +140,10 @@ mod tests { ); // Fork tree must contain at least `fork_window_size +2` blocks - assert_eq!( - fork_window_size, - forks_dbs.fork_tree_db.read(|fork_tree| fork_tree.size())? - ); - assert_eq!( - fork_window_size, - forks_dbs.fork_blocks_db.read(|db| db.len())? - ); + assert_eq!(fork_window_size, fork_tree.size()); // Get current blockstamp - let mut current_blockstamp = forks_dbs - .fork_tree_db - .read(|fork_tree| fork_tree.get_sheets())? - .get(0) - .expect("must be one sheet") - .1; + let mut current_blockstamp = fork_tree.get_sheets().get(0).expect("must be one sheet").1; // Generate 3 fork block let fork_point = &main_branch[main_branch.len() - 2]; @@ -186,19 +167,15 @@ mod tests { .collect(); // Add forks blocks into fork tree - insert_fork_blocks(&forks_dbs, &fork_blocks)?; - assert_eq!( - 2, - forks_dbs - .fork_tree_db - .read(|tree| tree.get_sheets().len())? - ); + insert_fork_blocks(&db, &mut fork_tree, &fork_blocks)?; + assert_eq!(2, fork_tree.get_sheets().len()); // Must not fork assert_eq!( None, fork_resolution_algo( - &forks_dbs, + &db, + &fork_tree, fork_window_size, current_blockstamp, &invalid_blocks @@ -212,9 +189,10 @@ mod tests { }; assert_eq!( true, - durs_blockchain_dal::writers::block::insert_new_fork_block( - &forks_dbs, - DALBlock { + durs_bc_db_writer::writers::block::insert_new_fork_block( + &db, + &mut fork_tree, + DbBlock { block: BlockDocument::V10( dubp_user_docs_tests_tools::mocks::gen_empty_timed_block_v10( determining_blockstamp, @@ -236,7 +214,8 @@ mod tests { determining_blockstamp, ]), fork_resolution_algo( - &forks_dbs, + &db, + &mut fork_tree, fork_window_size, current_blockstamp, &invalid_blocks @@ -263,13 +242,14 @@ mod tests { ) }) .collect(); - insert_fork_blocks(&forks_dbs, &new_main_blocks)?; + insert_fork_blocks(&db, &mut fork_tree, &new_main_blocks)?; // Must refork assert_eq!( Some(new_main_blocks.iter().map(|b| b.blockstamp()).collect()), fork_resolution_algo( - &forks_dbs, + &db, + &mut fork_tree, fork_window_size, current_blockstamp, &invalid_blocks @@ -280,13 +260,18 @@ mod tests { Ok(()) } - fn insert_fork_blocks(forks_dbs: &ForksDBs, blocks: &[BlockDocument]) -> Result<(), DALError> { + fn insert_fork_blocks( + db: &Db, + fork_tree: &mut ForkTree, + blocks: &[BlockDocument], + ) -> Result<(), DbError> { for block in blocks { assert_eq!( true, - durs_blockchain_dal::writers::block::insert_new_fork_block( - forks_dbs, - DALBlock { + durs_bc_db_writer::writers::block::insert_new_fork_block( + db, + fork_tree, + DbBlock { block: block.clone(), expire_certs: None, }, diff --git a/lib/modules/blockchain/blockchain/src/fork/revert_block.rs b/lib/modules/blockchain/blockchain/src/fork/revert_block.rs index d6586090a22b13bdc6f9ddc9ba7b6ddcfe927beb..8f57e631e7ee0bacb301a5b1fd86aeea14e72af9 100644 --- a/lib/modules/blockchain/blockchain/src/fork/revert_block.rs +++ b/lib/modules/blockchain/blockchain/src/fork/revert_block.rs @@ -21,11 +21,11 @@ use dubp_common_doc::traits::Document; use dubp_common_doc::{BlockNumber, Blockstamp}; use dubp_user_docs::documents::transaction::{TxAmount, TxBase}; use dup_crypto::keys::*; -use durs_blockchain_dal::entities::block::DALBlock; -use durs_blockchain_dal::entities::sources::SourceAmount; -use durs_blockchain_dal::writers::requests::*; -use durs_blockchain_dal::writers::transaction::DALTxV10; -use durs_blockchain_dal::{BinFreeStructDb, DALError, TxV10Datas}; +use durs_bc_db_reader::entities::block::DbBlock; +use durs_bc_db_reader::entities::sources::SourceAmount; +use durs_bc_db_writer::writers::requests::*; +use durs_bc_db_writer::writers::transaction::DbTxV10; +use durs_bc_db_writer::{BinFreeStructDb, DbError, TxV10Datas}; use durs_common_tools::fatal_error; use durs_wot::data::{NewLinkResult, RemLinkResult}; use durs_wot::{WebOfTrust, WotId}; @@ -46,17 +46,17 @@ pub struct ValidBlockRevertReqs { pub enum RevertValidBlockError { ExcludeUnknowNodeId(), RevokeUnknowNodeId(), - DALError(DALError), + DbError(DbError), } -impl From<DALError> for RevertValidBlockError { - fn from(e: DALError) -> Self { - RevertValidBlockError::DALError(e) +impl From<DbError> for RevertValidBlockError { + fn from(e: DbError) -> Self { + RevertValidBlockError::DbError(e) } } pub fn revert_block<W: WebOfTrust>( - dal_block: DALBlock, + dal_block: DbBlock, wot_index: &mut HashMap<PubKey, WotId>, wot_db: &BinFreeStructDb<W>, txs_db: &BinFreeStructDb<TxV10Datas>, @@ -80,7 +80,7 @@ pub fn revert_block_v10<W: WebOfTrust>( txs_db: &BinFreeStructDb<TxV10Datas>, ) -> Result<ValidBlockRevertReqs, RevertValidBlockError> { // Get transactions - let dal_txs: Vec<DALTxV10> = block + let dal_txs: Vec<DbTxV10> = block .transactions .iter() .map(|tx_enum| match *tx_enum { @@ -270,7 +270,7 @@ pub fn revert_block_v10<W: WebOfTrust>( // Return DBs requests Ok(ValidBlockRevertReqs { new_current_blockstamp: block.previous_blockstamp(), - block_query: BlocksDBsWriteQuery::RevertBlock(DALBlock { + block_query: BlocksDBsWriteQuery::RevertBlock(DbBlock { block: BlockDocument::V10(block), expire_certs: Some(expire_certs), }), diff --git a/lib/modules/blockchain/blockchain/src/fork/rollback.rs b/lib/modules/blockchain/blockchain/src/fork/rollback.rs index bac5a71c7b0c74828e0c9f1e4a2fae6fdd782fbc..51afb4a21d5c37ba9e34b07b6936c648bf0506fd 100644 --- a/lib/modules/blockchain/blockchain/src/fork/rollback.rs +++ b/lib/modules/blockchain/blockchain/src/fork/rollback.rs @@ -30,13 +30,11 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) // Rollback (revert old branch) while bc.current_blockstamp.id.0 > last_common_block_number { - if let Some(dal_block) = bc - .forks_dbs - .fork_blocks_db - .read(|db| db.get(&bc.current_blockstamp).cloned()) - .unwrap_or_else(|_| { - fatal_error!("revert block {} fail !", bc.current_blockstamp); - }) + if let Some(dal_block) = + durs_bc_db_reader::readers::block::get_fork_block(&bc.db, bc.current_blockstamp) + .unwrap_or_else(|_| { + fatal_error!("revert block {} fail !", bc.current_blockstamp); + }) { let blockstamp = dal_block.block.blockstamp(); debug!("try to revert block #{}", blockstamp); @@ -60,14 +58,19 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) block_query .apply( &bc.db, - &bc.forks_dbs, + &mut bc.fork_tree, unwrap!(bc.currency_params).fork_window_size, None, ) .expect("Fatal error : Fail to apply DBWriteRequest !"); for query in &wot_queries { query - .apply(&blockstamp, &unwrap!(bc.currency_params), &bc.wot_databases) + .apply( + &blockstamp, + &unwrap!(bc.currency_params), + &bc.wot_databases, + &bc.db, + ) .expect("Fatal error : Fail to apply WotsDBsWriteRequest !"); } for query in ¤cy_queries { @@ -83,12 +86,12 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) // Apply new branch let mut new_branch_is_valid = true; + let mut new_branch_blocks = Vec::with_capacity(new_bc_branch.len()); for blockstamp in &new_bc_branch { - if let Ok(Some(dal_block)) = bc - .forks_dbs - .fork_blocks_db - .read(|db| db.get(&blockstamp).cloned()) + if let Ok(Some(dal_block)) = + durs_bc_db_reader::readers::block::get_fork_block(&bc.db, *blockstamp) { + new_branch_blocks.push(dal_block.clone()); if let Ok(CheckAndApplyBlockReturn::ValidMainBlock(ValidBlockApplyReqs( bc_db_query, wot_dbs_queries, @@ -100,14 +103,19 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) bc_db_query .apply( &bc.db, - &bc.forks_dbs, + &mut bc.fork_tree, unwrap!(bc.currency_params).fork_window_size, None, ) .expect("Fatal error : Fail to apply DBWriteRequest !"); for query in &wot_dbs_queries { query - .apply(&blockstamp, &unwrap!(bc.currency_params), &bc.wot_databases) + .apply( + &blockstamp, + &unwrap!(bc.currency_params), + &bc.wot_databases, + &bc.db, + ) .expect("Fatal error : Fail to apply WotsDBsWriteRequest !"); } for query in &tx_dbs_queries { @@ -130,40 +138,33 @@ pub fn apply_rollback(bc: &mut BlockchainModule, new_bc_branch: Vec<Blockstamp>) if new_branch_is_valid { // update main branch in fork tree - if let Err(err) = durs_blockchain_dal::writers::fork_tree::change_main_branch( - &bc.forks_dbs, + if let Err(err) = durs_bc_db_writer::writers::fork_tree::change_main_branch( + &bc.db, + &mut bc.fork_tree, old_current_blockstamp, bc.current_blockstamp, ) { - fatal_error!("DALError: ForksDB: {:?}", err); + fatal_error!("DbError: ForksDB: {:?}", err); } // save dbs bc.db .save() .unwrap_or_else(|_| fatal_error!("DB corrupted, please reset data.")); - bc.forks_dbs.save_dbs(); + durs_bc_db_writer::writers::fork_tree::save_fork_tree(&bc.db, &bc.fork_tree) + .unwrap_or_else(|_| fatal_error!("DB corrupted, please reset data.")); bc.wot_databases.save_dbs(); bc.currency_databases.save_dbs(true, true); // Send events stackUpValidBlock - let new_branch_blocks: Vec<BlockDocument> = new_bc_branch - .into_iter() - .map(|blockstamp| { - bc.forks_dbs - .fork_blocks_db - .read(|db| db.get(&blockstamp).cloned()) - .expect("safe unwrap") - .expect("safe unwrap") - .block - }) - .collect(); - for block in new_branch_blocks { - events::sent::send_event(bc, &BlockchainEvent::StackUpValidBlock(Box::new(block))) + for db_block in new_branch_blocks { + events::sent::send_event( + bc, + &BlockchainEvent::StackUpValidBlock(Box::new(db_block.block)), + ) } } else { // reload dbs let dbs_path = durs_conf::get_blockchain_db_path(bc.profile_path.clone()); - bc.forks_dbs = ForksDBs::open(Some(&dbs_path)); bc.wot_databases = WotsV10DBs::open(Some(&dbs_path)); bc.currency_databases = CurrencyV10DBs::open(Some(&dbs_path)); } diff --git a/lib/modules/blockchain/blockchain/src/fork/stackable_blocks.rs b/lib/modules/blockchain/blockchain/src/fork/stackable_blocks.rs index 905b847497e9797c537cd22c48a16457e1908b40..f2904355dc682bb976307a8b953b26f2c0b87864 100644 --- a/lib/modules/blockchain/blockchain/src/fork/stackable_blocks.rs +++ b/lib/modules/blockchain/blockchain/src/fork/stackable_blocks.rs @@ -22,11 +22,9 @@ use unwrap::unwrap; pub fn apply_stackable_blocks(bc: &mut BlockchainModule) { 'blockchain: loop { - let stackable_blocks = durs_blockchain_dal::readers::fork_tree::get_stackables_blocks( - &bc.forks_dbs, - &bc.current_blockstamp, - ) - .expect("Fatal error : Fail to read ForksDB !"); + let stackable_blocks = + durs_bc_db_reader::readers::block::get_stackables_blocks(&bc.db, bc.current_blockstamp) + .expect("Fatal error : Fail to read ForksDB !"); if stackable_blocks.is_empty() { break 'blockchain; } else { @@ -36,50 +34,63 @@ pub fn apply_stackable_blocks(bc: &mut BlockchainModule) { let stackable_block_number = stackable_block.block.number(); let stackable_block_blockstamp = stackable_block.block.blockstamp(); - if let Ok(CheckAndApplyBlockReturn::ValidMainBlock(ValidBlockApplyReqs( - bc_db_query, - wot_dbs_queries, - tx_dbs_queries, - ))) = check_and_apply_block(bc, stackable_block.block) - { - let new_current_block = bc_db_query.get_block_doc_copy(); - let blockstamp = new_current_block.blockstamp(); - // Apply db requests - bc_db_query - .apply( - &bc.db, - &bc.forks_dbs, - unwrap!(bc.currency_params).fork_window_size, - None, - ) - .expect("Fatal error : Fail to apply DBWriteRequest !"); - for query in &wot_dbs_queries { - query - .apply(&blockstamp, &unwrap!(bc.currency_params), &bc.wot_databases) - .expect("Fatal error : Fail to apply WotsDBsWriteRequest !"); - } - for query in &tx_dbs_queries { - query - .apply(&blockstamp, &bc.currency_databases) - .expect("Fatal error : Fail to apply CurrencyDBsWriteRequest !"); - } - debug!("success to stackable_block({})", stackable_block_number); + match check_and_apply_block(bc, stackable_block.block) { + Ok(CheckAndApplyBlockReturn::ValidMainBlock(ValidBlockApplyReqs( + bc_db_query, + wot_dbs_queries, + tx_dbs_queries, + ))) => { + let new_current_block = bc_db_query.get_block_doc_copy(); + let blockstamp = new_current_block.blockstamp(); + // Apply db requests + bc_db_query + .apply( + &bc.db, + &mut bc.fork_tree, + unwrap!(bc.currency_params).fork_window_size, + None, + ) + .expect("Fatal error : Fail to apply DBWriteRequest !"); + for query in &wot_dbs_queries { + query + .apply( + &blockstamp, + &unwrap!(bc.currency_params), + &bc.wot_databases, + &bc.db, + ) + .expect("Fatal error : Fail to apply WotsDBsWriteRequest !"); + } + for query in &tx_dbs_queries { + query + .apply(&blockstamp, &bc.currency_databases) + .expect("Fatal error : Fail to apply CurrencyDBsWriteRequest !"); + } + debug!("success to stackable_block({})", stackable_block_number); - bc.current_blockstamp = stackable_block_blockstamp; - events::sent::send_event( - bc, - &BlockchainEvent::StackUpValidBlock(Box::new(new_current_block)), - ); - continue 'blockchain; - } else { - warn!("fail to stackable_block({})", stackable_block_number); + bc.current_blockstamp = stackable_block_blockstamp; + events::sent::send_event( + bc, + &BlockchainEvent::StackUpValidBlock(Box::new(new_current_block)), + ); + continue 'blockchain; + } + Ok(re) => warn!( + "fail to stackable_block({}) : {:?}", + stackable_block_number, re + ), + Err(e) => warn!( + "fail to stackable_block({}) : {:?}", + stackable_block_number, e + ), } } // Save databases bc.db .save() .unwrap_or_else(|_| fatal_error!("DB corrupted, please reset data.")); - bc.forks_dbs.save_dbs(); + durs_bc_db_writer::writers::fork_tree::save_fork_tree(&bc.db, &bc.fork_tree) + .unwrap_or_else(|_| fatal_error!("DB corrupted, please reset data.")); bc.wot_databases.save_dbs(); bc.currency_databases.save_dbs(true, true); break 'blockchain; diff --git a/lib/modules/blockchain/blockchain/src/lib.rs b/lib/modules/blockchain/blockchain/src/lib.rs index 28485b335ebc1a5e8a51db64e5a0ef339af072d1..b71714b43bd8c7b21c82fabbc582905242d275d1 100644 --- a/lib/modules/blockchain/blockchain/src/lib.rs +++ b/lib/modules/blockchain/blockchain/src/lib.rs @@ -61,7 +61,8 @@ use dubp_common_doc::traits::Document; use dubp_common_doc::Blockstamp; use dubp_currency_params::{CurrencyName, CurrencyParameters}; use dup_crypto::keys::*; -use durs_blockchain_dal::*; +use durs_bc_db_reader::entities::fork_tree::ForkTree; +use durs_bc_db_writer::*; use durs_common_tools::fatal_error; use durs_message::events::*; use durs_message::requests::*; @@ -97,8 +98,8 @@ pub struct BlockchainModule { pub currency: Option<CurrencyName>, /// Database pub db: Db, - /// Forks Databases - pub forks_dbs: ForksDBs, + /// Fork tree + pub fork_tree: ForkTree, /// Wot index pub wot_index: HashMap<PubKey, WotId>, /// Wots Databases @@ -189,13 +190,14 @@ impl BlockchainModule { // Open databases let db = open_db(&dbs_path.as_path()).unwrap_or_else(|_| fatal_error!("Fail to open DB.")); - let forks_dbs = ForksDBs::open(Some(&dbs_path)); + let fork_tree = durs_bc_db_reader::readers::current_meta_datas::get_fork_tree(&db) + .unwrap_or_else(|_| fatal_error!("Fail to get fork tree.")); let wot_databases = WotsV10DBs::open(Some(&dbs_path)); let currency_databases = CurrencyV10DBs::open(Some(&dbs_path)); // Get current blockstamp let current_blockstamp = - durs_blockchain_dal::readers::fork_tree::get_current_blockstamp(&forks_dbs) + durs_bc_db_reader::readers::current_meta_datas::get_current_blockstamp(&db) .expect("Fatal error : fail to read Blockchain DB !") .unwrap_or_default(); @@ -213,7 +215,7 @@ impl BlockchainModule { // Get wot index let wot_index: HashMap<PubKey, WotId> = - readers::identity::get_wot_index(&wot_databases.identities_db) + durs_bc_db_reader::readers::identity::get_wot_index(&db) .expect("Fatal eror : get_wot_index : Fail to read blockchain databases"); // Instanciate BlockchainModule @@ -225,7 +227,7 @@ impl BlockchainModule { current_blockstamp, consensus: Blockstamp::default(), db, - forks_dbs, + fork_tree, wot_index, wot_databases, currency_databases, @@ -350,7 +352,7 @@ pub mod tests { #[inline] /// Open database in an arbitrary temporary directory given by OS /// and automatically cleaned when `Db` is dropped - pub fn open_tmp_db() -> Result<Db, DALError> { - open_db(tempdir().map_err(DALError::FileSystemError)?.path()) + pub fn open_tmp_db() -> Result<Db, DbError> { + open_db(tempdir().map_err(DbError::FileSystemError)?.path()) } } diff --git a/lib/modules/blockchain/blockchain/src/requests/received.rs b/lib/modules/blockchain/blockchain/src/requests/received.rs index 03414696dca78cfb8bd708368cfc702df4108259..ec8218093fe5f8268938f6aa71609918d955bb92 100644 --- a/lib/modules/blockchain/blockchain/src/requests/received.rs +++ b/lib/modules/blockchain/blockchain/src/requests/received.rs @@ -16,7 +16,7 @@ //! Sub-module managing the inter-modules requests received. use crate::*; -use dubp_user_docs::documents::identity::IdentityDocument; +//use dubp_user_docs::documents::identity::IdentityDocument; use durs_message::requests::*; use durs_module::*; @@ -38,9 +38,12 @@ pub fn receive_req( debug!("BlockchainModule : receive BlockchainRequest::CurrentBlock()"); if let Ok(block_opt) = - readers::block::get_block(&bc.db, None, &bc.current_blockstamp) + durs_bc_db_reader::readers::block::get_block_in_local_blockchain( + &bc.db, + bc.current_blockstamp.id, + ) { - if let Some(dal_block) = block_opt { + if let Some(block) = block_opt { debug!( "BlockchainModule : send_req_response(CurrentBlock({}))", bc.current_blockstamp @@ -50,7 +53,7 @@ pub fn receive_req( req_from, req_id, &BlockchainResponse::CurrentBlock( - Box::new(dal_block.block), + Box::new(block), bc.current_blockstamp, ), ); @@ -70,7 +73,10 @@ pub fn receive_req( ); if let Ok(block_opt) = - readers::block::get_block_in_local_blockchain(&bc.db, block_number) + durs_bc_db_reader::readers::block::get_block_in_local_blockchain( + &bc.db, + block_number, + ) { if let Some(block) = block_opt { debug!( @@ -104,11 +110,13 @@ pub fn receive_req( first_block_number, count ); - if let Ok(blocks) = readers::block::get_blocks_in_local_blockchain( - &bc.db, - first_block_number, - count, - ) { + if let Ok(blocks) = + durs_bc_db_reader::readers::block::get_blocks_in_local_blockchain( + &bc.db, + first_block_number, + count, + ) + { if blocks.is_empty() { debug!( "BlockchainModule : Req : not found chunk (#{}, {}) in bdd !", @@ -144,34 +152,30 @@ pub fn receive_req( .map(|p| { ( p, - durs_blockchain_dal::readers::identity::get_uid( - &bc.wot_databases.identities_db, - p, - ) - .expect("Fatal error : get_uid : Fail to read WotV10DB !"), + durs_bc_db_reader::readers::identity::get_uid(&bc.db, &p) + .expect("Fatal error : get_uid : Fail to read WotV10DB !"), ) }) .collect(), ), ); - } - BlockchainRequest::GetIdentities(filters) => { - let identities = durs_blockchain_dal::readers::identity::get_identities( - &bc.wot_databases.identities_db, - filters, - bc.current_blockstamp.id, - ) - .expect("Fatal error : get_identities: Fail to read IdentitiesDB !") - .into_iter() - .map(|dal_idty| IdentityDocument::V10(dal_idty.idty_doc)) - .collect::<Vec<IdentityDocument>>(); - responses::sent::send_req_response( - bc, - req_from, - req_id, - &BlockchainResponse::Identities(identities), - ); - } + } /*BlockchainRequest::GetIdentities(filters) => { + let identities = durs_bc_db_reader::readers::identity::get_identities( + &db, + filters, + bc.current_blockstamp.id, + ) + .expect("Fatal error : get_identities: Fail to read IdentitiesDB !") + .into_iter() + .map(|dal_idty| IdentityDocument::V10(dal_idty.idty_doc)) + .collect::<Vec<IdentityDocument>>(); + responses::sent::send_req_response( + bc, + req_from, + req_id, + &BlockchainResponse::Identities(identities), + ); + }*/ } } } diff --git a/lib/modules/blockchain/blockchain/src/sync/apply/blocks_worker.rs b/lib/modules/blockchain/blockchain/src/sync/apply/blocks_worker.rs index 985c724d2bcbd48b67e01bac39bdbd14c4e09dbe..a42aa0bc5bf7c436a8e121d0946ac86608a06983 100644 --- a/lib/modules/blockchain/blockchain/src/sync/apply/blocks_worker.rs +++ b/lib/modules/blockchain/blockchain/src/sync/apply/blocks_worker.rs @@ -22,7 +22,6 @@ pub fn execute( sender_sync_thread: mpsc::Sender<MessForSyncThread>, recv: mpsc::Receiver<SyncJobsMess>, db: Db, - forks_db: ForksDBs, target_blockstamp: Blockstamp, mut apply_pb: ProgressBar<std::io::Stdout>, ) { @@ -30,6 +29,10 @@ pub fn execute( pool.execute(move || { let blocks_job_begin = SystemTime::now(); + // Get fork tree + let mut fork_tree = durs_bc_db_reader::readers::current_meta_datas::get_fork_tree(&db) + .expect("Fail to read DB."); + // Listen db requets let mut chunk_index = 0; let mut all_wait_duration = Duration::from_millis(0); @@ -46,8 +49,13 @@ pub fn execute( all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap(); // Apply db request - req.apply(&db, &forks_db, fork_window_size, Some(target_blockstamp)) - .expect("Fatal error : Fail to apply DBWriteRequest !"); + req.apply( + &db, + &mut fork_tree, + fork_window_size, + Some(target_blockstamp), + ) + .expect("Fatal error : Fail to apply DBWriteRequest !"); chunk_index += 1; if chunk_index == 250 { @@ -76,9 +84,10 @@ pub fn execute( println!(); println!("Write indexs in files..."); info!("Save blockchain and forks databases in files..."); + durs_bc_db_writer::writers::fork_tree::save_fork_tree(&db, &fork_tree) + .unwrap_or_else(|_| fatal_error!("DB corrupted, please reset data.")); db.save() .unwrap_or_else(|_| fatal_error!("DB corrupted, please reset data.")); - forks_db.save_dbs(); // Send finish signal sender_sync_thread diff --git a/lib/modules/blockchain/blockchain/src/sync/apply/mod.rs b/lib/modules/blockchain/blockchain/src/sync/apply/mod.rs index 976212ca53d0b24aea1f4ad59bf2cf34e383d5ee..4a17edda9f6cf927cafb05d761a9304fd847357e 100644 --- a/lib/modules/blockchain/blockchain/src/sync/apply/mod.rs +++ b/lib/modules/blockchain/blockchain/src/sync/apply/mod.rs @@ -26,8 +26,9 @@ use dubp_common_doc::traits::Document; use dubp_common_doc::{BlockNumber, Blockstamp}; use dubp_currency_params::{CurrencyName, CurrencyParameters}; use dup_crypto::keys::PubKey; -use durs_blockchain_dal::writers::requests::WotsDBsWriteQuery; -use durs_blockchain_dal::{BinFreeStructDb, CertsExpirV10Datas, WotsV10DBs}; +use durs_bc_db_reader::CertsExpirV10Datas; +use durs_bc_db_writer::writers::requests::WotsDBsWriteQuery; +use durs_bc_db_writer::{BinFreeStructDb, WotsV10DBs}; use durs_common_tools::fatal_error; use durs_network_documents::url::Url; use durs_wot::data::rusty::RustyWebOfTrust; @@ -95,8 +96,8 @@ impl BlockApplicator { // Find expire_certs let expire_certs = - durs_blockchain_dal::readers::certs::find_expire_certs(&self.certs_db, blocks_expiring) - .expect("find_expire_certs() : DALError"); + durs_bc_db_reader::readers::certs::find_expire_certs(&self.certs_db, blocks_expiring) + .expect("find_expire_certs() : DbError"); // Get block blockstamp let blockstamp = block_doc.blockstamp(); diff --git a/lib/modules/blockchain/blockchain/src/sync/apply/wot_worker.rs b/lib/modules/blockchain/blockchain/src/sync/apply/wot_worker.rs index 546b143fc60ca747bfbbca5b38046f335cec9b1f..5435ec9a0ebefb171637b25b494a8f0eaef949a0 100644 --- a/lib/modules/blockchain/blockchain/src/sync/apply/wot_worker.rs +++ b/lib/modules/blockchain/blockchain/src/sync/apply/wot_worker.rs @@ -28,6 +28,7 @@ pub fn execute( let wot_job_begin = SystemTime::now(); // Open databases let db_path = durs_conf::get_blockchain_db_path(profile_path); + let db = open_db(&db_path).expect("Fail to open DB."); let databases = WotsV10DBs::open(Some(&db_path)); // Listen db requets @@ -37,7 +38,7 @@ pub fn execute( all_wait_duration += SystemTime::now().duration_since(wait_begin).unwrap(); match mess { SyncJobsMess::WotsDBsWriteQuery(blockstamp, currency_params, req) => req - .apply(&blockstamp, ¤cy_params.deref(), &databases) + .apply(&blockstamp, ¤cy_params.deref(), &databases, &db) .expect("Fatal error : Fail to apply DBWriteRequest !"), SyncJobsMess::End => break, _ => {} diff --git a/lib/modules/blockchain/blockchain/src/sync/download/json_reader_worker.rs b/lib/modules/blockchain/blockchain/src/sync/download/json_reader_worker.rs index 6fa6caabff79d60402f29e8c179ed0395dcd329d..11328f1cd76ac865d04d0d18512dec7af1d9891a 100644 --- a/lib/modules/blockchain/blockchain/src/sync/download/json_reader_worker.rs +++ b/lib/modules/blockchain/blockchain/src/sync/download/json_reader_worker.rs @@ -96,10 +96,10 @@ pub fn json_reader_worker( // Get current local blockstamp debug!("Get local current blockstamp..."); let db_path = durs_conf::get_blockchain_db_path(profile_path); - let forks_dbs = ForksDBs::open(Some(&db_path)); + let db = open_db(&db_path).expect("Fail to open DB."); let current_blockstamp: Blockstamp = - durs_blockchain_dal::readers::fork_tree::get_current_blockstamp(&forks_dbs) - .expect("ForksV10DB : RustBreakError !") + durs_bc_db_reader::readers::current_meta_datas::get_current_blockstamp(&db) + .expect("get_current_blockstamp: Fail to read DB !") .unwrap_or_default(); info!("Local current blockstamp = {}", current_blockstamp); diff --git a/lib/modules/blockchain/blockchain/src/sync/mod.rs b/lib/modules/blockchain/blockchain/src/sync/mod.rs index b8a2c591ebac0c4175cec36e39265eecfb70e8bb..f324aea93a584a400e7eafcb7932016971dbbf43 100644 --- a/lib/modules/blockchain/blockchain/src/sync/mod.rs +++ b/lib/modules/blockchain/blockchain/src/sync/mod.rs @@ -23,8 +23,9 @@ use dubp_common_doc::Blockstamp; use dubp_common_doc::{BlockHash, BlockNumber}; use dubp_currency_params::{CurrencyName, CurrencyParameters}; use dup_crypto::keys::*; -use durs_blockchain_dal::writers::requests::*; -use durs_blockchain_dal::{open_free_struct_memory_db, CertsExpirV10Datas}; +use durs_bc_db_reader::CertsExpirV10Datas; +use durs_bc_db_writer::open_free_struct_memory_db; +use durs_bc_db_writer::writers::requests::*; use durs_common_tools::fatal_error; use durs_wot::WotId; use failure::Fail; @@ -179,20 +180,17 @@ pub fn local_sync<DC: DursConfTrait>( conf_path.push(durs_conf::constants::CONF_FILENAME); durs_conf::write_conf_file(conf_path.as_path(), &conf).expect("Fail to write new conf !"); - // Open blocks databases + // Open database let db = open_db(&db_path.as_path()).map_err(|_| LocalSyncError::FailToOpenDB)?; - // Open forks databases - let forks_dbs = ForksDBs::open(Some(&db_path)); - // Open wot databases let wot_databases = WotsV10DBs::open(Some(&db_path)); // Get local current blockstamp debug!("Get local current blockstamp..."); let current_blockstamp: Blockstamp = - durs_blockchain_dal::readers::fork_tree::get_current_blockstamp(&forks_dbs) - .expect("DALError : fail to get current blockstamp !") + durs_bc_db_reader::readers::current_meta_datas::get_current_blockstamp(&db) + .expect("DbError : fail to get current blockstamp !") .unwrap_or_default(); debug!("Success to get local current blockstamp."); @@ -204,7 +202,7 @@ pub fn local_sync<DC: DursConfTrait>( // Get wot index let wot_index: HashMap<PubKey, WotId> = - readers::identity::get_wot_index(&wot_databases.identities_db) + durs_bc_db_reader::readers::identity::get_wot_index(&db) .expect("Fatal eror : get_wot_index : Fail to read blockchain databases"); // Start sync @@ -241,7 +239,6 @@ pub fn local_sync<DC: DursConfTrait>( sender_sync_thread.clone(), recv_blocks_thread, db, - forks_dbs, target_blockstamp, apply_pb, ); @@ -302,7 +299,7 @@ pub fn local_sync<DC: DursConfTrait>( let datas_path = durs_conf::get_datas_path(profile_path.clone()); if block_doc.number() == BlockNumber(0) { block_applicator.currency_params = Some( - durs_blockchain_dal::readers::currency_params::get_and_write_currency_params( + durs_bc_db_reader::readers::currency_params::get_and_write_currency_params( &datas_path, &block_doc, ), diff --git a/lib/modules/ws2p-v1-legacy/src/responses/received.rs b/lib/modules/ws2p-v1-legacy/src/responses/received.rs index 8c7d3e9667a52610155a986152716763a80375a8..71b5b2ce19e4ecb923111de15bbe3b2a549b4a6d 100644 --- a/lib/modules/ws2p-v1-legacy/src/responses/received.rs +++ b/lib/modules/ws2p-v1-legacy/src/responses/received.rs @@ -27,7 +27,7 @@ pub fn receive_response( match *bc_res.deref() { BlockchainResponse::CurrentBlockstamp(ref current_blockstamp_) => { debug!( - "WS2Pv1Module : receive DALResBc::CurrentBlockstamp({})", + "WS2Pv1Module : receive DbResBc::CurrentBlockstamp({})", ws2p_module.current_blockstamp ); ws2p_module.current_blockstamp = *current_blockstamp_; diff --git a/lib/modules/ws2p-v1-legacy/src/ws_connections/meta_datas.rs b/lib/modules/ws2p-v1-legacy/src/ws_connections/meta_datas.rs index 3d8d08dcffbc3174e221c26dd8803130503e4ebf..055ecd42fa02bc88b93537a24957d210451f7037 100644 --- a/lib/modules/ws2p-v1-legacy/src/ws_connections/meta_datas.rs +++ b/lib/modules/ws2p-v1-legacy/src/ws_connections/meta_datas.rs @@ -163,7 +163,7 @@ impl WS2PConnectionMetaDatas { match req_id.as_str() { Some(req_id) => match msg.get("body") { Some(body) => { - trace!("WS2P : Receive DAL Request from {}.", self.node_full_id()); + trace!("WS2P : Receive DB Request from {}.", self.node_full_id()); let req_id = match WS2Pv1ReqId::from_str(req_id) { Ok(req_id) => req_id, diff --git a/lib/tools/dbs-tools/Cargo.toml b/lib/tools/dbs-tools/Cargo.toml index 57de80404f1d64984e2f9680d0c0fadb93b18095..387bf05f8e3e9f856a9e6689b61111e754ae6304 100644 --- a/lib/tools/dbs-tools/Cargo.toml +++ b/lib/tools/dbs-tools/Cargo.toml @@ -17,10 +17,9 @@ log = "0.4.*" rkv = "0.9.7" rustbreak = {version = "2.0.0-rc3", features = ["bin_enc"]} serde = { version = "1.0.*", features = ["derive"] } -serde_json = "1.0.*" unwrap = "1.2.1" [dev-dependencies] - +tempfile = "3.1.0" [features] diff --git a/lib/tools/dbs-tools/src/errors.rs b/lib/tools/dbs-tools/src/errors.rs index 245ce61edf8eef2a12dd20519f82bda4f74af7a7..0f6a07cfc4daf13dbea0dd3e5dabff1be5c2dcf5 100644 --- a/lib/tools/dbs-tools/src/errors.rs +++ b/lib/tools/dbs-tools/src/errors.rs @@ -20,57 +20,59 @@ use rustbreak::error::{RustbreakError, RustbreakErrorKind}; #[derive(Debug)] /// Data Access Layer Error -pub enum DALError { - /// Abort write transaction - WriteAbort { - /// Reason of transaction abort - reason: String, - }, - /// Error in write operation - WriteError, - /// Error in read operation - ReadError, +pub enum DbError { /// A database is corrupted, you have to reset the data completely DBCorrupted, + /// Database not exist + DBNotExist, + /// Error in read operation + ReadError, /// Error with the file system FileSystemError(std::io::Error), /// Serialization/Deserialization error SerdeError(String), /// Rkv store error StoreError(rkv::error::StoreError), - /// Capturing a panic signal during a write operation - WritePanic, /// Unknown error UnknowError, + /// Abort write transaction + WriteAbort { + /// Reason of transaction abort + reason: String, + }, + /// Error in write operation + WriteError, + /// Capturing a panic signal during a write operation + WritePanic, } -impl From<bincode::Error> for DALError { - fn from(e: bincode::Error) -> DALError { - DALError::SerdeError(format!("{}", e)) +impl From<bincode::Error> for DbError { + fn from(e: bincode::Error) -> DbError { + DbError::SerdeError(format!("{}", e)) } } -impl From<rkv::error::StoreError> for DALError { - fn from(e: rkv::error::StoreError) -> DALError { - DALError::StoreError(e) +impl From<rkv::error::StoreError> for DbError { + fn from(e: rkv::error::StoreError) -> DbError { + DbError::StoreError(e) } } -impl<T> From<std::sync::PoisonError<T>> for DALError { - fn from(_: std::sync::PoisonError<T>) -> DALError { - DALError::DBCorrupted +impl<T> From<std::sync::PoisonError<T>> for DbError { + fn from(_: std::sync::PoisonError<T>) -> DbError { + DbError::DBCorrupted } } -impl From<RustbreakError> for DALError { - fn from(rust_break_error: RustbreakError) -> DALError { +impl From<RustbreakError> for DbError { + fn from(rust_break_error: RustbreakError) -> DbError { match rust_break_error.kind() { - RustbreakErrorKind::Serialization => DALError::WriteError, - RustbreakErrorKind::Deserialization => DALError::ReadError, - RustbreakErrorKind::Poison => DALError::DBCorrupted, - RustbreakErrorKind::Backend => DALError::DBCorrupted, - RustbreakErrorKind::WritePanic => DALError::WritePanic, - _ => DALError::UnknowError, + RustbreakErrorKind::Serialization => DbError::WriteError, + RustbreakErrorKind::Deserialization => DbError::ReadError, + RustbreakErrorKind::Poison => DbError::DBCorrupted, + RustbreakErrorKind::Backend => DbError::DBCorrupted, + RustbreakErrorKind::WritePanic => DbError::WritePanic, + _ => DbError::UnknowError, } } } diff --git a/lib/tools/dbs-tools/src/free_struct_db.rs b/lib/tools/dbs-tools/src/free_struct_db.rs index b42fd09c1a0634507e21a0b1f595ef909c48db8e..1f130addd7cb49b2cd35b0f885f1f3c15a9e070b 100644 --- a/lib/tools/dbs-tools/src/free_struct_db.rs +++ b/lib/tools/dbs-tools/src/free_struct_db.rs @@ -15,7 +15,7 @@ //! Define free structure database -use crate::errors::DALError; +use crate::errors::DbError; use rustbreak::backend::{FileBackend, MemoryBackend}; use rustbreak::error::RustbreakError; use rustbreak::{deser::Bincode, Database, FileDatabase, MemoryDatabase}; @@ -30,7 +30,7 @@ use std::path::PathBuf; /// Open free structured rustbreak memory database pub fn open_free_struct_memory_db< D: Serialize + DeserializeOwned + Debug + Default + Clone + Send, ->() -> Result<MemoryDatabase<D, Bincode>, DALError> { +>() -> Result<MemoryDatabase<D, Bincode>, DbError> { let backend = MemoryBackend::new(); let db = MemoryDatabase::<D, Bincode>::from_parts(D::default(), backend, Bincode); Ok(db) @@ -42,7 +42,7 @@ pub fn open_free_struct_file_db< >( dbs_folder_path: &PathBuf, db_file_name: &str, -) -> Result<FileDatabase<D, Bincode>, DALError> { +) -> Result<FileDatabase<D, Bincode>, DbError> { let mut db_path = dbs_folder_path.clone(); db_path.push(db_file_name); let file_path = db_path.as_path(); diff --git a/lib/tools/dbs-tools/src/kv_db.rs b/lib/tools/dbs-tools/src/kv_db.rs index 4ccf499b82ad7bd060614be809cb84ba26255a1c..6550fe3871eb8bfd3388b2fc3641b8fb420bc17e 100644 --- a/lib/tools/dbs-tools/src/kv_db.rs +++ b/lib/tools/dbs-tools/src/kv_db.rs @@ -22,6 +22,6 @@ pub use file::{ KvFileDbStoreType, KvFileDbWriter, }; pub use rkv::{ - IntegerStore, MultiIntegerStore, MultiStore, OwnedValue as KvFileDbOwnedValue, SingleStore, - Value as KvFileDbValue, + store::multi::Iter, IntegerStore, MultiIntegerStore, MultiStore, + OwnedValue as KvFileDbOwnedValue, SingleStore, Value as KvFileDbValue, }; diff --git a/lib/tools/dbs-tools/src/kv_db/file.rs b/lib/tools/dbs-tools/src/kv_db/file.rs index 2585c112ff363144aa31e93b3461d43547433ff3..ea96f19be4b205926a440228ae6aa2b727c72d53 100644 --- a/lib/tools/dbs-tools/src/kv_db/file.rs +++ b/lib/tools/dbs-tools/src/kv_db/file.rs @@ -15,7 +15,7 @@ //! Define Key-Value file database -use crate::errors::DALError; +use crate::errors::DbError; use durs_common_tools::fatal_error; use log::error; use rkv::{DatabaseFlags, EnvironmentFlags, Manager, OwnedValue, Rkv, StoreOptions, Value}; @@ -35,6 +35,12 @@ pub struct KvFileDbWriter<'a> { writer: rkv::Writer<'a>, } +impl<'a> AsRef<rkv::Writer<'a>> for KvFileDbWriter<'a> { + fn as_ref(&self) -> &rkv::Writer<'a> { + &self.writer + } +} + impl<'a> AsMut<rkv::Writer<'a>> for KvFileDbWriter<'a> { fn as_mut(&mut self) -> &mut rkv::Writer<'a> { &mut self.writer @@ -52,10 +58,76 @@ pub struct KvFileDbHandler { /// Key-value file Database read-only handler pub struct KvFileDbRoHandler(KvFileDbHandler); +impl KvFileDbRoHandler { + /// Open Key-value file Database in read-only mode + pub fn open_db_ro(path: &Path, schema: &KvFileDbSchema) -> Result<KvFileDbRoHandler, DbError> { + let mut db_main_file = path.to_owned(); + db_main_file.push("data.mdb"); + if !db_main_file.as_path().is_file() { + return Err(DbError::DBNotExist); + } + + let mut manager = Manager::singleton().write()?; + let mut env = Rkv::environment_builder(); + env.set_flags(EnvironmentFlags::READ_ONLY) + .set_max_dbs(64) + .set_map_size(std::u32::MAX as usize); + let arc = manager.get_or_create(path, |path| Rkv::from_env(path, env))?; + + let mut stores = HashMap::new(); + for (store_name, store_type) in &schema.stores { + let store = match store_type { + KvFileDbStoreType::Single => { + KvFileDbStore::Single(arc.clone().read()?.open_single( + store_name.as_str(), + StoreOptions { + create: false, + flags: DatabaseFlags::empty(), + }, + )?) + } + KvFileDbStoreType::SingleIntKey => { + KvFileDbStore::SingleIntKey(arc.clone().read()?.open_integer( + store_name.as_str(), + StoreOptions { + create: false, + flags: DatabaseFlags::INTEGER_KEY, + }, + )?) + } + KvFileDbStoreType::Multi => KvFileDbStore::Multi(arc.clone().read()?.open_multi( + store_name.as_str(), + StoreOptions { + create: false, + flags: DatabaseFlags::empty(), + }, + )?), + KvFileDbStoreType::MultiIntKey => { + KvFileDbStore::MultiIntKey(arc.clone().read()?.open_multi_integer( + store_name.as_str(), + StoreOptions { + create: false, + flags: DatabaseFlags::INTEGER_KEY, + }, + )?) + } + }; + stores.insert(store_name.to_owned(), store); + } + + Ok(KvFileDbRoHandler(KvFileDbHandler { + arc, + path: path.to_owned(), + schema: schema.clone(), + stores, + })) + } +} + /// Key-value file Database read operations pub trait KvFileDbRead: Sized { /// Convert DB value to a rust type - fn from_db_value<T: DeserializeOwned>(v: Value) -> Result<T, DALError>; + fn from_db_value<T: DeserializeOwned>(v: Value) -> Result<T, DbError>; /// get a single store fn get_store(&self, store_name: &str) -> &super::SingleStore; @@ -63,18 +135,18 @@ pub trait KvFileDbRead: Sized { /// Get an integer store fn get_int_store(&self, store_name: &str) -> &super::IntegerStore<u32>; + /// get a multi store + fn get_multi_store(&self, store_name: &str) -> &super::MultiStore; + /// Read datas in transaction database - fn read<F, R>(&self, f: F) -> Result<R, DALError> + fn read<F, R>(&self, f: F) -> Result<R, DbError> where - F: FnOnce(KvFileDbReader) -> Result<R, DALError>; - - /// Try to clone database handler - fn try_clone(&self) -> Result<Self, DALError>; + F: FnOnce(KvFileDbReader) -> Result<R, DbError>; } impl KvFileDbRead for KvFileDbRoHandler { #[inline] - fn from_db_value<T: DeserializeOwned>(v: Value) -> Result<T, DALError> { + fn from_db_value<T: DeserializeOwned>(v: Value) -> Result<T, DbError> { KvFileDbHandler::from_db_value(v) } #[inline] @@ -86,16 +158,16 @@ impl KvFileDbRead for KvFileDbRoHandler { self.0.get_int_store(store_name) } #[inline] - fn read<F, R>(&self, f: F) -> Result<R, DALError> + fn get_multi_store(&self, store_name: &str) -> &super::MultiStore { + self.0.get_multi_store(store_name) + } + #[inline] + fn read<F, R>(&self, f: F) -> Result<R, DbError> where - F: FnOnce(KvFileDbReader) -> Result<R, DALError>, + F: FnOnce(KvFileDbReader) -> Result<R, DbError>, { self.0.read(f) } - #[inline] - fn try_clone(&self) -> Result<Self, DALError> { - Ok(KvFileDbRoHandler(self.0.try_clone()?)) - } } /// Describe Key-Value database schema @@ -132,11 +204,11 @@ pub enum KvFileDbStore { impl KvFileDbRead for KvFileDbHandler { #[inline] - fn from_db_value<T: DeserializeOwned>(v: Value) -> Result<T, DALError> { + fn from_db_value<T: DeserializeOwned>(v: Value) -> Result<T, DbError> { if let Value::Blob(bytes) = v { Ok(bincode::deserialize::<T>(bytes)?) } else { - Err(DALError::DBCorrupted) + Err(DbError::DBCorrupted) } } fn get_int_store(&self, store_name: &str) -> &super::IntegerStore<u32> { @@ -161,15 +233,23 @@ impl KvFileDbRead for KvFileDbHandler { fatal_error!("Dev error: store '{}' don't exist in DB.", store_name); } } - fn read<F, R>(&self, f: F) -> Result<R, DALError> + fn get_multi_store(&self, store_name: &str) -> &super::MultiStore { + if let Some(store_enum) = self.stores.get(store_name) { + if let KvFileDbStore::Multi(store) = store_enum { + store + } else { + fatal_error!("Dev error: store '{}' is not a multi store.", store_name); + } + } else { + fatal_error!("Dev error: store '{}' don't exist in DB.", store_name); + } + } + fn read<F, R>(&self, f: F) -> Result<R, DbError> where - F: FnOnce(KvFileDbReader) -> Result<R, DALError>, + F: FnOnce(KvFileDbReader) -> Result<R, DbError>, { Ok(f(&self.arc_clone().read()?.read()?)?) } - fn try_clone(&self) -> Result<KvFileDbHandler, DALError> { - KvFileDbHandler::open_db_inner(&self.path, &self.schema, false) - } } impl KvFileDbHandler { @@ -180,26 +260,24 @@ impl KvFileDbHandler { self.arc().clone() } /// Convert bytes to DB value - pub fn db_value(bytes: &[u8]) -> Result<Value, DALError> { + pub fn db_value(bytes: &[u8]) -> Result<Value, DbError> { Ok(Value::Blob(bytes)) } - /// Get read_only handler - pub fn get_ro_handler(&self) -> Result<KvFileDbRoHandler, DALError> { - Ok(KvFileDbRoHandler(self.try_clone()?)) - } /// Open Key-value file Database #[inline] - pub fn open_db(path: &Path, schema: &KvFileDbSchema) -> Result<KvFileDbHandler, DALError> { + pub fn open_db(path: &Path, schema: &KvFileDbSchema) -> Result<KvFileDbHandler, DbError> { KvFileDbHandler::open_db_inner(path, schema, true) } fn open_db_inner( path: &Path, schema: &KvFileDbSchema, first_open: bool, - ) -> Result<KvFileDbHandler, DALError> { + ) -> Result<KvFileDbHandler, DbError> { + let mut env_flags = EnvironmentFlags::NO_MEM_INIT; + env_flags.insert(EnvironmentFlags::NO_SYNC); let mut manager = Manager::singleton().write()?; let mut env = Rkv::environment_builder(); - env.set_flags(EnvironmentFlags::NO_SYNC) + env.set_flags(env_flags) .set_max_dbs(64) .set_map_size(std::u32::MAX as usize); let arc = manager.get_or_create(path, |path| Rkv::from_env(path, env))?; @@ -253,14 +331,14 @@ impl KvFileDbHandler { }) } /// Persist DB datas on disk - pub fn save(&self) -> Result<(), DALError> { + pub fn save(&self) -> Result<(), DbError> { Ok(self.arc_clone().read()?.sync(true)?) } /// Write datas in database /// /!\ The written data are visible to readers but not persisted on the disk until a save() is performed. - pub fn write<F>(&self, f: F) -> Result<(), DALError> + pub fn write<F>(&self, f: F) -> Result<(), DbError> where - F: FnOnce(KvFileDbWriter) -> Result<KvFileDbWriter, DALError>, + F: FnOnce(KvFileDbWriter) -> Result<KvFileDbWriter, DbError>, { f(KvFileDbWriter { buffer: Vec::with_capacity(0), @@ -272,3 +350,84 @@ impl KvFileDbHandler { Ok(()) } } + +#[cfg(test)] +mod tests { + + use super::*; + use tempfile::tempdir; + + fn get_int_store_str_val( + ro_db: &KvFileDbRoHandler, + store_name: &str, + key: u32, + ) -> Result<Option<String>, DbError> { + ro_db.read(|r| { + if let Some(Value::Str(v)) = ro_db.get_int_store(store_name).get(r, key)? { + Ok(Some(v.to_owned())) + } else { + Ok(None) + } + }) + } + + #[test] + fn test_open_db_wr_and_ro() -> Result<(), DbError> { + let tmp_dir = tempdir().map_err(DbError::FileSystemError)?; + let mut stores = HashMap::new(); + stores.insert("test1".to_owned(), KvFileDbStoreType::SingleIntKey); + let schema = KvFileDbSchema { stores }; + let db = KvFileDbHandler::open_db(tmp_dir.path(), &schema)?; + let store_test1 = db.get_int_store("test1"); + + db.write(|mut w| { + store_test1.put(w.as_mut(), 3, &Value::Str("toto"))?; + Ok(w) + })?; + + let ro_db = KvFileDbRoHandler::open_db_ro(tmp_dir.path(), &schema)?; + + assert_eq!( + Some("toto".to_owned()), + get_int_store_str_val(&ro_db, "test1", 3)? + ); + + db.write(|mut w| { + store_test1.put(w.as_mut(), 3, &Value::Str("titi"))?; + Ok(w) + })?; + + assert_eq!( + Some("titi".to_owned()), + get_int_store_str_val(&ro_db, "test1", 3)? + ); + + db.write(|mut w| { + store_test1.put(w.as_mut(), 3, &Value::Str("tutu"))?; + assert_eq!( + Some("titi".to_owned()), + get_int_store_str_val(&ro_db, "test1", 3)? + ); + Ok(w) + })?; + + let db_path = tmp_dir.path().to_owned(); + let thread = std::thread::spawn(move || { + let ro_db = + KvFileDbRoHandler::open_db_ro(db_path.as_path(), &schema).expect("Fail to open DB"); + assert_eq!( + Some("tutu".to_owned()), + get_int_store_str_val(&ro_db, "test1", 3).expect("Fail to read DB") + ); + }); + + assert_eq!( + Some("tutu".to_owned()), + get_int_store_str_val(&ro_db, "test1", 3).expect("Fail to read DB") + ); + + let _ = thread.join(); + + Ok(()) + } +} diff --git a/lib/tools/dbs-tools/src/lib.rs b/lib/tools/dbs-tools/src/lib.rs index 219f96b73ad97ee26ac8ea56dc2e6d8a4f7f4f5c..b2c5cffd1ccaf26b763218769ba28ce9684c6245 100644 --- a/lib/tools/dbs-tools/src/lib.rs +++ b/lib/tools/dbs-tools/src/lib.rs @@ -31,7 +31,7 @@ mod errors; mod free_struct_db; pub mod kv_db; -pub use errors::DALError; +pub use errors::DbError; pub use free_struct_db::{open_free_struct_file_db, open_free_struct_memory_db, BinFreeStructDb}; use serde::de::DeserializeOwned; @@ -42,7 +42,7 @@ use std::path::PathBuf; /// Convert rust type to bytes #[inline] -pub fn to_bytes<T: Serialize>(t: &T) -> Result<Vec<u8>, DALError> { +pub fn to_bytes<T: Serialize>(t: &T) -> Result<Vec<u8>, DbError> { Ok(bincode::serialize(t)?) } @@ -50,7 +50,7 @@ pub fn to_bytes<T: Serialize>(t: &T) -> Result<Vec<u8>, DALError> { pub fn open_free_struct_db<D: Serialize + DeserializeOwned + Debug + Default + Clone + Send>( dbs_folder_path: Option<&PathBuf>, db_file_name: &str, -) -> Result<BinFreeStructDb<D>, DALError> { +) -> Result<BinFreeStructDb<D>, DbError> { if let Some(dbs_folder_path) = dbs_folder_path { Ok(BinFreeStructDb::File(open_free_struct_file_db::<D>( dbs_folder_path,