Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • nodes/rust/duniter-v2s
  • llaq/lc-core-substrate
  • pini-gh/duniter-v2s
  • vincentux/duniter-v2s
  • mildred/duniter-v2s
  • d0p1/duniter-v2s
  • bgallois/duniter-v2s
  • Nicolas80/duniter-v2s
8 results
Show changes
Commits on Source (26)
Showing
with 390514 additions and 179 deletions
...@@ -125,12 +125,12 @@ tests: ...@@ -125,12 +125,12 @@ tests:
- cargo cucumber - cargo cucumber
.deploy_docker_multiplatform: .deploy_docker_multiplatform:
stage: deploy stage: build
rules: rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/ - if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
when: manual when: manual
changes: # changes:
- node/specs/$CHAIN-raw.json # - node/specs/$CHAIN-raw.json
- when: never - when: never
before_script: before_script:
- sh -c "[ -n '$DUNITERTEAM_PASSWD' ] || ( echo No access to environment variable 'DUNITERTEAM_PASSWD'; exit 1 )" - sh -c "[ -n '$DUNITERTEAM_PASSWD' ] || ( echo No access to environment variable 'DUNITERTEAM_PASSWD'; exit 1 )"
...@@ -141,7 +141,7 @@ tests: ...@@ -141,7 +141,7 @@ tests:
- export MANIFEST=localhost/manifest-$IMAGE_NAME:$MILESTONE - export MANIFEST=localhost/manifest-$IMAGE_NAME:$MILESTONE
- echo $MANIFEST - echo $MANIFEST
- podman manifest rm "$MANIFEST" 2>/dev/null || true - podman manifest rm "$MANIFEST" 2>/dev/null || true
- podman build --layers --platform linux/amd64 --manifest "$MANIFEST" -f docker/Dockerfile $PODMAN_BUILD_OPTIONS . - podman build --layers --platform linux/amd64,linux/arm64 --manifest "$MANIFEST" -f docker/Dockerfile $PODMAN_BUILD_OPTIONS .
- podman manifest push --all "$MANIFEST" "docker://docker.io/$IMAGE_NAME:$MILESTONE" - podman manifest push --all "$MANIFEST" "docker://docker.io/$IMAGE_NAME:$MILESTONE"
- podman manifest push --all "$MANIFEST" "docker://docker.io/$IMAGE_NAME:latest" - podman manifest push --all "$MANIFEST" "docker://docker.io/$IMAGE_NAME:latest"
after_script: after_script:
...@@ -197,7 +197,7 @@ readme_docker_release_tag: ...@@ -197,7 +197,7 @@ readme_docker_release_tag:
# Copy sources to the expected directory of srtool # Copy sources to the expected directory of srtool
- cp -R * /build/ - cp -R * /build/
# Build the runtime # Build the runtime
- /srtool/build --app --json -cM > $SRTOOL_OUTPUT - /srtool/build --app --json -cM | tee -a $SRTOOL_OUTPUT
- mv /build/runtime/$RUNTIME/target/srtool/release/wbuild/$RUNTIME-runtime/${RUNTIME}_runtime.compact.compressed.wasm $CI_PROJECT_DIR/release/ - mv /build/runtime/$RUNTIME/target/srtool/release/wbuild/$RUNTIME-runtime/${RUNTIME}_runtime.compact.compressed.wasm $CI_PROJECT_DIR/release/
artifacts: artifacts:
name: "runtime" name: "runtime"
...@@ -280,7 +280,7 @@ create_g1_data: ...@@ -280,7 +280,7 @@ create_g1_data:
- apt-get install -y clang cmake protobuf-compiler - apt-get install -y clang cmake protobuf-compiler
- cargo run ${FEATURES} -- build-spec --chain=${RUNTIME}_live > release/${RUNTIME}.json - cargo run ${FEATURES} -- build-spec --chain=${RUNTIME}_live > release/${RUNTIME}.json
- cargo run ${FEATURES} -- build-spec --chain=release/${RUNTIME}.json --disable-default-bootnode --raw > release/${RUNTIME}-raw.json - cargo run ${FEATURES} -- build-spec --chain=release/${RUNTIME}.json --disable-default-bootnode --raw > release/${RUNTIME}-raw.json
- cp node/specs/${RUNTIME}_client-specs.json release/ - cp node/specs/${RUNTIME}_client-specs.yaml release/
artifacts: artifacts:
name: "runtime" name: "runtime"
paths: paths:
...@@ -324,10 +324,12 @@ create_release: ...@@ -324,10 +324,12 @@ create_release:
- cargo xtask release-runtime $MILESTONE $CI_COMMIT_BRANCH - cargo xtask release-runtime $MILESTONE $CI_COMMIT_BRANCH
# We always ship runtimes: this is both a proof and a convenience # We always ship runtimes: this is both a proof and a convenience
- cargo xtask create-asset-link $MILESTONE g1-data.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/g1-data.json - cargo xtask create-asset-link $MILESTONE g1-data.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/g1-data.json
- cargo xtask create-asset-link $MILESTONE gdev-indexer.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gdev-indexer.json
- cargo xtask create-asset-link $MILESTONE gtest-indexer https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gtest-indexer.json
- cargo xtask create-asset-link $MILESTONE gdev_runtime.compact.compressed.wasm https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gdev_runtime.compact.compressed.wasm - cargo xtask create-asset-link $MILESTONE gdev_runtime.compact.compressed.wasm https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gdev_runtime.compact.compressed.wasm
- cargo xtask create-asset-link $MILESTONE gtest_runtime.compact.compressed.wasm https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gtest_runtime.compact.compressed.wasm - cargo xtask create-asset-link $MILESTONE gtest_runtime.compact.compressed.wasm https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gtest_runtime.compact.compressed.wasm
- cargo xtask create-asset-link $MILESTONE gdev_client-specs.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gdev_client-specs.json - cargo xtask create-asset-link $MILESTONE gdev_client-specs.yaml https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gdev_client-specs.yaml
- cargo xtask create-asset-link $MILESTONE gtest_client-specs.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gtest_client-specs.json - cargo xtask create-asset-link $MILESTONE gtest_client-specs.yaml https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gtest_client-specs.yaml
artifacts: artifacts:
paths: paths:
- $CI_PROJECT_DIR/release/ - $CI_PROJECT_DIR/release/
......
...@@ -210,7 +210,7 @@ version = "0.5.1" ...@@ -210,7 +210,7 @@ version = "0.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6" checksum = "cab112f0a86d568ea0e627cc1d6be74a1e9cd55214684db5561995f6dad897c6"
dependencies = [ dependencies = [
"num-traits", "num-traits 0.2.15",
] ]
[[package]] [[package]]
...@@ -259,7 +259,7 @@ dependencies = [ ...@@ -259,7 +259,7 @@ dependencies = [
"asn1-rs-impl", "asn1-rs-impl",
"displaydoc", "displaydoc",
"nom", "nom",
"num-traits", "num-traits 0.2.15",
"rusticata-macros", "rusticata-macros",
"thiserror", "thiserror",
"time 0.3.23", "time 0.3.23",
...@@ -275,7 +275,7 @@ dependencies = [ ...@@ -275,7 +275,7 @@ dependencies = [
"asn1-rs-impl", "asn1-rs-impl",
"displaydoc", "displaydoc",
"nom", "nom",
"num-traits", "num-traits 0.2.15",
"rusticata-macros", "rusticata-macros",
"thiserror", "thiserror",
"time 0.3.23", "time 0.3.23",
...@@ -826,7 +826,7 @@ dependencies = [ ...@@ -826,7 +826,7 @@ dependencies = [
"iana-time-zone", "iana-time-zone",
"js-sys", "js-sys",
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.15",
"time 0.1.45", "time 0.1.45",
"wasm-bindgen", "wasm-bindgen",
"winapi 0.3.9", "winapi 0.3.9",
...@@ -1145,6 +1145,15 @@ dependencies = [ ...@@ -1145,6 +1145,15 @@ dependencies = [
"memchr", "memchr",
] ]
[[package]]
name = "countmap"
version = "0.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1ef2a403c4af585607826502480ab6e453f320c230ef67255eee21f0cc72c0a6"
dependencies = [
"num-traits 0.1.43",
]
[[package]] [[package]]
name = "cpp_demangle" name = "cpp_demangle"
version = "0.3.5" version = "0.3.5"
...@@ -1688,7 +1697,7 @@ dependencies = [ ...@@ -1688,7 +1697,7 @@ dependencies = [
"displaydoc", "displaydoc",
"nom", "nom",
"num-bigint", "num-bigint",
"num-traits", "num-traits 0.2.15",
"rusticata-macros", "rusticata-macros",
] ]
...@@ -1702,7 +1711,7 @@ dependencies = [ ...@@ -1702,7 +1711,7 @@ dependencies = [
"displaydoc", "displaydoc",
"nom", "nom",
"num-bigint", "num-bigint",
"num-traits", "num-traits 0.2.15",
"rusticata-macros", "rusticata-macros",
] ]
...@@ -1868,7 +1877,7 @@ dependencies = [ ...@@ -1868,7 +1877,7 @@ dependencies = [
"flate2", "flate2",
"fnv", "fnv",
"log", "log",
"num-traits", "num-traits 0.2.15",
"parity-scale-codec", "parity-scale-codec",
"rayon", "rayon",
"simple_logger", "simple_logger",
...@@ -1914,7 +1923,7 @@ dependencies = [ ...@@ -1914,7 +1923,7 @@ dependencies = [
[[package]] [[package]]
name = "duniter" name = "duniter"
version = "0.3.0" version = "0.7.0"
dependencies = [ dependencies = [
"async-io", "async-io",
"bs58 0.5.0", "bs58 0.5.0",
...@@ -2023,6 +2032,7 @@ name = "duniter-live-tests" ...@@ -2023,6 +2032,7 @@ name = "duniter-live-tests"
version = "3.0.0" version = "3.0.0"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"countmap",
"hex-literal", "hex-literal",
"parity-scale-codec", "parity-scale-codec",
"sp-core", "sp-core",
...@@ -2423,7 +2433,7 @@ dependencies = [ ...@@ -2423,7 +2433,7 @@ dependencies = [
"futures 0.3.29", "futures 0.3.29",
"futures-timer", "futures-timer",
"log", "log",
"num-traits", "num-traits 0.2.15",
"parity-scale-codec", "parity-scale-codec",
"parking_lot 0.12.1", "parking_lot 0.12.1",
"scale-info", "scale-info",
...@@ -2464,7 +2474,7 @@ version = "0.9.0" ...@@ -2464,7 +2474,7 @@ version = "0.9.0"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4" checksum = "98de4bbd547a563b716d8dfa9aad1cb19bfab00f4fa09a6a4ed21dbcf44ce9c4"
dependencies = [ dependencies = [
"num-traits", "num-traits 0.2.15",
] ]
[[package]] [[package]]
...@@ -3889,7 +3899,7 @@ version = "0.1.5" ...@@ -3889,7 +3899,7 @@ version = "0.1.5"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "276ec31bcb4a9ee45f58bec6f9ec700ae4cf4f4f8f2fa7e06cb406bd5ffdd770" checksum = "276ec31bcb4a9ee45f58bec6f9ec700ae4cf4f4f8f2fa7e06cb406bd5ffdd770"
dependencies = [ dependencies = [
"num-traits", "num-traits 0.2.15",
] ]
[[package]] [[package]]
...@@ -5194,7 +5204,7 @@ dependencies = [ ...@@ -5194,7 +5204,7 @@ dependencies = [
"nalgebra-macros", "nalgebra-macros",
"num-complex", "num-complex",
"num-rational", "num-rational",
"num-traits", "num-traits 0.2.15",
"simba", "simba",
"typenum", "typenum",
] ]
...@@ -5403,7 +5413,7 @@ checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f" ...@@ -5403,7 +5413,7 @@ checksum = "f93ab6289c7b344a8a9f60f88d80aa20032336fe78da341afc91c8a2341fc75f"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.15",
] ]
[[package]] [[package]]
...@@ -5412,7 +5422,7 @@ version = "0.4.3" ...@@ -5412,7 +5422,7 @@ version = "0.4.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d" checksum = "02e0d21255c828d6f128a1e41534206671e8c3ea0c62f32291e808dc82cff17d"
dependencies = [ dependencies = [
"num-traits", "num-traits 0.2.15",
] ]
[[package]] [[package]]
...@@ -5432,7 +5442,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" ...@@ -5432,7 +5442,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9" checksum = "225d3389fb3509a24c93f5c29eb6bde2586b98d9f016636dff58d7c6f7569cd9"
dependencies = [ dependencies = [
"autocfg", "autocfg",
"num-traits", "num-traits 0.2.15",
] ]
[[package]] [[package]]
...@@ -5444,7 +5454,16 @@ dependencies = [ ...@@ -5444,7 +5454,16 @@ dependencies = [
"autocfg", "autocfg",
"num-bigint", "num-bigint",
"num-integer", "num-integer",
"num-traits", "num-traits 0.2.15",
]
[[package]]
name = "num-traits"
version = "0.1.43"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31"
dependencies = [
"num-traits 0.2.15",
] ]
[[package]] [[package]]
...@@ -7724,7 +7743,7 @@ dependencies = [ ...@@ -7724,7 +7743,7 @@ dependencies = [
"log", "log",
"num-bigint", "num-bigint",
"num-rational", "num-rational",
"num-traits", "num-traits 0.2.15",
"parity-scale-codec", "parity-scale-codec",
"parking_lot 0.12.1", "parking_lot 0.12.1",
"sc-client-api", "sc-client-api",
...@@ -8494,7 +8513,7 @@ dependencies = [ ...@@ -8494,7 +8513,7 @@ dependencies = [
"futures-timer", "futures-timer",
"linked-hash-map", "linked-hash-map",
"log", "log",
"num-traits", "num-traits 0.2.15",
"parity-scale-codec", "parity-scale-codec",
"parking_lot 0.12.1", "parking_lot 0.12.1",
"sc-client-api", "sc-client-api",
...@@ -9038,7 +9057,7 @@ checksum = "061507c94fc6ab4ba1c9a0305018408e312e17c041eb63bef8aa726fa33aceae" ...@@ -9038,7 +9057,7 @@ checksum = "061507c94fc6ab4ba1c9a0305018408e312e17c041eb63bef8aa726fa33aceae"
dependencies = [ dependencies = [
"approx", "approx",
"num-complex", "num-complex",
"num-traits", "num-traits 0.2.15",
"paste", "paste",
"wide", "wide",
] ]
...@@ -9195,7 +9214,7 @@ version = "6.0.0" ...@@ -9195,7 +9214,7 @@ version = "6.0.0"
source = "git+https://github.com/duniter/substrate?branch=duniter-substrate-v0.9.42#38b19717f847d3eda654b6465802c244ea6372a6" source = "git+https://github.com/duniter/substrate?branch=duniter-substrate-v0.9.42#38b19717f847d3eda654b6465802c244ea6372a6"
dependencies = [ dependencies = [
"integer-sqrt", "integer-sqrt",
"num-traits", "num-traits 0.2.15",
"parity-scale-codec", "parity-scale-codec",
"scale-info", "scale-info",
"serde", "serde",
...@@ -11280,7 +11299,7 @@ dependencies = [ ...@@ -11280,7 +11299,7 @@ dependencies = [
"libm", "libm",
"memory_units", "memory_units",
"num-rational", "num-rational",
"num-traits", "num-traits 0.2.15",
"region", "region",
] ]
......
...@@ -54,8 +54,10 @@ RUN set -x && \ ...@@ -54,8 +54,10 @@ RUN set -x && \
cat /root/dynenv && \ cat /root/dynenv && \
. /root/dynenv && \ . /root/dynenv && \
cargo build --locked $CARGO_OPTIONS --no-default-features $BENCH_OPTIONS --features $chain --target "$RUST_ARCH_TRIPLET" && \ cargo build --locked $CARGO_OPTIONS --no-default-features $BENCH_OPTIONS --features $chain --target "$RUST_ARCH_TRIPLET" && \
cargo build --locked $CARGO_OPTIONS --target "$RUST_ARCH_TRIPLET" --package distance-oracle && \
mkdir -p build && \ mkdir -p build && \
mv target/$RUST_ARCH_TRIPLET/$TARGET_FOLDER/duniter build/ mv target/$RUST_ARCH_TRIPLET/$TARGET_FOLDER/duniter build/ && \
mv target/$RUST_ARCH_TRIPLET/$TARGET_FOLDER/distance-oracle build/
# Run tests if requested, expted when cross-building # Run tests if requested, expted when cross-building
ARG cucumber=0 ARG cucumber=0
......
...@@ -359,7 +359,7 @@ ask to join the set of validators two sessions after ...@@ -359,7 +359,7 @@ ask to join the set of validators two sessions after
<details><summary><code>set_session_keys(keys)</code></summary> <details><summary><code>set_session_keys(keys)</code></summary>
```rust ```rust
keys: T::KeysWrapper keys: T::Keys
``` ```
</details> </details>
...@@ -750,8 +750,11 @@ Link an account to an identity ...@@ -750,8 +750,11 @@ Link an account to an identity
</details> </details>
claim pending membership to become actual memberhip claim membership
the requested membership must fullfill requirements a pending membership should exist
it must fullfill the requirements (certs, distance)
for main wot claim_membership is called automatically when validating identity
for smith wot, it means joining the authority members
#### renew_membership - 2 #### renew_membership - 2
...@@ -844,10 +847,9 @@ Removes the status if `status` is `None`. ...@@ -844,10 +847,9 @@ Removes the status if `status` is `None`.
#### request_membership - 0 #### request_membership - 0
<details><summary><code>request_membership(metadata)</code></summary> <details><summary><code>request_membership()</code></summary>
```rust ```rust
metadata: T::MetaData
``` ```
</details> </details>
...@@ -864,8 +866,11 @@ submit a membership request (must have a declared identity) ...@@ -864,8 +866,11 @@ submit a membership request (must have a declared identity)
</details> </details>
claim pending membership to become actual memberhip claim membership
the requested membership must fullfill requirements a pending membership should exist
it must fullfill the requirements (certs, distance)
for main wot claim_membership is called automatically when validating identity
for smith wot, it means joining the authority members
#### renew_membership - 2 #### renew_membership - 2
...@@ -2003,10 +2008,9 @@ usually means being a stash account). ...@@ -2003,10 +2008,9 @@ usually means being a stash account).
#### request_membership - 0 #### request_membership - 0
<details><summary><code>request_membership(metadata)</code></summary> <details><summary><code>request_membership()</code></summary>
```rust ```rust
metadata: T::MetaData
``` ```
</details> </details>
......
This diff is collapsed.
This diff is collapsed.
...@@ -81,5 +81,9 @@ ...@@ -81,5 +81,9 @@
], ],
"treasury_funder_pubkey": "FHNpKmJrUtusuvKPGomAygQqeiks98bdV6yD61Stb6vg", "treasury_funder_pubkey": "FHNpKmJrUtusuvKPGomAygQqeiks98bdV6yD61Stb6vg",
"ud": 1000, "ud": 1000,
"initial_monetary_mass": 3000 "initial_monetary_mass": 3000,
"current_block": {
"number": 0,
"medianTime": 1700000000
}
} }
\ No newline at end of file
...@@ -90,5 +90,9 @@ ...@@ -90,5 +90,9 @@
], ],
"treasury_funder_pubkey": "FHNpKmJrUtusuvKPGomAygQqeiks98bdV6yD61Stb6vg", "treasury_funder_pubkey": "FHNpKmJrUtusuvKPGomAygQqeiks98bdV6yD61Stb6vg",
"ud": 1000, "ud": 1000,
"initial_monetary_mass": 4000 "initial_monetary_mass": 4000,
"current_block": {
"number": 0,
"medianTime": 1700000000
}
} }
\ No newline at end of file
...@@ -15,4 +15,5 @@ parity-scale-codec = "3.4.0" ...@@ -15,4 +15,5 @@ parity-scale-codec = "3.4.0"
sp-core = { git = 'https://github.com/duniter/substrate', branch = 'duniter-substrate-v0.9.42', default-features = false } sp-core = { git = 'https://github.com/duniter/substrate', branch = 'duniter-substrate-v0.9.42', default-features = false }
subxt = { git = 'https://github.com/duniter/subxt', branch = 'duniter-substrate-v0.9.42', default-features = false, features = ["jsonrpsee-ws"] } subxt = { git = 'https://github.com/duniter/subxt', branch = 'duniter-substrate-v0.9.42', default-features = false, features = ["jsonrpsee-ws"] }
tokio = { version = "1.28", features = ["macros", "time", "rt-multi-thread"], default-features = false } tokio = { version = "1.28", features = ["macros", "time", "rt-multi-thread"], default-features = false }
sp-runtime = { git = 'https://github.com/duniter/substrate', branch = 'duniter-substrate-v0.9.42', default-features = false , features = ["std"] } # https://github.com/paritytech/subxt/issues/437 sp-runtime = { git = 'https://github.com/duniter/substrate', branch = 'duniter-substrate-v0.9.42', default-features = false , features = ["std"] } # https://github.com/paritytech/subxt/issues/437
\ No newline at end of file countmap = "0.2.0"
...@@ -16,7 +16,7 @@ Test suite that verifies the consistency of the onchain storage. ...@@ -16,7 +16,7 @@ Test suite that verifies the consistency of the onchain storage.
#### Custom RPC endpoint #### Custom RPC endpoint
You can choose to use another RPC endpoint by setting the environment variable `WS_RPC_ENDPOINT`. You can choose to use another RPC endpoint by setting the environment variable `WS_RPC_ENDPOINT`.
This is also the only way to test against a different network that the default one. This is also the only way to test against a different network that the default one which is `ws://localhost:9944`.
#### run against a specific block #### run against a specific block
......
...@@ -17,14 +17,16 @@ ...@@ -17,14 +17,16 @@
#[subxt::subxt(runtime_metadata_path = "../resources/metadata.scale")] #[subxt::subxt(runtime_metadata_path = "../resources/metadata.scale")]
pub mod gdev {} pub mod gdev {}
use countmap::CountMap;
use hex_literal::hex; use hex_literal::hex;
use sp_core::crypto::AccountId32; use sp_core::crypto::AccountId32;
use sp_core::{blake2_128, ByteArray, H256}; use sp_core::{blake2_128, ByteArray, H256};
use std::collections::HashMap; use std::collections::{HashMap, HashSet};
use subxt::config::SubstrateConfig as GdevConfig; use subxt::config::SubstrateConfig as GdevConfig;
const DEFAULT_ENDPOINT: &str = "wss://gdev.librelois.fr:443/ws"; const DEFAULT_ENDPOINT: &str = "ws://localhost:9944";
const EXISTENTIAL_DEPOSIT: u64 = 100;
const TREASURY_ACCOUNT_ID: [u8; 32] = const TREASURY_ACCOUNT_ID: [u8; 32] =
hex!("6d6f646c70792f74727372790000000000000000000000000000000000000000"); hex!("6d6f646c70792f74727372790000000000000000000000000000000000000000");
...@@ -89,7 +91,7 @@ async fn sanity_tests_at(client: Client, _maybe_block_hash: Option<H256>) -> any ...@@ -89,7 +91,7 @@ async fn sanity_tests_at(client: Client, _maybe_block_hash: Option<H256>) -> any
account_id_bytes.copy_from_slice(&key.0[48..]); account_id_bytes.copy_from_slice(&key.0[48..]);
accounts.insert(AccountId32::new(account_id_bytes), account_info); accounts.insert(AccountId32::new(account_id_bytes), account_info);
} }
println!("accounts: {}.", accounts.len()); println!("accounts.len(): {}.", accounts.len());
// Collect identities // Collect identities
let mut identities: HashMap<IdtyIndex, IdtyValue> = HashMap::new(); let mut identities: HashMap<IdtyIndex, IdtyValue> = HashMap::new();
...@@ -113,7 +115,7 @@ async fn sanity_tests_at(client: Client, _maybe_block_hash: Option<H256>) -> any ...@@ -113,7 +115,7 @@ async fn sanity_tests_at(client: Client, _maybe_block_hash: Option<H256>) -> any
}; };
identities.insert(IdtyIndex::from_le_bytes(idty_index_bytes), idty_val); identities.insert(IdtyIndex::from_le_bytes(idty_index_bytes), idty_val);
} }
println!("identities: {}.", identities.len()); println!("identities.len(): {}.", identities.len());
// Collect identity_index_of // Collect identity_index_of
let mut identity_index_of: HashMap<[u8; 16], IdtyIndex> = HashMap::new(); let mut identity_index_of: HashMap<[u8; 16], IdtyIndex> = HashMap::new();
...@@ -126,10 +128,10 @@ async fn sanity_tests_at(client: Client, _maybe_block_hash: Option<H256>) -> any ...@@ -126,10 +128,10 @@ async fn sanity_tests_at(client: Client, _maybe_block_hash: Option<H256>) -> any
.await?; .await?;
while let Some((key, idty_index)) = idty_index_of_iter.next().await? { while let Some((key, idty_index)) = idty_index_of_iter.next().await? {
let mut blake2_128_bytes = [0u8; 16]; let mut blake2_128_bytes = [0u8; 16];
blake2_128_bytes.copy_from_slice(&key.0[32..]); blake2_128_bytes.copy_from_slice(&key.0[32..48]);
identity_index_of.insert(blake2_128_bytes, idty_index); identity_index_of.insert(blake2_128_bytes, idty_index);
} }
println!("identity_index_of: {}.", identities.len()); println!("identity_index_of.len(): {}.", identity_index_of.len());
let storage = Storage { let storage = Storage {
accounts, accounts,
...@@ -154,12 +156,17 @@ mod verifier { ...@@ -154,12 +156,17 @@ mod verifier {
Self { errors: Vec::new() } Self { errors: Vec::new() }
} }
// FIXME why async functions when called with await?
/// method to run all storage tests
pub(super) async fn verify_storage(&mut self, storage: &Storage) -> anyhow::Result<()> { pub(super) async fn verify_storage(&mut self, storage: &Storage) -> anyhow::Result<()> {
self.verify_accounts(&storage.accounts).await; self.verify_accounts(&storage.accounts).await;
self.verify_identities(&storage.accounts, &storage.identities) self.verify_identities(&storage.accounts, &storage.identities)
.await; .await;
self.verify_identity_index_of(&storage.identities, &storage.identity_index_of) self.verify_identity_index_of(&storage.identities, &storage.identity_index_of)
.await; .await;
self.verify_identity_coherence(&storage.identities, &storage.identity_index_of)
.await;
if self.errors.is_empty() { if self.errors.is_empty() {
Ok(()) Ok(())
...@@ -174,12 +181,19 @@ mod verifier { ...@@ -174,12 +181,19 @@ mod verifier {
} }
} }
/// assert method to collect errors
fn assert(&mut self, assertion: bool, error: String) { fn assert(&mut self, assertion: bool, error: String) {
if !assertion { if !assertion {
self.errors.push(error); self.errors.push(error);
} }
} }
/// like assert but just push error
fn error(&mut self, error: String) {
self.errors.push(error);
}
/// check accounts sufficients and consumers (specific to duniter-account pallet)
async fn verify_accounts(&mut self, accounts: &HashMap<AccountId32, AccountInfo>) { async fn verify_accounts(&mut self, accounts: &HashMap<AccountId32, AccountInfo>) {
for (account_id, account_info) in accounts { for (account_id, account_info) in accounts {
if account_info.sufficients == 0 { if account_info.sufficients == 0 {
...@@ -190,7 +204,8 @@ mod verifier { ...@@ -190,7 +204,8 @@ mod verifier {
); );
// Rule 2: If the account is not sufficient, it should comply to the existential deposit // Rule 2: If the account is not sufficient, it should comply to the existential deposit
self.assert( self.assert(
(account_info.data.free + account_info.data.reserved) >= 200, (account_info.data.free + account_info.data.reserved)
>= EXISTENTIAL_DEPOSIT,
format!( format!(
"Account {} not respect existential deposit rule.", "Account {} not respect existential deposit rule.",
account_id account_id
...@@ -198,9 +213,9 @@ mod verifier { ...@@ -198,9 +213,9 @@ mod verifier {
); );
} }
// Rule 3: If the account have consumers, it shoul have at least one provider // Rule 3: If the account have consumers, it should have at least one provider
if account_info.consumers > 0 { if account_info.consumers > 0 {
// Rule 1: If the account is not s // Rule 1: If the account is not sufficient [...]
self.assert( self.assert(
account_info.providers > 0, account_info.providers > 0,
format!("Account {} has no providers nor sufficients.", account_id), format!("Account {} has no providers nor sufficients.", account_id),
...@@ -218,12 +233,31 @@ mod verifier { ...@@ -218,12 +233,31 @@ mod verifier {
} }
} }
/// check list of identities (account existence, sufficient)
async fn verify_identities( async fn verify_identities(
&mut self, &mut self,
accounts: &HashMap<AccountId32, AccountInfo>, accounts: &HashMap<AccountId32, AccountInfo>,
identities: &HashMap<IdtyIndex, IdtyValue>, identities: &HashMap<IdtyIndex, IdtyValue>,
) { ) {
// counts occurence of owner key
let mut countmap = CountMap::<AccountId32, u8>::new();
// list owner key with multiple occurences
let mut duplicates = HashSet::new();
for (idty_index, idty_value) in identities { for (idty_index, idty_value) in identities {
countmap.insert_or_increment(idty_value.owner_key.clone());
if let Some(count) = countmap.get_count(&idty_value.owner_key) {
if count > 1 {
self.error(format!(
"address {} is the owner_key of {count} identities",
idty_value.owner_key
));
if count == 2 {
duplicates.insert(idty_value.owner_key.clone());
}
}
}
// Rule 1: each identity should have an account // Rule 1: each identity should have an account
let maybe_account = accounts.get(&idty_value.owner_key); let maybe_account = accounts.get(&idty_value.owner_key);
self.assert( self.assert(
...@@ -244,7 +278,7 @@ mod verifier { ...@@ -244,7 +278,7 @@ mod verifier {
match idty_value.status { match idty_value.status {
IdtyStatus::Validated => { IdtyStatus::Validated => {
// Rule 3: If the identity is validated, removable_on shoud be zero // Rule 3: If the identity is validated, removable_on should be zero
self.assert( self.assert(
idty_value.removable_on == 0, idty_value.removable_on == 0,
format!( format!(
...@@ -254,7 +288,7 @@ mod verifier { ...@@ -254,7 +288,7 @@ mod verifier {
); );
} }
_ => { _ => {
// Rule 4: If the identity is not validated, next_creatable_identity_on shoud be zero // Rule 4: If the identity is not validated, next_creatable_identity_on should be zero
self.assert( self.assert(
idty_value.next_creatable_identity_on == 0, idty_value.next_creatable_identity_on == 0,
format!("Identity {} is corrupted: next_creatable_identity_on > 0 on non-validated idty", format!("Identity {} is corrupted: next_creatable_identity_on > 0 on non-validated idty",
...@@ -263,8 +297,18 @@ mod verifier { ...@@ -263,8 +297,18 @@ mod verifier {
} }
} }
} }
for (idty_index, idty_value) in identities {
if duplicates.contains(&idty_value.owner_key) {
self.error(format!(
"duplicate key {} at position {idty_index}",
idty_value.owner_key
));
}
}
} }
/// check the identity hashmap (length, identity existence, hash matches owner key)
async fn verify_identity_index_of( async fn verify_identity_index_of(
&mut self, &mut self,
identities: &HashMap<IdtyIndex, IdtyValue>, identities: &HashMap<IdtyIndex, IdtyValue>,
...@@ -273,7 +317,11 @@ mod verifier { ...@@ -273,7 +317,11 @@ mod verifier {
// Rule1: identity_index_of should have the same lenght as identities // Rule1: identity_index_of should have the same lenght as identities
self.assert( self.assert(
identities.len() == identity_index_of.len(), identities.len() == identity_index_of.len(),
"identities.len() != identity_index_of.len().".to_owned(), format!(
"identities.len({}) != identity_index_of.len({}).",
identities.len(),
identity_index_of.len()
),
); );
for (blake2_128_owner_key, idty_index) in identity_index_of { for (blake2_128_owner_key, idty_index) in identity_index_of {
...@@ -301,5 +349,29 @@ mod verifier { ...@@ -301,5 +349,29 @@ mod verifier {
} }
} }
} }
/// check coherence between identity list and identity index hashmap
async fn verify_identity_coherence(
&mut self,
identities: &HashMap<IdtyIndex, IdtyValue>,
identity_index_of: &HashMap<[u8; 16], IdtyIndex>,
) {
// each identity should be correcly referenced in the hashmap
for (idty_index, idty_value) in identities {
// hash owner key to get key
let blake2_128_owner_key = &blake2_128(idty_value.owner_key.as_slice());
// get identity index from hashmap
if let Some(index_of) = identity_index_of.get(blake2_128_owner_key) {
self.assert(idty_index == index_of,
format!("identity number {idty_index} with owner key {0} is mapped to identity index {index_of}", idty_value.owner_key));
} else {
self.error(format!(
"identity with owner key {} is not present in hashmap",
idty_value.owner_key
));
}
}
}
} }
} }
This diff is collapsed.
name: "ĞDev" name: "ĞDev"
id: "gdev" id: "gdev"
chainType: "Live" chainType: "Live"
bootNodes: [] bootNodes:
- "/dns/gdev.cgeek.fr/tcp/30334/p2p/12D3KooWN7QhcPbTZgNMnS7AUZh3ZfnM43VdVKqy4JbAEp5AJh4f"
- "/dns/gdev.coinduf.eu/tcp/30333/p2p/12D3KooWFseA3B66eBzj4NY5ng3Lb2U3VPnKCi3iXYGYUSAahEw7"
telemetryEndpoints: telemetryEndpoints:
- ["/dns/telemetry.polkadot.io/tcp/443/x-parity-wss/%2Fsubmit%2F", 0] - ["/dns/telemetry.polkadot.io/tcp/443/x-parity-wss/%2Fsubmit%2F", 0]
properties: properties:
......
This diff is collapsed.
...@@ -81,6 +81,13 @@ pub struct GenesisData<Parameters: DeserializeOwned, SessionKeys: Decode> { ...@@ -81,6 +81,13 @@ pub struct GenesisData<Parameters: DeserializeOwned, SessionKeys: Decode> {
pub ud: u64, pub ud: u64,
} }
#[derive(Deserialize, Serialize)]
struct BlockV1 {
number: u32,
#[serde(rename = "medianTime")]
median_time: u64,
}
#[derive(Clone)] #[derive(Clone)]
pub struct GenesisIdentity { pub struct GenesisIdentity {
pub idty_index: u32, pub idty_index: u32,
...@@ -139,6 +146,7 @@ struct TransactionV2 { ...@@ -139,6 +146,7 @@ struct TransactionV2 {
#[derive(Deserialize, Serialize)] #[derive(Deserialize, Serialize)]
struct GenesisMigrationData { struct GenesisMigrationData {
initial_monetary_mass: u64, initial_monetary_mass: u64,
current_block: BlockV1,
identities: BTreeMap<String, IdentityV1>, identities: BTreeMap<String, IdentityV1>,
#[serde(default)] #[serde(default)]
wallets: BTreeMap<PubkeyV1, u64>, wallets: BTreeMap<PubkeyV1, u64>,
...@@ -235,6 +243,7 @@ struct SmithWoT<SK: Decode> { ...@@ -235,6 +243,7 @@ struct SmithWoT<SK: Decode> {
} }
struct GenesisInfo<'a> { struct GenesisInfo<'a> {
genesis_timestamp: u64,
accounts: &'a BTreeMap<AccountId32, GenesisAccountData<u64, u32>>, accounts: &'a BTreeMap<AccountId32, GenesisAccountData<u64, u32>>,
genesis_data_wallets_count: &'a usize, genesis_data_wallets_count: &'a usize,
inactive_identities: &'a HashMap<u32, String>, inactive_identities: &'a HashMap<u32, String>,
...@@ -500,6 +509,7 @@ where ...@@ -500,6 +509,7 @@ where
}); });
let genesis_info = GenesisInfo { let genesis_info = GenesisInfo {
genesis_timestamp,
accounts: &accounts, accounts: &accounts,
genesis_data_wallets_count: &genesis_data_wallets_count, genesis_data_wallets_count: &genesis_data_wallets_count,
identities: &identities, identities: &identities,
...@@ -727,6 +737,7 @@ fn dump_genesis_info(info: GenesisInfo) { ...@@ -727,6 +737,7 @@ fn dump_genesis_info(info: GenesisInfo) {
// give genesis info // give genesis info
log::info!( log::info!(
"prepared genesis with: "prepared genesis with:
- {} as genesis timestamp
- {} accounts ({} identities, {} simple wallets) - {} accounts ({} identities, {} simple wallets)
- {} total identities ({} active, {} inactive) - {} total identities ({} active, {} inactive)
- {} smiths - {} smiths
...@@ -734,6 +745,7 @@ fn dump_genesis_info(info: GenesisInfo) { ...@@ -734,6 +745,7 @@ fn dump_genesis_info(info: GenesisInfo) {
- {} certifications - {} certifications
- {} smith certifications - {} smith certifications
- {} members in technical committee", - {} members in technical committee",
info.genesis_timestamp,
info.accounts.len(), info.accounts.len(),
info.identities.len() - info.inactive_identities.len(), info.identities.len() - info.inactive_identities.len(),
info.genesis_data_wallets_count, info.genesis_data_wallets_count,
...@@ -1040,8 +1052,10 @@ fn check_genesis_data_and_filter_expired_certs_since_export( ...@@ -1040,8 +1052,10 @@ fn check_genesis_data_and_filter_expired_certs_since_export(
genesis_data.identities.iter_mut().for_each(|(name, i)| { genesis_data.identities.iter_mut().for_each(|(name, i)| {
if (i.membership_expire_on.0 as u64) < genesis_timestamp { if (i.membership_expire_on.0 as u64) < genesis_timestamp {
if (i.membership_expire_on.0 as u64) >= genesis_data.current_block.median_time {
log::warn!("{} membership expired since export", name);
}
i.membership_expire_on = TimestampV1(0); i.membership_expire_on = TimestampV1(0);
log::warn!("{} membership expired since export", name);
} }
}); });
...@@ -1606,7 +1620,9 @@ where ...@@ -1606,7 +1620,9 @@ where
.map(|x| x.0.clone()) .map(|x| x.0.clone())
.collect::<Vec<_>>(); .collect::<Vec<_>>();
let genesis_timestamp: u64 = get_genesis_timestamp()?;
let genesis_info = GenesisInfo { let genesis_info = GenesisInfo {
genesis_timestamp,
accounts: &accounts, accounts: &accounts,
genesis_data_wallets_count: &genesis_data_wallets_count, genesis_data_wallets_count: &genesis_data_wallets_count,
identities: &identities, identities: &identities,
...@@ -1713,8 +1729,13 @@ fn get_genesis_input<P: Default + DeserializeOwned>( ...@@ -1713,8 +1729,13 @@ fn get_genesis_input<P: Default + DeserializeOwned>(
memmap2::Mmap::map(&file) memmap2::Mmap::map(&file)
.map_err(|e| format!("Error mmaping gen conf file `{}`: {}", config_file_path, e))? .map_err(|e| format!("Error mmaping gen conf file `{}`: {}", config_file_path, e))?
}; };
serde_yaml::from_slice::<GenesisInput<P>>(&bytes) if config_file_path.ends_with(".json") {
.map_err(|e| format!("Error parsing gen conf file: {}", e)) serde_json::from_slice::<GenesisInput<P>>(&bytes)
.map_err(|e| format!("Error parsing JSON gen conf file: {}", e))
} else {
serde_yaml::from_slice::<GenesisInput<P>>(&bytes)
.map_err(|e| format!("Error parsing YAML gen conf file: {}", e))
}
} }
fn get_genesis_migration_data() -> Result<GenesisMigrationData, String> { fn get_genesis_migration_data() -> Result<GenesisMigrationData, String> {
......
...@@ -123,7 +123,7 @@ impl SubstrateCli for Cli { ...@@ -123,7 +123,7 @@ impl SubstrateCli for Cli {
} }
// hardcoded previously generated raw chainspecs // hardcoded previously generated raw chainspecs
// yields a pointlessly heavy binary because of hexadecimal-text-encoded values // yields a pointlessly heavy binary because of hexadecimal-text-encoded values
#[cfg(all(feature = "gdev", feature = "embed"))] #[cfg(feature = "gdev")]
"gdev" => Box::new(chain_spec::gdev::ChainSpec::from_json_bytes( "gdev" => Box::new(chain_spec::gdev::ChainSpec::from_json_bytes(
&include_bytes!("../specs/gdev-raw.json")[..], &include_bytes!("../specs/gdev-raw.json")[..],
)?), )?),
......
...@@ -18,47 +18,138 @@ ...@@ -18,47 +18,138 @@
use super::*; use super::*;
use frame_benchmarking::{account, benchmarks}; use frame_benchmarking::{account, benchmarks};
use sp_runtime::traits::One;
// FIXME this is a naïve implementation of benchmarks: fn assert_has_event<T: Config>(generic_event: <T as Config>::RuntimeEvent) {
// - without properly prepare data frame_system::Pallet::<T>::assert_has_event(generic_event.into());
// - without "verify" blocks }
// - without thinking about worst case scenario
// - without writing complexity in the term of refund queue length
// It's there as a seed for benchmark implementation and to use WeightInfo where needed.
benchmarks! { benchmarks! {
where_clause { where_clause {
where where
IdtyId<T>: From<u32>, IdtyId<T>: From<u32>,
BalanceOf<T>: From<u64>, BalanceOf<T>: From<u64>,
T::AccountId: From<[u8; 32]>,
} }
queue_refund { queue_refund {
let account: T::AccountId = account("Alice", 1, 1); let account: T::AccountId = account("Alice", 1, 1);
let dummy_refund = Refund {
account: account.clone(),
identity: 0u32.into(),
amount: 20u64.into(),
};
let refund = Refund { let refund = Refund {
account, account,
identity: 1u32.into(), identity: 1u32.into(),
amount: 10u64.into(), amount: 10u64.into(),
}; };
}: { Pallet::<T>::queue_refund(refund) } // Complexity is bound to MAX_QUEUD_REFUNDS where an insertion is O(n-1)
for i in 0..MAX_QUEUED_REFUNDS-1 {
Pallet::<T>::queue_refund(dummy_refund.clone())
}
}: { Pallet::<T>::queue_refund(refund.clone()) }
verify {
assert_eq!(RefundQueue::<T>::get().last(), Some(refund).as_ref());
assert_eq!(RefundQueue::<T>::get().len() as u32, MAX_QUEUED_REFUNDS);
}
spend_quota { spend_quota {
let idty_id = 1u32; let idty_id: IdtyId<T> = 1u32.into();
let amount = 1u64; let amount = 2u64;
let quota_amount = 10u64;
IdtyQuota::<T>::insert(
idty_id,
Quota {
last_use: T::BlockNumber::zero(),
amount: quota_amount.into(),
},
);
}: { Pallet::<T>::spend_quota(idty_id.into(), amount.into()) } }: { Pallet::<T>::spend_quota(idty_id.into(), amount.into()) }
verify {
let quota_growth = sp_runtime::Perbill::from_rational(
T::BlockNumber::one(),
T::ReloadRate::get(),
).mul_floor(T::MaxQuota::get());
assert_eq!(IdtyQuota::<T>::get(idty_id).unwrap().amount, quota_growth +quota_amount.into() - amount.into());
}
try_refund { try_refund {
let account: T::AccountId = account("Alice", 1, 1); let account: T::AccountId = account("Alice", 1, 1);
let idty_id: IdtyId<T> = 1u32.into();
IdtyQuota::<T>::insert(
idty_id,
Quota {
last_use: T::BlockNumber::zero(),
amount: 10u64.into(),
},
);
let _ = CurrencyOf::<T>:: make_free_balance_be(
&T::RefundAccount::get(),u32::MAX.into(),
);
// The worst-case scenario is when the refund fails
// and can only be triggered if the account is dead,
// in this case by having no balance in the account.
let refund = Refund { let refund = Refund {
account, account: account.clone(),
identity: 1u32.into(), identity: 1u32.into(),
amount: 10u64.into(), amount: 10u64.into(),
}; };
}: { Pallet::<T>::try_refund(refund) } }: { Pallet::<T>::try_refund(refund) }
verify {
assert_has_event::<T>(Event::<T>::RefundFailed ( account ).into());
}
do_refund { do_refund {
let account: T::AccountId = account("Alice", 1, 1); let account: T::AccountId = account("Alice", 1, 1);
let _ = CurrencyOf::<T>:: make_free_balance_be(
&T::RefundAccount::get(),u32::MAX.into(),
);
// The worst-case scenario is when the refund fails
// and can only be triggered if the account is dead,
// in this case by having no balance in the account.
let refund = Refund { let refund = Refund {
account, account: account.clone(),
identity: 1u32.into(), identity: 1u32.into(),
amount: 10u64.into(), amount: 10u64.into(),
}; };
let amount = 5u64.into(); }: { Pallet::<T>::try_refund(refund) }
}: { Pallet::<T>::do_refund(refund, amount) } verify {
assert_has_event::<T>(Event::<T>::RefundFailed ( account ).into());
}
// The base weight consumed on processing refund queue when empty.
on_process_refund_queue {
assert_eq!(RefundQueue::<T>::get().len() as u32, 0);
}: { Pallet::<T>::process_refund_queue(Weight::MAX) }
// The weight consumed on processing refund queue with one element.
// Can deduce the process_refund_queue overhead by subtracting try_refund weight.
#[pov_mode = Measured]
on_process_refund_queue_elements {
let i in 1..MAX_QUEUED_REFUNDS;
let account: T::AccountId = account("Alice", 1, 1);
let idty_id: IdtyId<T> = 1u32.into();
IdtyQuota::<T>::insert(
idty_id,
Quota {
last_use: T::BlockNumber::zero(),
amount: 10u64.into(),
},
);
let _ = CurrencyOf::<T>:: make_free_balance_be(
&T::RefundAccount::get(),u32::MAX.into(),
);
// The worst-case scenario is when the refund fails
// and can only be triggered if the account is dead,
// in this case by having no balance in the account.
let refund = Refund {
account: account.clone(),
identity: 1u32.into(),
amount: 10u64.into(),
};
for _ in 0..i {
Pallet::<T>::queue_refund(refund.clone());
}
assert_eq!(RefundQueue::<T>::get().len() as u32, i);
}: { Pallet::<T>::process_refund_queue(Weight::MAX) }
verify {
assert_eq!(RefundQueue::<T>::get().len() as u32, 0);
assert_has_event::<T>(Event::<T>::RefundFailed ( account ).into());
}
impl_benchmark_test_suite!(Pallet, crate::mock::new_test_ext(crate::mock::QuotaConfig{identities: vec![1, 2]}), crate::mock::Test);
} }
...@@ -210,19 +210,31 @@ pub mod pallet { ...@@ -210,19 +210,31 @@ pub mod pallet {
/// perform as many refunds as possible within the supplied weight limit /// perform as many refunds as possible within the supplied weight limit
pub fn process_refund_queue(weight_limit: Weight) -> Weight { pub fn process_refund_queue(weight_limit: Weight) -> Weight {
RefundQueue::<T>::mutate(|queue| { RefundQueue::<T>::mutate(|queue| {
// The weight to process an empty queue
let mut total_weight = <T as pallet::Config>::WeightInfo::on_process_refund_queue();
// The weight to process one element without the actual try_refund weight
let overhead =
<T as pallet::Config>::WeightInfo::on_process_refund_queue_elements(2)
.saturating_sub(
<T as pallet::Config>::WeightInfo::on_process_refund_queue_elements(1),
)
.saturating_sub(<T as pallet::Config>::WeightInfo::try_refund());
// make sure that we have at least the time to handle one try_refund call
if queue.is_empty() { if queue.is_empty() {
return Weight::zero(); return total_weight;
} }
let mut total_weight = Weight::zero();
// make sure that we have at least the time to handle one try_refund call while total_weight.any_lt(weight_limit.saturating_sub(
while total_weight.any_lt( <T as pallet::Config>::WeightInfo::try_refund().saturating_add(overhead),
weight_limit.saturating_sub(<T as pallet::Config>::WeightInfo::try_refund()), )) {
) {
let Some(queued_refund) = queue.pop() else { let Some(queued_refund) = queue.pop() else {
break; break;
}; };
let consumed_weight = Self::try_refund(queued_refund); let consumed_weight = Self::try_refund(queued_refund);
total_weight = total_weight.saturating_add(consumed_weight); total_weight = total_weight
.saturating_add(consumed_weight)
.saturating_add(overhead);
} }
total_weight total_weight
}) })
...@@ -309,8 +321,6 @@ pub mod pallet { ...@@ -309,8 +321,6 @@ pub mod pallet {
// process refund queue if space left on block // process refund queue if space left on block
fn on_idle(_block: T::BlockNumber, remaining_weight: Weight) -> Weight { fn on_idle(_block: T::BlockNumber, remaining_weight: Weight) -> Weight {
Self::process_refund_queue(remaining_weight) Self::process_refund_queue(remaining_weight)
// opti: benchmark process_refund_queue overhead and substract this from weight limit
// .saturating_sub(T::WeightInfo::process_refund_queue())
} }
} }
} }
......
...@@ -238,4 +238,133 @@ fn test_not_enough_treasury() { ...@@ -238,4 +238,133 @@ fn test_not_enough_treasury() {
}) })
} }
// TODO implement a mock weight to test if refund queue processing actually stops when reached limit /// test complete scenario with queue and process refund queue weight with available quotas
#[test]
fn test_process_refund_queue_weight_with_quotas() {
new_test_ext(QuotaConfig {
identities: vec![1, 2, 3],
})
.execute_with(|| {
run_to_block(15);
// give enough currency to accounts and treasury and double check
Balances::make_free_balance_be(&account(1), 1000);
Balances::make_free_balance_be(&account(2), 1000);
Balances::make_free_balance_be(&account(3), 1000);
Balances::make_free_balance_be(
&<Test as pallet_quota::Config>::RefundAccount::get(),
10_000,
);
assert_eq!(
Balances::free_balance(<Test as pallet_quota::Config>::RefundAccount::get()),
10_000
);
// fill in the refund queue
Quota::queue_refund(pallet_quota::Refund {
account: account(1),
identity: 10,
amount: 10,
});
Quota::queue_refund(pallet_quota::Refund {
account: account(2),
identity: 2,
amount: 500,
});
Quota::queue_refund(pallet_quota::Refund {
account: account(3),
identity: 3,
amount: 666,
});
// process it with only no weight
Quota::process_refund_queue(Weight::from(0));
// after processing, it should be of the same size
assert_eq!(pallet_quota::RefundQueue::<Test>::get().len(), 3);
// process it with only 200 allowed weight
Quota::process_refund_queue(Weight::from_parts(200u64, 0));
// after processing, it should be of size 1 because total_weight += 25*2 by iteration and
// limit is total_weight < 200-100 so 2 elements can be processed
assert_eq!(pallet_quota::RefundQueue::<Test>::get().len(), 1);
// and we should observe the effects of refund
assert_eq!(Balances::free_balance(account(3)), 1666); // 1000 initial + 666 refunded
assert_eq!(Balances::free_balance(account(2)), 1500); // 1000 initial + 1500 refunded
assert_eq!(Balances::free_balance(account(1)), 1000); // only initial because no available weight to process
assert_eq!(
Balances::free_balance(<Test as pallet_quota::Config>::RefundAccount::get()),
// initial minus refunds
10_000 - 666 - 500
);
// events
System::assert_has_event(RuntimeEvent::Quota(pallet_quota::Event::Refunded {
who: account(3),
identity: 3,
amount: 666,
}));
System::assert_has_event(RuntimeEvent::Quota(pallet_quota::Event::Refunded {
who: account(2),
identity: 2,
amount: 500,
}));
})
}
/// test complete scenario with queue and process refund queue weight with limited quotas
#[test]
fn test_process_refund_queue_weight_no_quotas() {
new_test_ext(QuotaConfig {
identities: vec![1, 2],
})
.execute_with(|| {
run_to_block(15);
// give enough currency to accounts and treasury and double check
Balances::make_free_balance_be(&account(1), 1000);
Balances::make_free_balance_be(&account(2), 1000);
Balances::make_free_balance_be(&account(3), 1000);
Balances::make_free_balance_be(
&<Test as pallet_quota::Config>::RefundAccount::get(),
10_000,
);
assert_eq!(
Balances::free_balance(<Test as pallet_quota::Config>::RefundAccount::get()),
10_000
);
// fill in the refund queue
Quota::queue_refund(pallet_quota::Refund {
account: account(1),
identity: 10,
amount: 10,
});
Quota::queue_refund(pallet_quota::Refund {
account: account(2),
identity: 2,
amount: 500,
});
Quota::queue_refund(pallet_quota::Refund {
account: account(3),
identity: 3,
amount: 666,
});
// process it with only no weight
Quota::process_refund_queue(Weight::from(0));
// after processing, it should be of the same size
assert_eq!(pallet_quota::RefundQueue::<Test>::get().len(), 3);
// process it with only 150 allowed weight
Quota::process_refund_queue(Weight::from_parts(150u64, 0));
// after processing, it should be of size 2 because try_refund weight is 25 (first in the queue with no quota) then 25*2 for the 2 other elements
// limit is total_weight < 150-100 so 2 elements can be processed
assert_eq!(pallet_quota::RefundQueue::<Test>::get().len(), 1);
// and we should observe the effects of refund
assert_eq!(Balances::free_balance(account(3)), 1000); // 1000 initial only because no quota available
assert_eq!(Balances::free_balance(account(2)), 1500); // 1000 initial + 500 refunded
assert_eq!(Balances::free_balance(account(1)), 1000); // only initial because no available weight to process
assert_eq!(
Balances::free_balance(<Test as pallet_quota::Config>::RefundAccount::get()),
// initial minus refunds
10_000 - 500
);
// events
System::assert_has_event(RuntimeEvent::Quota(pallet_quota::Event::Refunded {
who: account(2),
identity: 2,
amount: 500,
}));
})
}
...@@ -7,19 +7,27 @@ pub trait WeightInfo { ...@@ -7,19 +7,27 @@ pub trait WeightInfo {
fn spend_quota() -> Weight; fn spend_quota() -> Weight;
fn try_refund() -> Weight; fn try_refund() -> Weight;
fn do_refund() -> Weight; fn do_refund() -> Weight;
fn on_process_refund_queue() -> Weight;
fn on_process_refund_queue_elements(_i: u32) -> Weight;
} }
impl WeightInfo for () { impl WeightInfo for () {
fn queue_refund() -> Weight { fn queue_refund() -> Weight {
Weight::from_parts(999u64, 0) Weight::from_parts(100u64, 0)
} }
fn spend_quota() -> Weight { fn spend_quota() -> Weight {
Weight::from_parts(999u64, 0) Weight::from_parts(25u64, 0)
} }
fn try_refund() -> Weight { fn try_refund() -> Weight {
Weight::from_parts(999u64, 0) Weight::from_parts(100u64, 0)
} }
fn do_refund() -> Weight { fn do_refund() -> Weight {
Weight::from_parts(999u64, 0) Weight::from_parts(25u64, 0)
}
fn on_process_refund_queue() -> Weight {
Weight::from_parts(1u64, 0)
}
fn on_process_refund_queue_elements(_i: u32) -> Weight {
Weight::from_parts(1u64, 0)
} }
} }