Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found

Target

Select target project
  • nodes/rust/duniter-v2s
  • llaq/lc-core-substrate
  • pini-gh/duniter-v2s
  • vincentux/duniter-v2s
  • mildred/duniter-v2s
  • d0p1/duniter-v2s
  • bgallois/duniter-v2s
  • Nicolas80/duniter-v2s
8 results
Show changes
Commits on Source (3)
  • Cédric Moreau's avatar
    Resolve "E2E tests fail but the CI succeeds" (!189) · 62a49be9
    Cédric Moreau authored
    * fix(#132): fix: always run `tests` job by default
    
    * fix(#132): fix: change `tests` job
    
    * fix(#132): E2E tests must fail if some hooks have failed
    62a49be9
  • Cédric Moreau's avatar
    9e3659a2
  • Hugo Trentesaux's avatar
    implement quotas and refund transaction fees (!183) · b793b455
    Hugo Trentesaux authored
    * implement quotas
    
    implement weights "à l'arrache"
    
    benchmarks duniter-account "à l'arrache"
    
    implement benchmark logic (not proper benchmarks)
    
    fix live tests :man_facepalming:
    
    and clippy :man_facepalming::man_facepalming:
    
    replace quotas by quota everywhere
    
    comment unused sections of template
    
    remove quota treasury dependency
    
    give treasury address as argument
    
    typo
    
    review tuxmain
    
    doc readme
    
    rename error DistanceKO to DistanceNotOK
    
    merge new owner key and revocation signature
    
    merge signature error types
    
    rename NewOwnerKeyPayload
    
    fix comment
    
    make eligibility more explicit
    
    implement quotas
    
    implement weights "à l'arrache"
    
    benchmarks duniter-account "à l'arrache"
    
    implement benchmark logic (not proper benchmarks)
    
    fix live tests :man_facepalming:
    
    and clippy :man_facepalming::man_facepalming:
    
    replace quotas by quota everywhere
    
    comment unused sections of template
    
    remove quota treasury dependency
    
    give treasury address as argument
    
    typo
    
    review tuxmain
    
    doc readme
    
    rename error DistanceKO to DistanceNotOK
    
    merge new owner key and revocation signature
    
    merge signature error types
    
    rename NewOwnerKeyPayload
    
    fix comment
    
    make eligibility more explicit
    
    update metadata
    
    fix
    
    fix fee multiplier update
    
    prevent network discovery + connecting other nodes
    b793b455
Showing
with 1729 additions and 1341 deletions
[alias]
cucumber = "test -p duniter-end2end-tests --test cucumber_tests --"
sanity-gdev = "test -p duniter-live-tests --test sanity_gdev -- --nocapture"
tu = "test --workspace --exclude duniter-end2end-tests --exclude duniter-live-tests"
# `te` and `cucumber` are synonyms
te = "test -p duniter-end2end-tests --test cucumber_tests --"
cucumber = "test -p duniter-end2end-tests --test cucumber_tests --"
ta = "test --workspace --exclude duniter-live-tests"
tb = "test --features runtime-benchmarks -p"
rbp = "run --release --features runtime-benchmarks -- benchmark pallet --chain=dev --steps=50 --repeat=20 --extrinsic=* --execution=wasm --wasm-execution=compiled --heap-pages=4096 --header=./file_header.txt --output=. --pallet"
xtask = "run --package xtask --"
cucumber-node = "run -- --chain=gdev_dev --execution=Native --sealing=manual --force-authoring --rpc-cors=all --tmp --ws-port 9944 --alice"
......@@ -7,6 +7,7 @@ stages:
- quality
- build
- tests
- release
- deploy
- deploy_readme
......@@ -43,7 +44,7 @@ check_labels:
.env:
image: paritytech/ci-linux:production
tags:
- podman
- kepler
fmt_and_clippy:
extends: .env
......@@ -52,6 +53,8 @@ fmt_and_clippy:
when: manual
- if: '$CI_COMMIT_TAG || $CI_COMMIT_BRANCH == "master"'
when: never
- if: '$CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/'
when: never
- if: $CI_PIPELINE_SOURCE == "merge_request_event"
- when: manual
stage: quality
......@@ -69,6 +72,61 @@ fmt_and_clippy:
tags:
- podman
.docker_release_build:
stage: build
rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
when: manual
changes:
- node/specs/$CHAIN-raw.json
variables:
IMAGE_NAME: "duniter/duniter-v2s-$CHAIN"
IMAGE_TAG: "latest"
PODMAN_BUILD_OPTIONS: "--platform linux/amd64 --build-arg chain=$CHAIN"
script:
- echo podman build --layers --tag "$IMAGE_NAME:$IMAGE_TAG" -f docker/Dockerfile $PODMAN_BUILD_OPTIONS .
- podman build --layers --tag "$IMAGE_NAME:$IMAGE_TAG" -f docker/Dockerfile $PODMAN_BUILD_OPTIONS .
tags:
- podman
.docker_release_deploy:
stage: deploy
rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
when: manual
changes:
- node/specs/$CHAIN-raw.json
variables:
IMAGE_NAME: "duniter/duniter-v2s-$CHAIN"
IMAGE_TAG: "latest"
script:
- echo podman push "localhost/$IMAGE_NAME:$IMAGE_TAG" "docker://docker.io/$IMAGE_NAME:$IMAGE_TAG"
- podman podman push "localhost/$IMAGE_NAME:$IMAGE_TAG" "docker://docker.io/$IMAGE_NAME:$IMAGE_TAG"
tags:
- podman
gdev_docker_release_build:
extends: .docker_release_build
variables:
CHAIN: gdev
gtest_docker_release_build:
extends: .docker_release_build
variables:
CHAIN: gtest
gdev_docker_release_deploy:
extends: .docker_release_deploy
needs: ['gdev_docker_release_build']
variables:
CHAIN: gdev
gtest_docker_release_deploy:
extends: .docker_release_deploy
needs: ['gtest_docker_release_build']
variables:
CHAIN: gtest
.docker_deploy:
stage: deploy
before_script:
......@@ -127,20 +185,28 @@ build_release_tag:
IMAGE_TAG: "$CI_COMMIT_TAG"
PODMAN_BUILD_OPTIONS: "--platform linux/amd64"
test_debug:
tests:
stage: tests
extends: .docker_build
image: rust:1-bullseye
rules:
- if: $CI_COMMIT_REF_NAME =~ /^wip*$/
when: manual
- if: $CI_COMMIT_TAG
when: never
- if: $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
when: never
- if: '$CI_MERGE_REQUEST_ID || $CI_COMMIT_BRANCH == "master"'
- when: manual
when: manual
- when: always
variables:
IMAGE_NAME: "duniter/duniter-v2s-test"
IMAGE_TAG: "debug-sha-$CI_COMMIT_SHORT_SHA"
PODMAN_BUILD_OPTIONS: "--target build --build-arg debug=1 --build-arg cucumber=1"
DEBIAN_FRONTEND: noninteractive
script:
- apt-get update
- apt-get install -y clang cmake protobuf-compiler
# cargo build is required for cucumber tests anyway
- cargo build
- cargo tu
- cargo cucumber
test_release:
stage: tests
......@@ -229,3 +295,191 @@ readme_docker_release_tag:
PUSHRM_FILE: "$CI_PROJECT_DIR/docker/README.md"
script: "/bin/true"
############## SRTOOL ##############
.srtool:
stage: build
rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
image: paritytech/srtool:1.73.0-0.12.0
variables:
PACKAGE: $RUNTIME-runtime
RUNTIME_DIR: runtime/$RUNTIME
SRTOOL_OUTPUT: $CI_PROJECT_DIR/release/srtool_output_$RUNTIME.json
script:
- echo "Building runtime for $RUNTIME"
- echo release/runtime-700 | sed -e "s/release\///g"
- mkdir -p $CI_PROJECT_DIR/release
# Copy sources to the expected directory of srtool
- cp -R * /build/
# Build the runtime
- /srtool/build --app --json -cM > $SRTOOL_OUTPUT
- mv /build/runtime/$RUNTIME/target/srtool/release/wbuild/$RUNTIME-runtime/${RUNTIME}_runtime.compact.compressed.wasm $CI_PROJECT_DIR/release/
artifacts:
name: "runtime"
paths:
- $CI_PROJECT_DIR/release
tags:
- kepler
gdev_srtool:
extends: .srtool
variables:
RUNTIME: gdev
gtest_srtool:
extends: .srtool
variables:
RUNTIME: gtest
############## SPECS ##############
create_g1_data:
stage: build
rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
image: python:3.9.18
variables:
DEBIAN_FRONTEND: noninteractive
LEVELDB_PATH: /dump/duniter_default/data/leveldb
script:
# Duniter 1.8.7 dump
- mkdir /dump
- cd /dump
# Export file constructed using Duniter 1.8.7:
# - bin/duniter sync g1.cgeek.fr --store-txs --nointeractive --mdb 1.8.7
# - mkdir -p /tmp/backup-g1-duniter-1.8.7
# - cp -R $HOME/.config/duniter/1.8.7/data /tmp/backup-g1-duniter-1.8.7
# - cp -R $HOME/.config/duniter/1.8.7/g1 /tmp/backup-g1-duniter-1.8.7
# - cp -R $HOME/.config/duniter/1.8.7/txs.db /tmp/backup-g1-duniter-1.8.7
# - tar -cvzf /tmp/backup-g1-duniter-1.8.7.tgz /tmp/backup-g1-duniter-1.8.7
# Then the file is uploaded to dl.cgeek.fr manually
- curl https://dl.cgeek.fr/public/backup-g1-duniter-1.8.7.tgz -o g1-dump.tgz
- tar xvzf g1-dump.tgz
- rm g1-dump.tgz
- mv tmp/backup-g1-duniter-1.8.7 duniter_default
# py-g1-migrator conversion
- git clone https://git.duniter.org/tools/py-g1-migrator.git -b import_identities_from_leveldb /py-g1-migrator
- cd /py-g1-migrator
- rm -rf inputs/*
- apt-get update
- apt-get install -y sqlite3 libleveldb-dev jq
- pip install -r requirements.txt
# Export identities and wallets
- ./main.py
# Export transaction history
- sqlite3 /dump/duniter_default/txs.db --json "select time,comment,issuers,outputs from txs;" > inputs/transactions_history.json 2>> inputs/txs.err
- ./generate_transactions_history.py
# Merge in one file
- 'jq -s "{ identities: .[0].identities, wallets: .[0].wallets, initial_monetary_mass: .[0].initial_monetary_mass, transactions_history: .[1] }" output/gtest_genesis.json output/history.json > output/g1-data.json'
# Make the exported file available for next jobs
- mkdir -p $CI_PROJECT_DIR/release/
- cp output/g1-data.json $CI_PROJECT_DIR/release/
artifacts:
paths:
- $CI_PROJECT_DIR/release/
tags:
- kepler
.build_specs:
stage: build
rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
image: rust:1-bullseye
variables:
WASM_FILE: $CI_PROJECT_DIR/release/${RUNTIME}_runtime.compact.compressed.wasm
DUNITER_GENESIS_DATA: $CI_PROJECT_DIR/release/g1-data.json
DUNITER_GENESIS_EXPORT: $CI_PROJECT_DIR/release/${RUNTIME}-indexer.json
DEBIAN_FRONTEND: noninteractive
script:
- apt-get update
- apt-get install -y clang cmake protobuf-compiler
- cargo run ${FEATURES} -- build-spec --chain=${RUNTIME}_live > release/${RUNTIME}.json
- cargo run ${FEATURES} -- build-spec --chain=release/${RUNTIME}.json --disable-default-bootnode --raw > release/${RUNTIME}-raw.json
- cp node/specs/${RUNTIME}_client-specs.json release/
artifacts:
name: "runtime"
paths:
- $CI_PROJECT_DIR/release
tags:
- kepler
gdev_specs:
extends: .build_specs
needs:
- gdev_srtool
- create_g1_data
variables:
RUNTIME: gdev
gtest_specs:
extends: .build_specs
needs:
- gtest_srtool
- create_g1_data
variables:
RUNTIME: gtest
FEATURES: --features gtest --no-default-features
############## RELEASE ##############
create_release:
stage: release
rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
needs: ['create_g1_data', 'gdev_srtool', 'gtest_srtool']
when: manual
image: rust:1-bullseye
variables:
SRTOOL_OUTPUT_GDEV: $CI_PROJECT_DIR/release/srtool_output_gdev.json
SRTOOL_OUTPUT_GTEST: $CI_PROJECT_DIR/release/srtool_output_gtest.json
SRTOOL_OUTPUT_G1: $CI_PROJECT_DIR/release/srtool_output_g1.json
script:
# Release creation
- export MILESTONE=$(echo release/runtime-700 | sed -e "s/release\///g")
- cargo xtask release-runtime $MILESTONE $CI_COMMIT_BRANCH
# We always ship runtimes: this is both a proof and a convenience
- cargo xtask create-asset-link $MILESTONE g1-data.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/g1-data.json
- cargo xtask create-asset-link $MILESTONE gdev_runtime.compact.compressed.wasm https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gdev_runtime.compact.compressed.wasm
- cargo xtask create-asset-link $MILESTONE gtest_runtime.compact.compressed.wasm https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gtest_runtime.compact.compressed.wasm
- cargo xtask create-asset-link $MILESTONE gdev_client-specs.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gdev_client-specs.json
- cargo xtask create-asset-link $MILESTONE gtest_client-specs.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gtest_client-specs.json
artifacts:
paths:
- $CI_PROJECT_DIR/release/
tags:
- kepler
# ------ RELEASE: ADD SPECS ------
.release_specs:
stage: release
rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
image: rust:1-bullseye
script:
- export MILESTONE=$(echo release/runtime-700 | sed -e "s/release\///g")
- cargo xtask create-asset-link $MILESTONE ${RUNTIME}.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/${RUNTIME}.json
- cargo xtask create-asset-link $MILESTONE ${RUNTIME}-raw.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/${RUNTIME}-raw.json
- echo "Release Docker file..."
artifacts:
paths:
- $CI_PROJECT_DIR/release/
tags:
- kepler
release_gdev_6_specs:
extends: .release_specs
needs:
- create_release
- gdev_specs
variables:
RUNTIME: gdev
release_gtest_specs:
extends: .release_specs
needs:
- create_release
- gtest_specs
variables:
RUNTIME: gtest
This diff is collapsed.
......@@ -69,6 +69,7 @@ sp-membership = { path = 'primitives/membership' }
# crates.io dependencies
async-io = { version = "1.6.0", default-features = false }
bs58 = "0.5.0"
clap = { version = "4.0.9", default-features = false, features = ["derive"] }
clap_complete = { version = "4", default-features = false }
futures = { version = "0.3.28", default-features = false, features = ["compat"] }
......@@ -89,6 +90,7 @@ frame-benchmarking = { git = "https://github.com/duniter/substrate", branch = "d
frame-benchmarking-cli = { git = "https://github.com/duniter/substrate", branch = "duniter-substrate-v0.9.42", default-features = false }
frame-system = { git = "https://github.com/duniter/substrate", branch = "duniter-substrate-v0.9.42", default-features = false }
pallet-grandpa = { git = "https://github.com/duniter/substrate", branch = "duniter-substrate-v0.9.42", default-features = false }
pallet-im-online = { git = "https://github.com/duniter/substrate", branch = "duniter-substrate-v0.9.42", default-features = false }
pallet-transaction-payment = { git = "https://github.com/duniter/substrate", branch = "duniter-substrate-v0.9.42", default-features = false }
pallet-transaction-payment-rpc = { git = "https://github.com/duniter/substrate", branch = "duniter-substrate-v0.9.42", default-features = false }
pallet-transaction-payment-rpc-runtime-api = { git = "https://github.com/duniter/substrate", branch = "duniter-substrate-v0.9.42", default-features = false }
......@@ -143,6 +145,7 @@ members = [
'end2end-tests',
'live-tests',
'pallets/certification',
'pallets/quota',
'pallets/distance',
'pallets/duniter-test-parameters',
'pallets/duniter-test-parameters/macro',
......
......@@ -44,23 +44,18 @@ RUN set -x && \
fi
# Build
ARG chain="gdev"
RUN set -x && \
cat /root/dynenv && \
. /root/dynenv && \
cargo build --locked $CARGO_OPTIONS --target "$RUST_ARCH_TRIPLET" && \
cargo build --locked $CARGO_OPTIONS --no-default-features --features $chain --target "$RUST_ARCH_TRIPLET" && \
mkdir -p build && \
mv target/$RUST_ARCH_TRIPLET/$TARGET_FOLDER/duniter build/
# Run tests if requested, expted when cross-building
ARG cucumber=0
RUN if [ "$cucumber" != 0 ] && [ "$TARGETPLATFORM" = "$BUILDPLATFORM" ]; then \
cargo test --workspace --exclude duniter-end2end-tests --exclude duniter-live-tests && \
cargo cucumber -i account_creation* && \
cargo cucumber -i certification* && \
cargo cucumber -i identity_creation* && \
cargo cucumber -i monetary_mass* && \
cargo cucumber -i oneshot_account* && \
cargo cucumber -i transfer_all* && \
cargo ta && \
cd target/debug/deps/ && \
rm cucumber_tests-*.d && \
mv cucumber_tests* ../../../build/duniter-cucumber; \
......
This diff is collapsed.
......@@ -9,13 +9,13 @@ You can use `try-runtime` subcommand to replay a block against a real state from
5. Replay the block a first time to get the state:
```
duniter try-runtime --exectuion=Native execute-block --block-at 0x2633026e3e428b010cfe08d215b6253843a9fe54db28748ca56de37e6a83c644 live -s tmp/snapshot1 -u ws://localhost:9944
duniter try-runtime --execution=Native execute-block --block-at 0x2633026e3e428b010cfe08d215b6253843a9fe54db28748ca56de37e6a83c644 live -s tmp/snapshot1 -u ws://localhost:9944
```
6. Then, replay the block as many times as you need against your local snapshot:
```
duniter try-runtime --exectuion=Native execute-block --block-at 0x2633026e3e428b010cfe08d215b6253843a9fe54db28748ca56de37e6a83c644 --block-ws-uri ws://localhost:9944 snap -s tmp/snapshot1
duniter try-runtime --execution=Native execute-block --block-at 0x2633026e3e428b010cfe08d215b6253843a9fe54db28748ca56de37e6a83c644 --block-ws-uri ws://localhost:9944 snap -s tmp/snapshot1
```
try-runtime does not allow (for now) to store the block locally, only the storage can be stored.
Feature: Balance transfer
Feature: Account creation
Scenario: Create a new account with enough funds
When alice sends 5 ĞD to dave
......@@ -29,8 +29,10 @@ Feature: Balance transfer
@ignoreErrors
Scenario: Create a new account without any funds
Then eve should have 0 ĞD
# Alice is treasury funder for 1 ĞD
Then alice should have 9 ĞD
When eve send 0 ĞD to alice
Then alice should have 10 ĞD
Then alice should have 9 ĞD
When alice send 5 ĞD to eve
Then eve should have 5 ĞD
When 1 block later
......
......@@ -3,9 +3,11 @@ Feature: Identity creation
Scenario: alice invites a new member to join the web of trust
# 6 ĞD covers:
# - account creation fees (3 ĞD)
# - existential deposit (2 ĞD)
# - existential deposit (1 ĞD)
# - transaction fees (below 1 ĞD)
When alice sends 7 ĞD to dave
# Alice is treasury funder for 1 ĞD => 10-1-7 = 2 (minus fees)
Then alice should have 199 cĞD
When bob sends 750 cĞD to dave
When charlie sends 6 ĞD to eve
# alice last certification is counted from block zero
......
Feature: Balance transfer
Feature: Monetary mass
Scenario: After 10 blocks, the monetary mass should be 60 ĞD
Then Monetary mass should be 30.00 ĞD
......
......@@ -2,20 +2,22 @@ Feature: Oneshot account
Scenario: Simple oneshot consumption
When alice sends 7 ĞD to oneshot dave
Then alice should have 3 ĞD
# Alice is treasury funder for 1 ĞD and pays fees
Then alice should have 199 cĞD
Then dave should have oneshot 7 ĞD
When oneshot dave consumes into account bob
Then dave should have oneshot 0 ĞD
Then bob should have 1699 cĞD
Then bob should have 1698 cĞD
Then bob should have oneshot 0 ĞD
Scenario: Double oneshot consumption
When alice sends 7 ĞD to oneshot dave
Then alice should have 3 ĞD
# Alice is treasury funder for 1 ĞD and pays fees
Then alice should have 199 cĞD
Then dave should have oneshot 7 ĞD
When oneshot dave consumes 4 ĞD into account bob and the rest into oneshot charlie
Then dave should have oneshot 0 ĞD
Then bob should have 14 ĞD
Then bob should have oneshot 0 ĞD
Then charlie should have 10 ĞD
Then charlie should have oneshot 299 cĞD
Then charlie should have oneshot 298 cĞD
......@@ -5,10 +5,13 @@ Feature: Balance transfer all
When bob sends all his ĞDs to dave
"""
Bob is a member, as such he is not allowed to empty his account completely,
if he tries to do so, the existence deposit (2 ĞD) must remain.
if he tries to do so, the existence deposit (1 ĞD) must remain.
Bob is a member, transaction fees are refunded for him
101 = existential deposit (100) + fees refunded using quota (001)
"""
Then bob should have 2 ĞD
Then bob should have 101 cĞD
"""
10 ĞD (initial Bob balance) - 2 ĞD (Existential deposit) - 0.02 ĞD (transaction fees)
10 ĞD (initial Bob balance) - 1 ĞD (Existential deposit) - 0.02 ĞD (transaction fees)
"""
Then dave should have 798 cĞD
Then dave should have 898 cĞD
# TODO check that the missing cent went to treasury
......@@ -11,28 +11,37 @@
},
"identities": {
"Alice": {
"index": 1,
"balance": 1000,
"certs": [
"Bob",
"Charlie"
],
"pubkey": "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY"
"certs_received": {
"Bob": 2700000000,
"Charlie": 2700000000
},
"owner_address": "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY",
"membership_expire_on": 2700000000,
"next_cert_issuable_on": 0
},
"Bob": {
"index": 2,
"balance": 1000,
"certs": [
"Alice",
"Charlie"
],
"pubkey": "5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty"
"certs_received": {
"Alice": 2700000000,
"Charlie": 2700000000
},
"owner_address": "5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty",
"membership_expire_on": 2700000000,
"next_cert_issuable_on": 0
},
"Charlie": {
"index": 3,
"balance": 1000,
"certs": [
"Alice",
"Bob"
],
"pubkey": "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y"
"certs_received": {
"Alice": 2700000000,
"Bob": 2700000000
},
"owner_address": "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y",
"membership_expire_on": 2700000000,
"next_cert_issuable_on": 0
}
},
"parameters": {
......@@ -59,31 +68,18 @@
"wot_min_cert_for_create_idty_right": 2,
"wot_min_cert_for_membership": 2
},
"smiths": {
"Alice": {
"certs": [
"Bob",
"Charlie"
]
},
"Bob": {
"certs": [
"Alice",
"Charlie"
]
},
"Charlie": {
"certs": [
"Alice",
"Bob"
]
}
},
"clique_smiths": [
{ "name": "Alice" },
{ "name": "Bob" },
{ "name": "Charlie" }
],
"sudo_key": "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY",
"technical_committee": [
"Alice",
"Bob",
"Charlie"
],
"ud": 1000
"treasury_funder_pubkey": "FHNpKmJrUtusuvKPGomAygQqeiks98bdV6yD61Stb6vg",
"ud": 1000,
"initial_monetary_mass": 3000
}
\ No newline at end of file
......@@ -9,36 +9,48 @@
},
"identities": {
"Alice": {
"index": 1,
"balance": 1000,
"certs": [
"Bob",
"Charlie"
],
"pubkey": "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY"
"certs_received": {
"Bob": 2700000000,
"Charlie": 2700000000
},
"owner_address": "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY",
"membership_expire_on": 2700000000,
"next_cert_issuable_on": 0
},
"Bob": {
"index": 2,
"balance": 1000,
"certs": [
"Alice",
"Charlie"
],
"pubkey": "5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty"
"certs_received": {
"Alice": 2700000000,
"Charlie": 2700000000
},
"owner_address": "5FHneW46xGXgs5mUiveU4sbTyGBzmstUspZC92UhjJM694ty",
"membership_expire_on": 2700000000,
"next_cert_issuable_on": 0
},
"Charlie": {
"index": 3,
"balance": 1000,
"certs": [
"Alice",
"Bob"
],
"pubkey": "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y"
"certs_received": {
"Alice": 2700000000,
"Bob": 2700000000
},
"owner_address": "5FLSigC9HGRKVhB9FiEo4Y3koPsNmBmLJbpXg2mp1hXcS59Y",
"membership_expire_on": 2700000000,
"next_cert_issuable_on": 0
},
"Dave": {
"index": 4,
"balance": 1000,
"certs": [
"Alice",
"Bob"
],
"pubkey": "5DAAnrj7VHTznn2AWBemMuyBwZWs6FNFjdyVXUeYum3PTXFy"
"certs_received": {
"Alice": 2700000000,
"Bob": 2700000000
},
"owner_address": "5DAAnrj7VHTznn2AWBemMuyBwZWs6FNFjdyVXUeYum3PTXFy",
"membership_expire_on": 2700000000,
"next_cert_issuable_on": 0
}
},
"parameters": {
......@@ -65,31 +77,18 @@
"wot_min_cert_for_create_idty_right": 2,
"wot_min_cert_for_membership": 2
},
"smiths": {
"Alice": {
"certs": [
"Bob",
"Charlie"
]
},
"Bob": {
"certs": [
"Alice",
"Charlie"
]
},
"Charlie": {
"certs": [
"Alice",
"Bob"
]
}
},
"clique_smiths": [
{ "name": "Alice" },
{ "name": "Bob" },
{ "name": "Charlie" }
],
"sudo_key": "5GrwvaEF5zXb26Fz9rcQpDWS57CtERHpNehXCPcNoHGKutQY",
"technical_committee": [
"Alice",
"Bob",
"Charlie"
],
"ud": 1000
"treasury_funder_pubkey": "FHNpKmJrUtusuvKPGomAygQqeiks98bdV6yD61Stb6vg",
"ud": 1000,
"initial_monetary_mass": 4000
}
\ No newline at end of file
......@@ -30,9 +30,13 @@ pub async fn certify(client: &Client, from: AccountKeyring, to: AccountKeyring)
.at_latest()
.await
.unwrap()
.fetch(&gdev::storage().identity().identity_index_of(&from.into()))
.fetch(
&gdev::storage()
.identity()
.identity_index_of(&from.clone().into()),
)
.await?
.unwrap();
.unwrap_or_else(|| panic!("{} issuer must exist", from));
let receiver_index = client
.storage()
.at_latest()
......@@ -40,7 +44,7 @@ pub async fn certify(client: &Client, from: AccountKeyring, to: AccountKeyring)
.unwrap()
.fetch(&gdev::storage().identity().identity_index_of(&to.into()))
.await?
.unwrap();
.unwrap_or_else(|| panic!("{} issuer must exist", from));
let _events = create_block_with_extrinsic(
client,
......
......@@ -87,6 +87,13 @@ impl Process {
}
}
// Do not let the process keep running after the tests ended
impl Drop for Process {
fn drop(&mut self) {
self.kill()
}
}
pub const DISTANCE_ORACLE_LOCAL_PATH: &str = "../target/debug/distance-oracle";
const DUNITER_DOCKER_PATH: &str = "/usr/local/bin/duniter";
const DUNITER_LOCAL_PATH: &str = "../target/debug/duniter";
......@@ -97,7 +104,10 @@ struct FullNode {
ws_port: u16,
}
pub async fn spawn_node(maybe_genesis_conf_file: Option<PathBuf>) -> (Client, Process, u16) {
pub async fn spawn_node(
maybe_genesis_conf_file: Option<PathBuf>,
no_spawn: bool,
) -> (Client, Option<Process>, u16) {
println!("maybe_genesis_conf_file={:?}", maybe_genesis_conf_file);
let duniter_binary_path = std::env::var("DUNITER_BINARY_PATH").unwrap_or_else(|_| {
if std::path::Path::new(DUNITER_DOCKER_PATH).exists() {
......@@ -107,20 +117,40 @@ pub async fn spawn_node(maybe_genesis_conf_file: Option<PathBuf>) -> (Client, Pr
}
});
let FullNode {
process,
p2p_port: _,
ws_port,
} = spawn_full_node(
&["--dev", "--execution=Native", "--sealing=manual"],
&duniter_binary_path,
maybe_genesis_conf_file,
);
let client = Client::from_url(format!("ws://127.0.0.1:{}", ws_port))
let mut the_ws_port = 9944;
let mut opt_process = None;
// Eventually spawn a node (we most likely will - unless --no-spawn option is used)
if !no_spawn {
let FullNode {
process,
p2p_port: _,
ws_port,
} = spawn_full_node(
&[
"--chain=gdev_dev",
"--execution=Native",
"--sealing=manual",
// Necessary options which were previously set by --dev option:
"--force-authoring",
"--rpc-cors=all",
"--alice",
"--tmp",
// Fix: End2End test may fail due to network discovery. This option disables automatic peer discovery.π
"--reserved-only",
// prevent local network discovery (even it does not connect due to above flag)
"--no-mdns",
],
&duniter_binary_path,
maybe_genesis_conf_file,
);
opt_process = Some(process);
the_ws_port = ws_port;
}
let client = Client::from_url(format!("ws://127.0.0.1:{}", the_ws_port))
.await
.expect("fail to connect to node");
(client, process, ws_port)
(client, opt_process, the_ws_port)
}
pub async fn create_empty_block(client: &Client) -> Result<()> {
......@@ -169,16 +199,14 @@ fn spawn_full_node(
// Env vars
let mut envs = Vec::new();
if let Some(genesis_conf_file) = maybe_genesis_conf_file {
envs.push(("DUNITER_GENESIS_CONFIG", genesis_conf_file));
envs.push(("DUNITER_GENESIS_CONFIG", genesis_conf_file.clone()));
envs.push(("DUNITER_GENESIS_DATA", genesis_conf_file));
}
// Logs
let log_file_path = format!("duniter-v2s-{}.log", ws_port);
let log_file = std::fs::File::create(&log_file_path).expect("fail to create log file");
// Clean previous data
std::fs::remove_dir_all("/tmp/duniter-cucumber").ok();
// Command
let process = Process(
Command::new(duniter_binary_path)
......@@ -192,8 +220,6 @@ fn spawn_full_node(
&rpc_port.to_string(),
"--ws-port",
&ws_port.to_string(),
"--base-path",
"/tmp/duniter-cucumber",
]
.iter()
.chain(args),
......
......@@ -18,7 +18,7 @@ mod common;
use async_trait::async_trait;
use common::*;
use cucumber::{given, then, when, World, WorldInit};
use cucumber::{given, then, when, FailureWriter, World, WorldInit};
use sp_keyring::AccountKeyring;
use std::convert::Infallible;
use std::path::PathBuf;
......@@ -38,11 +38,11 @@ pub struct DuniterWorld {
impl DuniterWorld {
// Write methods
async fn init(&mut self, maybe_genesis_conf_file: Option<PathBuf>) {
async fn init(&mut self, maybe_genesis_conf_file: Option<PathBuf>, no_spawn: bool) {
if let Some(ref mut inner) = self.inner {
inner.kill();
}
self.inner = Some(DuniterWorldInner::new(maybe_genesis_conf_file).await);
self.inner = Some(DuniterWorldInner::new(maybe_genesis_conf_file, no_spawn).await);
}
fn kill(&mut self) {
if let Some(ref mut inner) = self.inner {
......@@ -121,13 +121,13 @@ impl World for DuniterWorld {
struct DuniterWorldInner {
client: Client,
process: Process,
process: Option<Process>,
ws_port: u16,
}
impl DuniterWorldInner {
async fn new(maybe_genesis_conf_file: Option<PathBuf>) -> Self {
let (client, process, ws_port) = spawn_node(maybe_genesis_conf_file).await;
async fn new(maybe_genesis_conf_file: Option<PathBuf>, no_spawn: bool) -> Self {
let (client, process, ws_port) = spawn_node(maybe_genesis_conf_file, no_spawn).await;
DuniterWorldInner {
client,
process,
......@@ -135,7 +135,9 @@ impl DuniterWorldInner {
}
}
fn kill(&mut self) {
self.process.kill();
if let Some(p) = &mut self.process {
p.kill();
}
}
}
......@@ -599,6 +601,9 @@ struct CustomOpts {
/// Keep running
#[clap(short, long)]
keep_running: bool,
/// Do not spawn a node, reuse expected node on port 9944
#[clap(long)]
no_spawn: bool,
/// For compliance with Jetbrains IDE which pushes extra args.
/// https://youtrack.jetbrains.com/issue/CPP-33071/cargo-test-adds-extra-options-which-conflict-with-Cucumber
......@@ -627,6 +632,7 @@ async fn main() {
let opts = cucumber::cli::Opts::<_, _, _, CustomOpts>::parsed();
let keep_running = opts.custom.keep_running;
let no_spawn = opts.custom.no_spawn;
// Handle crtl+C
let running = Arc::new(AtomicBool::new(true));
......@@ -636,10 +642,10 @@ async fn main() {
})
.expect("Error setting Ctrl-C handler");
DuniterWorld::cucumber()
let summarize = DuniterWorld::cucumber()
//.fail_on_skipped()
.max_concurrent_scenarios(4)
.before(|feature, _rule, scenario, world| {
.before(move |feature, _rule, scenario, world| {
let mut genesis_conf_file_path = PathBuf::new();
genesis_conf_file_path.push("cucumber-genesis");
genesis_conf_file_path.push(&format!(
......@@ -647,21 +653,25 @@ async fn main() {
genesis_conf_name(&feature.tags, &scenario.tags)
));
world.set_ignore_errors(ignore_errors(&scenario.tags));
Box::pin(world.init(Some(genesis_conf_file_path)))
Box::pin(world.init(Some(genesis_conf_file_path), no_spawn))
})
.after(move |_feature, _rule, _scenario, maybe_world| {
if keep_running {
while running.load(Ordering::SeqCst) {}
}
// Early kill (not waiting destructor) to save CPU/memory
if let Some(world) = maybe_world {
world.kill();
}
Box::pin(std::future::ready(()))
})
.with_cli(opts)
.run_and_exit(features_path)
.run(features_path)
.await;
if summarize.hook_errors() > 0 {
panic!("Could not run tests correctly (hook errors)");
}
}
fn genesis_conf_name(feature_tags: &[String], scenario_tags: &[String]) -> String {
......
......@@ -38,7 +38,7 @@ type Index = u32;
// Define gdev types
type AccountInfo = gdev::runtime_types::frame_system::AccountInfo<
Index,
gdev::runtime_types::pallet_duniter_account::types::AccountData<Balance>,
gdev::runtime_types::pallet_duniter_account::types::AccountData<Balance, IdtyIndex>,
>;
type IdtyData = gdev::runtime_types::common_runtime::entities::IdtyData;
type IdtyIndex = u32;
......
{
"name": "ĞDev",
"id": "gdev",
"chainType": "Live",
"bootNodes": [],
"telemetryEndpoints": [
[
"/dns/telemetry.polkadot.io/tcp/443/x-parity-wss/%2Fsubmit%2F",
0
]
],
"properties": {
"tokenDecimals": 2,
"tokenSymbol": "ĞD"
}
}
\ No newline at end of file
......@@ -14,7 +14,7 @@
// You should have received a copy of the GNU Affero General Public License
// along with Duniter-v2S. If not, see <https://www.gnu.org/licenses/>.
#[cfg(feature = "gdev")]
// Common to all Duniter blockchains
pub mod gen_genesis_data;
#[cfg(feature = "g1")]
......@@ -23,8 +23,6 @@ pub mod g1;
pub mod gdev;
#[cfg(feature = "gtest")]
pub mod gtest;
#[cfg(feature = "gtest")]
pub mod gtest_genesis;
use common_runtime::{AccountId, IdtyIndex, Signature};
use sp_core::{Pair, Public};
......@@ -42,14 +40,6 @@ pub fn get_from_seed<TPublic: Public>(seed: &str) -> <TPublic::Pair as Pair>::Pu
.public()
}
/*/// Generate an account ID from pair.
pub fn get_account_id_from_pair<TPublic: Public>(pair: TPublic::Pair) -> AccountId
where
AccountPublic: From<<TPublic::Pair as Pair>::Public>,
{
AccountPublic::from(pair.public()).into_account()
}*/
/// Generate an account ID from seed.
pub fn get_account_id_from_seed<TPublic: Public>(seed: &str) -> AccountId
where
......@@ -60,9 +50,14 @@ where
fn clique_wot(
initial_identities_len: usize,
) -> BTreeMap<IdtyIndex, BTreeMap<IdtyIndex, Option<common_runtime::BlockNumber>>> {
) -> (
BTreeMap<IdtyIndex, BTreeMap<IdtyIndex, Option<common_runtime::BlockNumber>>>,
u32,
) {
let mut certs_by_issuer = BTreeMap::new();
let mut count: u32 = 0;
for i in 1..=initial_identities_len {
count += initial_identities_len as u32;
certs_by_issuer.insert(
i as IdtyIndex,
(1..=initial_identities_len)
......@@ -76,5 +71,5 @@ fn clique_wot(
.collect(),
);
}
certs_by_issuer
(certs_by_issuer, count)
}