Skip to content
Snippets Groups Projects
Commit ffbfd143 authored by Hugo Trentesaux's avatar Hugo Trentesaux Committed by Cédric Moreau
Browse files

adapt ci to export new py-g1-migrator history files (!266)

* remove indexer-specific code

* Revert "make it manual for testing"

This reverts commit 01a935fe.

* make it manual for testing

* adapt ci
parent 1ea24193
No related branches found
No related tags found
1 merge request!266adapt ci to export new py-g1-migrator history files
Pipeline #37372 passed
......@@ -41,7 +41,6 @@ check_labels:
script:
- ./scripts/check_labels.sh $CI_MERGE_REQUEST_LABELS $CI_MERGE_REQUEST_MILESTONE
check_metadata:
extends: .env
stage: tests
......@@ -260,7 +259,7 @@ create_g1_data:
stage: build
rules:
- if: $CI_PIPELINE_SOURCE != "merge_request_event" && $CI_COMMIT_BRANCH =~ /^(release\/runtime-)[0-9].*/
image: python:3.9.18
image: h30x/py-g1-migrator # this image already has plyvel python requirement and dependency
variables:
DEBIAN_FRONTEND: noninteractive
LEVELDB_PATH: /dump/duniter_default/data/leveldb
......@@ -272,8 +271,6 @@ create_g1_data:
# - bin/duniter sync g1.cgeek.fr --store-txs --nointeractive --mdb 1.8.7
# - mkdir -p /tmp/backup-g1-duniter-1.8.7
# - cp -R $HOME/.config/duniter/1.8.7/data /tmp/backup-g1-duniter-1.8.7
# - cp -R $HOME/.config/duniter/1.8.7/g1 /tmp/backup-g1-duniter-1.8.7
# - cp -R $HOME/.config/duniter/1.8.7/txs.db /tmp/backup-g1-duniter-1.8.7
# - tar -cvzf /tmp/backup-g1-duniter-1.8.7.tgz /tmp/backup-g1-duniter-1.8.7
# Then the file is uploaded to dl.cgeek.fr manually
- curl https://dl.cgeek.fr/public/backup-g1-duniter-1.8.7.tgz -o g1-dump.tgz
......@@ -281,22 +278,20 @@ create_g1_data:
- rm g1-dump.tgz
- mv tmp/backup-g1-duniter-1.8.7 duniter_default
# py-g1-migrator conversion
- git clone https://git.duniter.org/tools/py-g1-migrator.git -b import_identities_from_leveldb /py-g1-migrator
- git clone https://git.duniter.org/tools/py-g1-migrator.git --depth 1 --branch hugo/docker /py-g1-migrator
- cd /py-g1-migrator
- rm -rf inputs/*
- apt-get update
- apt-get install -y sqlite3 libleveldb-dev jq
- pip install -r requirements.txt
# Export identities and wallets
- ./main.py
# Export transaction history
- sqlite3 /dump/duniter_default/txs.db --json "select time,comment,issuers,outputs from txs;" > inputs/transactions_history.json 2>> inputs/txs.err
- ./generate_transactions_history.py
# Merge in one file
- 'jq -s "{ identities: .[0].identities, wallets: .[0].wallets, initial_monetary_mass: .[0].initial_monetary_mass, current_block: .[0].current_block, transactions_history: .[1] }" output/gtest_genesis.json output/history.json > output/g1-data.json'
# Export genesis file
- ./main.py # ./output/genesis.json
# Expore history files for squid
- ./squid-block.py # ./output/block_hist.json
- ./squid-cert.py # ./output/cert_hist.json
- ./squid-tx.py # ./output/tx_hist.json
# Make the exported file available for next jobs
- mkdir -p $CI_PROJECT_DIR/release/
- cp output/g1-data.json $CI_PROJECT_DIR/release/
- cp output/genesis.json $CI_PROJECT_DIR/release/
- cp output/block_hist.json $CI_PROJECT_DIR/release/
- cp output/cert_hist.json $CI_PROJECT_DIR/release/
- cp output/tx_hist.json $CI_PROJECT_DIR/release/
artifacts:
expire_in: never
paths:
......@@ -311,8 +306,7 @@ create_g1_data:
image: rust:1-bullseye
variables:
WASM_FILE: $CI_PROJECT_DIR/release/${RUNTIME}_runtime.compact.compressed.wasm
DUNITER_GENESIS_DATA: $CI_PROJECT_DIR/release/g1-data.json
DUNITER_GENESIS_EXPORT: $CI_PROJECT_DIR/release/${RUNTIME}-indexer.json
DUNITER_GENESIS_DATA: $CI_PROJECT_DIR/release/genesis.json # py-g1-migrator outputs this file with `./main.py`
DEBIAN_FRONTEND: noninteractive
script:
- apt-get update
......@@ -363,7 +357,7 @@ create_release:
- export MILESTONE=$(echo $CI_COMMIT_BRANCH | sed -e "s/release\///g")
- cargo xtask release-runtime $MILESTONE $CI_COMMIT_BRANCH
# We always ship runtimes: this is both a proof and a convenience
- cargo xtask create-asset-link $MILESTONE g1-data.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/g1-data.json
- cargo xtask create-asset-link $MILESTONE genesis.json https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/genesis.json
- cargo xtask create-asset-link $MILESTONE gdev_runtime.compact.compressed.wasm https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gdev_runtime.compact.compressed.wasm
- cargo xtask create-asset-link $MILESTONE gtest_runtime.compact.compressed.wasm https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gtest_runtime.compact.compressed.wasm
- cargo xtask create-asset-link $MILESTONE gdev_client-specs.yaml https://nodes.pages.duniter.org/-/rust/duniter-v2s/-/jobs/$CI_JOB_ID/artifacts/release/gdev_client-specs.yaml
......
......@@ -34,7 +34,6 @@ use sp_runtime::{
use std::{
collections::{BTreeMap, HashMap},
fmt::{Display, Formatter},
fs,
ops::{Add, Sub},
};
......@@ -129,7 +128,6 @@ pub struct GenesisIndexerExport {
technical_committee: Vec<String>,
ud: u64,
wallets: BTreeMap<AccountId, u64>,
transactions_history: Option<BTreeMap<AccountId, Vec<TransactionV2>>>,
}
#[derive(Deserialize, Serialize)]
......@@ -155,7 +153,6 @@ struct GenesisMigrationData {
identities: BTreeMap<String, IdentityV1>,
#[serde(default)]
wallets: BTreeMap<PubkeyV1, u64>,
transactions_history: Option<BTreeMap<PubkeyV1, Vec<TransactionV1>>>,
}
// Base58 encoded Ed25519 public key
......@@ -667,68 +664,7 @@ where
}
// Indexer output
if let Ok(path) = std::env::var("DUNITER_GENESIS_EXPORT") {
// genesis_certs_min_received => min_cert
// genesis_memberships_expire_on => membership_period
// genesis_smith_certs_min_received => smith_min_cert
let export = GenesisIndexerExport {
first_ud,
first_ud_reeval,
genesis_parameters: common_parameters.clone(),
identities: identities_v2,
sudo_key: sudo_key.clone(),
technical_committee,
ud,
wallets: accounts
.iter()
.map(|(account_id, data)| (account_id.clone(), data.balance))
.collect(),
smiths: (smiths)
.iter()
.map(|smith| {
(
smith.name.clone(),
SmithData {
idty_index: smith.idty_index,
name: smith.name.clone(),
account: smith.account.clone(),
session_keys: smith.session_keys.clone(),
certs_received: smith.certs_received.clone(),
},
)
})
.collect::<BTreeMap<String, SmithData>>(),
transactions_history: genesis_data.transactions_history.map(|history| {
history
.iter()
// Avoid wrong pubkeys in tx history
.filter(|(pubkey, _)| v1_pubkey_to_account_id((*pubkey).clone()).is_ok())
.map(|(pubkey, txs)| {
(
v1_pubkey_to_account_id(pubkey.clone())
.expect("already checked account"),
txs.iter()
// Avoid wrong pubkeys in tx history
.filter(|tx| v1_pubkey_to_account_id(tx.issuer.clone()).is_ok())
.map(|tx| TransactionV2 {
issuer: v1_pubkey_to_account_id(tx.issuer.clone())
.expect("already checked tx.issuer"),
amount: tx.amount.clone(),
written_time: tx.written_time,
comment: tx.comment.clone(),
})
.collect::<Vec<TransactionV2>>(),
)
})
.collect::<BTreeMap<AccountId, Vec<TransactionV2>>>()
}),
};
fs::write(
&path,
serde_json::to_string_pretty(&export).expect("should be serializable"),
)
.unwrap_or_else(|_| panic!("Could not export genesis data to {}", &path));
}
// handled by indexer directly from py-g1-migrator output
let genesis_data = GenesisData {
accounts,
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment