diff --git a/.gitignore b/.gitignore
index 326fe8ae9fe43fa153c989e3be69fa5f3783cd42..023ba0662bc71abbe2595b34c32e940e36b97565 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,6 +1,5 @@
 __pycache__
+inputs/*
 output/*
-!output/.gitkeep
-boot_gdev/tmp/
-boot_gdev/resources/
-*.json
\ No newline at end of file
+env/*
+!output/.gitkeep
\ No newline at end of file
diff --git a/README.md b/README.md
index d069d983f058161f09265f3074f99bcd807fc5e1..5d052e7ad0f165913083ebff5514ac2f6a65df43 100644
--- a/README.md
+++ b/README.md
@@ -8,15 +8,9 @@ Edit `.env` file to suit your needs, then export environment variables:
 
 ## Execution
 
-TL;DR
-```
-time (./main.py && ./generate_transactions_history.py && ./boot_gdev.sh poka gdevX && ./push-gdev.sh)
-```
+This is used in Duniter CI:
 
-or just:
- ```
-./orchestra.sh poka gdevX
-```
+https://git.duniter.org/nodes/rust/duniter-v2s/-/blob/56998122e42afd2c2c1642a72a6772a82490ccda/.gitlab-ci.yml#L270-L298
 
 ---
 
@@ -33,18 +27,6 @@ or
 
     pip install substrate-interface
 
-## 0. Add optional custom data
-
-**You can add link your new Ğ1 v2 address with your actual Ğ1 v1 pubkey by editing `custom/identities.json` file.**
-
-You can add your owns identities by editing `custom/identities.json` file.
-
-You can add your identity to smiths wot by editing `custom/smiths.json` file.
-
-You can add your identity to tech commitee by editing `custom/technical_committee.json` file.
-
-You can edit currency parameters in `custom/parameters.py` file.
-
 ## 1. Generate your up-to-date v2s Ğ1data genesis
 
     ./main.py
diff --git a/boot_gdev.sh b/boot_gdev.sh
deleted file mode 100755
index 8858195cf86c4801ffcfcc411a73f76ded829e7b..0000000000000000000000000000000000000000
--- a/boot_gdev.sh
+++ /dev/null
@@ -1,76 +0,0 @@
-#!/bin/bash
-
-# This script is going to help you to boot a new ĞDev bootstrap node
-# This is alpha
-
-# sanity startup
-set -e
-MY_PATH="`dirname \"$0\"`"
-MY_PATH="`( cd \"$MY_PATH\" && pwd )`"
-BOOT_GDEV="$MY_PATH/boot_gdev"
-[[ -d tmp ]] && rm -r tmp/
-[[ ! -d resources ]] && mkdir -p resources/
-rm -f *.json
-
-# helper to execute a duniter subcommand in docker
-function duniter_exec () {
-  OUTPUT=$(docker run --rm -it --entrypoint duniter duniter/duniter-v2s:$TAG "$@")
-  echo "${OUTPUT::-1}"
-}
-
-# get smith username
-smith_idty=$1
-[[ ! $smith_idty ]] && echo "Please pass your smith username identity in argument (ex: ./boot_gdev.sh poka)" && exit 1
-
-# get smith username
-PROTOCOL_VERSION=$2
-[[ ! $PROTOCOL_VERSION ]] && echo "Please pass the protocol version (ex: ./boot_gdev.sh poka gdev5)" && exit 1
-
-# get latest Duniter image tag
-TAG=$(curl -sL --fail "https://hub.docker.com/v2/repositories/duniter/duniter-v2s/tags/?page_size=1000" | jq -r '.results | .[] | .name' | grep '^sha-' | head -n1)
-[[ ! $TAG ]] && echo "Can't get latest Duniter v2s Docker image tag" && exit 1
-
-echo "Generate final raw ChainSpec"
-echo "    Duniter v2s tag: $TAG"
-echo "    Smith username: $smith_idty"
-
-# generate temporary secret key
-initKey=$(duniter_exec key generate)
-tmpSecret=$(echo -n "$initKey" | grep "Secret phrase:" | awk -F ':       ' '{print $2}' | sed 's/^[ \t]*//;s/[ \t]*$//' | tr -d '\r')
-tmpAddress=$(echo -n "$initKey" | grep "SS58 Address:" | awk -F ':      ' '{print $2}' | sed 's/^[ \t]*//;s/[ \t]*$//' | tr -d '\r')
-
-echo "    address: $tmpAddress"
-
-# generate temporary sessions_keys
-initSessionsKeysBloc=$(duniter_exec key generate-session-keys --chain gdev_local --suri "$tmpSecret")
-initSessionsKeys=$(echo -n "$initSessionsKeysBloc" | grep "Session Keys:" | awk -F ': ' '{print $2}' | sed 's/^[ \t]*//;s/[ \t]*$//' | tr -d '\r')
-
-echo "    sessions_keys: $initSessionsKeys"
-echo
-
-# copy genesis json to validator folder
-mkdir -p $BOOT_GDEV/resources/
-cp $MY_PATH/output/gdev.json $BOOT_GDEV/resources/
-
-# add session_keys to smith identity
-sed -i "0,/  \"$smith_idty\": {/s//  \"$smith_idty\": {\n    \"session_keys\": \"$initSessionsKeys\",/" $BOOT_GDEV/resources/gdev.json
-
-# generate initials raw chainSpec
-docker run -v $BOOT_GDEV/resources/:/var/lib/duniter/resources \
-    -e DUNITER_GENESIS_CONFIG=/var/lib/duniter/resources/gdev.json \
-    --rm -it \
-    --entrypoint duniter duniter/duniter-v2s:$TAG build-spec -lerror \
-    --chain=gdev-gl --raw \
-    > $BOOT_GDEV/resources/gdev-raw.json
-
-# Set good protocolId in chainspec
-sed -i "s/gdev2/$PROTOCOL_VERSION/g" $BOOT_GDEV/resources/gdev-raw.json
-
-echo "Done"
-echo
-
-# create a local live network to test
-echo "Generate final Docker compose ready for production"
-cd $BOOT_GDEV
-$BOOT_GDEV/create-live-network.sh "$tmpSecret" "$TAG" "$smith_idty"
-
diff --git a/boot_gdev/create-live-network.sh b/boot_gdev/create-live-network.sh
deleted file mode 100755
index 66b46e42271bc4e6d4f1e2daad273f6feb3d100a..0000000000000000000000000000000000000000
--- a/boot_gdev/create-live-network.sh
+++ /dev/null
@@ -1,78 +0,0 @@
-#!/usr/bin/env bash
-#
-# USAGE (these steps are deprecated since boot_gdev.sh borned)
-#
-# 1. Generate genesis authorities session keys.
-# 2. Create the json file that contains the genesis configuration and verify carefully that the
-#    declared session keys correspond to the one you have generated in the first step.
-# 3. Generate raw chain specs with script `gen-live-network-raw-spec.sh`.
-# 4. Share the generated raw spec with other genesis authorities.
-# 5. Each genesis authority should run this script with its session keys seed.
-#
-
-# This script is meant to be run on Unix/Linux based systems
-set -e
-
-# params
-VALIDATOR_SECRET_KEY=$1
-DUNITER_IMAGE_TAG=$2
-SMITHNAME=$3
-CURRENCY="gdev"
-WORK_DIR="tmp/$CURRENCY"
-SPEC_DIR="resources"
-
-echo "    CURRENCY=$CURRENCY"
-
-# constants
-
-# clean and (re-)create working forders
-rm -rf $WORK_DIR
-mkdir -p $WORK_DIR/duniter-rpc
-mkdir -p $WORK_DIR/duniter-validator/chains/$CURRENCY
-WORK_DIR_ABSO=$(realpath $WORK_DIR)
-
-# helper to execute a duniter subcommand in docker
-function duniter_tmp () {
-  docker rm duniter-tmp > /dev/null
-  OUTPUT=$(docker run --name duniter-tmp -it --entrypoint duniter duniter/duniter-v2s:$DUNITER_IMAGE_TAG "$@")
-  echo "${OUTPUT::-1}"
-}
-
-# copy raw chain spec
-cp $SPEC_DIR/$CURRENCY-raw.json $WORK_DIR/duniter-rpc/$CURRENCY-raw.json
-cp $SPEC_DIR/$CURRENCY-raw.json $WORK_DIR/duniter-validator/$CURRENCY-raw.json
-
-# generate rpc node key
-echo "    generate rpc node key…"
-RPC_NODE_KEY=$(duniter_tmp key generate-node-key --file /var/lib/duniter/node-key.txt)
-docker cp duniter-tmp:/var/lib/duniter/node-key.txt $WORK_DIR/duniter-rpc/node-key
-echo "    RPC_NODE_KEY=$RPC_NODE_KEY"
-
-# generate validator node key
-echo "    generate validator node key…"
-VALIDATOR_NODE_KEY=$(duniter_tmp key generate-node-key --file /var/lib/duniter/node-key.txt)
-docker cp duniter-tmp:/var/lib/duniter/node-key.txt $WORK_DIR/duniter-validator/node-key
-echo "    VALIDATOR_NODE_KEY=$VALIDATOR_NODE_KEY"
-
-# generate docker-compose file
-echo "    generate docker-compose file…"
-cp live-template.docker-compose.yml $WORK_DIR/docker-compose.yml
-sed -i -e "s/DUNITER_IMAGE_TAG/$DUNITER_IMAGE_TAG/g" $WORK_DIR/docker-compose.yml
-sed -i -e "s/CURRENCY/$CURRENCY/g" $WORK_DIR/docker-compose.yml
-sed -i -e "s/RPC_NODE_KEY/$RPC_NODE_KEY/g" $WORK_DIR/docker-compose.yml
-sed -i -e "s/VALIDATOR_NODE_KEY/$VALIDATOR_NODE_KEY/g" $WORK_DIR/docker-compose.yml
-sed -i -e "s/SMITHNAME/$SMITHNAME/g" $WORK_DIR/docker-compose.yml
-
-# inject validator session keys in validator node keystore
-echo "    Inject validator session keys in validator node keystore…"
-duniter_tmp key generate-session-keys --chain "${CURRENCY}_local" --suri "$VALIDATOR_SECRET_KEY" -d /var/lib/duniter
-docker cp duniter-tmp:/var/lib/duniter/chains/${CURRENCY}_local/keystore $WORK_DIR/duniter-validator/chains/$CURRENCY
-
-# launch the network
-echo
-echo "This is your new Docker compose: '$WORK_DIR_ABSO'"
-cd $WORK_DIR
-cat docker-compose.yml
-
-echo
-echo -e "TODO: cd boot_gdev/gdev/tmp/ && docker compose up -d"
diff --git a/boot_gdev/live-template.docker-compose.yml b/boot_gdev/live-template.docker-compose.yml
deleted file mode 100644
index 5eb01d2b2cccd45e76b32d14ef324633e2594219..0000000000000000000000000000000000000000
--- a/boot_gdev/live-template.docker-compose.yml
+++ /dev/null
@@ -1,47 +0,0 @@
-version: "3.4"
-
-services:
-  duniter-rpc:
-    image: duniter/duniter-v2s:DUNITER_IMAGE_TAG
-    restart: unless-stopped
-    ports:
-      - "9933:9933"
-      - "9944:9944"
-      - "30333:30333"
-      - "9615:9615"
-    volumes:
-      - ./duniter-rpc/:/var/lib/duniter/
-    environment:
-      - DUNITER_CHAIN_NAME=/var/lib/duniter/CURRENCY-raw.json
-    command:
-      - "--bootnodes"
-      - "/dns/duniter-validator/tcp/30333/p2p/VALIDATOR_NODE_KEY"
-      - "--node-key-file"
-      - "/var/lib/duniter/node-key"
-      - "--rpc-cors"
-      - "all"
-      - "--name"
-      - "SMITHNAME-rpc"
-
-  duniter-validator:
-    image: duniter/duniter-v2s:DUNITER_IMAGE_TAG
-    restart: unless-stopped
-    ports:
-      - "127.0.0.1:9945:9944"
-      - "30334:30333"
-      - "9616:9615"
-    volumes:
-      - ./duniter-validator/:/var/lib/duniter/
-    environment:
-      - DUNITER_CHAIN_NAME=/var/lib/duniter/CURRENCY-raw.json
-    command:
-      - "--bootnodes"
-      - "/dns/duniter-rpc/tcp/30333/p2p/RPC_NODE_KEY"
-      - "--node-key-file"
-      - "/var/lib/duniter/node-key"
-      - "--rpc-methods=Unsafe"
-      - "--validator"
-      - "--rpc-cors"
-      - "all"
-      - "--name"
-      - "SMITHNAME-smith"
diff --git a/custom/addresses_switches.json b/custom/addresses_switches.json
deleted file mode 100644
index 4cb8fc7460b2abc28908509a23b3f5691e9e1726..0000000000000000000000000000000000000000
--- a/custom/addresses_switches.json
+++ /dev/null
@@ -1,26 +0,0 @@
-{
-    "poka": {
-        "v1": "Do99s6wQR2JLfhirPdpAERSjNbmjjECzGxHNJMiNKT3P",
-        "v2": "5CQ8T4qpbYJq7uVsxGPQ5q2df7x3Wa4aRY6HUWMBYjfLZhnn"
-    },
-    "tuxmain": {
-        "v1": "45GfjkWCWQhJ3epJVGC2NSg1Rcu4Ue1vDD3kk9eLs5TQ",
-        "v2": "5D2DnScFpxoEUXDwZbJH18tRsQMygBSh1F6YCcWvTYzKY2W7"
-    },
-    "elois": {
-        "v1": "D9D2zaJoWYWveii1JRYLVK3J4Z7ZH3QczoKrnQeiM6mx",
-        "v2": "5CtmAELWQ6FGDrAfsfVbGe4CXW4YrpNRTVU27eWzSE6J8ocU"
-    },
-    "1000i100": {
-        "v1": "2sZF6j2PkxBDNAqUde7Dgo5x3crkerZpQ4rBqqJGn8QT",
-        "v2": "5CCrBS67BrpBx3ihGHc72HZp3eHHbETxWFuNfwbbdoGSJFN8"
-    },
-    "HugoTrentesaux": {
-        "v1": "55oM6F9ZE2MGi642GGjhCzHhdDdWwU6KchTjPzW7g3bp",
-        "v2": "5Dq8xjvkmbz7q4g2LbZgyExD26VSCutfEc6n4W4AfQeVHZqz"
-    },
-    "ManUtopiK": {
-        "v1": "2JggyyUn2puL5PG6jsMYFC2y9KwjjMmy2adnx3c5fUf8",
-        "v2": "5DUjwHRqPayt3tAZk1fqEgU99xZB9jzBHKy2sMSTNcc7m9D1"
-    }
-}
\ No newline at end of file
diff --git a/custom/identities.json b/custom/identities.json
deleted file mode 100644
index 9e26dfeeb6e641a33dae4961196235bdb965b21b..0000000000000000000000000000000000000000
--- a/custom/identities.json
+++ /dev/null
@@ -1 +0,0 @@
-{}
\ No newline at end of file
diff --git a/custom/parameters.py b/custom/parameters.py
deleted file mode 100644
index 4e286192c2ad08861baf24140e975b3265090a00..0000000000000000000000000000000000000000
--- a/custom/parameters.py
+++ /dev/null
@@ -1,33 +0,0 @@
-from lib.utility_param import *
-
-# Genesis parameters
-GENESIS_CERTS_EXPIRE_ON = 15
-GENESIS_CERTS_MIN_RECEIVED = 0
-GENESIS_MEMBERSHIPS_EXPIRE_ON = b_months(12)
-GENESIS_SMITH_CERTS_EXPIRE_ON = b_months(24)
-GENESIS_SMITH_CERTS_MIN_RECEIVED = 1
-GENESIS_SMITH_MEMBERSHIPS_EXPIRE_ON = b_months(12)
-
-# Parameters
-BABE_EPOCH_DURATION = 600
-CERT_PERIOD = b_days(5) # 5 days
-CERT_MAX_BY_ISSUER = 100
-CERT_MIN_RECEIVED_CERT_TO_ISSUE_CERT = 5
-CERT_VALIDITY_PERIOD = b_months(24)
-IDTY_CONFIRM_PERIOD = b_days(7)
-IDTY_CREATION_PERIOD = b_days(7)
-MEMBERSHIP_PERIOD = b_months(12)
-PENDING_MEMBERSHIP_PERIOD = b_months(1)
-UD_CREATION_PERIOD = b_days(1)
-UD_REEVAL_PERIOD = b_months(6)
-SMITH_CERT_PERIOD = b_days(5)
-SMITH_CERT_MAX_BY_ISSUER = 15
-SMITH_CERT_MIN_RECEIVED_CERT_TO_ISSUE_CERT = 3
-SMITH_CERT_VALIDITY_PERIOD = b_months(24)
-SMITH_MEMBERSHIP_PERIOD = b_months(12)
-SMITH_PENDING_MEMBERSHIP_PERIOD = b_months(1)
-SMITHS_WOT_FIRST_CERT_ISSUABLE_ON = b_days(1)
-SMITHS_WOT_MIN_CERT_FOR_MEMBERSHIP = 3
-WOT_FIRST_CERT_ISSUABLE_ON = b_minutes(5) # 5 minutes
-WOT_MIN_CERT_FOR_CREATE_IDTY_RIGHT = 5
-WOT_MIN_CERT_FOR_MEMBERSHIP = 5
diff --git a/custom/smiths.json b/custom/smiths.json
deleted file mode 100644
index 4fa85a31f36c1037d421c6aa4e7c1ea129c3ed89..0000000000000000000000000000000000000000
--- a/custom/smiths.json
+++ /dev/null
@@ -1,28 +0,0 @@
-[
-  {
-    "poka": {
-      "certs_received": ["HugoTrentesaux", "tuxmain", "ManUtopiK"]
-    }
-  },
-  {
-    "HugoTrentesaux": {
-      "session_keys": "0xe59c3a385df1e64678fef4afdec9bbf1949f244ff48bc9a0a0a09ae0a3c9b36d16f1b93ee972a77dbae3ab9c107eb25864c4684c583cf95d825e620f8c7ff10416f1b93ee972a77dbae3ab9c107eb25864c4684c583cf95d825e620f8c7ff10416f1b93ee972a77dbae3ab9c107eb25864c4684c583cf95d825e620f8c7ff104",
-      "certs_received": ["tuxmain", "poka", "ManUtopiK"]
-    }
-  },
-  {
-    "tuxmain": {
-      "certs_received": ["HugoTrentesaux", "poka", "ManUtopiK"]
-    }
-  },
-  {
-    "1000i100": {
-      "certs_received": ["HugoTrentesaux", "poka", "ManUtopiK"]
-    }
-  },
-  {
-    "ManUtopiK": {
-      "certs_received": ["HugoTrentesaux", "poka", "1000i100"]
-    }
-  }
-]
diff --git a/custom/technical_committee.json b/custom/technical_committee.json
deleted file mode 100644
index 27c1328e104fc323efa47650c92fd0d4fd0fb175..0000000000000000000000000000000000000000
--- a/custom/technical_committee.json
+++ /dev/null
@@ -1 +0,0 @@
-["poka", "HugoTrentesaux", "tuxmain", "1000i100"]
\ No newline at end of file
diff --git a/docker-compose.yml b/docker-compose.yml
deleted file mode 100644
index 6ce5881624e746f45ec52e4d45a201072a13b86a..0000000000000000000000000000000000000000
--- a/docker-compose.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-# This is a minimal docker-compose.yml template for running a Duniter instance
-# For more detailed examples, look at docker/compose folder
-
-version: "3.5"
-
-services:
-  duniter-v2s:
-    container_name: duniter-v2s-gtest
-    # choose the version of the image here
-    image: duniter/duniter-v2s:latest
-    ports:
-      # telemetry
-      - 9615:9615
-      # rpc
-      - 9933:9933
-      # rpc-ws
-      - 9944:9944
-      # p2p
-      - 30333:30333
-    environment:
-      DUNITER_INSTANCE_NAME: "duniter_local"
-      DUNITER_CHAIN_NAME: "dev"
-      #DUNITER_DISABLE_PROMETHEUS: "false"
-      DUNITER_GENESIS_CONFIG: "/var/lib/duniter/gdev.json"
-    volumes:
-      - ./output:/var/lib/duniter
diff --git a/docs/generate_inputs_data.sh b/docs/generate_inputs_data.sh
deleted file mode 100755
index bb06a6420b1854e3f7fe1224e33bd3d4080a99b3..0000000000000000000000000000000000000000
--- a/docs/generate_inputs_data.sh
+++ /dev/null
@@ -1,44 +0,0 @@
-#!/bin/bash
-
-# This doc-script allow you to generate the input data required by py-g1-migrator if you don't want to pull it from the network.
-# You need a Duniter 1.8.x node to use it, with `--store-txs` option to get the `txs.db` required for the last sqlite export.
-# You also need `dex` tool. To get it, in you Duniter source code folder: `cargo bdex`. Then dex is there: `target/release/dex`
-# You can find more informations in this issue: https://git.duniter.org/tools/py-g1-migrator/-/issues/1
-
-# Set the export folder you want
-folder=~/dev/py-g1-migrator/inputs/
-# Set the path of dex binary
-shopt -s expand_aliases
-alias dex="~/dev/duniter/target/release/dex"
-
-# Stop Duniter to use dex tool...
-# duniter stop
-
-echo
-echo "Export wallets data in progress..."
-yes|dex find wallet -p balance -o json -f $folder/wallets.json > /dev/null || exit 1
-
-echo "Export certs data in progress..."
-yes|dex find cindex -p issued -o json -f $folder/certs.json > /dev/null
-
-echo "Export identities data in progress..."
-yes|dex find iindex -p uid member writtenOn wotb_id created_on -o json -f $folder/idty.json > /dev/null
-
-echo "Export membership data in progress..."
-yes|dex find mindex -p expires_on -o json -f $folder/membership.json > /dev/null
-
-echo "Export blocs dates data in progress..."
-yes|dex find main_blocks -p medianTime -o json -l 10000000 -f $folder/blocs.json > /dev/null
-
-echo "Export UD value in progress..."
-yes|dex find bindex -r -l 1 -p dividend mass medianTime udTime udReevalTime membersCount -o json -f $folder/ud_value.json > /dev/null
-
-# echo "Export all history transactions in progress..."
-# cd $HOME/.config/duniter/duniter_default
-# sqlite3 txs.db --json "select time,comment,issuers,outputs from txs;" > $folder/transactions_history.json 2>>/tmp/duniter-monit.err && echo "Done"
-
-echo "Starting python script to generate genesis data"
-./main.py
-
-echo "Copying genesis data to bootstrap server"
-scp output/gtest_genesis.json trentesaux:~/bootstrap_gtest/
diff --git a/export-from-v1.8.sh b/export-from-v1.8.sh
deleted file mode 100755
index 7bca29f6f29f5b3d6e8e79b4dff5acbcf1c881d8..0000000000000000000000000000000000000000
--- a/export-from-v1.8.sh
+++ /dev/null
@@ -1,231 +0,0 @@
-#!/bin/bash
-set -ueo pipefail
-
-me="${0##*/}"
-
-usage () {
-  cat <<EOF
-Usage: $me -l [-p <path>] [<dest_folder>]
-       $me -p <path> [<dest_folder>]
-       $me -v <name> [<dest_folder>]
-       $me -s [<dest_folder>]
-Export duniter v1.8 data to the specified folder. Use current directory
-when <dest_folder> is not set.
-The actual exported data will appear into an 'inputs' subfolder which
-must not pre-exist.
-
-Options:
-  -h		Show this message
-  -l       	Export using local dex installation
-                When this option is not set export is done via a duniter
-                Docker image
-                Can be combined with -p
-  -p <path>	Export from a local duniter home folder designed by <path>
-  -v <name> 	Export from a duniter named volume <name>
-  -s		Sync a new duniter instance via docker then export from there
-		The container and related volumes will be dropped afterwards
-
-When using -p or -v you have to ensure that the corresponding duniter instance
-was properly stopped before proceeding.
-EOF
-}
-
-error () {
-  errcode="$1"
-  errmsg="$2"
-  echo "$errmsg" >&2
-  if [ "$1" -lt 10 ]; then
-    echo "" >&2
-    usage >&2
-  fi
-  exit "$errcode"
-}
-
-check_docker () {
-  type -p docker || error 14 "Cannot find the 'docker' command."
-  id -nG | tr ' ' '\n' | grep '^docker$' || error 14 "User not in the 'docker' group."
-}
-
-check_sqlite3 () {
-  type -p sqlite3 || error 13 "Cannot find 'sqlite3' executable. Is it in your PATH?"
-}
-
-extract_from_volume () {
-  docker_volume="$1"
-  mkdir "$dest_folder/inputs"
-  docker run --rm -v "$docker_volume":/var/lib/duniter -v "$dest_folder":/data --entrypoint "" -u root duniter/duniter:dev sh -c '
-    set -ex
-    rm -fr /data/inputs
-    mkdir /data/inputs
-    chmod a+rwx /data/inputs
-    echo "Export wallets data in progress..."
-    yes | dex find wallet -p balance -o json -f /data/inputs/wallets.json > /dev/null
-    echo "Export certs data in progress..."
-    yes | dex find cindex -p issued -o json -f /data/inputs/certs.json > /dev/null
-    echo "Export identities data in progress..."
-    yes | dex find iindex -p uid member writtenOn -o json -f /data/inputs/idty.json > /dev/null
-    echo "Export blocs dates data in progress..."
-    yes | dex find main_blocks -p medianTime -o json -l 10000000 -f /data/inputs/blocs.json > /dev/null
-    echo "Export UD value in progress..."
-    yes | dex find bindex -r -l 1 -p dividend -o json -f /data/inputs/ud_value.json > /dev/null
-    cp /var/lib/duniter/duniter_default/txs.db /data/inputs
-    chown -R $(stat -c "%U:%G" /data) /data/inputs
-    chmod og-w /data/inputs
-  '
-  echo "Export transaction history in progress..."
-  sqlite3 "$dest_folder/inputs/txs.db" --json "select time,comment,issuers,outputs from txs;" >"$dest_folder/inputs/transactions_history.json" && echo Done
-  rm "$dest_folder/inputs/txs.db"
-}
-
-check_path () {
-  [ -d "$path" ] || error 2 "No folder named '$path'."
-  path="$(readlink -f "$path")"
-  [ -d "$path/duniter_default" ] || error 2 "Cannot acces folder 'duniter_default' into '$path'."
-}
-
-check_volume () {
-  docker volume ls -q | grep "^$volume$"
-}
-
-check_dex () {
-  type -p dex || error 15 "Cannot find 'dex' executable. Is it in your PATH?"
-}
-
-# ==========
-# Local mode
-# ==========
-
-do_local () {
-  check_path
-  check_dex
-  check_sqlite3
-  mkdir "$dest_folder/inputs"
-  echo "Export wallets data in progress..."
-  dex -h "$path" find wallet -p balance -o json -f "$dest_folder/inputs/wallets.json" >/dev/null <<<y
-  echo "Export certs data in progress..."
-  dex -h "$path" find cindex -p issued -o json -f "$dest_folder/inputs/certs.json" >/dev/null <<<y
-  echo "Export identities data in progress..."
-  dex -h "$path" find iindex -p uid member writtenOn -o json -f "$dest_folder/inputs/idty.json" >/dev/null <<<y
-  echo "Export blocs dates data in progress..."
-  dex -h "$path" find main_blocks -p medianTime -o json -f "$dest_folder/inputs/blocs.json" >/dev/null <<<y
-  echo "Export UD value in progress..."
-  dex -h "$path" find bindex -r -l 1 -p dividend -o json -f "$dest_folder/inputs/ud_value.json" >/dev/null <<<y
-  echo "Export transaction history in progress..."
-  sqlite3 "$path/duniter_default/txs.db" --json "select time,comment,issuers,outputs from txs;" >"$dest_folder/inputs/transactions_history.json" && echo Done
-}
-
-# =========
-# Path mode
-# =========
-
-do_path () {
-  check_path
-  check_docker
-  check_sqlite3
-  extract_from_volume "$path"
-}
-
-# ===========
-# Volume mode
-# ===========
-
-do_volume () {
-  check_docker
-  check_volume
-  check_sqlite3
-  extract_from_volume "$volume"
-}
-
-# =========
-# Sync mode
-# =========
-
-do_sync () {
-  check_docker
-  check_sqlite3
-  docker_image="duniter/duniter:v1.8.6"
-  docker_volume="duniter_data_$$"
-  docker volume create "$docker_volume"
-  docker run --rm -it \
-    --entrypoint "" \
-    --memory 4g \
-    -v "$docker_volume":/var/lib/duniter \
-    "$docker_image" \
-    /bin/sh -c "
-      set -ex
-      duniter config --store-txs
-      duniter sync g1.duniter.org:443
-    "
-  extract_from_volume "$docker_volume"
-  docker volume rm "$docker_volume"
-}
-
-# ====
-# Main
-# ====
-
-# Handle command line options 
-TEMP=$(getopt -o 'hlp:v:s' -n "$me" -- "$@") || {
-  error 1 "Syntax error."
-}
-eval set -- "$TEMP"
-unset TEMP
-
-unset mode volume
-path=/var/lib/duniter
-
-while true; do
-  case "$1" in
-    -h)
-      usage
-      exit
-      ;;
-    -l)
-      mode=local
-      ;;
-    -p)
-      [ "$mode" = local ] || mode=path
-      path="$2"
-      shift
-      ;;
-    -v)
-      mode=volume
-      volume="$2"
-      shift
-      ;;
-    -s)
-      mode=sync
-      ;;
-    --)
-      shift
-      break
-      ;;
-    *)
-      error 1 "Syntax error."
-      ;;
-  esac
-  shift
-done
-dest_folder="${1:-.}"
-[ -d "$dest_folder/." ] || error 13 "Cannot access folder '$dest_folder'."
-[ -w "$dest_folder/." ] || error 13 "Folder '$dest_folder' is not writable."
-[ ! -e "$dest_folder/inputs" ] || error 13 "An item named 'inputs' exists into '$dest_folder'. It is in our way. Please remove it before proceeding."
-dest_folder="$(readlink -f "$dest_folder")"
-
-case "$mode" in
-  local)
-    do_local
-    ;;
-  path)
-    do_path
-    ;;
-  volume)
-    do_volume
-    ;;
-  sync)
-    do_sync
-    ;;
-  *)
-    error 1 "You must set one of these options: -l, -p, -v, -s"
-    ;;
-esac
diff --git a/lib/functions.py b/lib/functions.py
index 509c594ba2a584631a1e4a65f64651fa3b55dad4..c3bb4b84d586b3dcfb4d6098f512e6ba3040fc8e 100644
--- a/lib/functions.py
+++ b/lib/functions.py
@@ -1,4 +1,7 @@
 import base58
+import json
+import math
+import collections
 
 from adapters.duniter_v18.blocks import LevelDBBlocksRepository
 from adapters.duniter_v18.certifications import LevelDBCertificationsRepository
@@ -6,8 +9,14 @@ from adapters.duniter_v18.identities import LevelDBIdentitiesRepository
 from adapters.duniter_v18.memberships import LevelDBMembershipsRepository
 from adapters.duniter_v18.wallets import LevelDBWalletsRepository
 from adapters.duniter_v18.ud_value import LevelDBUDValueRepository
-from custom.parameters import CERT_PERIOD, CERT_MIN_RECEIVED_CERT_TO_ISSUE_CERT
 from lib.utility import load_json
+from lib.utility_param import *
+
+# Constant to estimate cert interval
+CERT_PERIOD = b_days(5)  # 5 days
+
+# when iterating on blocks, log current block every NOTIF_INTERVAL
+NOTIF_INTERVAL = 100000
 
 
 def get_wallets_data(leveldb_path: str) -> tuple:
@@ -68,9 +77,6 @@ def get_identities_and_wallets(start_timestamp: int, leveldb_path: str) -> tuple
     certifications_repository = LevelDBCertificationsRepository(leveldb_path)
     blocks_repository = LevelDBBlocksRepository(leveldb_path)
 
-    # Get custom identities
-    custom_identities = load_json("custom/identities.json")
-
     # Get wallets data
     print("    parse Wallets...")
     (wallets, total_money, ignored_money) = get_wallets_data(leveldb_path)
@@ -79,7 +85,9 @@ def get_identities_and_wallets(start_timestamp: int, leveldb_path: str) -> tuple
     wallet_sum = total_money + ignored_money
     missing_money = initial_monetary_mass - wallet_sum
     if missing_money != 0:
-        print(f"⚠️ initial monetary mass {initial_monetary_mass:,} does not equal wallet sum {wallet_sum:,}")
+        print(
+            f"⚠️ initial monetary mass {initial_monetary_mass:,} does not equal wallet sum {wallet_sum:,}"
+        )
         print(f"money on the wallets: {total_money:,}")
         print(f"money from ignored sources: {ignored_money:,}")
         print(f"missing money (added to treasury): {missing_money:,}")
@@ -97,8 +105,6 @@ def get_identities_and_wallets(start_timestamp: int, leveldb_path: str) -> tuple
         identity_names[pubkey] = uid
         mindex_entry = memberships_repository.get(pubkey)
         membership_expire_on = mindex_entry["expires_on"]
-        if membership_expire_on < 0:
-            membership_expire_on = 0  # forget old expiry date
 
         # add address and balance to identity
         if pubkey not in wallets:
@@ -121,9 +127,6 @@ def get_identities_and_wallets(start_timestamp: int, leveldb_path: str) -> tuple
             "certs_received": {},
         }
 
-    # Add custom identities
-    identities.update(custom_identities)
-
     # Generate identities Ğ1v2 genesis json bloc
     # certs are stored per issuer in input file
     # certs are stored per receiver in output file
@@ -145,7 +148,7 @@ def get_identities_and_wallets(start_timestamp: int, leveldb_path: str) -> tuple
             # timestamp of cert creation
             created_at = blocks_repository.get(cert["created_on"])["medianTime"]
             # block of next issuable cert
-            next_issuable_on = created_at + CERT_PERIOD
+            next_issuable_on = created_at + CERT_PERIOD # TODO check if Duniter expects timestamp or block number
             # timestamp of cert expiration
             cert_expire_at = cert["expires_on"]
             cert_expire_on = cert_expire_at
@@ -159,14 +162,169 @@ def get_identities_and_wallets(start_timestamp: int, leveldb_path: str) -> tuple
     return identities, wallets
 
 
-def get_smiths():
-    final_smiths = {}
-    smiths_brut = load_json("custom/smiths.json")
-    for smith in smiths_brut:
-        final_smiths.update(smith)
+def get_blocks(leveldb_path: str) -> list:
+    """
+    Get blocks,
+    return a list of blocks
+    """
+    # Get wallets balances data
+    blocks_repo = LevelDBBlocksRepository(leveldb_path)
+    blocks = []
+    for num, block in blocks_repo:
+        if num % NOTIF_INTERVAL == 0:
+            print(num)
+        noEvent = (
+            True
+            and not block.get("certifications")
+            and not block.get("transactions")
+            and not block.get("joiners") # TODO membership events
+            and not block.get("leavers") # TODO membership events
+            and not block.get("revoked") # TODO membership events
+            and not block.get("actives") # TODO membership events
+            and not block.get("excluded") # TODO membership events
+        )
+        sample = {
+            "height": block.get("number"),
+            "timestamp": block.get("medianTime"),
+            "hash": block.get("hash"),
+            "parentHash": block.get("previousHash"),
+            "validator": block.get("issuer"),
+            "version": block.get("version"),
+            "hasEvent": not noEvent,
+        }
+        blocks.append(sample)
+
+    return blocks
 
-    return final_smiths
 
+def get_tx(leveldb_path: str) -> list:
+    """
+    Get tx,
+    return a list of tx
+    """
+    # Get wallets balances data
+    blocks_repo = LevelDBBlocksRepository(leveldb_path)
+    txs = []
+    for num, block in blocks_repo:
+        if num % NOTIF_INTERVAL == 0:
+            print(num)
+        for tx in block.get("transactions"):
+            outputs = tx["outputs"]
+            issuers = tx["issuers"]
+            comment = tx["comment"]
+            timestamp = block["medianTime"]
+            issuers_count = len(issuers)
+            # loop on issuers. If multiple issuers, approximate amount
+            for issuer in issuers:
+                # loop on outputs
+                for output in outputs:
+                    outputparts = output.split(":")
+                    amount = int(outputparts[0])
+                    receiver = outputparts[2]
+                    if issuers_count > 1:
+                        amount = math.floor(amount / issuers_count)  # approximation
+                    # ignore non trivial unlock sources
+                    # https://git.duniter.org/tools/py-g1-migrator/-/issues/3
+                    if "&&" in receiver or "||" in receiver:
+                        print(num)
+                        print("ignoring " + receiver)
+                        continue
+                    receiver = receiver.split("SIG(")[1].split(")")[0]
+                    sample = {
+                        "blockNumber": num,
+                        "timestamp": timestamp,
+                        "from": issuer,
+                        "to": receiver,
+                        "amount": amount,
+                        "comment": comment,
+                    }
+                    # do not include outputs that go back to sender
+                    if sample["from"] != sample["to"]:
+                        txs.append(sample)
+    return txs
+
+
+def get_cert(leveldb_path: str) -> list:
+    """
+    Get certification history
+    Creation, Renewal, AND Removal
+    For this, re-index the blockchain v1
+    """
+    # initialize
+    CERTVALIDITY = 63115200 # 3600 * 24 * 365.25 * 2 validity of certification in seconds (2 years)
+    cert_should_expire = {} # maps (issuer, receiver) to expiration timestamp
+    may_expire = collections.deque() # queue of (expire, (issuer, receiver)), should be ordered by expire timestamp
+    cert_events = [] # cert events returned by this function (Creation, Renewal, Removal)
+    identity_id = {} # maps pubkey to identity index
+    blockMedianTime = [] # medianTime of the block n at position n
+    # repos
+    blocks_repo = LevelDBBlocksRepository(leveldb_path)
+    identities_repository = LevelDBIdentitiesRepository(leveldb_path)
+
+    # Get identities index by pubkey
+    for pubkey, identity in identities_repository:
+        index = identity["wotb_id"] + 1
+        identity_id[pubkey] = index
 
-def get_technical_committee():
-    return load_json("custom/technical_committee.json")
+    for num, block in blocks_repo:
+        if num % NOTIF_INTERVAL == 0:
+            print(num)
+        certs = block.get("certifications")
+        medianTime = block.get("medianTime")
+        blockMedianTime.append(medianTime)
+        # expire certs
+        while may_expire:
+            expi, (src, tgt) = may_expire.popleft()
+            # the certification could expire at this block
+            if expi < medianTime:
+                # check if it was not renewed meanwhile
+                expi = cert_should_expire[(src, tgt)]
+                if expi < medianTime:
+                    optype = "Removal"
+                    sample = {
+                        "blockNumber": num,
+                        "issuer": src,
+                        "receiver": tgt,
+                        "type": optype,
+                    }
+                    cert_events.append(sample)
+                    del cert_should_expire[(src, tgt)]
+                # cert has been renewed meanwhile, ignore this
+                else:
+                    pass
+            # since certs are sorted, other wont expire either
+            else:
+                may_expire.appendleft((expi, (src, tgt)))
+                break
+        # loop over added certs of this block
+        for cert in certs:
+            # read certification
+            parts = cert.split(":")
+            source = identity_id[parts[0]]
+            target = identity_id[parts[1]]
+            blockNum = int(parts[2])
+            # first time we see this cert, this is a creation (or re-creation)
+            if (source, target) not in cert_should_expire:
+                optype = "Creation"
+            # if cert is already there, this is a renewal
+            else:
+                optype = "Renewal"
+            # set new expiration time in both case and schedule cert removal
+            # cert validity is counted from cert emission, not write time
+            start_time = blockMedianTime[blockNum]
+            new_expire = start_time + CERTVALIDITY
+            cert_should_expire[(source, target)] = new_expire
+            may_expire.append((new_expire, (source, target)))
+            # add sample
+            sample = {
+                # # block number of the document creation
+                # "blockNumberCreated": blockNum,
+                # # block in which the certification is written
+                # "blockNumberWritten": num,
+                "blockNumber": num,
+                "issuer": source,
+                "receiver": target,
+                "type": optype,
+            }
+            cert_events.append(sample)
+    return cert_events
\ No newline at end of file
diff --git a/lib/get_parameters.py b/lib/get_parameters.py
deleted file mode 100644
index bdf5c576b53a84e11d130b0e4e8d66209893dedf..0000000000000000000000000000000000000000
--- a/lib/get_parameters.py
+++ /dev/null
@@ -1,37 +0,0 @@
-from custom.parameters import *
-
-def get_genesis_parameters():
-    return {
-        'genesis_certs_expire_on': GENESIS_CERTS_EXPIRE_ON,
-        'genesis_certs_min_received': GENESIS_CERTS_MIN_RECEIVED,
-        'genesis_memberships_expire_on': GENESIS_MEMBERSHIPS_EXPIRE_ON,
-        'genesis_smith_certs_expire_on': GENESIS_SMITH_CERTS_EXPIRE_ON,
-        'genesis_smith_certs_min_received': GENESIS_SMITH_CERTS_MIN_RECEIVED,
-        'genesis_smith_memberships_expire_on': GENESIS_SMITH_MEMBERSHIPS_EXPIRE_ON,
-    }
-
-def get_parameters():
-    return {
-        'babe_epoch_duration': BABE_EPOCH_DURATION,
-        'cert_period': CERT_PERIOD,
-        'cert_max_by_issuer': CERT_MAX_BY_ISSUER,
-        'cert_min_received_cert_to_issue_cert': CERT_MIN_RECEIVED_CERT_TO_ISSUE_CERT,
-        'cert_validity_period': CERT_VALIDITY_PERIOD,
-        'idty_confirm_period': IDTY_CONFIRM_PERIOD,
-        'idty_creation_period': IDTY_CREATION_PERIOD,
-        'membership_period': MEMBERSHIP_PERIOD,
-        'pending_membership_period': PENDING_MEMBERSHIP_PERIOD,
-        'ud_creation_period': UD_CREATION_PERIOD,
-        'ud_reeval_period': UD_REEVAL_PERIOD,
-        'smith_cert_period': SMITH_CERT_PERIOD,
-        'smith_cert_max_by_issuer': SMITH_CERT_MAX_BY_ISSUER,
-        'smith_cert_min_received_cert_to_issue_cert': SMITH_CERT_MIN_RECEIVED_CERT_TO_ISSUE_CERT,
-        'smith_cert_validity_period': SMITH_CERT_VALIDITY_PERIOD,
-        'smith_membership_period': SMITH_MEMBERSHIP_PERIOD,
-        'smith_pending_membership_period': SMITH_PENDING_MEMBERSHIP_PERIOD,
-        'smiths_wot_first_cert_issuable_on': SMITHS_WOT_FIRST_CERT_ISSUABLE_ON,
-        'smiths_wot_min_cert_for_membership': SMITHS_WOT_MIN_CERT_FOR_MEMBERSHIP,
-        'wot_first_cert_issuable_on': WOT_FIRST_CERT_ISSUABLE_ON,
-        'wot_min_cert_for_create_idty_right': WOT_MIN_CERT_FOR_CREATE_IDTY_RIGHT,
-        'wot_min_cert_for_membership': WOT_MIN_CERT_FOR_MEMBERSHIP,
-    }
diff --git a/lib/utility.py b/lib/utility.py
index 6d5619b58e49df57e24755922a6e92ff6b6d54b0..abd1bd8477ef23c3f6289dba29f20847a46dfe33 100644
--- a/lib/utility.py
+++ b/lib/utility.py
@@ -1,6 +1,5 @@
 import json, base58
 from urllib.request import urlopen
-from custom.parameters import *
 from substrateinterface import Keypair, KeypairType
 
 
@@ -22,12 +21,6 @@ def v1_pubkey_to_v2_address(pubkey):
     return keypair.ss58_address
 
 
-def date_to_bloc_number(date_timestamp: int, start_timestamp: int):
-    "converts a unix timestamp to a block number in blockchain v2 based on estimated start timestamp"
-    timestamp = date_timestamp - start_timestamp  # can be negative
-    return b_seconds(timestamp) # lower approximation
-
-
 def load_json(data):
     get_data = open(data)
     return json.load(get_data)
diff --git a/lib/utility_param.py b/lib/utility_param.py
index ffb45b263ac9a4086692d72a299d1f8eaf9a684d..62f4ac9c4405f85b6ebeae276d24b2ab8383f973 100644
--- a/lib/utility_param.py
+++ b/lib/utility_param.py
@@ -1,6 +1,5 @@
 # Utility params
 
-
 def b_seconds(seconds: int) -> int:
     """converts a number of seconds to a number of 6-seconds blocs
     use lower approximation
@@ -9,18 +8,11 @@ def b_seconds(seconds: int) -> int:
     """
     return int(seconds / 6)
 
-
 def b_minutes(minutes: int) -> int:
     return b_seconds(minutes * 60)
 
-
 def b_hours(hours: int) -> int:
     return b_minutes(hours) * 60
 
-
 def b_days(days: int) -> int:
-    return b_hours(days) * 24
-
-
-def b_months(months: int) -> int:
-    return b_days(months) * 30
+    return b_hours(days) * 24
\ No newline at end of file
diff --git a/main.py b/main.py
index 2e618564888c384c5d26fa341412a6a3e98ea3f0..8cd397f555284169e1401a7b24288d953283bf42 100755
--- a/main.py
+++ b/main.py
@@ -20,7 +20,7 @@ import sys
 from time import time
 
 from adapters.duniter_v18.ud_value import LevelDBUDValueRepository
-from lib.functions import get_identities_and_wallets, get_smiths, get_technical_committee
+from lib.functions import get_identities_and_wallets
 
 DEFAULT_LEVELDB_PATH = "./leveldb"
 LEVELDB_PATH = os.getenv("LEVELDB_PATH", DEFAULT_LEVELDB_PATH)
@@ -38,12 +38,6 @@ def main():
     if start_timestamp == "":
         start_timestamp = int(time())
 
-    # Get ĞTest parameters
-    print("    get ĞTest parameters...")
-    smiths = get_smiths()
-    technical_committee = get_technical_committee()
-    sudo_key = "5Dq8xjvkmbz7q4g2LbZgyExD26VSCutfEc6n4W4AfQeVHZqz"
-
     # Dump ĞTest parameters
     print("    dump ĞTest parameters...")
     # Get last block info
@@ -65,7 +59,7 @@ def main():
     print("    add simple wallets...")
 
     # Final ĞTest genesis JSON
-    gtest_genesis = {
+    genesis = {
         "first_ud_value": first_ud_value,
         "first_ud_reeval": first_ud_reeval,
         "current_block": {
@@ -74,18 +68,15 @@ def main():
         },
         # "first_ud_time": FIRST_UD_TIME, # this field does not exist in Duniter
         "initial_monetary_mass": inital_monetary_mass,
-        "smiths": smiths,
-        "technical_committee": technical_committee,
-        "sudo_key": sudo_key,
         "identities": identities,
         "wallets": other_wallets,
         # "treasury": treasury, # would need to modify pallet treasury, adding it as an account instead
     }
 
     # Dump JSON to file
-    gtest_genesis_json = json.dumps(gtest_genesis, indent=2).encode()
-    gtest_json = open("output/gtest_genesis.json", "wb")
-    gtest_json.write(gtest_genesis_json)
+    genesis_json = json.dumps(genesis, indent=2).encode()
+    genesis_fid = open("output/genesis.json", "wb")
+    genesis_fid.write(genesis_json)
 
 
 if __name__ == '__main__':
diff --git a/note.md b/note.md
new file mode 100644
index 0000000000000000000000000000000000000000..ae76887705e5e9cccb9647f3db0fd2912ecef98a
--- /dev/null
+++ b/note.md
@@ -0,0 +1,42 @@
+
+Note Hugo pour dev en local.
+
+## Sur mon noeud Duniter
+
+```sh
+mkdir /tmp/backup-g1-duniter-1.8.7
+cp -R $HOME/.config/duniter/duniter_default/data /tmp/backup-g1-duniter-1.8.7/
+# cp -R $HOME/.config/duniter/duniter_default/g1 /tmp/backup-g1-duniter-1.8.7/
+cp -R $HOME/.config/duniter/duniter_default/txs.db /tmp/backup-g1-duniter-1.8.7/
+tar -cvzf /tmp/backup-g1-duniter-1.8.7.tgz /tmp/backup-g1-duniter-1.8.7
+mv /tmp/backup-g1-duniter-1.8.7.tgz /var/www/files.coinduf.eu
+```
+
+## Sur ma machine de dev dans py-g1-migrator
+
+```sh
+# récupérer le dump
+curl https://files.coinduf.eu/backup-g1-duniter-1.8.7.tgz -o inputs/g1-dump.tgz
+tar xvzf inputs/g1-dump.tgz  -C inputs
+mv inputs/tmp/backup-g1-duniter-1.8.7 inputs/duniter_default
+
+# exec
+python -m venv env
+source ./env/bin/activate
+pip install -r requirements.txt
+export LEVELDB_PATH="./inputs/duniter_default/data/leveldb" 
+# --- MAIN ---
+# main script outputs ./output/genesis.json which is used to build Duniter genesis state
+./main.py
+
+# --- SQUID ---
+# squid scripts are used by Duniter-Squid to provide seamless history for client users
+./squid-block.py # ./output/block_hist.json
+./squid-cert.py # ./output/cert_hist.json
+./squid-tx.py # ./output/tx_hist.json
+
+# copy to artifacts
+scp ./output/block_hist.json wolf:/var/www/files.coinduf.eu/
+scp ./output/tx_hist.json wolf:/var/www/files.coinduf.eu/
+scp ./output/cert_hist.json wolf:/var/www/files.coinduf.eu/
+```
diff --git a/orchestra.sh b/orchestra.sh
deleted file mode 100755
index 2057f0fc4854f73565b2c1de95c9e2e3033f08c8..0000000000000000000000000000000000000000
--- a/orchestra.sh
+++ /dev/null
@@ -1,23 +0,0 @@
-#!/bin/bash
-
-set -e
-MY_PATH="`dirname \"$0\"`"
-MY_PATH="`( cd \"$MY_PATH\" && pwd )`"
-cd $MY_PATH
-
-# get smith username
-smith_idty=$1
-[[ ! $smith_idty ]] && echo "Please pass your smith username identity in argument (ex: ./boot_gdev.sh poka)" && exit 1
-
-# get smith username
-PROTOCOL_VERSION=$2
-[[ ! $PROTOCOL_VERSION ]] && echo "Please pass the protocol version (ex: ./boot_gdev.sh poka gdev5)" && exit 1
-
-
-$MY_PATH/main.py
-$MY_PATH/generate_transactions_history.py
-$MY_PATH/boot_gdev.sh $smith_idty $PROTOCOL_VERSION
-$MY_PATH/push-gdev.sh
-
-echo "g1-migrator orchestra success !"
-echo
diff --git a/push-gdev.sh b/push-gdev.sh
deleted file mode 100755
index 79baadedf55888cac87fdb0b67ace03591be6764..0000000000000000000000000000000000000000
--- a/push-gdev.sh
+++ /dev/null
@@ -1,11 +0,0 @@
-#!/bin/bash
-
-set -e
-MY_PATH="`dirname \"$0\"`"
-MY_PATH="`( cd \"$MY_PATH\" && pwd )`"
-
-# create a local live network to test
-echo "Push new ĞDev to production"
-
-rs $MY_PATH/boot_gdev/tmp/gdev/ poka@gdev.p2p.legal:/home/poka/gdev-latest/ 13322 && echo "Push success =)" || echo "Push failed :/"
-echo
diff --git a/scripts/custom b/scripts/custom
deleted file mode 120000
index 768f2ec8d6646a03e7fb74125fc4589dab4784a2..0000000000000000000000000000000000000000
--- a/scripts/custom
+++ /dev/null
@@ -1 +0,0 @@
-../custom/
\ No newline at end of file
diff --git a/scripts/lib b/scripts/lib
deleted file mode 120000
index 5bf80bf1392c96e6b9afe5d8f0b7b64fd4814c33..0000000000000000000000000000000000000000
--- a/scripts/lib
+++ /dev/null
@@ -1 +0,0 @@
-../lib/
\ No newline at end of file
diff --git a/scripts/scan_derivations.py b/scripts/scan_derivations.py
deleted file mode 100644
index 8125078754acdb42aff7d0d22517ef92055bd8c4..0000000000000000000000000000000000000000
--- a/scripts/scan_derivations.py
+++ /dev/null
@@ -1,39 +0,0 @@
-#!/usr/bin/env python3
-
-import sys
-from substrateinterface import Keypair, SubstrateInterface
-
-duniter = SubstrateInterface(url="wss://gdev.p2p.legal/ws")
-rangeNbr = 50
-
-if len(sys.argv) < 2:
-    print("Please give your mnemonic as argument")
-    sys.exit(1)
-
-mnemonic: str = sys.argv[1]
-if len(sys.argv) > 2:
-    rangeNbr = int(sys.argv[2])
-
-# scan root address
-keypair = Keypair.create_from_mnemonic(mnemonic=mnemonic)
-address = keypair.ss58_address
-balance = duniter.query('System', 'Account', [
-                        address]).value['data']['free'] / 100
-if balance != 0:
-    print("root: " + address + " - " +
-          str(balance) + " ĞDev")
-
-# scan range derivations
-for i in range(0, rangeNbr + 1):
-    derivation = str(i)
-    keypair = Keypair.create_from_uri(
-        mnemonic + "//" + derivation)
-    address = keypair.ss58_address
-    balance = duniter.query('System', 'Account', [
-                            address]).value['data']['free'] / 100
-
-    if balance == 0:
-        continue
-
-    print(derivation + ": " + address + " - " +
-          str(balance) + " ĞDev")
diff --git a/squid-block.py b/squid-block.py
new file mode 100755
index 0000000000000000000000000000000000000000..4710aa4d7d47095f669fe0092cd4ceb5f1fee3e0
--- /dev/null
+++ b/squid-block.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+import json
+import os
+
+from lib.functions import get_blocks
+
+DEFAULT_LEVELDB_PATH = "./leveldb"
+LEVELDB_PATH = os.getenv("LEVELDB_PATH", DEFAULT_LEVELDB_PATH)
+
+
+def main():
+    # get blocks
+    block_hist = get_blocks(LEVELDB_PATH)
+
+    # Dump JSON to file
+    print("Exporting...")
+    block_hist_json = json.dumps(block_hist, indent=2).encode()
+    gtest_json = open("output/block_hist.json", "wb")
+    gtest_json.write(block_hist_json)
+
+
+if __name__ == "__main__":
+    print("Prepare blocks for squid")
+    main()
+    print("Done\n")
diff --git a/squid-cert.py b/squid-cert.py
new file mode 100755
index 0000000000000000000000000000000000000000..bb428b9a18df3078ae36b233f8dca738c2f54c09
--- /dev/null
+++ b/squid-cert.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+import json
+import os
+
+from lib.functions import get_cert
+
+DEFAULT_LEVELDB_PATH = "./leveldb"
+LEVELDB_PATH = os.getenv("LEVELDB_PATH", DEFAULT_LEVELDB_PATH)
+
+
+def main():
+    # get certs
+    cert_hist = get_cert(LEVELDB_PATH)
+
+    # Dump JSON to file
+    print("Exporting...")
+    cert_hist_json = json.dumps(cert_hist, indent=2).encode()
+    gtest_json = open("output/cert_hist.json", "wb")
+    gtest_json.write(cert_hist_json)
+
+
+if __name__ == "__main__":
+    print("Prepare cert for squid")
+    main()
+    print("Done\n")
diff --git a/squid-tx.py b/squid-tx.py
new file mode 100755
index 0000000000000000000000000000000000000000..77e379d8f588989e45ecaf10b68fb3c2aa6d19b8
--- /dev/null
+++ b/squid-tx.py
@@ -0,0 +1,26 @@
+#!/usr/bin/env python3
+
+import json
+import os
+
+from lib.functions import get_tx
+
+DEFAULT_LEVELDB_PATH = "./leveldb"
+LEVELDB_PATH = os.getenv("LEVELDB_PATH", DEFAULT_LEVELDB_PATH)
+
+
+def main():
+    # get txs
+    tx_hist = get_tx(LEVELDB_PATH)
+
+    # Dump JSON to file
+    print("Exporting...")
+    tx_hist_json = json.dumps(tx_hist, indent=2).encode()
+    gtest_json = open("output/tx_hist.json", "wb")
+    gtest_json.write(tx_hist_json)
+
+
+if __name__ == "__main__":
+    print("Prepare tx for squid")
+    main()
+    print("Done\n")