From cdf5c5165ac4cc6670bd7c030070eeb32620a9a0 Mon Sep 17 00:00:00 2001
From: inso <insomniak.fr@gmaiL.com>
Date: Sat, 19 Nov 2016 12:37:24 +0100
Subject: [PATCH] Working on new architecture
- Adding transactions data and tables
- Fix duniterpy syntax
- Fix multiples bugs
---
requirements.txt | 2 +-
src/sakia/app.py | 25 +-
src/sakia/core/__init__.py | 2 -
src/sakia/core/registry/__init__.py | 1 -
src/sakia/core/registry/identity.py | 549 ------------------
src/sakia/core/transfer.py | 374 ------------
src/sakia/core/txhistory.py | 425 --------------
src/sakia/core/wallet.py | 420 --------------
src/sakia/data/connectors/bma.py | 11 +-
src/sakia/data/connectors/node.py | 41 +-
src/sakia/data/entities/identity.py | 6 +-
src/sakia/data/entities/transaction.py | 14 +
src/sakia/data/processors/__init__.py | 1 +
src/sakia/data/processors/certifications.py | 4 +-
src/sakia/data/processors/identities.py | 23 +-
src/sakia/data/processors/transactions.py | 114 ++++
src/sakia/data/processors/tx_lifecycle.py | 197 +++++++
src/sakia/data/repositories/meta.sql | 1 +
src/sakia/data/repositories/transactions.py | 3 +-
src/sakia/gui/navigation/graphs/base/model.py | 10 -
.../gui/navigation/identities/controller.py | 4 +-
src/sakia/gui/navigation/model.py | 15 +-
.../gui/navigation/txhistory/table_model.py | 21 +-
src/sakia/gui/widgets/context_menu.py | 1 -
src/sakia/options.py | 4 +-
src/sakia/services/__init__.py | 1 +
src/sakia/services/blockchain.py | 7 +-
src/sakia/services/documents.py | 4 +-
src/sakia/services/identities.py | 31 +-
src/sakia/services/network.py | 4 +-
src/sakia/services/transactions.py | 125 ++++
31 files changed, 565 insertions(+), 1875 deletions(-)
delete mode 100644 src/sakia/core/__init__.py
delete mode 100644 src/sakia/core/registry/__init__.py
delete mode 100644 src/sakia/core/registry/identity.py
delete mode 100644 src/sakia/core/transfer.py
delete mode 100644 src/sakia/core/txhistory.py
delete mode 100644 src/sakia/core/wallet.py
create mode 100644 src/sakia/data/processors/transactions.py
create mode 100644 src/sakia/data/processors/tx_lifecycle.py
create mode 100644 src/sakia/services/transactions.py
diff --git a/requirements.txt b/requirements.txt
index 0fadd5f3..ffecd9a6 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,4 +2,4 @@ git+https://github.com/Insoleet/quamash.git@master
asynctest
networkx
git+https://github.com/hynek/attrs.git@master
-git+https://github.com/duniter/duniter-python-api.git@master
+git+https://github.com/duniter/duniter-python-api.git@dev
diff --git a/src/sakia/app.py b/src/sakia/app.py
index adbbb0bc..86ab4ee7 100644
--- a/src/sakia/app.py
+++ b/src/sakia/app.py
@@ -15,10 +15,10 @@ from duniterpy.api.bma import API
from . import __version__
from .options import SakiaOptions
from sakia.data.connectors import BmaConnector
-from sakia.services import NetworkService, BlockchainService, IdentitiesService, SourcesServices
+from sakia.services import NetworkService, BlockchainService, IdentitiesService, SourcesServices, TransactionsService
from sakia.data.repositories import SakiaDatabase
from sakia.data.processors import BlockchainProcessor, NodesProcessor, IdentitiesProcessor, \
- CertificationsProcessor, SourcesProcessor
+ CertificationsProcessor, SourcesProcessor, TransactionsProcessor
from sakia.data.files import AppDataFile, UserParametersFile
from sakia.decorators import asyncify
from sakia.money import Relative
@@ -33,7 +33,8 @@ class Application(QObject):
"""
def __init__(self, qapp, loop, options, app_data, parameters, db,
- network_services, blockchain_services, identities_services):
+ network_services, blockchain_services, identities_services,
+ sources_services, transactions_services):
"""
Init a new "sakia" application
:param QCoreApplication qapp: Qt Application
@@ -43,8 +44,10 @@ class Application(QObject):
:param sakia.data.entities.UserParameters parameters: the application current user parameters
:param sakia.data.repositories.SakiaDatabase db: The database
:param dict network_services: All network services for current currency
- :param dict blockchain_services: All network services for current currency
- :param dict identities_services: All network services for current currency
+ :param dict blockchain_services: All blockchain services for current currency
+ :param dict identities_services: All identities services for current currency
+ :param dict sources_services: All sources services for current currency
+ :param dict transactions_services: All transactions services for current currency
:return:
"""
super().__init__()
@@ -61,12 +64,14 @@ class Application(QObject):
self.network_services = network_services
self.blockchain_services = blockchain_services
self.identities_services = identities_services
+ self.sources_services = sources_services
+ self.transactions_services = transactions_services
@classmethod
def startup(cls, argv, qapp, loop):
options = SakiaOptions.from_arguments(argv)
app_data = AppDataFile.in_config_path(options.config_path).load_or_init()
- app = cls(qapp, loop, options, app_data, None, None, {}, {}, {})
+ app = cls(qapp, loop, options, app_data, None, None, {}, {}, {}, {}, {})
#app.set_proxy()
#app.get_last_version()
app.load_profile(app_data.default)
@@ -89,18 +94,24 @@ class Application(QObject):
certs_processor = CertificationsProcessor(self.db.certifications_repo, self.db.identities_repo, bma_connector)
blockchain_processor = BlockchainProcessor.instanciate(self)
sources_processor = SourcesProcessor.instanciate(self)
+ transactions_processor = TransactionsProcessor.instanciate(self)
self.blockchain_services = {}
self.network_services = {}
self.identities_services = {}
self.sources_services = {}
+ self.transactions_services = {}
for currency in self.db.connections_repo.get_currencies():
self.identities_services[currency] = IdentitiesService(currency, identities_processor,
certs_processor, blockchain_processor,
bma_connector)
+ self.transactions_services[currency] = TransactionsService(currency, transactions_processor,
+ identities_processor, bma_connector)
+
self.blockchain_services[currency] = BlockchainService(currency, blockchain_processor, bma_connector,
- self.identities_services[currency])
+ self.identities_services[currency],
+ self.transactions_services[currency])
self.network_services[currency] = NetworkService.load(currency, nodes_processor,
self.blockchain_services[currency])
self.sources_services[currency] = SourcesServices(currency, sources_processor, bma_connector)
diff --git a/src/sakia/core/__init__.py b/src/sakia/core/__init__.py
deleted file mode 100644
index 4dffa313..00000000
--- a/src/sakia/core/__init__.py
+++ /dev/null
@@ -1,2 +0,0 @@
-from .wallet import Wallet
-
diff --git a/src/sakia/core/registry/__init__.py b/src/sakia/core/registry/__init__.py
deleted file mode 100644
index 5f9b05a8..00000000
--- a/src/sakia/core/registry/__init__.py
+++ /dev/null
@@ -1 +0,0 @@
-from .identity import Identity, LocalState, BlockchainState
\ No newline at end of file
diff --git a/src/sakia/core/registry/identity.py b/src/sakia/core/registry/identity.py
deleted file mode 100644
index 21c8788f..00000000
--- a/src/sakia/core/registry/identity.py
+++ /dev/null
@@ -1,549 +0,0 @@
-"""
-Created on 11 févr. 2014
-
-@author: inso
-"""
-
-import logging
-import time
-from enum import Enum
-from pkg_resources import parse_version
-
-from duniterpy.documents import BlockUID, SelfCertification, MalformedDocumentError
-from duniterpy.api import bma, errors
-from duniterpy.api.bma import PROTOCOL_VERSION
-
-from sakia.errors import NoPeerAvailable
-from PyQt5.QtCore import QObject, pyqtSignal
-
-
-class LocalState(Enum):
- """
- The local state describes how the identity exists locally :
- COMPLETED means all its related datas (certifiers, certified...)
- were succefully downloaded
- PARTIAL means not all data are present locally
- NOT_FOUND means it could not be found anywhere
- """
- NOT_FOUND = 0
- PARTIAL = 1
- COMPLETED = 2
-
-
-class BlockchainState(Enum):
- """
- The blockchain state describes how the identity
- was found :
- VALIDATED means it was found in the blockchain
- BUFFERED means it was found via a lookup but not in the
- blockchain
- NOT_FOUND means it could not be found anywhere
- """
- NOT_FOUND = 0
- BUFFERED = 1
- VALIDATED = 2
-
-
-class Identity(QObject):
- """
- A person with a uid and a pubkey
- """
- def __init__(self, uid, pubkey, sigdate, local_state, blockchain_state):
- """
- Initializing a person object.
-
- :param str uid: The identity uid, also known as its uid on the network
- :param str pubkey: The identity pubkey
- :parma BlockUID sig_date: The date of signature of the self certification
- :param LocalState local_state: The local status of the identity
- :param BlockchainState blockchain_state: The blockchain status of the identity
- """
- if sigdate:
- assert type(sigdate) is BlockUID
- super().__init__()
- self.uid = uid
- self.pubkey = pubkey
- self._sigdate = sigdate
- self.local_state = local_state
- self.blockchain_state = blockchain_state
-
- @classmethod
- def empty(cls, pubkey):
- return cls("", pubkey, None, LocalState.NOT_FOUND, BlockchainState.NOT_FOUND)
-
- @classmethod
- def from_handled_data(cls, uid, pubkey, sigdate, blockchain_state):
- return cls(uid, pubkey, sigdate, LocalState.COMPLETED, blockchain_state)
-
- @classmethod
- def from_json(cls, json_data, version):
- """
- Create a person from json data
-
- :param dict json_data: The person as a dict in json format
- :return: A new person if pubkey wasn't known, else a new person instance.
- """
- pubkey = json_data['pubkey']
- uid = json_data['uid']
- local_state = LocalState[json_data['local_state']]
- blockchain_state = BlockchainState[json_data['blockchain_state']]
- if version >= parse_version("0.20.0dev0") and json_data['sigdate']:
- sigdate = BlockUID.from_str(json_data['sigdate'])
- else:
- sigdate = BlockUID.empty()
-
- return cls(uid, pubkey, sigdate, local_state, blockchain_state)
-
- @property
- def sigdate(self):
- return self._sigdate
-
- @sigdate.setter
- def sigdate(self, sigdate):
- assert type(sigdate) is BlockUID
- self._sigdate = sigdate
-
- async def selfcert(self, community):
- """
- Get the identity self certification.
- This request is not cached in the person object.
-
- :param sakia.core.community.Community community: The community target to request the self certification
- :return: A SelfCertification duniterpy object
- :rtype: duniterpy.documents.certification.SelfCertification
- """
- try:
- timestamp = BlockUID.empty()
- lookup_data = await community.bma_access.future_request(bma.wot.Lookup,
- req_args={'search': self.pubkey})
-
- for result in lookup_data['results']:
- if result["pubkey"] == self.pubkey:
- uids = result['uids']
- for uid_data in uids:
- # If the sigDate was written in the blockchain
- if self._sigdate and BlockUID.from_str(uid_data["meta"]["timestamp"]) == self._sigdate:
- timestamp = BlockUID.from_str(uid_data["meta"]["timestamp"])
- uid = uid_data["uid"]
- signature = uid_data["self"]
- # Else we choose the latest one found
- elif BlockUID.from_str(uid_data["meta"]["timestamp"]) >= timestamp:
- timestamp = BlockUID.from_str(uid_data["meta"]["timestamp"])
- uid = uid_data["uid"]
- signature = uid_data["self"]
-
- if not self.sigdate:
- self.sigdate = timestamp
-
- return SelfCertification(PROTOCOL_VERSION,
- community.currency,
- self.pubkey,
- uid,
- timestamp,
- signature)
- except errors.DuniterError as e:
- if e.ucode == errors.NO_MATCHING_IDENTITY:
- raise LookupFailureError(self.pubkey, community)
- except MalformedDocumentError:
- raise LookupFailureError(self.pubkey, community)
-
- async def get_join_date(self, community):
- """
- Get the person join date.
- This request is not cached in the person object.
-
- :param sakia.core.community.Community community: The community target to request the join date
- :return: A datetime object
- """
- try:
- search = await community.bma_access.future_request(bma.blockchain.Membership,
- {'search': self.pubkey})
- if len(search['memberships']) > 0:
- membership_data = search['memberships'][0]
- block = await community.bma_access.future_request(bma.blockchain.Block,
- req_args={'number': membership_data['blockNumber']})
- return block['medianTime']
- except errors.DuniterError as e:
- if e.ucode == errors.NO_MEMBER_MATCHING_PUB_OR_UID:
- raise MembershipNotFoundError(self.pubkey, community.name)
- except NoPeerAvailable as e:
- logging.debug(str(e))
- raise MembershipNotFoundError(self.pubkey, community.name)
-
- async def get_expiration_date(self, community):
- try:
- membership = await self.membership(community)
- join_block_number = membership['blockNumber']
- try:
- join_block = await community.bma_access.future_request(bma.blockchain.Block,
- req_args={'number': join_block_number})
-
- parameters = await community.bma_access.future_request(bma.blockchain.Parameters)
- join_date = join_block['medianTime']
- expiration_date = join_date + parameters['sigValidity']
- except NoPeerAvailable:
- expiration_date = None
- except errors.DuniterError as e:
- logging.debug("Expiration date not found")
- expiration_date = None
- except MembershipNotFoundError:
- expiration_date = None
- return expiration_date
-
-
-#TODO: Manage 'OUT' memberships ? Maybe ?
- async def membership(self, community):
- """
- Get the person last membership document.
-
- :param sakia.core.community.Community community: The community target to request the join date
- :return: The membership data in BMA json format
- :rtype: dict
- """
- try:
- search = await community.bma_access.future_request(bma.blockchain.Membership,
- {'search': self.pubkey})
- block_number = -1
- membership_data = None
-
- for ms in search['memberships']:
- if ms['blockNumber'] > block_number:
- block_number = ms['blockNumber']
- if 'type' in ms:
- if ms['type'] is 'IN':
- membership_data = ms
- else:
- membership_data = ms
- if membership_data:
- return membership_data
- else:
- raise MembershipNotFoundError(self.pubkey, community.name)
-
- except errors.DuniterError as e:
- if e.ucode == errors.NO_MEMBER_MATCHING_PUB_OR_UID:
- raise MembershipNotFoundError(self.pubkey, community.name)
- else:
- logging.debug(str(e))
- raise MembershipNotFoundError(self.pubkey, community.name)
- except NoPeerAvailable as e:
- logging.debug(str(e))
- raise MembershipNotFoundError(self.pubkey, community.name)
-
- async def published_uid(self, community):
- try:
- data = await community.bma_access.future_request(bma.wot.Lookup,
- req_args={'search': self.pubkey})
- timestamp = BlockUID.empty()
-
- for result in data['results']:
- if result["pubkey"] == self.pubkey:
- uids = result['uids']
- person_uid = ""
- for uid_data in uids:
- if BlockUID.from_str(uid_data["meta"]["timestamp"]) >= timestamp:
- timestamp = uid_data["meta"]["timestamp"]
- person_uid = uid_data["uid"]
- if person_uid == self.uid:
- return True
- except errors.DuniterError as e:
- logging.debug("Lookup error : {0}".format(str(e)))
- except NoPeerAvailable as e:
- logging.debug(str(e))
- return False
-
- async def uid_is_revokable(self, community):
- published = await self.published_uid(community)
- if published:
- try:
- await community.bma_access.future_request(bma.wot.CertifiersOf,
- {'search': self.pubkey})
- except errors.DuniterError as e:
- if e.ucode in (errors.NO_MATCHING_IDENTITY, errors.NO_MEMBER_MATCHING_PUB_OR_UID):
- logging.debug("Certifiers of error : {0}".format(str(e)))
- except NoPeerAvailable as e:
- logging.debug(str(e))
- return False
-
- async def is_member(self, community):
- """
- Check if the person is a member of a community
-
- :param sakia.core.community.Community community: The community target to request the join date
- :return: True if the person is a member of a community
- """
- try:
- certifiers = await community.bma_access.future_request(bma.wot.CertifiersOf,
- {'search': self.pubkey})
- return certifiers['isMember']
- except errors.DuniterError as e:
- if e.ucode in (errors.NO_MATCHING_IDENTITY, errors.NO_MEMBER_MATCHING_PUB_OR_UID):
- pass
- except NoPeerAvailable as e:
- logging.debug(str(e))
- return False
-
- async def certifiers_of(self, identities_registry, community):
- """
- Get the list of this person certifiers
-
- :param sakia.core.registry.identities.IdentitiesRegistry identities_registry: The identities registry
- :param sakia.core.community.Community community: The community target
- :return: The list of the certifiers of this community
- :rtype: list
- """
- certifiers = list()
- try:
- data = await community.bma_access.future_request(bma.wot.CertifiersOf,
- {'search': self.pubkey})
-
- for certifier_data in data['certifications']:
- certifier = {}
- certifier['identity'] = identities_registry.from_handled_data(certifier_data['uid'],
- certifier_data['pubkey'],
- None,
- BlockchainState.VALIDATED,
- community)
- certifier['cert_time'] = certifier_data['cert_time']['medianTime']
- if certifier_data['written']:
- certifier['block_number'] = certifier_data['written']['number']
- else:
- certifier['block_number'] = None
-
- certifiers.append(certifier)
- except errors.DuniterError as e:
- if e.ucode in (errors.NO_MATCHING_IDENTITY, errors.NO_MEMBER_MATCHING_PUB_OR_UID):
- logging.debug("Certifiers of error : {0}".format(str(e)))
- else:
- logging.debug(str(e))
- except NoPeerAvailable as e:
- logging.debug(str(e))
-
- try:
- data = await community.bma_access.future_request(bma.wot.Lookup, {'search': self.pubkey})
- for result in data['results']:
- if result["pubkey"] == self.pubkey:
- self._refresh_uid(result['uids'])
- for uid_data in result['uids']:
- for certifier_data in uid_data['others']:
- for uid in certifier_data['uids']:
- # add a certifier
- certifier = {}
- certifier['identity'] = identities_registry.\
- from_handled_data(uid,
- certifier_data['pubkey'],
- None,
- BlockchainState.BUFFERED,
- community)
- certifier['cert_time'] = await community.time(certifier_data['meta']['block_number'])
- certifier['block_number'] = None
-
- certifiers.append(certifier)
- except errors.DuniterError as e:
- if e.ucode in (errors.NO_MATCHING_IDENTITY, errors.NO_MEMBER_MATCHING_PUB_OR_UID):
- logging.debug("Lookup error : {0}".format(str(e)))
- except NoPeerAvailable as e:
- logging.debug(str(e))
- return certifiers
-
- async def certified_by(self, identities_registry, community):
- """
- Get the list of persons certified by this person
- :param sakia.core.registry.IdentitiesRegistry identities_registry: The registry
- :param sakia.core.community.Community community: The community target
- :return: The list of the certified persons of this community in BMA json format
- :rtype: list
- """
- certified_list = list()
- try:
- data = await community.bma_access.future_request(bma.wot.CertifiedBy, {'search': self.pubkey})
- for certified_data in data['certifications']:
- certified = {}
- certified['identity'] = identities_registry.from_handled_data(certified_data['uid'],
- certified_data['pubkey'],
- None,
- BlockchainState.VALIDATED,
- community)
- certified['cert_time'] = certified_data['cert_time']['medianTime']
- if certified_data['written']:
- certified['block_number'] = certified_data['written']['number']
- else:
- certified['block_number'] = None
- certified_list.append(certified)
- except errors.DuniterError as e:
- if e.ucode in (errors.NO_MATCHING_IDENTITY, errors.NO_MEMBER_MATCHING_PUB_OR_UID):
- logging.debug("Certified by error : {0}".format(str(e)))
- except NoPeerAvailable as e:
- logging.debug(str(e))
-
- try:
- data = await community.bma_access.future_request(bma.wot.Lookup, {'search': self.pubkey})
- for result in data['results']:
- if result["pubkey"] == self.pubkey:
- self._refresh_uid(result['uids'])
- for certified_data in result['signed']:
- certified = {}
- certified['identity'] = identities_registry.from_handled_data(certified_data['uid'],
- certified_data['pubkey'],
- None,
- BlockchainState.BUFFERED,
- community)
- timestamp = BlockUID.from_str(certified_data['meta']['timestamp'])
- certified['cert_time'] = await community.time(timestamp.number)
- certified['block_number'] = None
- certified_list.append(certified)
- except errors.DuniterError as e:
- if e.ucode in (errors.NO_MATCHING_IDENTITY, errors.NO_MEMBER_MATCHING_PUB_OR_UID):
- logging.debug("Lookup error : {0}".format(str(e)))
- except NoPeerAvailable as e:
- logging.debug(str(e))
- return certified_list
-
- async def _unique_valid(self, cert_list, community):
- """
- Get the certifications in the blockchain and in the pools
- Get only unique and last certification for each pubkey
- :param list cert_list: The certifications list to filter
- :param sakia.core.community.Community community: The community target
- :return: The list of the certifiers of this community
- :rtype: list
- """
- unique_valid = []
- # add certifiers of uid
- for certifier in tuple(cert_list):
- # add only valid certification...
- try:
- cert_expired = await community.certification_expired(certifier['cert_time'])
- except NoPeerAvailable:
- logging.debug("No peer available")
- cert_expired = True
-
- if not certifier['block_number']:
- # add only valid certification...
- try:
- cert_writable = await community.certification_writable(certifier['cert_time'])
- except NoPeerAvailable:
- logging.debug("No peer available")
- cert_writable = False
- else:
- cert_writable = True
-
- if not cert_expired and cert_writable:
- # keep only the latest certification
- already_found = [c['identity'].pubkey for c in unique_valid]
- if certifier['identity'].pubkey in already_found:
- index = already_found.index(certifier['identity'].pubkey)
- if certifier['cert_time'] > unique_valid[index]['cert_time']:
- unique_valid[index] = certifier
- else:
- unique_valid.append(certifier)
- return unique_valid
-
- async def unique_valid_certifiers_of(self, identities_registry, community):
- """
- Get the certifications in the blockchain and in the pools
- Get only unique and last certification for each pubkey
- :param sakia.core.registry.identities.IdentitiesRegistry identities_registry: The identities registry
- :param sakia.core.community.Community community: The community target
- :return: The list of the certifiers of this community
- :rtype: list
- """
- certifier_list = await self.certifiers_of(identities_registry, community)
- return await self._unique_valid(certifier_list, community)
-
- async def unique_valid_certified_by(self, identities_registry, community):
- """
- Get the list of persons certified by this person, filtered to get only unique
- and valid certifications.
- :param sakia.core.registry.IdentitiesRegistry identities_registry: The registry
- :param sakia.core.community.Community community: The community target
- :return: The list of the certified persons of this community in BMA json format
- :rtype: list
- """
- certified_list = await self.certified_by(identities_registry, community)
- return await self._unique_valid(certified_list, community)
-
- async def identity_revocation_time(self, community):
- """
- Get the remaining time before identity implicit revocation
- :param sakia.core.Community community: the community
- :return: the remaining time
- :rtype: int
- """
- membership = await self.membership(community)
- join_block = membership['blockNumber']
- block = await community.get_block(join_block)
- join_date = block['medianTime']
- parameters = await community.parameters()
- # revocation date is join_date + 1 sigvalidity (expiration date) + 2*sigvalidity
- revocation_date = join_date + 3*parameters['sigValidity']
- current_time = time.time()
- return revocation_date - current_time
-
- async def membership_expiration_time(self, community):
- """
- Get the remaining time before membership expiration
- :param sakia.core.Community community: the community
- :return: the remaining time
- :rtype: int
- """
- membership = await self.membership(community)
- join_block = membership['blockNumber']
- block = await community.get_block(join_block)
- join_date = block['medianTime']
- parameters = await community.parameters()
- expiration_date = join_date + parameters['sigValidity']
- current_time = time.time()
- return expiration_date - current_time
-
- async def requirements(self, community):
- """
- Get the current requirements data.
- :param sakia.core.Community community: the community
- :return: the requirements
- :rtype: dict
- """
- try:
- requirements = await community.bma_access.future_request(bma.wot.Requirements,
- {'search': self.pubkey})
- for req in requirements['identities']:
- if req['pubkey'] == self.pubkey and req['uid'] == self.uid and \
- self._sigdate and \
- BlockUID.from_str(req['meta']['timestamp']) == self._sigdate:
- return req
- except errors.DuniterError as e:
- logging.debug(str(e))
- return None
-
- def _refresh_uid(self, uids):
- """
- Refresh UID from uids list, got from a successful lookup request
- :param list uids: UIDs got from a lookup request
- """
- timestamp = BlockUID.empty()
- if self.local_state == LocalState.NOT_FOUND:
- for uid_data in uids:
- if BlockUID.from_str(uid_data["meta"]["timestamp"]) >= timestamp:
- timestamp = BlockUID.from_str(uid_data["meta"]["timestamp"])
- identity_uid = uid_data["uid"]
- self.uid = identity_uid
- self.blockchain_state = BlockchainState.BUFFERED
- self.local_state = LocalState.PARTIAL
-
- def jsonify(self):
- """
- Get the community as dict in json format.
- :return: The community as a dict in json format
- """
- data = {'uid': self.uid,
- 'pubkey': self.pubkey,
- 'sigdate': str(self._sigdate) if self._sigdate else None,
- 'local_state': self.local_state.name,
- 'blockchain_state': self.blockchain_state.name}
- return data
-
- def __str__(self):
- return "{uid} - {pubkey} - {sigdate} - {local} - {blockchain}".format(uid=self.uid,
- pubkey=self.pubkey,
- sigdate=self._sigdate,
- local=self.local_state,
- blockchain=self.blockchain_state)
diff --git a/src/sakia/core/transfer.py b/src/sakia/core/transfer.py
deleted file mode 100644
index b1cd3048..00000000
--- a/src/sakia/core/transfer.py
+++ /dev/null
@@ -1,374 +0,0 @@
-"""
-Created on 31 janv. 2015
-
-@author: inso
-"""
-import logging
-import time
-from duniterpy.api import bma
-from duniterpy.documents import Block, BlockUID
-from PyQt5.QtCore import pyqtSignal, QObject
-from enum import Enum
-
-
-class TransferState(Enum):
- """
- TO_SEND means the transaction wasn't sent yet
- AWAITING means the transaction is waiting to reach K blockchain confrmation
- VALIDATED means the transaction was validated locally and is considered present in the blockchain
- REFUSED means the transaction took too long to be registered in the blockchain,
- therefore it is considered as refused
- DROPPED means the transaction was canceled locally. It can still be validated
- in the blockchain if it was sent, if the guy is unlucky ;)
- """
- TO_SEND = 0
- AWAITING = 1
- VALIDATING = 4
- VALIDATED = 2
- REFUSED = 3
- DROPPED = 5
-
-
-class Transfer(QObject):
- """
- A transfer is the lifecycle of a transaction.
- """
- transfer_broadcasted = pyqtSignal(str)
- broadcast_error = pyqtSignal(int, str)
-
- def __init__(self, sha_hash, state, blockUID, metadata, locally_created):
- """
- The constructor of a transfer.
- Check for metadata keys which must be present :
- - receiver
- - block
- - time
- - issuer
- - amount
- - comment
-
- :param str sha_hash: The hash of the transaction
- :param TransferState state: The state of the Transfer
- :param duniterpy.documents.BlockUID blockUID: The blockUID of the transaction in the blockchain
- :param dict metadata: The transfer metadata
- """
- assert('receiver' in metadata)
- assert('time' in metadata)
- assert('issuer' in metadata)
- assert('amount' in metadata)
- assert('comment' in metadata)
- assert('issuer_uid' in metadata)
- assert('receiver_uid' in metadata)
- assert('txid' in metadata)
- super().__init__()
-
- self.sha_hash = sha_hash
- self.state = state
- self.blockUID = blockUID
- self._locally_created = locally_created
- self._metadata = metadata
-
- # Dict containing states of a transfer :
- # keys are a tuple containg (current_state, transition_parameters)
- # values are tuples containing (transition_test, transition_success, new_state)
- self._table_states = {
- (TransferState.TO_SEND, (list, Block)):
- (
- (self._broadcast_success, lambda l, b: self._wait(b), TransferState.AWAITING),
- (lambda l,b: self._broadcast_failure(l), None, TransferState.REFUSED),
- ),
- (TransferState.TO_SEND, ()):
- ((self._is_locally_created, self._drop, TransferState.DROPPED),),
-
- (TransferState.AWAITING, (bool, Block)):
- ((self._found_in_block, lambda r, b: self._be_validating(b), TransferState.VALIDATING),),
- (TransferState.AWAITING, (bool, Block, int, int)):
- ((self._not_found_in_blockchain, None, TransferState.REFUSED),),
-
- (TransferState.VALIDATING, (bool, Block, int)):
- ((self._reached_enough_confrmation, None, TransferState.VALIDATED),),
- (TransferState.VALIDATING, (bool, Block)):
- ((self._rollback_and_removed, lambda r, b: self._drop(), TransferState.DROPPED),),
-
- (TransferState.VALIDATED, (bool, Block, int)):
- ((self._rollback_in_fork_window, lambda r, b, i: self._be_validating(b), TransferState.VALIDATING),),
-
- (TransferState.VALIDATED, (bool, Block)):
- (
- (self._rollback_and_removed, lambda r, b: self._drop(), TransferState.DROPPED),
- (self._rollback_and_local, lambda r, b: self._wait(b), TransferState.AWAITING),
- ),
-
- (TransferState.REFUSED, ()):
- ((self._is_locally_created, self._drop, TransferState.DROPPED),)
- }
-
- @classmethod
- def initiate(cls, metadata):
- """
- Create a new transfer in a "TO_SEND" state.
- :param dict metadata: The computed metadata of the transfer
- :return: A new transfer
- :rtype: Transfer
- """
- return cls(None, TransferState.TO_SEND, None, metadata, True)
-
- @classmethod
- def create_from_blockchain(cls, hash, blockUID, metadata):
- """
- Create a new transfer sent from another sakia instance
- :param str hash: The transaction hash
- :param duniterpy.documents.BlockUID blockUID: The block id were we found the tx
- :param dict metadata: The computed metadata of the transaction
- :return: A new transfer
- :rtype: Transfer
- """
- return cls(hash, TransferState.VALIDATING, blockUID, metadata, False)
-
- @classmethod
- def load(cls, data):
- """
- Create a new transfer from a dict in json format.
- :param dict data: The loaded data
- :return: A new transfer
- :rtype: Transfer
- """
- return cls(data['hash'],
- TransferState[data['state']],
- BlockUID.from_str(data['blockUID']) if data['blockUID'] else None,
- data['metadata'], data['local'])
-
- def jsonify(self):
- """
- :return: The transfer as a dict in json format
- """
- return {'hash': self.sha_hash,
- 'state': self.state.name,
- 'blockUID': str(self.blockUID) if self.blockUID else None,
- 'metadata': self._metadata,
- 'local': self._locally_created}
-
- @property
- def metadata(self):
- """
- :return: this transfer metadata
- """
- return self._metadata
-
- def _not_found_in_blockchain(self, rollback, block, mediantime_target, mediantime_blocks):
- """
- Check if the transaction could not be found in the blockchain
- :param bool rollback: True if we are in a rollback procedure
- :param duniterpy.documents.Block block: The block to look for the tx
- :param int mediantime_target: The mediantime to mine a block in the community parameters
- :param int mediantime_blocks: The number of block used to derive the mediantime
- :return: True if the transaction could not be found in a given time
- :rtype: bool
- """
- if not rollback:
- for tx in block.transactions:
- if tx.sha_hash == self.sha_hash:
- return False
- if block.time > self.metadata['time'] + mediantime_target*mediantime_blocks:
- return True
- return False
-
- def _found_in_block(self, rollback, block):
- """
- Check if the transaction can be found in the blockchain
- :param bool rollback: True if we are in a rollback procedure
- :param duniterpy.documents.Block block: The block to check for the transaction
- :return: True if the transaction was found
- :rtype: bool
- """
- if not rollback:
- for tx in block.transactions:
- if tx.sha_hash == self.sha_hash:
- return True
- return False
-
- def _broadcast_success(self, ret_codes, block):
- """
- Check if the retcode is 200 after a POST
- :param list ret_codes: The POST return codes of the broadcast
- :param duniterpy.documents.Block block: The current block used for transition.
- :return: True if the post was successful
- :rtype: bool
- """
- return 200 in ret_codes
-
- def _broadcast_failure(self, ret_codes):
- """
- Check if no retcode is 200 after a POST
- :param list ret_codes: The POST return codes of the broadcast
- :return: True if the post was failed
- :rtype: bool
- """
- return 200 not in ret_codes
-
- def _reached_enough_confrmation(self, rollback, current_block, fork_window):
- """
- Check if the transfer reached enough confrmation in the blockchain
- :param bool rollback: True if we are in a rollback procedure
- :param duniterpy.documents.Block current_block: The current block of the main blockchain
- :param int fork_window: The number of confrmations needed on the network
- :return: True if the transfer reached enough confrmations
- :rtype: bool
- """
- return not rollback and self.blockUID.number + fork_window <= current_block.number
-
- def _rollback_and_removed(self, rollback, block):
- """
- Check if the transfer is not in the block anymore
- :param bool rollback: True if we are in a rollback procedure
- :param duniterpy.documents.Block block: The block to check for the transaction
- :return: True if the transfer is not found in the block
- """
- if rollback:
- if not block or block.blockUID != self.blockUID:
- return True
- else:
- return self.sha_hash not in [t.sha_hash for t in block.transactions]
- return False
-
- def _rollback_in_fork_window(self, rollback, current_block, fork_window):
- """
- Check if the transfer is not in the block anymore
- :param bool rollback: True if we are in a rollback procedure
- :param duniterpy.documents.Block current_block: The block to check for the transaction
- :return: True if the transfer is found in the block
- """
- if rollback:
- return self.blockUID.number + fork_window > current_block.number
- return False
-
- def _rollback_and_local(self, rollback, block):
- """
- Check if the transfer is not in the block anymore
- :param bool rollback: True if we are in a rollback procedure
- :param duniterpy.documents.Block block: The block to check for the transaction
- :return: True if the transfer is found in the block
- """
- if rollback and self._locally_created and block.blockUID == self.blockUID:
- return self.sha_hash not in [t.sha_hash for t in block.transactions]
- return False
-
- def _is_locally_created(self):
- """
- Check if we can send back the transaction if it was locally created
- :return: True if the transaction was locally created
- """
- return self._locally_created
-
- def _wait(self, current_block):
- """
- Set the transfer as AWAITING confrmation.
- :param duniterpy.documents.Block current_block: Current block of the main blockchain
- """
- self.blockUID = current_block.blockUID
- self._metadata['time'] = int(time.time())
-
- def _be_validating(self, block):
- """
- Action when the transfer ins found in a block
-
- :param bool rollback: True if we are in a rollback procedure
- :param duniterpy.documents.Block block: The block checked
- """
- self.blockUID = block.blockUID
- self._metadata['time'] = block.mediantime
-
- def _drop(self):
- """
- Cancel the transfer locally.
- The transfer state becomes TransferState.DROPPED.
- """
- self.blockUID = None
-
- def _try_transition(self, transition_key, inputs):
- """
- Try the transition defined by the given transition_key
- with inputs
- :param tuple transition_key: The transition key in the table states
- :param tuple inputs: The inputs
- :return: True if the transition was applied
- :rtype: bool
- """
- if len(inputs) == len(transition_key[1]):
- for i, input in enumerate(inputs):
- if type(input) is not transition_key[1][i]:
- return False
- for transition in self._table_states[transition_key]:
- if transition[0](*inputs):
- if self.sha_hash:
- logging.debug("{0} : {1} --> {2}".format(self.sha_hash[:5], self.state.name,
- transition[2].name))
- else:
- logging.debug("Unsent transfer : {0} --> {1}".format(self.state.name,
- transition[2].name))
-
- # If the transition changes data, apply changes
- if transition[1]:
- transition[1](*inputs)
- self.state = transition[2]
- return True
- return False
-
- def run_state_transitions(self, inputs):
- """
- Try all current state transitions with inputs
- :param tuple inputs: The inputs passed to the transitions
- :return: True if the transaction changed state
- :rtype: bool
- """
- transition_keys = [k for k in self._table_states.keys() if k[0] == self.state]
- for key in transition_keys:
- if self._try_transition(key, inputs):
- return True
- return False
-
- def cancel(self):
- """
- Cancel a local transaction
- """
- self.run_state_transitions(())
-
- async def send(self, txdoc, community):
- """
- Send a transaction and update the transfer state to AWAITING if accepted.
- If the transaction was refused (return code != 200), state becomes REFUSED
- The txdoc is saved as the transfer txdoc.
-
- :param txdoc: A transaction duniterpy object
- :param community: The community target of the transaction
- """
- self.sha_hash = txdoc.sha_hash
- responses = await community.bma_access.broadcast(bma.tx.Process,
- post_args={'transaction': txdoc.signed_raw()})
- blockUID = community.network.current_blockUID
- block = await community.bma_access.future_request(bma.blockchain.Block,
- req_args={'number': blockUID.number})
- signed_raw = "{0}{1}\n".format(block['raw'], block['signature'])
- block_doc = Block.from_signed_raw(signed_raw)
- result = (False, "")
- for r in responses:
- if r.status == 200:
- result = (True, (await r.json()))
- elif not result[0]:
- result = (False, (await r.text()))
- else:
- await r.text()
- self.run_state_transitions(([r.status for r in responses], block_doc))
- self.run_state_transitions(([r.status for r in responses], ))
- return result
-
- async def get_raw_document(self, community):
- """
- Get the raw documents of this transfer
- """
- block = await community.get_block(self.blockUID.number)
- if block:
- block_doc = Block.from_signed_raw("{0}{1}\n".format(block['raw'], block['signature']))
- for tx in block_doc.transactions:
- if tx.sha_hash == self.sha_hash:
- return tx
diff --git a/src/sakia/core/txhistory.py b/src/sakia/core/txhistory.py
deleted file mode 100644
index 7b8674e9..00000000
--- a/src/sakia/core/txhistory.py
+++ /dev/null
@@ -1,425 +0,0 @@
-import asyncio
-import logging
-import hashlib
-import math
-from duniterpy.documents import SimpleTransaction, Block, MalformedDocumentError
-from duniterpy.api import bma, errors
-from .transfer import Transfer, TransferState
-from sakia.errors import NoPeerAvailable
-
-
-class TxHistory:
- def __init__(self, app, wallet):
- self._latest_block = 0
- self.wallet = wallet
- self.app = app
- self._stop_coroutines = False
- self._running_refresh = []
- self._transfers = []
- self.available_sources = []
- self._dividends = []
-
- @property
- def latest_block(self):
- return self._latest_block
-
- @latest_block.setter
- def latest_block(self, value):
- self._latest_block = value
-
- def load_from_json(self, data, version):
- """
- Load the tx history cache from json data
-
- :param dict data: the data
- :param version: the version parsed with pkg_resources.parse_version
- :return:
- """
- self._transfers = []
-
- data_sent = data['transfers']
- for s in data_sent:
- self._transfers.append(Transfer.load(s))
-
- for s in data['sources']:
- self.available_sources.append(s.copy())
-
- for d in data['dividends']:
- d['state'] = TransferState[d['state']]
- self._dividends.append(d)
-
- self.latest_block = data['latest_block']
-
- def jsonify(self):
- data_transfer = []
- for s in self.transfers:
- data_transfer.append(s.jsonify())
-
- data_sources = []
- for s in self.available_sources:
- data_sources.append(s)
-
- data_dividends = []
- for d in self._dividends:
- dividend = {
- 'block_number': d['block_number'],
- "consumed": d['consumed'],
- 'time': d['time'],
- 'id': d['id'],
- 'amount': d['amount'],
- 'base': d['base'],
- 'state': d['state'].name
- }
- data_dividends.append(dividend)
-
- return {'latest_block': self.latest_block,
- 'transfers': data_transfer,
- 'sources': data_sources,
- 'dividends': data_dividends}
-
- @property
- def transfers(self):
- return [t for t in self._transfers if t.state != TransferState.DROPPED]
-
- @property
- def dividends(self):
- return self._dividends.copy()
-
- def stop_coroutines(self, closing=False):
- self._stop_coroutines = True
-
- async def _get_block_doc(self, community, number):
- """
- Retrieve the current block document
- :param sakia.core.Community community: The community we look for a block
- :param int number: The block number to retrieve
- :return: the block doc or None if no block was found
- """
- tries = 0
- block_doc = None
- block = None
- while block is None and tries < 3:
- try:
- block = await community.bma_access.future_request(bma.blockchain.Block,
- req_args={'number': number})
- signed_raw = "{0}{1}\n".format(block['raw'],
- block['signature'])
- try:
- block_doc = Block.from_signed_raw(signed_raw)
- except TypeError:
- logging.debug("Error in {0}".format(number))
- block = None
- tries += 1
- except errors.DuniterError as e:
- if e.ucode == errors.BLOCK_NOT_FOUND:
- block = None
- tries += 1
- return block_doc
-
- async def _parse_transaction(self, community, tx, blockUID,
- mediantime, received_list, txid):
- """
- Parse a transaction
- :param sakia.core.Community community: The community
- :param duniterpy.documents.Transaction tx: The tx json data
- :param duniterpy.documents.BlockUID blockUID: The block id where we found the tx
- :param int mediantime: Median time on the network
- :param list received_list: The list of received transactions
- :param int txid: The latest txid
- :return: the found transaction
- """
- receivers = [o.conditions.left.pubkey for o in tx.outputs
- if o.conditions.left.pubkey != tx.issuers[0]]
-
- if len(receivers) == 0:
- receivers = [tx.issuers[0]]
-
- try:
- issuer = await self.wallet._identities_registry.future_find(tx.issuers[0], community)
- issuer_uid = issuer.uid
- except LookupFailureError:
- issuer_uid = ""
-
- try:
- receiver = await self.wallet._identities_registry.future_find(receivers[0], community)
- receiver_uid = receiver.uid
- except LookupFailureError:
- receiver_uid = ""
-
- metadata = {
- 'time': mediantime,
- 'comment': tx.comment,
- 'issuer': tx.issuers[0],
- 'issuer_uid': issuer_uid,
- 'receiver': receivers[0],
- 'receiver_uid': receiver_uid,
- 'txid': txid
- }
-
- in_issuers = len([i for i in tx.issuers
- if i == self.wallet.pubkey]) > 0
- in_outputs = len([o for o in tx.outputs
- if o.conditions.left.pubkey == self.wallet.pubkey]) > 0
-
- tx_hash = hashlib.sha256(tx.signed_raw().encode("ascii")).hexdigest().upper()
- # If the wallet pubkey is in the issuers we sent this transaction
- if in_issuers:
- outputs = [o for o in tx.outputs
- if o.conditions.left.pubkey != self.wallet.pubkey]
- amount = 0
- for o in outputs:
- amount += o.amount * math.pow(10, o.base)
- metadata['amount'] = amount
- transfer = Transfer.create_from_blockchain(tx_hash,
- blockUID,
- metadata.copy())
- return transfer
- # If we are not in the issuers,
- # maybe we are in the recipients of this transaction
- elif in_outputs:
- outputs = [o for o in tx.outputs
- if o.conditions.left.pubkey == self.wallet.pubkey]
- amount = 0
- for o in outputs:
- amount += o.amount * math.pow(10, o.base)
- metadata['amount'] = amount
-
- transfer = Transfer.create_from_blockchain(tx_hash,
- blockUID,
- metadata.copy())
- received_list.append(transfer)
- return transfer
- return None
-
- async def _parse_block(self, community, block_number, received_list, txmax):
- """
- Parse a block
- :param sakia.core.Community community: The community
- :param int block_number: The block to request
- :param list received_list: The list where we are appending transactions
- :param int txmax: Latest tx id
- :return: The list of transfers sent
- """
- block_doc = await self._get_block_doc(community, block_number)
- transfers = []
- if block_doc:
- for transfer in [t for t in self._transfers if t.state == TransferState.AWAITING]:
- transfer.run_state_transitions((False, block_doc))
-
- new_tx = [t for t in block_doc.transactions
- if t.sha_hash not in [trans.sha_hash for trans in self._transfers]
- and SimpleTransaction.is_simple(t)]
-
- for (txid, tx) in enumerate(new_tx):
- transfer = await self._parse_transaction(community, tx, block_doc.blockUID,
- block_doc.mediantime, received_list, txid+txmax)
- if transfer:
- #logging.debug("Transfer amount : {0}".format(transfer.metadata['amount']))
- transfers.append(transfer)
- else:
- pass
- #logging.debug("None transfer")
- else:
- logging.debug("Could not find or parse block {0}".format(block_number))
- return transfers
-
- async def request_dividends(self, community, parsed_block):
- for i in range(0, 6):
- try:
- dividends_data = await community.bma_access.future_request(bma.ud.History,
- req_args={'pubkey': self.wallet.pubkey})
-
- dividends = dividends_data['history']['history'].copy()
-
- for d in dividends:
- if d['block_number'] < parsed_block:
- dividends.remove(d)
- return dividends
- except errors.DuniterError as e:
- if e.ucode == errors.BLOCK_NOT_FOUND:
- pass
- return {}
-
- async def _refresh(self, community, block_number_from, block_to, received_list):
- """
- Refresh last transactions
-
- :param sakia.core.Community community: The community
- :param list received_list: List of transactions received
- """
- new_transfers = []
- new_dividends = []
- try:
- logging.debug("Refresh from : {0} to {1}".format(block_number_from, block_to['number']))
- dividends = await self.request_dividends(community, block_number_from)
- with_tx_data = await community.bma_access.future_request(bma.blockchain.TX)
- blocks_with_tx = with_tx_data['result']['blocks']
- while block_number_from <= block_to['number']:
- udid = 0
- for d in [ud for ud in dividends if ud['block_number'] == block_number_from]:
- state = TransferState.VALIDATED if block_number_from + MAX_CONFIRMATIONS <= block_to['number'] \
- else TransferState.VALIDATING
-
- if d['block_number'] not in [ud['block_number'] for ud in self._dividends]:
- d['id'] = udid
- d['state'] = state
- new_dividends.append(d)
-
- udid += 1
- else:
- known_dividend = [ud for ud in self._dividends
- if ud['block_number'] == d['block_number']][0]
- known_dividend['state'] = state
-
- # We parse only blocks with transactions
- if block_number_from in blocks_with_tx:
- transfers = await self._parse_block(community, block_number_from,
- received_list,
- udid + len(new_transfers))
- new_transfers += transfers
-
- self.wallet.refresh_progressed.emit(block_number_from, block_to['number'], self.wallet.pubkey)
- block_number_from += 1
-
- signed_raw = "{0}{1}\n".format(block_to['raw'],
- block_to['signature'])
- block_to = Block.from_signed_raw(signed_raw)
- for transfer in [t for t in self._transfers + new_transfers if t.state == TransferState.VALIDATING]:
- transfer.run_state_transitions((False, block_to, MAX_CONFIRMATIONS))
-
- # We check if latest parsed block_number is a new high number
- if block_number_from > self.latest_block:
- self.available_sources = await self.wallet.sources(community)
- if self._stop_coroutines:
- return
- self.latest_block = block_number_from
-
- parameters = await community.parameters()
- for transfer in [t for t in self._transfers if t.state == TransferState.AWAITING]:
- transfer.run_state_transitions((False, block_to,
- parameters['avgGenTime'], parameters['medianTimeBlocks']))
- except (MalformedDocumentError, NoPeerAvailable) as e:
- logging.debug(str(e))
- self.wallet.refresh_finished.emit([])
- return
-
- self._transfers = self._transfers + new_transfers
- self._dividends = self._dividends + new_dividends
-
- self.wallet.refresh_finished.emit(received_list)
-
- async def _check_block(self, community, block_number):
- """
- Parse a block
- :param sakia.core.Community community: The community
- :param int block_number: The block to check for transfers
- """
- block_doc = await self._get_block_doc(community, block_number)
- if block_doc:
- # We check the block dividend state
- match = [d for d in self._dividends if d['block_number'] == block_number]
- if len(match) > 0:
- if block_doc.ud:
- match[0]['amount'] = block_doc.ud
- match[0]['base'] = block_doc.unit_base
- else:
- self._dividends.remove(match[0])
-
- # We check if transactions are still present
- for transfer in [t for t in self._transfers
- if t.state in (TransferState.VALIDATING, TransferState.VALIDATED) and
- t.blockUID.number == block_number]:
- if transfer.blockUID.sha_hash == block_doc.blockUID.sha_hash:
- return True
- transfer.run_state_transitions((True, block_doc))
- else:
- logging.debug("Could not get block document")
- return False
-
- async def _rollback(self, community):
- """
- Rollback last transactions until we find one still present
- in the main blockchain
-
- :param sakia.core.Community community: The community
- """
- try:
- logging.debug("Rollback from : {0}".format(self.latest_block))
- # We look for the block goal to check for rollback,
- # depending on validating and validated transfers...
- tx_blocks = [tx.blockUID.number for tx in self._transfers
- if tx.state in (TransferState.VALIDATED, TransferState.VALIDATING) and
- tx.blockUID is not None]
- ud_blocks = [ud['block_number'] for ud in self._dividends
- if ud['state'] in (TransferState.AWAITING, TransferState.VALIDATING)]
- blocks = tx_blocks + ud_blocks
- blocks.reverse()
- for i, block_number in enumerate(blocks):
- self.wallet.refresh_progressed.emit(i, len(blocks), self.wallet.pubkey)
- if await self._check_block(community, block_number):
- break
-
- current_block = await self._get_block_doc(community, community.network.current_blockUID.number)
- if current_block:
- members_pubkeys = await community.members_pubkeys()
- for transfer in [t for t in self._transfers
- if t.state == TransferState.VALIDATED]:
- transfer.run_state_transitions((True, current_block, MAX_CONFIRMATIONS))
- except NoPeerAvailable:
- logging.debug("No peer available")
-
- async def refresh(self, community, received_list):
- # We update the block goal
- try:
- current_block_number = community.network.current_blockUID.number
- if current_block_number:
- current_block = await community.bma_access.future_request(bma.blockchain.Block,
- req_args={'number': current_block_number})
- members_pubkeys = await community.members_pubkeys()
- # We look for the first block to parse, depending on awaiting and validating transfers and ud...
- tx_blocks = [tx.blockUID.number for tx in self._transfers
- if tx.state in (TransferState.AWAITING, TransferState.VALIDATING) \
- and tx.blockUID is not None]
- ud_blocks = [ud['block_number'] for ud in self._dividends
- if ud['state'] in (TransferState.AWAITING, TransferState.VALIDATING)]
- blocks = tx_blocks + ud_blocks + \
- [max(0, self.latest_block - MAX_CONFIRMATIONS)]
- block_from = min(set(blocks))
-
- await self._wait_for_previous_refresh()
- if block_from < current_block["number"]:
- # Then we start a new one
- logging.debug("Starts a new refresh")
- task = asyncio.ensure_future(self._refresh(community, block_from, current_block, received_list))
- self._running_refresh.append(task)
- except errors.DuniterError as e:
- if e.ucode == errors.BLOCK_NOT_FOUND:
- logging.debug("Block not found")
- except NoPeerAvailable:
- logging.debug("No peer available")
-
- async def rollback(self, community, received_list):
- await self._wait_for_previous_refresh()
- # Then we start a new one
- logging.debug("Starts a new rollback")
- task = asyncio.ensure_future(self._rollback(community))
- self._running_refresh.append(task)
-
- # Then we start a refresh to check for new transactions
- await self.refresh(community, received_list)
-
- async def _wait_for_previous_refresh(self):
- # We wait for current refresh coroutines
- if len(self._running_refresh) > 0:
- logging.debug("Wait for the end of previous refresh")
- done, pending = await asyncio.wait(self._running_refresh)
- for cor in done:
- try:
- self._running_refresh.remove(cor)
- except ValueError:
- logging.debug("Task already removed.")
- for p in pending:
- logging.debug("Still waiting for : {0}".format(p))
- logging.debug("Previous refresh finished")
- else:
- logging.debug("No previous refresh")
diff --git a/src/sakia/core/wallet.py b/src/sakia/core/wallet.py
deleted file mode 100644
index 19fa06c1..00000000
--- a/src/sakia/core/wallet.py
+++ /dev/null
@@ -1,420 +0,0 @@
-"""
-Created on 1 févr. 2014
-
-@author: inso
-"""
-
-from duniterpy.documents.transaction import InputSource, OutputSource, Unlock, SIGParameter, Transaction, reduce_base
-from duniterpy.grammars import output
-from duniterpy.key import SigningKey
-
-from duniterpy.api import bma
-from duniterpy.api.bma import PROTOCOL_VERSION
-from sakia.errors import NoPeerAvailable
-from .transfer import Transfer
-from .txhistory import TxHistory
-from .. import __version__
-
-from pkg_resources import parse_version
-from PyQt5.QtCore import QObject, pyqtSignal
-
-import logging
-import asyncio
-
-
-class Wallet(QObject):
- """
- A wallet is used to manage money with a unique key.
- """
- refresh_progressed = pyqtSignal(int, int, str)
- refresh_finished = pyqtSignal(list)
-
- def __init__(self, walletid, pubkey, name, identities_registry):
- """
- Constructor of a wallet object
-
- :param int walletid: The wallet number, unique between all wallets
- :param str pubkey: The wallet pubkey
- :param str name: The wallet name
- """
- super().__init__()
- self.coins = []
- self.walletid = walletid
- self.pubkey = pubkey
- self.name = name
- self._identities_registry = identities_registry
- self.caches = {}
-
- @classmethod
- def create(cls, walletid, salt, password, name, identities_registry):
- """
- Factory method to create a new wallet
-
- :param int walletid: The wallet number, unique between all wallets
- :param str salt: The account salt
- :param str password: The account password
- :param str name: The account name
- """
- if walletid == 0:
- key = SigningKey(salt, password)
- else:
- key = SigningKey(b"{0}{1}".format(salt, walletid), password)
- return cls(walletid, key.pubkey, name, identities_registry)
-
- @classmethod
- def load(cls, json_data, identities_registry):
- """
- Factory method to load a saved wallet.
-
- :param dict json_data: The wallet as a dict in json format
- """
- walletid = json_data['walletid']
- pubkey = json_data['pubkey']
- name = json_data['name']
- return cls(walletid, pubkey, name, identities_registry)
-
- def load_caches(self, app, json_data):
- """
- Load this wallet caches.
- Each cache correspond to one different community.
-
- :param dict json_data: The caches as a dict in json format
- """
- version = parse_version(json_data['version'])
- for currency in json_data:
- if currency != 'version':
- self.caches[currency] = TxHistory(app, self)
- if version >= parse_version("0.20.dev0"):
- self.caches[currency].load_from_json(json_data[currency], version)
-
- def jsonify_caches(self):
- """
- Get this wallet caches as json.
-
- :return: The wallet caches as a dict in json format
- """
- data = {}
- for currency in self.caches:
- data[currency] = self.caches[currency].jsonify()
- return data
-
- def init_cache(self, app, community):
- """
- Init the cache of this wallet for the specified community.
-
- :param community: The community to refresh its cache
- """
- if community.currency not in self.caches:
- self.caches[community.currency] = TxHistory(app, self)
-
- def refresh_transactions(self, community, received_list):
- """
- Refresh the cache of this wallet for the specified community.
-
- :param community: The community to refresh its cache
- """
- logging.debug("Refresh transactions for {0}".format(self.pubkey))
- asyncio.ensure_future(self.caches[community.currency].refresh(community, received_list))
-
- def rollback_transactions(self, community, received_list):
- """
- Rollback the transactions of this wallet for the specified community.
-
- :param community: The community to refresh its cache
- """
- logging.debug("Refresh transactions for {0}".format(self.pubkey))
- asyncio.ensure_future(self.caches[community.currency].rollback(community, received_list))
-
- def check_password(self, salt, password):
- """
- Check if wallet password is ok.
-
- :param salt: The account salt
- :param password: The given password
- :return: True if (salt, password) generates the good public key
- .. warning:: Generates a new temporary SigningKey from salt and password
- """
- key = None
- if self.walletid == 0:
- key = SigningKey(salt, password)
- else:
- key = SigningKey("{0}{1}".format(salt, self.walletid), password)
- return (key.pubkey == self.pubkey)
-
- async def relative_value(self, community):
- """
- Get wallet value relative to last generated UD
-
- :param community: The community to get value
- :return: The wallet relative value
- """
- value = await self.value(community)
- ud = community.dividend
- relative_value = value / float(ud)
- return relative_value
-
- async def value(self, community):
- """
- Get wallet absolute value
-
- :param community: The community to get value
- :return: The wallet absolute value
- """
- value = 0
- sources = await self.sources(community)
- for s in sources:
- value += s['amount'] * pow(10, s['base'])
- return value
-
- def tx_sources(self, amount, community):
- """
- Get inputs to generate a transaction with a given amount of money
-
- :param int amount: The amount target value
- :param community: The community target of the transaction
-
- :return: The list of inputs to use in the transaction document
- """
-
- # such a dirty algorithmm
- # everything should be done again from scratch
- # in future versions
-
- def current_value(inputs, overhs):
- i = 0
- for s in inputs:
- i += s['amount'] * (10**s['base'])
- for o in overhs:
- i -= o[0] * (10**o[1])
- return i
-
- amount, amount_base = reduce_base(amount, 0)
- cache = self.caches[community.currency]
- current_base = max([src['base'] for src in cache.available_sources])
- value = 0
- sources = []
- outputs = []
- overheads = []
- buf_sources = list(cache.available_sources)
- while current_base >= 0:
- for s in [src for src in cache.available_sources if src['base'] == current_base]:
- test_sources = sources + [s]
- val = current_value(test_sources, overheads)
- # if we have to compute an overhead
- if current_value(test_sources, overheads) > amount * (10**amount_base):
- overhead = current_value(test_sources, overheads) - int(amount) * (10**amount_base)
- # we round the overhead in the current base
- # exemple : 12 in base 1 -> 1*10^1
- overhead = int(round(float(overhead) / (10**current_base)))
- source_value = s['amount'] * (10**s['base'])
- out = int((source_value - (overhead * (10**current_base)))/(10**current_base))
- if out * (10**current_base) <= amount * (10**amount_base):
- sources.append(s)
- buf_sources.remove(s)
- overheads.append((overhead, current_base))
- outputs.append((out, current_base))
- # else just add the output
- else:
- sources.append(s)
- buf_sources.remove(s)
- outputs.append((s['amount'] , s['base']))
- if current_value(sources, overheads) == amount * (10 ** amount_base):
- return sources, outputs, overheads, buf_sources
-
- current_base -= 1
-
- raise NotEnoughMoneyError(value, community.currency,
- len(sources), amount * pow(10, amount_base))
-
- def tx_inputs(self, sources):
- """
- Get inputs to generate a transaction with a given amount of money
-
- :param list sources: The sources used to send the given amount of money
-
- :return: The list of inputs to use in the transaction document
- """
- inputs = []
- for s in sources:
- inputs.append(InputSource(s['amount'], s['base'], s['type'], s['identifier'], s['noffset']))
- return inputs
-
- def tx_unlocks(self, sources):
- """
- Get unlocks to generate a transaction with a given amount of money
-
- :param list sources: The sources used to send the given amount of money
-
- :return: The list of unlocks to use in the transaction document
- """
- unlocks = []
- for i, s in enumerate(sources):
- unlocks.append(Unlock(i, [SIGParameter(0)]))
- return unlocks
-
- def tx_outputs(self, pubkey, outputs, overheads):
- """
- Get outputs to generate a transaction with a given amount of money
-
- :param str pubkey: The target pubkey of the transaction
- :param list outputs: The amount to send
- :param list inputs: The inputs used to send the given amount of money
- :param list overheads: The overheads used to send the given amount of money
-
- :return: The list of outputs to use in the transaction document
- """
- total = []
- outputs_bases = set(o[1] for o in outputs)
- for base in outputs_bases:
- output_sum = 0
- for o in outputs:
- if o[1] == base:
- output_sum += o[0]
- total.append(OutputSource(output_sum, base, output.Condition.token(output.SIG.token(pubkey))))
-
- overheads_bases = set(o[1] for o in overheads)
- for base in overheads_bases:
- overheads_sum = 0
- for o in overheads:
- if o[1] == base:
- overheads_sum += o[0]
- total.append(OutputSource(overheads_sum, base, output.Condition.token(output.SIG.token(self.pubkey))))
-
- return total
-
- def prepare_tx(self, pubkey, blockstamp, amount, message, community):
- """
- Prepare a simple Transaction document
- :param str pubkey: the target of the transaction
- :param duniterpy.documents.BlockUID blockstamp: the blockstamp
- :param int amount: the amount sent to the receiver
- :param Community community: the target community
- :return: the transaction document
- :rtype: duniterpy.documents.Transaction
- """
- result = self.tx_sources(int(amount), community)
- sources = result[0]
- computed_outputs = result[1]
- overheads = result[2]
- self.caches[community.currency].available_sources = result[3][1:]
- logging.debug("Inputs : {0}".format(sources))
-
- inputs = self.tx_inputs(sources)
- unlocks = self.tx_unlocks(sources)
- outputs = self.tx_outputs(pubkey, computed_outputs, overheads)
- logging.debug("Outputs : {0}".format(outputs))
- tx = Transaction(PROTOCOL_VERSION, community.currency, blockstamp, 0,
- [self.pubkey], inputs, unlocks,
- outputs, message, None)
- return tx
-
- async def send_money(self, salt, password, community,
- recipient, amount, message):
- """
- Send money to a given recipient in a specified community
-
- :param str salt: The account salt
- :param str password: The account password
- :param community: The community target of the transfer
- :param str recipient: The pubkey of the recipient
- :param int amount: The amount of money to transfer
- :param str message: The message to send with the transfer
- """
- try:
- blockUID = community.network.current_blockUID
- block = await community.bma_access.future_request(bma.blockchain.Block,
- req_args={'number': blockUID.number})
- except ValueError as e:
- if '404' in str(e):
- return False, "Could not send transfer with null blockchain"
-
- time = block['medianTime']
- txid = len(block['transactions'])
- if self.walletid == 0:
- key = SigningKey(salt, password)
- else:
- key = SigningKey("{0}{1}".format(salt, self.walletid), password)
- logging.debug("Sender pubkey:{0}".format(key.pubkey))
-
- issuer = await self._identities_registry.future_find(key.pubkey, community)
- issuer_uid = issuer.uid
-
- receiver = await self._identities_registry.future_find(recipient, community)
- receiver_uid = receiver.uid
-
- metadata = {'block': None,
- 'time': time,
- 'amount': amount,
- 'issuer': key.pubkey,
- 'issuer_uid': issuer_uid,
- 'receiver': recipient,
- 'receiver_uid': receiver_uid,
- 'comment': message,
- 'txid': txid
- }
- transfer = Transfer.initiate(metadata)
- self.caches[community.currency]._transfers.append(transfer)
- try:
- tx = self.prepare_tx(recipient, blockUID, amount, message, community)
- logging.debug("TX : {0}".format(tx.raw()))
-
- tx.sign([key])
- logging.debug("Transaction : [{0}]".format(tx.signed_raw()))
- return await transfer.send(tx, community)
- except NotEnoughMoneyError as e:
- return (False, str(e))
-
- async def sources(self, community):
- """
- Get available sources in a given community
-
- :param sakia.core.community.Community community: The community where we want available sources
- :return: List of bma sources
- """
- sources = []
- try:
- data = await community.bma_access.future_request(bma.tx.Sources,
- req_args={'pubkey': self.pubkey})
- return data['sources'].copy()
- except NoPeerAvailable as e:
- logging.debug(str(e))
- return sources
-
- def transfers(self, community):
- """
- Get all transfers objects of this wallet
-
- :param community: The community we want to get the executed transfers
- :return: A list of Transfer objects
- """
- if community.currency in self.caches:
- return self.caches[community.currency].transfers
- else:
- return []
-
- def dividends(self, community):
- """
- Get all the dividends received by this wallet
-
- :param community: The community we want to get received dividends
- :return: Result of udhistory request
- """
- if community.currency in self.caches:
- return self.caches[community.currency].dividends
- else:
- return []
-
- def stop_coroutines(self, closing=False):
- for c in self.caches.values():
- c.stop_coroutines(closing)
-
- def jsonify(self):
- """
- Get the wallet as json format.
-
- :return: The wallet as a dict in json format.
- """
- return {'walletid': self.walletid,
- 'pubkey': self.pubkey,
- 'name': self.name,
- 'version': __version__}
diff --git a/src/sakia/data/connectors/bma.py b/src/sakia/data/connectors/bma.py
index 8030fc67..10eba5d6 100644
--- a/src/sakia/data/connectors/bma.py
+++ b/src/sakia/data/connectors/bma.py
@@ -29,9 +29,9 @@ class BmaConnector:
else:
return True
filters = {
- bma.ud.History: lambda n: compare_versions(n, "0.11.0"),
- bma.tx.History: lambda n: compare_versions(n, "0.11.0"),
- bma.blockchain.Membership: lambda n: compare_versions(n, "0.14")
+ bma.ud.history: lambda n: compare_versions(n, "0.11.0"),
+ bma.tx.history: lambda n: compare_versions(n, "0.11.0"),
+ bma.blockchain.membership: lambda n: compare_versions(n, "0.14")
}
if request in filters:
nodes = [n for n in nodes if filters[request](n)]
@@ -55,11 +55,10 @@ class BmaConnector:
tries = 0
while tries < 3:
endpoint = random.choice(endpoints)
- req = request(endpoint.conn_handler(), **req_args)
try:
- self._logger.debug("Requesting {0} on endpoint {1}".format(str(req), str(endpoint)))
+ self._logger.debug("Requesting {0} on endpoint {1}".format(str(request.__name__), str(endpoint)))
with aiohttp.ClientSession() as session:
- json_data = await req.get(**get_args, session=session)
+ json_data = await request(endpoint.conn_handler(session), **req_args)
return json_data
except (ClientError, ServerDisconnectedError, gaierror,
asyncio.TimeoutError, ValueError, jsonschema.ValidationError) as e:
diff --git a/src/sakia/data/connectors/node.py b/src/sakia/data/connectors/node.py
index 006cbc89..1dde86ca 100644
--- a/src/sakia/data/connectors/node.py
+++ b/src/sakia/data/connectors/node.py
@@ -61,7 +61,7 @@ class NodeConnector(QObject):
:return: A new node
:rtype: sakia.core.net.Node
"""
- peer_data = await bma.network.Peering(ConnectionHandler(address, port)).get(session)
+ peer_data = await bma.network.peering(ConnectionHandler(address, port, session))
peer = Peer.from_signed_raw("{0}{1}\n".format(peer_data['raw'],
peer_data['signature']))
@@ -92,10 +92,10 @@ class NodeConnector(QObject):
return cls(node, session)
- async def safe_request(self, endpoint, request, req_args={}, get_args={}):
+ async def safe_request(self, endpoint, request, req_args={}):
try:
- conn_handler = endpoint.conn_handler()
- data = await request(conn_handler, **req_args).get(self._session, **get_args)
+ conn_handler = endpoint.conn_handler(self._session)
+ data = await request(conn_handler, **req_args)
return data
except (ClientError, gaierror, TimeoutError, ConnectionRefusedError, DisconnectedError, ValueError) as e:
self._logger.debug("{0} : {1}".format(str(e), self.node.pubkey[:5]))
@@ -150,9 +150,8 @@ class NodeConnector(QObject):
for endpoint in [e for e in self.node.endpoints if isinstance(e, BMAEndpoint)]:
if not self._connected['block']:
try:
- conn_handler = endpoint.conn_handler()
- block_websocket = bma.ws.Block(conn_handler)
- ws_connection = block_websocket.connect(self._session)
+ conn_handler = endpoint.conn_handler(self._session)
+ ws_connection = bma.ws.block(conn_handler)
async with ws_connection as ws:
self._connected['block'] = True
self._logger.debug("Connected successfully to block ws : {0}"
@@ -160,7 +159,7 @@ class NodeConnector(QObject):
async for msg in ws:
if msg.tp == aiohttp.MsgType.text:
self._logger.debug("Received a block : {0}".format(self.node.pubkey[:5]))
- block_data = block_websocket.parse_text(msg.data)
+ block_data = bma.parse_text(msg.data)
await self.refresh_block(block_data)
elif msg.tp == aiohttp.MsgType.closed:
break
@@ -191,7 +190,7 @@ class NodeConnector(QObject):
"""
for endpoint in [e for e in self.node.endpoints if isinstance(e, BMAEndpoint)]:
try:
- block_data = await self.safe_request(endpoint, bma.blockchain.Current, self._session)
+ block_data = await self.safe_request(endpoint, bma.blockchain.current)
if not block_data:
continue
await self.refresh_block(block_data)
@@ -220,7 +219,7 @@ class NodeConnector(QObject):
conn_handler = endpoint.conn_handler()
self._logger.debug("Requesting {0}".format(conn_handler))
try:
- previous_block = await self.safe_request(endpoint, bma.blockchain.Block,
+ previous_block = await self.safe_request(endpoint, bma.blockchain.block,
req_args={'number': self.node.current_buid.number})
if not previous_block:
continue
@@ -252,7 +251,7 @@ class NodeConnector(QObject):
"""
for endpoint in [e for e in self.node.endpoints if isinstance(e, BMAEndpoint)]:
try:
- summary_data = await self.safe_request(endpoint, bma.node.Summary)
+ summary_data = await self.safe_request(endpoint, bma.node.summary)
if not summary_data:
continue
self.node.software = summary_data["duniter"]["software"]
@@ -276,9 +275,8 @@ class NodeConnector(QObject):
"""
for endpoint in [e for e in self.node.endpoints if isinstance(e, BMAEndpoint)]:
try:
- data = await self.safe_request(endpoint, bma.wot.Lookup,
- req_args={'search':self.node.pubkey},
- get_args={})
+ data = await self.safe_request(endpoint, bma.wot.lookup,
+ req_args={'search':self.node.pubkey})
if not data:
continue
self.node.state = Node.ONLINE
@@ -314,16 +312,15 @@ class NodeConnector(QObject):
for endpoint in [e for e in self.node.endpoints if isinstance(e, BMAEndpoint)]:
if not self._connected['peer']:
try:
- conn_handler = endpoint.conn_handler()
- peer_websocket = bma.ws.Peer(conn_handler)
- ws_connection = peer_websocket.connect(self._session)
+ conn_handler = endpoint.conn_handler(self._session)
+ ws_connection = bma.ws.peer(conn_handler)
async with ws_connection as ws:
self._connected['peer'] = True
self._logger.debug("Connected successfully to peer ws : {0}".format(self.node.pubkey[:5]))
async for msg in ws:
if msg.tp == aiohttp.MsgType.text:
self._logger.debug("Received a peer : {0}".format(self.node.pubkey[:5]))
- peer_data = peer_websocket.parse_text(msg.data)
+ peer_data = bma.parse_text(msg.data)
self.refresh_peer_data(peer_data)
elif msg.tp == aiohttp.MsgType.closed:
break
@@ -357,8 +354,8 @@ class NodeConnector(QObject):
"""
for endpoint in [e for e in self.node.endpoints if isinstance(e, BMAEndpoint)]:
try:
- peers_data = await self.safe_request(endpoint, bma.network.peering.Peers,
- get_args={'leaves': 'true'})
+ peers_data = await self.safe_request(endpoint, bma.network.peers,
+ req_args={'leaves': 'true'})
if not peers_data:
continue
self.node.state = Node.ONLINE
@@ -368,8 +365,8 @@ class NodeConnector(QObject):
for leaf_hash in leaves:
try:
leaf_data = await self.safe_request(endpoint,
- bma.network.peering.Peers,
- get_args={'leaf': leaf_hash})
+ bma.network.peers,
+ req_args={'leaf': leaf_hash})
if not leaf_data:
break
self.refresh_peer_data(leaf_data['leaf']['value'])
diff --git a/src/sakia/data/entities/identity.py b/src/sakia/data/entities/identity.py
index e22e337d..a3ef5a2f 100644
--- a/src/sakia/data/entities/identity.py
+++ b/src/sakia/data/entities/identity.py
@@ -1,5 +1,5 @@
import attr
-from duniterpy.documents import block_uid, BlockUID, SelfCertification
+from duniterpy.documents import block_uid, BlockUID, Identity
from duniterpy import PROTOCOL_VERSION
@@ -24,7 +24,7 @@ class Identity:
Creates a self cert document for a given identity
:param sakia.data.entities.Identity identity:
:return: the document
- :rtype: duniterpy.documents.SelfCertification
+ :rtype: duniterpy.documents.Identity
"""
- return SelfCertification(PROTOCOL_VERSION, self.currency, self.pubkey,
+ return Identity(PROTOCOL_VERSION, self.currency, self.pubkey,
self.uid, self.blockstamp, self.signature)
diff --git a/src/sakia/data/entities/transaction.py b/src/sakia/data/entities/transaction.py
index 0b59ffc8..1cdde841 100644
--- a/src/sakia/data/entities/transaction.py
+++ b/src/sakia/data/entities/transaction.py
@@ -4,6 +4,15 @@ from duniterpy.documents import block_uid
@attr.s()
class Transaction:
+
+ TO_SEND = 0
+ AWAITING = 1
+ VALIDATING = 2
+ VALIDATED = 4
+ REFUSED = 8
+ DROPPED = 16
+ LOCAL = 128
+
currency = attr.ib(convert=str, cmp=False)
sha_hash = attr.ib(convert=str)
written_on = attr.ib(convert=block_uid, cmp=False)
@@ -16,3 +25,8 @@ class Transaction:
amount_base = attr.ib(convert=int, cmp=False)
comment = attr.ib(convert=str, cmp=False)
txid = attr.ib(convert=int, cmp=False)
+ state = attr.ib(convert=int, cmp=False)
+
+ @property
+ def local(self):
+ return self.state & Transaction.LOCAL == Transaction.LOCAL
diff --git a/src/sakia/data/processors/__init__.py b/src/sakia/data/processors/__init__.py
index 3e7dd0f5..d4559613 100644
--- a/src/sakia/data/processors/__init__.py
+++ b/src/sakia/data/processors/__init__.py
@@ -4,3 +4,4 @@ from .certifications import CertificationsProcessor
from .blockchain import BlockchainProcessor
from .connections import ConnectionsProcessor
from .sources import SourcesProcessor
+from .transactions import TransactionsProcessor
diff --git a/src/sakia/data/processors/certifications.py b/src/sakia/data/processors/certifications.py
index a1670f95..c49d9253 100644
--- a/src/sakia/data/processors/certifications.py
+++ b/src/sakia/data/processors/certifications.py
@@ -34,9 +34,9 @@ class CertificationsProcessor:
:return: the instanciated certification
:rtype: sakia.data.entities.Certification
"""
- cert = Certification(currency, cert.pubkey_from, cert.pubkey_to, cert.timestamp,
+ cert = Certification(currency, cert.pubkey_from, cert.pubkey_to, cert.timestamp.number,
0, cert.signatures[0], blockstamp)
- self._repo.insert(cert)
+ self._certifications_repo.insert(cert)
return cert
def commit_certification(self, cert):
diff --git a/src/sakia/data/processors/identities.py b/src/sakia/data/processors/identities.py
index 96efad15..f87543bc 100644
--- a/src/sakia/data/processors/identities.py
+++ b/src/sakia/data/processors/identities.py
@@ -7,7 +7,7 @@ from ..connectors import BmaConnector
from ..processors import NodesProcessor
from duniterpy.api import bma, errors
from duniterpy.key import SigningKey
-from duniterpy.documents import SelfCertification, BlockUID, block_uid
+from duniterpy.documents import Identity, BlockUID, block_uid
from aiohttp.errors import ClientError
from sakia.errors import NoPeerAvailable
@@ -97,23 +97,26 @@ class IdentitiesProcessor:
"""
return self._identities_repo.get_written(currency=currency, pubkey=pubkey)
- def get_identity(self, currency, pubkey, uid):
+ def get_identity(self, currency, pubkey, uid=""):
"""
Return the identity corresponding to a given pubkey, uid and currency
:param str currency:
:param str pubkey:
- :param str uid:
+ :param str uid: optionally, specify an uid to lookup
:rtype: sakia.data.entities.Identity
"""
written = self.get_written(currency=currency, pubkey=pubkey)
if not written:
- identities = self._identities_repo.get_all(currency=currency, pubkey=pubkey, uid=uid)
- recent = identities[0]
- for i in identities:
- if i.blockstamp > recent.blockstamp:
- recent = i
- return recent
+ identities = self._identities_repo.get_all(currency=currency, pubkey=pubkey)
+ if identities:
+ recent = identities[0]
+ for i in identities:
+ if i.blockstamp > recent.blockstamp and i.uid == uid:
+ recent = i
+ return recent
+ else:
+ return written[0]
def commit_identity(self, identity):
"""
@@ -166,7 +169,7 @@ class IdentitiesProcessor:
blockchain = self._blockchain_repo.get_one(currency=currency)
block_uid = blockchain.current_buid
timestamp = blockchain.median_time
- selfcert = SelfCertification(2,
+ selfcert = Identity(2,
currency,
identity.pubkey,
identity.uid,
diff --git a/src/sakia/data/processors/transactions.py b/src/sakia/data/processors/transactions.py
new file mode 100644
index 00000000..277bba41
--- /dev/null
+++ b/src/sakia/data/processors/transactions.py
@@ -0,0 +1,114 @@
+import attr
+import re
+from ..entities import Transaction
+from .nodes import NodesProcessor
+from . import tx_lifecycle
+from ..connectors import BmaConnector
+from duniterpy.api import bma, errors
+from duniterpy.documents import Block, BMAEndpoint
+import asyncio
+import time
+
+
+@attr.s
+class TransactionsProcessor:
+ _repo = attr.ib() # :type sakia.data.repositories.SourcesRepo
+ _bma_connector = attr.ib() # :type sakia.data.connectors.bma.BmaConnector
+ _table_states = attr.ib(default=attr.Factory(dict))
+
+ @classmethod
+ def instanciate(cls, app):
+ """
+ Instanciate a blockchain processor
+ :param sakia.app.Application app: the app
+ """
+ return cls(app.db.transactions_repo,
+ BmaConnector(NodesProcessor(app.db.nodes_repo)))
+
+ def _try_transition(self, tx, transition_key, inputs):
+ """
+ Try the transition defined by the given transition_key
+ with inputs
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param tuple transition_key: The transition key in the table states
+ :param tuple inputs: The inputs
+ :return: True if the transition was applied
+ :rtype: bool
+ """
+ if len(inputs) == len(transition_key[1]):
+ for i, input in enumerate(inputs):
+ if type(input) is not transition_key[1][i]:
+ return False
+ for transition in tx_lifecycle.states[transition_key]:
+ if transition[0](*inputs):
+ if tx.sha_hash:
+ self._logger.debug("{0} : {1} --> {2}".format(tx.sha_hash[:5], tx.state,
+ transition[2].name))
+ else:
+ self._logger.debug("Unsent transfer : {0} --> {1}".format(tx.state,
+ transition[2].name))
+
+ # If the transition changes data, apply changes
+ if transition[1]:
+ transition[1](tx, *inputs)
+ tx.state = transition[2] | tx.local
+ return True
+ return False
+
+ def find_by_hash(self, sha_hash):
+ return self._repo.find_one(sha_hash=sha_hash)
+
+ def awaiting(self):
+ return self._repo.find_all(state=Transaction.AWAITING) + \
+ self._repo.find_all(state=Transaction.AWAITING | Transaction.LOCAL)
+
+ def run_state_transitions(self, tx, *inputs):
+ """
+ Try all current state transitions with inputs
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param tuple inputs: The inputs passed to the transitions
+ :return: True if the transaction changed state
+ :rtype: bool
+ """
+ transition_keys = [k for k in tx_lifecycle.states.keys() if k[0] | Transaction.LOCAL == tx.state]
+ for key in transition_keys:
+ if self._try_transition(tx, key, inputs):
+ return True
+ return False
+
+ def cancel(self, tx):
+ """
+ Cancel a local transaction
+ :param sakia.data.entities.Transaction tx: the transaction
+ """
+ self.run_state_transitions(tx, ())
+
+ async def send(self, tx, txdoc, community):
+ """
+ Send a transaction and update the transfer state to AWAITING if accepted.
+ If the transaction was refused (return code != 200), state becomes REFUSED
+ The txdoc is saved as the transfer txdoc.
+
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param txdoc: A transaction duniterpy object
+ :param community: The community target of the transaction
+ """
+ tx.sha_hash = txdoc.sha_hash
+ responses = await community.bma_access.broadcast(bma.tx.Process,
+ post_args={'transaction': txdoc.signed_raw()})
+ blockUID = community.network.current_blockUID
+ block = await community.bma_access.future_request(bma.blockchain.Block,
+ req_args={'number': blockUID.number})
+ signed_raw = "{0}{1}\n".format(block['raw'], block['signature'])
+ block_doc = Block.from_signed_raw(signed_raw)
+ result = (False, "")
+ for r in responses:
+ if r.status == 200:
+ result = (True, (await r.json()))
+ elif not result[0]:
+ result = (False, (await r.text()))
+ else:
+ await r.text()
+ self.run_state_transitions(tx, ([r.status for r in responses], block_doc))
+ self.run_state_transitions(tx, ([r.status for r in responses],))
+ return result
diff --git a/src/sakia/data/processors/tx_lifecycle.py b/src/sakia/data/processors/tx_lifecycle.py
new file mode 100644
index 00000000..49b10afe
--- /dev/null
+++ b/src/sakia/data/processors/tx_lifecycle.py
@@ -0,0 +1,197 @@
+import time
+from sakia.data.entities import Transaction
+from duniterpy.documents import Block
+
+def _not_found_in_blockchain(tx, rollback, block, mediantime_target, mediantime_blocks):
+ """
+ Check if the transaction could not be found in the blockchain
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param bool rollback: True if we are in a rollback procedure
+ :param duniterpy.documents.Block block: The block to look for the tx
+ :param int mediantime_target: The mediantime to mine a block in the community parameters
+ :param int mediantime_blocks: The number of block used to derive the mediantime
+ :return: True if the transaction could not be found in a given time
+ :rtype: bool
+ """
+ if not rollback:
+ for block_tx in block.transactions:
+ if block_tx.sha_hash == tx.sha_hash:
+ return False
+ if block.time > tx.timestamp + mediantime_target * mediantime_blocks:
+ return True
+ return False
+
+
+def _found_in_block(tx, rollback, block):
+ """
+ Check if the transaction can be found in the blockchain
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param bool rollback: True if we are in a rollback procedure
+ :param duniterpy.documents.Block block: The block to check for the transaction
+ :return: True if the transaction was found
+ :rtype: bool
+ """
+ if not rollback:
+ for block_tx in block.transactions:
+ if block_tx.sha_hash == tx.sha_hash:
+ return True
+ return False
+
+
+def _broadcast_success(tx, ret_codes, block):
+ """
+ Check if the retcode is 200 after a POST
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param list ret_codes: The POST return codes of the broadcast
+ :param duniterpy.documents.Block block: The current block used for transition.
+ :return: True if the post was successful
+ :rtype: bool
+ """
+ return 200 in ret_codes
+
+
+def _broadcast_failure(tx, ret_codes):
+ """
+ Check if no retcode is 200 after a POST
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param list ret_codes: The POST return codes of the broadcast
+ :return: True if the post was failed
+ :rtype: bool
+ """
+ return 200 not in ret_codes
+
+
+def _reached_enough_confrmation(tx, rollback, current_block, fork_window):
+ """
+ Check if the transfer reached enough confrmation in the blockchain
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param bool rollback: True if we are in a rollback procedure
+ :param duniterpy.documents.Block current_block: The current block of the main blockchain
+ :param int fork_window: The number of confrmations needed on the network
+ :return: True if the transfer reached enough confrmations
+ :rtype: bool
+ """
+ return not rollback and tx.blockstamp.number + fork_window <= current_block.number
+
+
+def _rollback_and_removed(tx, rollback, block):
+ """
+ Check if the transfer is not in the block anymore
+
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param bool rollback: True if we are in a rollback procedure
+ :param duniterpy.documents.Block block: The block to check for the transaction
+ :return: True if the transfer is not found in the block
+ """
+ if rollback:
+ if not block or block.blockUID != tx.blockstamp:
+ return True
+ else:
+ return tx.sha_hash not in [t.sha_hash for t in block.transactions]
+ return False
+
+
+def _rollback_in_fork_window(tx, rollback, current_block, fork_window):
+ """
+ Check if the transfer is not in the block anymore
+
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param bool rollback: True if we are in a rollback procedure
+ :param duniterpy.documents.Block current_block: The block to check for the transaction
+ :return: True if the transfer is found in the block
+ """
+ if rollback:
+ return tx.blockstamp.number + fork_window > current_block.number
+ return False
+
+
+def _rollback_and_local(tx, rollback, block):
+ """
+ Check if the transfer is not in the block anymore
+
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param bool rollback: True if we are in a rollback procedure
+ :param duniterpy.documents.Block block: The block to check for the transaction
+ :return: True if the transfer is found in the block
+ """
+ if rollback and tx.local and block.blockUID == tx.blockstamp:
+ return tx.sha_hash not in [t.sha_hash for t in block.transactions]
+ return False
+
+
+def _is_locally_created(tx):
+ """
+ Check if we can send back the transaction if it was locally created
+
+ :param sakia.data.entities.Transaction tx: the transaction
+ :return: True if the transaction was locally created
+ """
+ return tx.local
+
+
+def _wait(tx, current_block):
+ """
+ Set the transfer as AWAITING confrmation.
+
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param duniterpy.documents.Block current_block: Current block of the main blockchain
+ """
+ tx.blockstamp = current_block.blockUID
+ tx.timestamp = int(time.time())
+
+
+def _be_validating(tx, block):
+ """
+ Action when the transfer ins found in a block
+
+ :param sakia.data.entities.Transaction tx: the transaction
+ :param bool rollback: True if we are in a rollback procedure
+ :param duniterpy.documents.Block block: The block checked
+ """
+ tx.blockstamp = block.blockUID
+ tx.timestamp = block.mediantime
+
+
+def _drop(tx):
+ """
+ Cancel the transfer locally.
+ The transfer state becomes TransferState.DROPPED.
+ :param sakia.data.entities.Transaction tx: the transaction
+ """
+ tx.blockstamp = None
+
+
+# Dict containing states of a transfer :
+# keys are a tuple containg (current_state, transition_parameters)
+# values are tuples containing (transition_test, transition_success, new_state)
+states = {
+ (Transaction.TO_SEND, (list, Block)):
+ (
+ (_broadcast_success, lambda tx, l, b: _wait(tx, b), Transaction.AWAITING),
+ (lambda tx, l, b: _broadcast_failure(tx, l), None, Transaction.REFUSED),
+ ),
+ (Transaction.TO_SEND, ()):
+ ((_is_locally_created, _drop, Transaction.DROPPED),),
+
+ (Transaction.AWAITING, (bool, Block)):
+ ((_found_in_block, lambda tx, r, b: _be_validating(tx, b), Transaction.VALIDATING),),
+ (Transaction.AWAITING, (bool, Block, int, int)):
+ ((_not_found_in_blockchain, None, Transaction.REFUSED),),
+
+ (Transaction.VALIDATING, (bool, Block, int)):
+ ((_reached_enough_confrmation, None, Transaction.VALIDATED),),
+ (Transaction.VALIDATING, (bool, Block)):
+ ((_rollback_and_removed, lambda tx, r, b: _drop(tx), Transaction.DROPPED),),
+
+ (Transaction.VALIDATED, (bool, Block, int)):
+ ((_rollback_in_fork_window, lambda tx, r, b, i: _be_validating(tx, b), Transaction.VALIDATING),),
+
+ (Transaction.VALIDATED, (bool, Block)):
+ (
+ (_rollback_and_removed, lambda tx, r, b: _drop(tx), Transaction.DROPPED),
+ (_rollback_and_local, lambda tx, r, b: _wait(tx, b), Transaction.AWAITING),
+ ),
+
+ (Transaction.REFUSED, ()):
+ ((_is_locally_created, _drop, Transaction.DROPPED),)
+ }
diff --git a/src/sakia/data/repositories/meta.sql b/src/sakia/data/repositories/meta.sql
index e62a22c5..95513f11 100644
--- a/src/sakia/data/repositories/meta.sql
+++ b/src/sakia/data/repositories/meta.sql
@@ -80,6 +80,7 @@ CREATE TABLE IF NOT EXISTS transactions(
amountbase INT,
comment VARCHAR(255),
txid INT,
+ state INT,
PRIMARY KEY (sha_hash)
);
diff --git a/src/sakia/data/repositories/transactions.py b/src/sakia/data/repositories/transactions.py
index 39e42db0..041aec29 100644
--- a/src/sakia/data/repositories/transactions.py
+++ b/src/sakia/data/repositories/transactions.py
@@ -39,7 +39,8 @@ class TransactionsRepo:
amount = ?,
amountbase = ?,
comment = ?,
- txid = ?
+ txid = ?,
+ state = ?
WHERE
sha_hash=?""",
updated_fields + where_fields)
diff --git a/src/sakia/gui/navigation/graphs/base/model.py b/src/sakia/gui/navigation/graphs/base/model.py
index aa9bb087..8e3abc81 100644
--- a/src/sakia/gui/navigation/graphs/base/model.py
+++ b/src/sakia/gui/navigation/graphs/base/model.py
@@ -1,5 +1,4 @@
from sakia.gui.component.model import ComponentModel
-from sakia.core.registry import BlockchainState
class BaseGraphModel(ComponentModel):
@@ -20,12 +19,3 @@ class BaseGraphModel(ComponentModel):
:rtype: sakia.core.registry.Identity
"""
return await self.app.identities_registry.future_find(pubkey, self.community)
-
- def get_identity_from_data(self, metadata, pubkey):
- return self.app.identities_registry.from_handled_data(
- metadata['text'],
- pubkey,
- None,
- BlockchainState.VALIDATED,
- self.community
- )
diff --git a/src/sakia/gui/navigation/identities/controller.py b/src/sakia/gui/navigation/identities/controller.py
index 370fa23e..6eef79a5 100644
--- a/src/sakia/gui/navigation/identities/controller.py
+++ b/src/sakia/gui/navigation/identities/controller.py
@@ -3,9 +3,7 @@ import logging
from PyQt5.QtGui import QCursor
from sakia.errors import NoPeerAvailable
-from duniterpy.api import bma, errors
-from duniterpy.documents.block import BlockUID
-from sakia.core.registry import Identity, BlockchainState
+from duniterpy.api import errors
from sakia.decorators import once_at_a_time, asyncify
from sakia.gui.component.controller import ComponentController
from sakia.gui.widgets.context_menu import ContextMenu
diff --git a/src/sakia/gui/navigation/model.py b/src/sakia/gui/navigation/model.py
index 883b5201..38ec512f 100644
--- a/src/sakia/gui/navigation/model.py
+++ b/src/sakia/gui/navigation/model.py
@@ -43,13 +43,14 @@ class NavigationModel(ComponentModel):
'connection': connection,
},
'children': [
- # {
- # 'node': {
- # 'title': self.tr('Transfers'),
- # 'icon': ':/icons/tx_icon',
- # 'component': "TxHistory",
- # }
- # },
+ # {
+ # 'node': {
+ # 'title': self.tr('Transfers'),
+ # 'icon': ':/icons/tx_icon',
+ # 'component': "TxHistory",
+ # 'transactions_service': self.app.transactions_services[connection.currency],
+ # }
+ # },
{
'node': {
'title': self.tr('Network'),
diff --git a/src/sakia/gui/navigation/txhistory/table_model.py b/src/sakia/gui/navigation/txhistory/table_model.py
index 001a3e98..4fcad39c 100644
--- a/src/sakia/gui/navigation/txhistory/table_model.py
+++ b/src/sakia/gui/navigation/txhistory/table_model.py
@@ -13,8 +13,7 @@ from PyQt5.QtCore import QAbstractTableModel, Qt, QVariant, QSortFilterProxyMode
QDateTime, QLocale
from PyQt5.QtGui import QFont, QColor, QIcon
from sakia.errors import NoPeerAvailable
-
-from sakia.core.transfer import Transfer, TransferState
+from sakia.data.entities import Transaction
from sakia.decorators import asyncify, once_at_a_time, cancel_once_task
@@ -121,20 +120,20 @@ class TxFilterProxyModel(QSortFilterProxyModel):
if role == Qt.FontRole:
font = QFont()
- if state_data == TransferState.AWAITING or state_data == TransferState.VALIDATING:
+ if state_data == Transaction.AWAITING or state_data == Transaction.VALIDATING:
font.setItalic(True)
- elif state_data == TransferState.REFUSED:
+ elif state_data == Transaction.REFUSED:
font.setItalic(True)
- elif state_data == TransferState.TO_SEND:
+ elif state_data == Transaction.TO_SEND:
font.setBold(True)
else:
font.setItalic(False)
return font
if role == Qt.ForegroundRole:
- if state_data == TransferState.REFUSED:
+ if state_data == Transaction.REFUSED:
return QColor(Qt.red)
- elif state_data == TransferState.TO_SEND:
+ elif state_data == Transaction.TO_SEND:
return QColor(Qt.blue)
if role == Qt.TextAlignmentRole:
@@ -148,17 +147,17 @@ class TxFilterProxyModel(QSortFilterProxyModel):
if source_index.column() == self.sourceModel().columns_types.index('date'):
return QDateTime.fromTime_t(source_data).toString(Qt.SystemLocaleLongDate)
- if state_data == TransferState.VALIDATING or state_data == TransferState.AWAITING:
+ if state_data == Transaction.VALIDATING or state_data == Transaction.AWAITING:
block_col = model.columns_types.index('block_number')
block_index = model.index(source_index.row(), block_col)
block_data = model.data(block_index, Qt.DisplayRole)
current_confirmations = 0
- if state_data == TransferState.VALIDATING:
+ if state_data == Transaction.VALIDATING:
current_blockUID_number = self.community.network.current_blockUID.number
if current_blockUID_number:
current_confirmations = current_blockUID_number - block_data
- elif state_data == TransferState.AWAITING:
+ elif state_data == Transaction.AWAITING:
current_confirmations = 0
max_confirmations = self.sourceModel().max_confirmations()
@@ -317,7 +316,7 @@ class HistoryTableModel(QAbstractTableModel):
for transfer in transfers:
coro = None
count += 1
- if type(transfer) is Transfer:
+ if type(transfer) is Transaction:
if transfer.metadata['issuer'] == self.account.pubkey:
coro = asyncio.ensure_future(self.data_sent(transfer))
else:
diff --git a/src/sakia/gui/widgets/context_menu.py b/src/sakia/gui/widgets/context_menu.py
index 8cf30569..6da8e57b 100644
--- a/src/sakia/gui/widgets/context_menu.py
+++ b/src/sakia/gui/widgets/context_menu.py
@@ -5,7 +5,6 @@ from PyQt5.QtWidgets import QMenu, QAction, QApplication, QMessageBox
from duniterpy.documents import Block
from sakia.data.entities import Identity
-from sakia.core.transfer import Transfer, TransferState
from sakia.decorators import asyncify
from sakia.gui.contact import ConfigureContactDialog
from sakia.gui.dialogs.certification.controller import CertificationController
diff --git a/src/sakia/options.py b/src/sakia/options.py
index 35ed0cb1..b85c7f72 100644
--- a/src/sakia/options.py
+++ b/src/sakia/options.py
@@ -62,7 +62,7 @@ class SakiaOptions:
logging.getLogger('quamash').setLevel(logging.INFO)
file_handler = RotatingFileHandler(path.join(self.config_path, 'sakia.log'), 'a', 1000000, 10)
file_handler.setFormatter(formatter)
- self._logger.addHandler(file_handler)
stream_handler = StreamHandler()
stream_handler.setFormatter(formatter)
- self._logger.addHandler(stream_handler)
+ self._logger.handlers = [file_handler, stream_handler]
+ self._logger.propagate = False
diff --git a/src/sakia/services/__init__.py b/src/sakia/services/__init__.py
index d98cc6ea..62b82310 100644
--- a/src/sakia/services/__init__.py
+++ b/src/sakia/services/__init__.py
@@ -3,3 +3,4 @@ from .identities import IdentitiesService
from .blockchain import BlockchainService
from .documents import DocumentsService
from .sources import SourcesServices
+from .transactions import TransactionsService
diff --git a/src/sakia/services/blockchain.py b/src/sakia/services/blockchain.py
index dec471ab..28c01041 100644
--- a/src/sakia/services/blockchain.py
+++ b/src/sakia/services/blockchain.py
@@ -9,7 +9,7 @@ class BlockchainService(QObject):
Blockchain service is managing new blocks received
to update data locally
"""
- def __init__(self, currency, blockchain_processor, bma_connector, identities_service):
+ def __init__(self, currency, blockchain_processor, bma_connector, identities_service, transactions_service):
"""
Constructor the identities service
@@ -17,12 +17,14 @@ class BlockchainService(QObject):
:param sakia.data.processors.BlockchainProcessor blockchain_processor: the blockchain processor for given currency
:param sakia.data.connectors.BmaConnector bma_connector: The connector to BMA API
:param sakia.services.IdentitiesService identities_service: The identities service
+ :param sakia.services.TransactionsService transactions_service: The transactions service
"""
super().__init__()
self._blockchain_processor = blockchain_processor
self._bma_connector = bma_connector
self.currency = currency
self._identities_service = identities_service
+ self._transactions_service = transactions_service
self._logger = logging.getLogger('sakia')
async def handle_blockchain_progress(self):
@@ -32,7 +34,8 @@ class BlockchainService(QObject):
with_identities = await self._blockchain_processor.new_blocks_with_identities(self.currency)
with_money = await self._blockchain_processor.new_blocks_with_money(self.currency)
blocks = await self._blockchain_processor.blocks(with_identities + with_money, self.currency)
- self._identities_service.handle_new_blocks(blocks)
+ await self._identities_service.handle_new_blocks(blocks)
+ await self._transactions_service.handle_new_blocks(blocks)
def parameters(self):
return self._blockchain_processor.parameters(self.currency)
diff --git a/src/sakia/services/documents.py b/src/sakia/services/documents.py
index 2c1533a1..1cdbdad8 100644
--- a/src/sakia/services/documents.py
+++ b/src/sakia/services/documents.py
@@ -6,7 +6,7 @@ from collections import Counter
from duniterpy.key import SigningKey
from duniterpy import PROTOCOL_VERSION
-from duniterpy.documents import BlockUID, Block, SelfCertification, Certification, Membership, Revocation
+from duniterpy.documents import BlockUID, Block, Identity, Certification, Membership, Revocation
from duniterpy.api import bma, errors
from sakia.data.entities import Node
from aiohttp.errors import ClientError, DisconnectedError
@@ -41,7 +41,7 @@ class DocumentsService:
block_uid = BlockUID.empty()
else:
raise
- selfcert = SelfCertification(PROTOCOL_VERSION,
+ selfcert = Identity(PROTOCOL_VERSION,
currency,
self.pubkey,
self.name,
diff --git a/src/sakia/services/identities.py b/src/sakia/services/identities.py
index fde264f0..e6d173b5 100644
--- a/src/sakia/services/identities.py
+++ b/src/sakia/services/identities.py
@@ -104,7 +104,7 @@ class IdentitiesService(QObject):
for certified_data in data['certifications']:
cert = Certification(self.currency, data["pubkey"],
certified_data["pubkey"], certified_data["sigDate"])
- cert.block_number = certified_data["cert_time"]["number"]
+ cert.block = certified_data["cert_time"]["number"]
cert.timestamp = certified_data["cert_time"]["medianTime"]
if certified_data['written']:
cert.written_on = BlockUID(certified_data['written']['number'],
@@ -144,9 +144,10 @@ class IdentitiesService(QObject):
"""
need_refresh = []
for ms in block.joiners + block.actives:
- written = self._identities_processor.get_written(self.currency, ms.issuer)
+ written_list = self._identities_processor.get_written(self.currency, ms.issuer)
# we update every written identities known locally
- if written:
+ if written_list:
+ written = written_list[0]
written.membership_written_on = block.blockUID
written.membership_type = "IN"
written.membership_buid = ms.membership_ts
@@ -157,9 +158,10 @@ class IdentitiesService(QObject):
need_refresh.append(written)
for ms in block.leavers:
- written = self._identities_processor.get_written(self.currency, ms.issuer)
+ written_list = self._identities_processor.get_written(self.currency, ms.issuer)
# we update every written identities known locally
- if written:
+ if written_list:
+ written = written_list[0]
written.membership_written_on = block.blockUID
written.membership_type = "OUT"
written.membership_buid = ms.membership_ts
@@ -182,15 +184,15 @@ class IdentitiesService(QObject):
"""
need_refresh = set([])
for cert in block.certifications:
- written = self._identities_processor.get_written(self.currency, cert.pubkey_to)
+ written_list = self._identities_processor.get_written(self.currency, cert.pubkey_to)
# if we have locally a written identity matching the certification
- if written or self._identities_processor.get_written(self.currency, cert.pubkey_from):
+ if written_list or self._identities_processor.get_written(self.currency, cert.pubkey_from):
self._certs_processor.create_certification(self.currency, cert, block.blockUID)
# we update every written identities known locally
- if written:
+ if written_list:
# A certification can change the requirements state
# of an identity
- need_refresh.add(written)
+ need_refresh += written_list
return need_refresh
async def refresh_requirements(self, identity):
@@ -246,10 +248,13 @@ class IdentitiesService(QObject):
:rtype: dict
"""
- requirements_data = await self._bma_connector.get(currency, bma.wot.Requirements, req_args={'search': pubkey})
- for req in requirements_data['identities']:
- if req['uid'] == uid:
- return req
+ try:
+ requirements_data = await self._bma_connector.get(currency, bma.wot.requirements, req_args={'search': pubkey})
+ for req in requirements_data['identities']:
+ if req['uid'] == uid:
+ return req
+ except NoPeerAvailable as e:
+ self._logger.debug(str(e))
async def lookup(self, text):
"""
diff --git a/src/sakia/services/network.py b/src/sakia/services/network.py
index 9ceb864f..9725b3c3 100644
--- a/src/sakia/services/network.py
+++ b/src/sakia/services/network.py
@@ -257,10 +257,12 @@ class NetworkService(QObject):
# If new latest block is lower than the previously found one
# or if the previously found block is different locally
# than in the main chain, we declare a rollback
- if current_buid <= self._block_found \
+ if current_buid < self._block_found \
or node_connector.node.previous_buid != self._block_found:
+ self._logger.debug("Start rollback")
self._block_found = current_buid
#TODO: self._blockchain_service.rollback()
else:
+ self._logger.debug("Start refresh")
self._block_found = current_buid
asyncio.ensure_future(self._blockchain_service.handle_blockchain_progress())
diff --git a/src/sakia/services/transactions.py b/src/sakia/services/transactions.py
new file mode 100644
index 00000000..76c76e96
--- /dev/null
+++ b/src/sakia/services/transactions.py
@@ -0,0 +1,125 @@
+from PyQt5.QtCore import QObject
+from duniterpy.api import bma
+from duniterpy.grammars.output import Condition
+from duniterpy.documents.transaction import reduce_base
+from duniterpy.documents import Transaction, SimpleTransaction
+from duniterpy.api import errors
+from sakia.data.entities import Transaction
+import math
+import logging
+import hashlib
+
+
+class TransactionsService(QObject):
+ """
+ Transaction service is managing sources received
+ to update data locally
+ """
+ def __init__(self, currency, transactions_processor, identities_processor, bma_connector):
+ """
+ Constructor the identities service
+
+ :param str currency: The currency name of the community
+ :param sakia.data.processors.IdentitiesProcessor identities_processor: the identities processor for given currency
+ :param sakia.data.processors.TransactionProcessor transactions_processor: the transactions processor for given currency
+ :param sakia.data.connectors.BmaConnector bma_connector: The connector to BMA API
+ """
+ super().__init__()
+ self._transactions_processor = transactions_processor
+ self._identities_processor = identities_processor
+ self._bma_connector = bma_connector
+ self.currency = currency
+ self._logger = logging.getLogger('sakia')
+
+ async def _parse_transaction(self, tx_doc, blockUID, mediantime, txid):
+ """
+ Parse a transaction
+ :param sakia.core.Community community: The community
+ :param duniterpy.documents.Transaction tx_doc: The tx json data
+ :param duniterpy.documents.BlockUID blockUID: The block id where we found the tx
+ :param int mediantime: Median time on the network
+ :param int txid: The latest txid
+ :return: the found transaction
+ """
+ receivers = [o.conditions.left.pubkey for o in tx_doc.outputs
+ if o.conditions.left.pubkey != tx_doc.issuers[0]]
+
+ if len(receivers) == 0:
+ receivers = [tx_doc.issuers[0]]
+
+ in_issuers = len([i for i in tx_doc.issuers
+ if i == self.wallet.pubkey]) > 0
+ in_outputs = len([o for o in tx_doc.outputs
+ if o.conditions.left.pubkey == self.wallet.pubkey]) > 0
+
+ tx_hash = hashlib.sha256(tx_doc.signed_raw().encode("ascii")).hexdigest().upper()
+ if in_issuers or in_outputs:
+ # If the wallet pubkey is in the issuers we sent this transaction
+ if in_issuers:
+ outputs = [o for o in tx_doc.outputs
+ if o.conditions.left.pubkey != self.wallet.pubkey]
+ amount = 0
+ for o in outputs:
+ amount += o.amount * math.pow(10, o.base)
+ # If we are not in the issuers,
+ # maybe we are in the recipients of this transaction
+ else:
+ outputs = [o for o in tx_doc.outputs
+ if o.conditions.left.pubkey == self.wallet.pubkey]
+ amount = 0
+ for o in outputs:
+ amount += o.amount * math.pow(10, o.base)
+ amount, amount_base = reduce_base(amount, 0)
+
+ transaction = Transaction(currency=self.currency,
+ sha_hash=tx_hash,
+ written_on=blockUID.number,
+ blockstamp=tx_doc.blockstamp,
+ timestamp=mediantime,
+ signature=tx_doc.signatures[0],
+ issuer=tx_doc.issuers[0],
+ receiver=receivers[0],
+ amount=amount,
+ amount_base=amount_base,
+ comment=tx_doc.comment,
+ txid=txid)
+ return transaction
+ return None
+
+ async def _parse_block(self, block_doc, txid):
+ """
+ Parse a block
+ :param duniterpy.documents.Block block_doc: The block
+ :param int txid: Latest tx id
+ :return: The list of transfers sent
+ """
+ transfers = []
+ for tx in [t for t in self._transactions_processor.awaiting()]:
+ self._transactions_processor.run_state_transitions(tx, (False, block_doc))
+
+ new_transactions = [t for t in block_doc.transactions
+ if not self._transactions_processor.find_by_hash(t.sha_hash)
+ and SimpleTransaction.is_simple(t)]
+
+ for (i, tx_doc) in enumerate(new_transactions):
+ tx = await self._parse_transaction(tx_doc, block_doc.blockUID,
+ block_doc.mediantime, txid+i)
+ if tx:
+ #logging.debug("Transfer amount : {0}".format(transfer.metadata['amount']))
+ transfers.append(tx)
+ else:
+ pass
+ #logging.debug("None transfer")
+ return transfers
+
+ async def handle_new_blocks(self, blocks):
+ """
+ Refresh last transactions
+
+ :param list[duniterpy.documents.Block] blocks: The blocks containing data to parse
+ """
+ self._logger.debug("Refresh transactions")
+ txid = 0
+ for block in blocks:
+ transfers = await self._parse_block(block, txid)
+ txid += len(transfers)
--
GitLab