Skip to content
Snippets Groups Projects

Compare revisions

Changes are shown as if the source revision was being merged into the target revision. Learn more about comparing revisions.

Source

Select target project
No results found
Select Git revision
  • appimage
  • check_uniq_node_by_endpoints
  • dev
  • feature/agent_architecture
  • feature/backend
  • fix_ci
  • fix_ci_osx
  • fix_dbus_error
  • fix_gitlab
  • fix_travis#1105
  • fixappveyor
  • gitlab
  • landscape
  • master
  • pyinstaller
  • pyqt5.6
  • qt5.7
  • qtwebengine
  • sakia020
  • translations
  • 0.1.0
  • 0.10.0
  • 0.10.1
  • 0.10.2
  • 0.11.0
  • 0.11.1
  • 0.11.2
  • 0.11.3
  • 0.11.4
  • 0.11.4.post1
  • 0.11.5
  • 0.12.0
  • 0.12.1dev1
  • 0.12.1dev2
  • 0.12.1dev3
  • 0.12.1dev4
  • 0.12.dev1
  • 0.12.dev2
  • 0.12.dev3
  • 0.12.dev4
  • 0.12.dev5
  • 0.12rc1
  • 0.2.0
  • 0.2.1
  • 0.20.0
  • 0.20.0dev1
  • 0.20.0dev10
  • 0.20.0dev12
  • 0.20.0dev13
  • 0.20.0dev14
  • 0.20.0dev15
  • 0.20.0dev16
  • 0.20.0dev2
  • 0.20.0dev3
  • 0.20.0dev3-test1
  • 0.20.0dev4
  • 0.20.0dev5
  • 0.20.0dev6
  • 0.20.0dev7
  • 0.20.0dev8
  • 0.20.0dev9
  • 0.20.1
  • 0.20.10
  • 0.20.11
  • 0.20.13
  • 0.20.2
  • 0.20.3
  • 0.20.4
  • 0.20.5
  • 0.20.6
  • 0.20.7
  • 0.20.8
  • 0.20.9
  • 0.3.0
  • 0.30.0
  • 0.30.0beta
  • 0.30.0beta1
  • 0.30.0beta2
  • 0.30.0beta3
  • 0.30.0beta4
  • 0.30.0beta5
  • 0.30.0beta6
  • 0.30.0beta7
  • 0.30.0beta8
  • 0.30.0beta9
  • 0.30.1
  • 0.30.10
  • 0.30.11
  • 0.30.12
  • 0.30.13
  • 0.30.14
  • 0.30.2
  • 0.30.3
  • 0.30.4
  • 0.30.5
  • 0.30.6
  • 0.30.7
  • 0.30.8
  • 0.30.9
  • 0.31.0
  • 0.31.0.post1
  • 0.31.1
  • 0.31.2
  • 0.31.3
  • 0.31.4
  • 0.31.5
  • 0.31.6
  • 0.32.0
  • 0.32.0-linux
  • 0.32.0RC1
  • 0.32.0RC2
  • 0.32.0RC3
  • 0.32.0RC4
  • 0.32.0RC5
  • 0.32.0RC6
  • 0.32.1
  • 0.32.10
  • 0.32.10post1
  • 0.32.2
  • 0.32.3
120 results

Target

Select target project
  • cebash/sakia
  • santiago/sakia
  • jonas/sakia
3 results
Select Git revision
  • Docker
  • Docker-debian9
  • check_uniq_node_by_endpoints
  • dev
  • feature/agent_architecture
  • feature/backend
  • fix_ci
  • fix_ci_osx
  • fix_dbus_error
  • fix_travis#1105
  • fixappveyor
  • gitlab
  • landscape
  • master
  • pyinstaller
  • pyqt5.6
  • qt5.7
  • qtwebengine
  • sakia020
  • translations
  • 0.1.0
  • 0.10.0
  • 0.10.1
  • 0.10.2
  • 0.11.0
  • 0.11.1
  • 0.11.2
  • 0.11.3
  • 0.11.4
  • 0.11.4.post1
  • 0.11.5
  • 0.12.0
  • 0.12.1dev1
  • 0.12.1dev2
  • 0.12.1dev3
  • 0.12.1dev4
  • 0.12.dev1
  • 0.12.dev2
  • 0.12.dev3
  • 0.12.dev4
  • 0.12.dev5
  • 0.12rc1
  • 0.2.0
  • 0.2.1
  • 0.20.0
  • 0.20.0dev1
  • 0.20.0dev10
  • 0.20.0dev12
  • 0.20.0dev13
  • 0.20.0dev14
  • 0.20.0dev15
  • 0.20.0dev16
  • 0.20.0dev2
  • 0.20.0dev3
  • 0.20.0dev3-test1
  • 0.20.0dev4
  • 0.20.0dev5
  • 0.20.0dev6
  • 0.20.0dev7
  • 0.20.0dev8
  • 0.20.0dev9
  • 0.20.1
  • 0.20.10
  • 0.20.11
  • 0.20.13
  • 0.20.2
  • 0.20.3
  • 0.20.4
  • 0.20.5
  • 0.20.6
  • 0.20.7
  • 0.20.8
  • 0.20.9
  • 0.3.0
  • 0.30.0
  • 0.30.0beta
  • 0.30.0beta1
  • 0.30.0beta2
  • 0.30.0beta3
  • 0.30.0beta4
  • 0.30.0beta5
  • 0.30.0beta6
  • 0.30.0beta7
  • 0.30.0beta8
  • 0.30.0beta9
  • 0.30.1
  • 0.30.10
  • 0.30.11
  • 0.30.12
  • 0.30.13
  • 0.30.14
  • 0.30.2
  • 0.30.3
  • 0.30.4
  • 0.30.5
  • 0.30.6
  • 0.30.7
  • 0.30.8
  • 0.30.9
  • 0.31.0
  • 0.31.0.post1
  • 0.31.1
  • 0.31.2
  • 0.31.3
  • 0.31.4
  • 0.31.5
  • 0.31.6
  • 0.32.0
  • 0.32.0-linux
  • 0.32.0RC1
  • 0.32.0RC2
  • 0.32.0RC3
  • 0.32.0RC4
  • 0.32.0RC5
  • 0.32.0RC6
  • 0.32.1
  • 0.32.10
  • 0.32.10post1
  • 0.32.2
  • 0.32.3
120 results
Show changes
Showing
with 720 additions and 317 deletions
......@@ -15,7 +15,7 @@ class CertificationsProcessor:
_certifications_repo = attr.ib() # :type sakia.data.repositories.CertificationsRepo
_identities_repo = attr.ib() # :type sakia.data.repositories.IdentitiesRepo
_bma_connector = attr.ib() # :type sakia.data.connectors.bma.BmaConnector
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger("sakia")))
@classmethod
def instanciate(cls, app):
......@@ -23,8 +23,11 @@ class CertificationsProcessor:
Instanciate a blockchain processor
:param sakia.app.Application app: the app
"""
return cls(app.db.certifications_repo, app.db.identities_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters))
return cls(
app.db.certifications_repo,
app.db.identities_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters),
)
def drop_expired(self, identity, current_ts, sig_validity, sig_window):
"""
......@@ -32,11 +35,13 @@ class CertificationsProcessor:
:param sakia.data.Identity identity:
:rtype: List[sakia.data.entities.Certification]
"""
expired = self._certifications_repo.expired(currency=identity.currency,
expired = self._certifications_repo.expired(
currency=identity.currency,
pubkey=identity.pubkey,
current_ts=current_ts,
sig_validity=sig_validity,
sig_window=sig_window)
sig_window=sig_window,
)
for cert in expired:
self._certifications_repo.drop(cert)
return expired
......@@ -69,7 +74,9 @@ class CertificationsProcessor:
:return: the remaining time
:rtype: int
"""
certified = self._certifications_repo.get_latest_sent(currency=currency, pubkey=pubkey)
certified = self._certifications_repo.get_latest_sent(
currency=currency, pubkey=pubkey
)
if certified and blockchain_time - certified.timestamp < parameters.sig_period:
return parameters.sig_period - (blockchain_time - certified.timestamp)
return 0
......@@ -83,13 +90,15 @@ class CertificationsProcessor:
:return: the instanciated certification
:rtype: sakia.data.entities.Certification
"""
cert = Certification(currency=currency,
cert = Certification(
currency=currency,
certifier=cert.pubkey_from,
certified=cert.pubkey_to,
block=cert.timestamp.number,
timestamp=timestamp,
signature=cert.signatures[0],
written_on=blockstamp.number if blockstamp else -1)
written_on=blockstamp.number if blockstamp else -1,
)
try:
self._certifications_repo.insert(cert)
except sqlite3.IntegrityError:
......@@ -114,12 +123,16 @@ class CertificationsProcessor:
:param List[str] connections_pubkeys: pubkeys of existing connections
:return:
"""
certifiers = self._certifications_repo.get_all(currency=connection.currency, certifier=connection.pubkey)
certifiers = self._certifications_repo.get_all(
currency=connection.currency, certifier=connection.pubkey
)
for c in certifiers:
if c.certified not in connections_pubkeys:
self._certifications_repo.drop(c)
certified = self._certifications_repo.get_all(currency=connection.currency, certified=connection.pubkey)
certified = self._certifications_repo.get_all(
currency=connection.currency, certified=connection.pubkey
)
for c in certified:
if c.certifier not in connections_pubkeys:
self._certifications_repo.drop(c)
......@@ -12,7 +12,7 @@ class ConnectionsProcessor:
"""
_connections_repo = attr.ib() # :type
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger("sakia")))
@classmethod
def instanciate(cls, app):
......@@ -43,7 +43,9 @@ class ConnectionsProcessor:
def connections_with_uids(self, currency=""):
if currency:
return [r for r in self._connections_repo.get_all(currency=currency) if r.uid]
return [
r for r in self._connections_repo.get_all(currency=currency) if r.uid
]
else:
return [r for r in self._connections_repo.get_all() if r.uid]
......
......@@ -10,8 +10,9 @@ class ContactsProcessor:
:param sakia.data.repositories.ContactsRepo _contacts_repo: the repository of the contacts
"""
_contacts_repo = attr.ib()
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger("sakia")))
@classmethod
def instanciate(cls, app):
......
import attr
import logging
from ..entities import Dividend
from ..entities import Dividend, Source
from .nodes import NodesProcessor
from ..connectors import BmaConnector
from duniterpy.api import bma
......@@ -16,10 +16,11 @@ class DividendsProcessor:
:param sakia.data.repositories.BlockchainsRepo _blockchain_repo: the repository of the sources
:param sakia.data.connectors.bma.BmaConnector _bma_connector: the bma connector
"""
_repo = attr.ib()
_blockchain_repo = attr.ib()
_bma_connector = attr.ib()
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger("sakia")))
@classmethod
def instanciate(cls, app):
......@@ -27,8 +28,11 @@ class DividendsProcessor:
Instanciate a blockchain processor
:param sakia.app.Application app: the app
"""
return cls(app.db.dividends_repo, app.db.blockchains_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters))
return cls(
app.db.dividends_repo,
app.db.blockchains_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters),
)
def commit(self, dividend):
try:
......@@ -38,7 +42,9 @@ class DividendsProcessor:
self._logger.debug("Dividend already in db")
return False
async def initialize_dividends(self, connection, transactions, log_stream, progress):
async def initialize_dividends(
self, connection, transactions, log_stream, progress
):
"""
Request transactions from the network to initialize data for a given pubkey
:param sakia.data.entities.Connection connection:
......@@ -47,21 +53,24 @@ class DividendsProcessor:
:param function progress: progress callback
"""
blockchain = self._blockchain_repo.get_one(currency=connection.currency)
history_data = await self._bma_connector.get(connection.currency, bma.ud.history,
req_args={'pubkey': connection.pubkey})
log_stream("Found {0} available dividends".format(len(history_data["history"]["history"])))
avg_blocks_per_month = int(30 * 24 * 3600 / blockchain.parameters.avg_gen_time)
start = blockchain.current_buid.number - avg_blocks_per_month
history_data = await self._bma_connector.get(
connection.currency, bma.ud.history, req_args={"pubkey": connection.pubkey}
)
block_numbers = []
dividends = []
nb_ud_tx = len(history_data["history"]["history"]) + len(transactions)
for ud_data in history_data["history"]["history"]:
dividend = Dividend(currency=connection.currency,
if ud_data["block_number"] > start:
dividend = Dividend(
currency=connection.currency,
pubkey=connection.pubkey,
block_number=ud_data["block_number"],
timestamp=ud_data["time"],
amount=ud_data["amount"],
base=ud_data["base"])
base=ud_data["base"],
)
log_stream("Dividend of block {0}".format(dividend.block_number))
progress(1/nb_ud_tx)
block_numbers.append(dividend.block_number)
try:
dividends.append(dividend)
......@@ -72,17 +81,26 @@ class DividendsProcessor:
for tx in transactions:
txdoc = Transaction.from_signed_raw(tx.raw)
for input in txdoc.inputs:
if input.source == "D" and input.origin_id == connection.pubkey and input.index not in block_numbers:
if (
input.source == Source.TYPE_DIVIDEND
and input.origin_id == connection.pubkey
and input.index not in block_numbers
and input.index > start
):
diff_blocks = blockchain.current_buid.number - input.index
ud_mediantime = blockchain.median_time - diff_blocks*blockchain.parameters.avg_gen_time
dividend = Dividend(currency=connection.currency,
ud_mediantime = (
blockchain.median_time
- diff_blocks * blockchain.parameters.avg_gen_time
)
dividend = Dividend(
currency=connection.currency,
pubkey=connection.pubkey,
block_number=input.index,
timestamp=ud_mediantime,
amount=input.amount,
base=input.base)
base=input.base,
)
log_stream("Dividend of block {0}".format(dividend.block_number))
progress(1/nb_ud_tx)
try:
dividends.append(dividend)
self._repo.insert(dividend)
......@@ -99,6 +117,8 @@ class DividendsProcessor:
:param sakia.data.entities.Connection connection:
:return:
"""
dividends = self._repo.get_all(currency=connection.currency, pubkey=connection.pubkey)
dividends = self._repo.get_all(
currency=connection.currency, pubkey=connection.pubkey
)
for d in dividends:
self._repo.drop(d)
......@@ -23,11 +23,12 @@ class IdentitiesProcessor:
:param _bma_connector: sakia.data.connectors.bma.BmaConnector
:param _logger:
"""
_identities_repo = attr.ib() # :type sakia.data.repositories.IdentitiesRepo
_certifications_repo = attr.ib() # :type sakia.data.repositories.IdentitiesRepo
_blockchain_repo = attr.ib() # :type sakia.data.repositories.BlockchainRepo
_bma_connector = attr.ib() # :type sakia.data.connectors.bma.BmaConnector
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger("sakia")))
@classmethod
def instanciate(cls, app):
......@@ -35,8 +36,12 @@ class IdentitiesProcessor:
Instanciate a blockchain processor
:param sakia.app.Application app: the app
"""
return cls(app.db.identities_repo, app.db.certifications_repo, app.db.blockchains_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters))
return cls(
app.db.identities_repo,
app.db.certifications_repo,
app.db.blockchains_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters),
)
async def find_from_pubkey(self, currency, pubkey):
"""
......@@ -53,15 +58,19 @@ class IdentitiesProcessor:
found_identity = idty
if not found_identity.uid:
try:
data = await self._bma_connector.get(currency, bma.wot.lookup, req_args={'search': pubkey})
for result in data['results']:
data = await self._bma_connector.get(
currency, bma.wot.lookup, req_args={"search": pubkey}
)
for result in data["results"]:
if result["pubkey"] == pubkey:
uids = result['uids']
uids = result["uids"]
for uid_data in uids:
identity = Identity(currency, pubkey)
identity.uid = uid_data['uid']
identity.blockstamp = block_uid(uid_data['meta']['timestamp'])
identity.signature = uid_data['self']
identity.uid = uid_data["uid"]
identity.blockstamp = block_uid(
uid_data["meta"]["timestamp"]
)
identity.signature = uid_data["self"]
if identity.blockstamp >= found_identity.blockstamp:
found_identity = identity
except (errors.DuniterError, asyncio.TimeoutError, ClientError) as e:
......@@ -82,17 +91,34 @@ class IdentitiesProcessor:
tries = 0
while tries < 3:
try:
data = await self._bma_connector.get(currency, bma.wot.lookup, req_args={'search': text})
for result in data['results']:
pubkey = result['pubkey']
for uid_data in result['uids']:
if not uid_data['revoked']:
identity = Identity(currency=currency,
data = await self._bma_connector.get(
currency, bma.wot.lookup, req_args={"search": text}
)
for result in data["results"]:
pubkey = result["pubkey"]
for uid_data in result["uids"]:
if not uid_data["revoked"]:
identity = Identity(
currency=currency,
pubkey=pubkey,
uid=uid_data['uid'],
blockstamp=uid_data['meta']['timestamp'],
signature=uid_data['self'])
uid=uid_data["uid"],
blockstamp=uid_data["meta"]["timestamp"],
signature=uid_data["self"],
)
if identity not in identities:
# Search same identity with empty blockstamp (incomplete)
same_with_empty_blockstamp = (
self._identities_repo.get_one(
currency=identity.currency,
uid=identity.uid,
pubkey=identity.pubkey,
blockstamp=BlockUID.empty(),
)
)
# Same identity with empty blockstamp (incomplete) should not appears as duplicate
# Beware that identities in block 0 have empty blockstamps !
if same_with_empty_blockstamp in identities:
identities.remove(same_with_empty_blockstamp)
identities.append(identity)
break
except (errors.DuniterError, asyncio.TimeoutError, ClientError) as e:
......@@ -141,39 +167,61 @@ class IdentitiesProcessor:
log_stream("Requesting membership data")
progress(1 / 3)
try:
memberships_data = await self._bma_connector.get(identity.currency, bma.blockchain.memberships,
req_args={'search': identity.pubkey})
if block_uid(memberships_data['sigDate']) == identity.blockstamp \
and memberships_data['uid'] == identity.uid:
# Return a 1002 - MEMBER_NOT_FOUND if identity has no memberships or not written in blockchain
memberships_data = await self._bma_connector.get(
identity.currency,
bma.blockchain.memberships,
req_args={"search": identity.pubkey},
)
if (
block_uid(memberships_data["sigDate"]) == identity.blockstamp
and memberships_data["uid"] == identity.uid
):
identity.written = True
for ms in memberships_data['memberships']:
if ms['written'] and ms['written'] > identity.membership_written_on:
identity.membership_buid = BlockUID(ms['blockNumber'], ms['blockHash'])
identity.membership_type = ms['membership']
identity.membership_written_on = ms['written']
for ms in memberships_data["memberships"]:
if ms["written"] and ms["written"] > identity.membership_written_on:
identity.membership_buid = BlockUID(
ms["blockNumber"], ms["blockHash"]
)
identity.membership_type = ms["membership"]
identity.membership_written_on = ms["written"]
progress(1 / 3)
if identity.membership_buid:
log_stream("Requesting membership timestamp")
ms_block_data = await self._bma_connector.get(identity.currency, bma.blockchain.block,
req_args={'number': identity.membership_buid.number})
ms_block_data = await self._bma_connector.get(
identity.currency,
bma.blockchain.block,
req_args={"number": identity.membership_buid.number},
)
if ms_block_data:
identity.membership_timestamp = ms_block_data['medianTime']
identity.membership_timestamp = ms_block_data["medianTime"]
log_stream("Requesting identity requirements status")
progress(1 / 3)
requirements_data = await self._bma_connector.get(identity.currency, bma.wot.requirements,
req_args={'search': identity.pubkey})
identity_data = next((data for data in requirements_data["identities"]
if data["pubkey"] == identity.pubkey))
identity.member = identity_data['membershipExpiresIn'] > 0
identity.written = identity_data['wasMember']
requirements_data = await self._bma_connector.get(
identity.currency,
bma.wot.requirements,
req_args={"search": identity.pubkey},
)
identity_data = next(
(
data
for data in requirements_data["identities"]
if data["pubkey"] == identity.pubkey
)
)
identity.member = identity_data["membershipExpiresIn"] > 0
identity.written = identity_data["wasMember"]
identity.sentry = identity_data["isSentry"]
identity.outdistanced = identity_data['outdistanced']
identity.outdistanced = identity_data["outdistanced"]
self.insert_or_update_identity(identity)
except errors.DuniterError as e:
if e.ucode == errors.NO_MEMBER_MATCHING_PUB_OR_UID:
if (
e.ucode == errors.NO_MEMBER_MATCHING_PUB_OR_UID
or e.message == "MEMBER_NOT_FOUND"
):
identity.written = False
self.insert_or_update_identity(identity)
else:
......@@ -185,16 +233,21 @@ class IdentitiesProcessor:
:return: (True if found, local value, network value)
"""
identity = Identity(connection.currency, connection.pubkey, connection.uid)
found_identity = Identity(connection.currency, connection.pubkey, connection.uid)
found_identity = Identity(
connection.currency, connection.pubkey, connection.uid
)
def _parse_uid_lookup(data):
timestamp = BlockUID.empty()
found_uid = ""
for result in data['results']:
for result in data["results"]:
if result["pubkey"] == identity.pubkey:
uids = result['uids']
uids = result["uids"]
for uid_data in uids:
if BlockUID.from_str(uid_data["meta"]["timestamp"]) >= timestamp:
if (
BlockUID.from_str(uid_data["meta"]["timestamp"])
>= timestamp
):
timestamp = BlockUID.from_str(uid_data["meta"]["timestamp"])
found_identity.blockstamp = timestamp
found_uid = uid_data["uid"]
......@@ -205,8 +258,8 @@ class IdentitiesProcessor:
timestamp = BlockUID.empty()
found_uid = ""
found_result = ["", ""]
for result in data['results']:
uids = result['uids']
for result in data["results"]:
uids = result["uids"]
for uid_data in uids:
if BlockUID.from_str(uid_data["meta"]["timestamp"]) >= timestamp:
timestamp = BlockUID.from_str(uid_data["meta"]["timestamp"])
......@@ -214,23 +267,32 @@ class IdentitiesProcessor:
found_uid = uid_data["uid"]
found_identity.signature = uid_data["self"]
if found_uid == identity.uid:
found_result = result['pubkey'], found_uid
found_result = result["pubkey"], found_uid
if found_result[1] == identity.uid:
return identity.pubkey == found_result[0], identity.pubkey, found_result[0]
return (
identity.pubkey == found_result[0],
identity.pubkey,
found_result[0],
)
else:
return False, identity.pubkey, None
async def execute_requests(parser, search):
nonlocal registered
try:
data = await self._bma_connector.get(connection.currency, bma.wot.lookup,
req_args={'search': search})
data = await self._bma_connector.get(
connection.currency, bma.wot.lookup, req_args={"search": search}
)
if data:
registered = parser(data)
except errors.DuniterError as e:
self._logger.debug(e.ucode)
if e.ucode not in (errors.NO_MEMBER_MATCHING_PUB_OR_UID, errors.NO_MATCHING_IDENTITY):
if e.ucode not in (
errors.NO_MEMBER_MATCHING_PUB_OR_UID,
errors.NO_MATCHING_IDENTITY,
):
raise
# cell 0 contains True if the user is already registered
# cell 1 contains the uid/pubkey selected locally
# cell 2 contains the uid/pubkey found on the network
......@@ -255,9 +317,11 @@ class IdentitiesProcessor:
"""
identities = self._identities_repo.get_all(currency=connection.currency)
for idty in identities:
others_certs = self._certifications_repo.get_all(currency=connection.currency,
certifier=idty.pubkey)
others_certs += self._certifications_repo.get_all(currency=connection.currency,
certified=idty.pubkey)
others_certs = self._certifications_repo.get_all(
currency=connection.currency, certifier=idty.pubkey
)
others_certs += self._certifications_repo.get_all(
currency=connection.currency, certified=idty.pubkey
)
if not others_certs:
self._identities_repo.drop(idty)
import attr
import sqlite3
from sakia.constants import ROOT_SERVERS
from ..entities import Node
from duniterpy.documents import BlockUID, endpoint
from duniterpy.documents import BlockUID
import logging
import time
@attr.s
class NodesProcessor:
_repo = attr.ib() # :type sakia.data.repositories.NodesRepo
@classmethod
def instanciate(cls, app):
return cls(app.db.nodes_repo)
def initialize_root_nodes(self, currency):
def initialize_root_nodes(self, currency, root_servers):
if not self.nodes(currency):
for pubkey in ROOT_SERVERS[currency]["nodes"]:
node = Node(currency=currency,
for pubkey in root_servers[currency]["nodes"]:
node = Node(
currency=currency,
pubkey=pubkey,
endpoints=ROOT_SERVERS[currency]["nodes"][pubkey],
endpoints=root_servers[currency]["nodes"][pubkey],
peer_blockstamp=BlockUID.empty(),
state=Node.ONLINE)
state=0,
)
self._repo.insert(node)
def current_buid(self, currency):
"""
Get current buid
Get the latest block considered valid
It is the most frequent last block of every known nodes
:param str currency:
"""
current_buid = self._repo.current_buid(currency=currency)
......@@ -37,20 +40,27 @@ class NodesProcessor:
Get nodes which are in the ONLINE state.
"""
current_buid = self._repo.current_buid(currency=currency)
return self._repo.get_all(currency=currency, state=Node.ONLINE, current_buid=current_buid)
return self._repo.get_synced_nodes(currency, current_buid)
def offline_synced_nodes(self, currency):
"""
Get nodes which are in the ONLINE state.
"""
current_buid = self._repo.current_buid(currency=currency)
return self._repo.get_offline_synced_nodes(currency, current_buid)
def synced_members_nodes(self, currency):
"""
Get nodes which are in the ONLINE state.
"""
current_buid = self._repo.current_buid(currency=currency)
return self._repo.get_all(currency=currency, state=Node.ONLINE, member=True, current_buid=current_buid)
return self._repo.get_synced_members_nodes(currency, current_buid)
def online_nodes(self, currency):
"""
Get nodes which are in the ONLINE state.
"""
return self._repo.get_all(currency=currency, state=Node.ONLINE)
return self._repo.get_online_nodes(currency)
def delete_node(self, node):
self._repo.drop(node)
......@@ -91,6 +101,8 @@ class NodesProcessor:
def unknown_node(self, currency, pubkey):
"""
Search for pubkey in the repository.
:param str currency: Name of currency
:param str pubkey: the pubkey to lookup
"""
other_node = self._repo.get_one(currency=currency, pubkey=pubkey)
......@@ -108,17 +120,6 @@ class NodesProcessor:
"""
return self._repo.get_all(currency=currency, root=True)
def current_buid(self, currency):
"""
Get the latest block considered valid
It is the most frequent last block of every known nodes
"""
blocks_uids = [n.current_buid for n in self.synced_nodes(currency)]
if len(blocks_uids) > 0:
return blocks_uids[0]
else:
return BlockUID.empty()
def quality(self, currency):
"""
Get a ratio of the synced nodes vs the rest
......@@ -131,6 +132,27 @@ class NodesProcessor:
ratio_synced = synced / total
return ratio_synced
def update_ws2p(self, currency, head):
"""
Update the peer of a node
:param str currency: the currency of the peer
:param head:
:return:
"""
node = self._repo.get_one(pubkey=head.pubkey, currency=currency)
if node:
if node.current_buid < head.blockstamp:
logging.debug("Update node: {0}".format(head.pubkey[:5]))
node.previous_buid = node.current_buid
node.current_buid = head.blockstamp
# capture sofware and version
node.software = head.software
node.version = head.software_version
self._repo.update(node)
return node, True
return node, False
def update_peer(self, currency, peer):
"""
Update the peer of a node
......@@ -147,8 +169,14 @@ class NodesProcessor:
return node, True
return node, False
def drop_all(self, currency):
nodes = self._repo.get_all()
for n in nodes:
if n.pubkey not in ROOT_SERVERS[currency].keys():
self._repo.drop(n)
\ No newline at end of file
def handle_success(self, node):
if not node.online():
node.last_state_change = time.time()
node.state = max(0, node.state - 1)
self.update_node(node)
def handle_failure(self, node, weight=1):
if node.state + weight > Node.FAILURE_THRESHOLD and node.online():
node.last_state_change = time.time()
node.state = min(5, node.state + weight)
self.update_node(node)
......@@ -13,9 +13,10 @@ class SourcesProcessor:
:param sakia.data.repositories.SourcesRepo _repo: the repository of the sources
:param sakia.data.connectors.bma.BmaConnector _bma_connector: the bma connector
"""
_repo = attr.ib()
_bma_connector = attr.ib()
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger("sakia")))
@classmethod
def instanciate(cls, app):
......@@ -23,8 +24,10 @@ class SourcesProcessor:
Instanciate a blockchain processor
:param sakia.app.Application app: the app
"""
return cls(app.db.sources_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters))
return cls(
app.db.sources_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters),
)
def commit(self, source):
try:
......@@ -32,6 +35,9 @@ class SourcesProcessor:
except sqlite3.IntegrityError:
self._logger.debug("Source already known: {0}".format(source.identifier))
def get_one(self, **search):
return self._repo.get_one(**search)
def amount(self, currency, pubkey):
"""
Get the amount value of the sources for a given pubkey
......@@ -50,15 +56,27 @@ class SourcesProcessor:
"""
return self._repo.get_all(currency=currency, pubkey=pubkey)
def consume(self, sources):
def consume(self, sources, tx_sha_hash):
"""
Consume sources in db by setting the used_by column to tx hash
:param List(Source) sources: Source instances list
:param str tx_sha_hash: Hash of tx
:param currency:
:param sources:
:return:
"""
for s in sources:
self._repo.drop(s)
self._repo.consume(s, tx_sha_hash)
def restore_all(self, tx_sha_hash):
"""
Restore sources in db by setting the used_by column to null
:param str tx_sha_hash: Hash of tx
:return:
"""
self._repo.restore_all(tx_sha_hash)
def insert(self, source):
try:
......
......@@ -13,10 +13,11 @@ from duniterpy.documents import Transaction as TransactionDoc
@attr.s
class TransactionsProcessor:
_repo = attr.ib() # :type sakia.data.repositories.SourcesRepo
_repo = attr.ib() # :type sakia.data.repositories.TransactionsRepo
_blockchain_repo = attr.ib()
_bma_connector = attr.ib() # :type sakia.data.connectors.bma.BmaConnector
_table_states = attr.ib(default=attr.Factory(dict))
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger("sakia")))
@classmethod
def instanciate(cls, app):
......@@ -24,8 +25,11 @@ class TransactionsProcessor:
Instanciate a blockchain processor
:param sakia.app.Application app: the app
"""
return cls(app.db.transactions_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters))
return cls(
app.db.transactions_repo,
app.db.blockchains_repo,
BmaConnector(NodesProcessor(app.db.nodes_repo), app.parameters),
)
def next_txid(self, currency, block_number):
"""
......@@ -63,11 +67,17 @@ class TransactionsProcessor:
for transition in tx_lifecycle.states[transition_key]:
if transition[0](tx, *inputs):
if tx.sha_hash:
self._logger.debug("{0} : {1} --> {2}".format(tx.sha_hash[:5], tx.state,
transition[2]))
self._logger.debug(
"{0}: {1} --> {2}".format(
tx.sha_hash[:5], tx.state, transition[2]
)
)
else:
self._logger.debug("Unsent transfer : {0} --> {1}".format(tx.state,
transition[2]))
self._logger.debug(
"Unsent transfer: {0} --> {1}".format(
tx.state, transition[2]
)
)
# If the transition changes data, apply changes
if transition[1]:
......@@ -82,9 +92,12 @@ class TransactionsProcessor:
except sqlite3.IntegrityError:
self._repo.update(tx)
def find_by_hash(self, pubkey, sha_hash):
def find_by_pubkey_and_hash(self, pubkey: str, sha_hash: str) -> Transaction:
return self._repo.get_one(pubkey=pubkey, sha_hash=sha_hash)
def find_one_by_hash(self, sha_hash: str) -> Transaction:
return self._repo.get_one(sha_hash=sha_hash)
def awaiting(self, currency):
return self._repo.get_all(currency=currency, state=Transaction.AWAITING)
......@@ -120,9 +133,13 @@ class TransactionsProcessor:
:param currency: The community target of the transaction
"""
self._repo.insert(tx)
responses = await self._bma_connector.broadcast(currency, bma.tx.process, req_args={'transaction': tx.raw})
responses = await self._bma_connector.broadcast(
currency, bma.tx.process, req_args={"transaction_signed_raw": tx.raw}
)
result = await parse_bma_responses(responses)
self.run_state_transitions(tx, [r.status for r in responses if not isinstance(r, BaseException)])
self.run_state_transitions(
tx, [r.status for r in responses if not isinstance(r, BaseException)]
)
return result, tx
async def initialize_transactions(self, connection, log_stream, progress):
......@@ -132,19 +149,35 @@ class TransactionsProcessor:
:param function log_stream:
:param function progress: progress callback
"""
history_data = await self._bma_connector.get(connection.currency, bma.tx.history,
req_args={'pubkey': connection.pubkey})
blockchain = self._blockchain_repo.get_one(currency=connection.currency)
avg_blocks_per_month = int(30 * 24 * 3600 / blockchain.parameters.avg_gen_time)
start = blockchain.current_buid.number - avg_blocks_per_month
end = blockchain.current_buid.number
history_data = await self._bma_connector.get(
connection.currency,
bma.tx.blocks,
req_args={"pubkey": connection.pubkey, "start": start, "end": end},
)
txid = 0
nb_tx = len(history_data["history"]["sent"]) + len(history_data["history"]["received"])
nb_tx = len(history_data["history"]["sent"]) + len(
history_data["history"]["received"]
)
log_stream("Found {0} transactions".format(nb_tx))
transactions = []
for sent_data in history_data["history"]["sent"] + history_data["history"]["received"]:
for sent_data in (
history_data["history"]["sent"] + history_data["history"]["received"]
):
sent = TransactionDoc.from_bma_history(history_data["currency"], sent_data)
log_stream("{0}/{1} transactions".format(txid, nb_tx))
progress(1 / nb_tx)
try:
tx = parse_transaction_doc(sent, connection.pubkey, sent_data["block_number"],
sent_data["time"], txid)
tx = parse_transaction_doc(
sent,
connection.pubkey,
sent_data["block_number"],
sent_data["time"],
txid,
)
if tx:
transactions.append(tx)
self._repo.insert(tx)
......@@ -163,6 +196,8 @@ class TransactionsProcessor:
:param List[str] connections_pubkeys: pubkeys of existing connections
:return:
"""
transactions = self._repo.get_all(currency=connection.currency, pubkey=connection.pubkey)
transactions = self._repo.get_all(
currency=connection.currency, pubkey=connection.pubkey
)
for tx in transactions:
self._repo.drop(tx)
......@@ -3,16 +3,16 @@ from sakia.data.entities import Transaction
from duniterpy.documents import Block
def _found_in_block(tx, block):
def _found_in_block(tx, sha_hash, block_number):
"""
Check if the transaction can be found in the blockchain
:param sakia.data.entities.Transaction tx: the transaction
:param duniterpy.documents.Block block: The block to check for the transaction
:param str sha_hash: The transaction sha_hash found in history
:param int block_number: The block_number where the tx was found
:return: True if the transaction was found
:rtype: bool
"""
for block_tx in block.transactions:
if block_tx.sha_hash == tx.sha_hash:
if sha_hash == tx.sha_hash:
return True
......@@ -49,15 +49,16 @@ def _is_locally_created(tx):
return tx.local
def _be_validated(tx, block):
def _be_validated(tx, sha_hash, block_number):
"""
Action when the transfer ins found in a block
:param sakia.data.entities.Transaction tx: the transaction
:param bool rollback: True if we are in a rollback procedure
:param duniterpy.documents.Block block: The block checked
:param str sha_hash: The tx sha_hash found in history
:param int block_number: The block_number where the tx was found
"""
tx.written_block = block.number
tx.written_block = block_number
def _drop(tx):
......@@ -73,17 +74,17 @@ def _drop(tx):
# keys are a tuple containg (current_state, transition_parameters)
# values are tuples containing (transition_test, transition_success, new_state)
states = {
(Transaction.TO_SEND, (list,)):
(
(Transaction.TO_SEND, (list,)): (
(_broadcast_success, None, Transaction.AWAITING),
(lambda tx, l: _broadcast_failure(tx, l), None, Transaction.REFUSED),
),
(Transaction.TO_SEND, ()):
((_is_locally_created, _drop, Transaction.DROPPED),),
(Transaction.AWAITING, (Block,)):
((_found_in_block, _be_validated, Transaction.VALIDATED),),
(Transaction.REFUSED, ()):
((_is_locally_created, _drop, Transaction.DROPPED),)
(Transaction.TO_SEND, ()): ((_is_locally_created, _drop, Transaction.DROPPED),),
(
Transaction.AWAITING,
(
str,
int,
),
): ((_found_in_block, _be_validated, Transaction.VALIDATED),),
(Transaction.REFUSED, ()): ((_is_locally_created, _drop, Transaction.DROPPED),),
}
BEGIN TRANSACTION;
ALTER TABLE sources ADD COLUMN conditions VARCHAR(255);
COMMIT;
BEGIN TRANSACTION;
ALTER TABLE sources ADD COLUMN used_by VARCHAR(255) default null;
COMMIT;
......@@ -7,8 +7,8 @@ from ..entities import Blockchain, BlockchainParameters
@attr.s(frozen=True)
class BlockchainsRepo:
"""The repository for Blockchain entities.
"""
"""The repository for Blockchain entities."""
_conn = attr.ib() # :type sqlite3.Connection
_primary_keys = (attr.fields(Blockchain).currency,)
......@@ -17,20 +17,30 @@ class BlockchainsRepo:
Commit a blockchain to the database
:param sakia.data.entities.Blockchain blockchain: the blockchain to commit
"""
blockchain_tuple = attr.astuple(blockchain.parameters) \
+ attr.astuple(blockchain, filter=attr.filters.exclude(attr.fields(Blockchain).parameters))
values = ",".join(['?'] * len(blockchain_tuple))
self._conn.execute("INSERT INTO blockchains VALUES ({0})".format(values), blockchain_tuple)
blockchain_tuple = attr.astuple(blockchain.parameters) + attr.astuple(
blockchain, filter=attr.filters.exclude(attr.fields(Blockchain).parameters)
)
values = ",".join(["?"] * len(blockchain_tuple))
self._conn.execute(
"INSERT INTO blockchains VALUES ({0})".format(values), blockchain_tuple
)
def update(self, blockchain):
"""
Update an existing blockchain in the database
:param sakia.data.entities.Blockchain blockchain: the blockchain to update
"""
updated_fields = attr.astuple(blockchain, filter=attr.filters.exclude(
attr.fields(Blockchain).parameters, *BlockchainsRepo._primary_keys))
where_fields = attr.astuple(blockchain, filter=attr.filters.include(*BlockchainsRepo._primary_keys))
self._conn.execute("""UPDATE blockchains SET
updated_fields = attr.astuple(
blockchain,
filter=attr.filters.exclude(
attr.fields(Blockchain).parameters, *BlockchainsRepo._primary_keys
),
)
where_fields = attr.astuple(
blockchain, filter=attr.filters.include(*BlockchainsRepo._primary_keys)
)
self._conn.execute(
"""UPDATE blockchains SET
current_buid=?,
current_members_count=?,
current_mass=?,
......@@ -47,7 +57,8 @@ class BlockchainsRepo:
previous_ud_time=?
WHERE
currency=?""",
updated_fields + where_fields)
updated_fields + where_fields,
)
def get_one(self, **search):
"""
......@@ -62,7 +73,9 @@ class BlockchainsRepo:
values.append(v)
if filters:
request = "SELECT * FROM blockchains WHERE {filters}".format(filters=" AND ".join(filters))
request = "SELECT * FROM blockchains WHERE {filters}".format(
filters=" AND ".join(filters)
)
else:
request = "SELECT * FROM blockchains"
......@@ -71,7 +84,9 @@ class BlockchainsRepo:
if data:
return Blockchain(BlockchainParameters(*data[:20]), *data[20:])
def get_all(self, offset=0, limit=1000, sort_by="currency", sort_order="ASC", **search) -> List[Blockchain]:
def get_all(
self, offset=0, limit=1000, sort_by="currency", sort_order="ASC", **search
) -> List[Blockchain]:
"""
Get all existing blockchain in the database corresponding to the search
:param int offset: offset in results to paginate
......@@ -95,22 +110,22 @@ class BlockchainsRepo:
offset=offset,
limit=limit,
sort_by=sort_by,
sort_order=sort_order
sort_order=sort_order,
)
c = self._conn.execute(request, tuple(values))
else:
request = """SELECT * FROM blockchains
ORDER BY {sort_by} {sort_order}
LIMIT {limit} OFFSET {offset}""".format(
offset=offset,
limit=limit,
sort_by=sort_by,
sort_order=sort_order
offset=offset, limit=limit, sort_by=sort_by, sort_order=sort_order
)
c = self._conn.execute(request)
datas = c.fetchall()
if datas:
return [Blockchain(BlockchainParameters(*data[:19]), *data[20:]) for data in datas]
return [
Blockchain(BlockchainParameters(*data[:19]), *data[20:])
for data in datas
]
return []
def drop(self, blockchain):
......@@ -118,5 +133,7 @@ class BlockchainsRepo:
Drop an existing blockchain from the database
:param sakia.data.entities.Blockchain blockchain: the blockchain to update
"""
where_fields = attr.astuple(blockchain, filter=attr.filters.include(*BlockchainsRepo._primary_keys))
where_fields = attr.astuple(
blockchain, filter=attr.filters.include(*BlockchainsRepo._primary_keys)
)
self._conn.execute("DELETE FROM blockchains WHERE currency=?", where_fields)
......@@ -5,11 +5,15 @@ from ..entities import Certification
@attr.s(frozen=True)
class CertificationsRepo:
"""The repository for Communities entities.
"""
"""The repository for Communities entities."""
_conn = attr.ib() # :type sqlite3.Connection
_primary_keys = (attr.fields(Certification).currency, attr.fields(Certification).certified,
attr.fields(Certification).certifier, attr.fields(Certification).block,)
_primary_keys = (
attr.fields(Certification).currency,
attr.fields(Certification).certified,
attr.fields(Certification).certifier,
attr.fields(Certification).block,
)
def insert(self, certification):
"""
......@@ -17,17 +21,27 @@ class CertificationsRepo:
:param sakia.data.entities.Certification certification: the certification to commit
"""
certification_tuple = attr.astuple(certification)
values = ",".join(['?'] * len(certification_tuple))
self._conn.execute("INSERT INTO certifications VALUES ({0})".format(values), certification_tuple)
values = ",".join(["?"] * len(certification_tuple))
self._conn.execute(
"INSERT INTO certifications VALUES ({0})".format(values),
certification_tuple,
)
def update(self, certification):
"""
Update an existing certification in the database
:param sakia.data.entities.Certification certification: the certification to update
"""
updated_fields = attr.astuple(certification, filter=attr.filters.exclude(*CertificationsRepo._primary_keys))
where_fields = attr.astuple(certification, filter=attr.filters.include(*CertificationsRepo._primary_keys))
self._conn.execute("""UPDATE certifications SET
updated_fields = attr.astuple(
certification,
filter=attr.filters.exclude(*CertificationsRepo._primary_keys),
)
where_fields = attr.astuple(
certification,
filter=attr.filters.include(*CertificationsRepo._primary_keys),
)
self._conn.execute(
"""UPDATE certifications SET
ts=?,
signature=?,
written_on=?
......@@ -36,7 +50,8 @@ class CertificationsRepo:
certifier=? AND
certified=? AND
block=?""",
updated_fields + where_fields)
updated_fields + where_fields,
)
def get_one(self, **search):
"""
......@@ -50,7 +65,9 @@ class CertificationsRepo:
filters.append("{k}=?".format(k=k))
values.append(v)
request = "SELECT * FROM certifications WHERE {filters}".format(filters=" AND ".join(filters))
request = "SELECT * FROM certifications WHERE {filters}".format(
filters=" AND ".join(filters)
)
c = self._conn.execute(request, tuple(values))
data = c.fetchone()
......@@ -70,7 +87,9 @@ class CertificationsRepo:
filters.append("{key} = ?".format(key=k))
values.append(value)
request = "SELECT * FROM certifications WHERE {filters}".format(filters=" AND ".join(filters))
request = "SELECT * FROM certifications WHERE {filters}".format(
filters=" AND ".join(filters)
)
c = self._conn.execute(request, tuple(values))
datas = c.fetchall()
......@@ -92,9 +111,18 @@ class CertificationsRepo:
WHERE currency=? AND (certifier=? or certified=?)
AND ((ts + ? < ?) or (written_on == -1 and ts + ? < ?))
"""
c = self._conn.execute(request, (currency, pubkey, pubkey,
sig_validity, current_ts,
sig_window, current_ts))
c = self._conn.execute(
request,
(
currency,
pubkey,
pubkey,
sig_validity,
current_ts,
sig_window,
current_ts,
),
)
datas = c.fetchall()
if datas:
return [Certification(*data) for data in datas]
......@@ -123,10 +151,16 @@ class CertificationsRepo:
Drop an existing certification from the database
:param sakia.data.entities.Certification certification: the certification to update
"""
where_fields = attr.astuple(certification, filter=attr.filters.include(*CertificationsRepo._primary_keys))
self._conn.execute("""DELETE FROM certifications
where_fields = attr.astuple(
certification,
filter=attr.filters.include(*CertificationsRepo._primary_keys),
)
self._conn.execute(
"""DELETE FROM certifications
WHERE
currency=? AND
certifier=? AND
certified=? AND
block=?""", where_fields)
block=?""",
where_fields,
)
......@@ -8,6 +8,7 @@ class ConnectionsRepo:
"""
The repository for Connections entities.
"""
_conn = attr.ib() # :type sqlite3.Connection
_primary_keys = (attr.fields(Connection).currency, attr.fields(Connection).pubkey)
......@@ -16,22 +17,36 @@ class ConnectionsRepo:
Commit a connection to the database
:param sakia.data.entities.Connection connection: the connection to commit
"""
connection_tuple = attr.astuple(connection, filter=attr.filters.exclude(attr.fields(Connection).password,
attr.fields(Connection).salt))
values = ",".join(['?'] * len(connection_tuple))
self._conn.execute("INSERT INTO connections VALUES ({0})".format(values), connection_tuple)
connection_tuple = attr.astuple(
connection,
filter=attr.filters.exclude(
attr.fields(Connection).password, attr.fields(Connection).salt
),
)
values = ",".join(["?"] * len(connection_tuple))
self._conn.execute(
"INSERT INTO connections VALUES ({0})".format(values), connection_tuple
)
def update(self, connection):
"""
Update an existing connection in the database
:param sakia.data.entities.Connection connection: the certification to update
"""
updated_fields = attr.astuple(connection, filter=attr.filters.exclude(attr.fields(Connection).password,
updated_fields = attr.astuple(
connection,
filter=attr.filters.exclude(
attr.fields(Connection).password,
attr.fields(Connection).salt,
*ConnectionsRepo._primary_keys))
where_fields = attr.astuple(connection, filter=attr.filters.include(*ConnectionsRepo._primary_keys))
*ConnectionsRepo._primary_keys
),
)
where_fields = attr.astuple(
connection, filter=attr.filters.include(*ConnectionsRepo._primary_keys)
)
self._conn.execute("""UPDATE connections SET
self._conn.execute(
"""UPDATE connections SET
uid=?,
scrypt_N=?,
scrypt_p=?,
......@@ -40,7 +55,9 @@ class ConnectionsRepo:
WHERE
currency=? AND
pubkey=?
""", updated_fields + where_fields)
""",
updated_fields + where_fields,
)
def get_one(self, **search):
"""
......@@ -54,7 +71,9 @@ class ConnectionsRepo:
filters.append("{k}=?".format(k=k))
values.append(v)
request = "SELECT * FROM connections WHERE {filters}".format(filters=" AND ".join(filters))
request = "SELECT * FROM connections WHERE {filters}".format(
filters=" AND ".join(filters)
)
c = self._conn.execute(request, tuple(values))
data = c.fetchone()
......@@ -115,8 +134,13 @@ class ConnectionsRepo:
Drop an existing connection from the database
:param sakia.data.entities.Connection connection: the connection to update
"""
where_fields = attr.astuple(connection, filter=attr.filters.include(*ConnectionsRepo._primary_keys))
self._conn.execute("""DELETE FROM connections
where_fields = attr.astuple(
connection, filter=attr.filters.include(*ConnectionsRepo._primary_keys)
)
self._conn.execute(
"""DELETE FROM connections
WHERE
currency=? AND
pubkey=?""", where_fields)
pubkey=?""",
where_fields,
)
......@@ -8,6 +8,7 @@ class ContactsRepo:
"""
The repository for Contacts entities.
"""
_conn = attr.ib() # :type sqlite3.Contact
_primary_keys = (attr.fields(Contact).contact_id,)
......@@ -23,33 +24,44 @@ class ContactsRepo:
contacts_list = contacts_list[:-1]
else:
col_names = ",".join([a.name for a in attr.fields(Contact)])
values = ",".join(['?'] * len(contacts_list))
values = ",".join(["?"] * len(contacts_list))
cursor = self._conn.cursor()
cursor.execute("INSERT INTO contacts ({:}) VALUES ({:})".format(col_names, values), contacts_list)
cursor.execute(
"INSERT INTO contacts ({:}) VALUES ({:})".format(col_names, values),
contacts_list,
)
contact.contact_id = cursor.lastrowid
def update(self, contact):
"""
Update an existing contact in the database
:param sakia.data.entities.Contact contact: the certification to update
"""
updated_fields = attr.astuple(contact, tuple_factory=list,
filter=attr.filters.exclude(*ContactsRepo._primary_keys))
updated_fields = attr.astuple(
contact,
tuple_factory=list,
filter=attr.filters.exclude(*ContactsRepo._primary_keys),
)
updated_fields[3] = "\n".join([str(n) for n in updated_fields[3]])
where_fields = attr.astuple(contact, tuple_factory=list,
filter=attr.filters.include(*ContactsRepo._primary_keys))
where_fields = attr.astuple(
contact,
tuple_factory=list,
filter=attr.filters.include(*ContactsRepo._primary_keys),
)
self._conn.execute("""UPDATE contacts SET
self._conn.execute(
"""UPDATE contacts SET
currency=?,
name=?,
pubkey=?,
fields=?
WHERE
contact_id=?
""", updated_fields + where_fields)
""",
updated_fields + where_fields,
)
def get_one(self, **search):
"""
......@@ -63,7 +75,9 @@ class ContactsRepo:
filters.append("{k}=?".format(k=k))
values.append(v)
request = "SELECT * FROM contacts WHERE {filters}".format(filters=" AND ".join(filters))
request = "SELECT * FROM contacts WHERE {filters}".format(
filters=" AND ".join(filters)
)
c = self._conn.execute(request, tuple(values))
data = c.fetchone()
......@@ -98,7 +112,12 @@ class ContactsRepo:
Drop an existing contact from the database
:param sakia.data.entities.Contact contact: the contact to update
"""
where_fields = attr.astuple(contact, filter=attr.filters.include(*ContactsRepo._primary_keys))
self._conn.execute("""DELETE FROM contacts
where_fields = attr.astuple(
contact, filter=attr.filters.include(*ContactsRepo._primary_keys)
)
self._conn.execute(
"""DELETE FROM contacts
WHERE
contact_id=?""", where_fields)
contact_id=?""",
where_fields,
)
......@@ -5,10 +5,14 @@ from ..entities import Dividend
@attr.s(frozen=True)
class DividendsRepo:
"""The repository for Communities entities.
"""
"""The repository for Communities entities."""
_conn = attr.ib() # :type sqlite3.Connection
_primary_keys = (attr.fields(Dividend).currency, attr.fields(Dividend).pubkey, attr.fields(Dividend).block_number)
_primary_keys = (
attr.fields(Dividend).currency,
attr.fields(Dividend).pubkey,
attr.fields(Dividend).block_number,
)
def insert(self, dividend):
"""
......@@ -16,8 +20,10 @@ class DividendsRepo:
:param sakia.data.entities.Dividend dividend: the dividend to commit
"""
dividend_tuple = attr.astuple(dividend)
values = ",".join(['?'] * len(dividend_tuple))
self._conn.execute("INSERT INTO dividends VALUES ({0})".format(values), dividend_tuple)
values = ",".join(["?"] * len(dividend_tuple))
self._conn.execute(
"INSERT INTO dividends VALUES ({0})".format(values), dividend_tuple
)
def get_one(self, **search):
"""
......@@ -31,7 +37,9 @@ class DividendsRepo:
filters.append("{k}=?".format(k=k))
values.append(v)
request = "SELECT * FROM dividends WHERE {filters}".format(filters=" AND ".join(filters))
request = "SELECT * FROM dividends WHERE {filters}".format(
filters=" AND ".join(filters)
)
c = self._conn.execute(request, tuple(values))
data = c.fetchone()
......@@ -51,7 +59,9 @@ class DividendsRepo:
filters.append("{key} = ?".format(key=k))
values.append(value)
request = "SELECT * FROM dividends WHERE {filters}".format(filters=" AND ".join(filters))
request = "SELECT * FROM dividends WHERE {filters}".format(
filters=" AND ".join(filters)
)
c = self._conn.execute(request, tuple(values))
datas = c.fetchall()
......@@ -59,7 +69,15 @@ class DividendsRepo:
return [Dividend(*data) for data in datas]
return []
def get_dividends(self, currency, pubkey, offset=0, limit=1000, sort_by="currency", sort_order="ASC"):
def get_dividends(
self,
currency,
pubkey,
offset=0,
limit=1000,
sort_by="currency",
sort_order="ASC",
):
"""
Get all transfers in the database on a given currency from or to a pubkey
......@@ -69,11 +87,8 @@ class DividendsRepo:
request = """SELECT * FROM dividends
WHERE currency=? AND pubkey=?
ORDER BY {sort_by} {sort_order}
LIMIT {limit} OFFSET {offset}""" \
.format(offset=offset,
limit=limit,
sort_by=sort_by,
sort_order=sort_order
LIMIT {limit} OFFSET {offset}""".format(
offset=offset, limit=limit, sort_by=sort_by, sort_order=sort_order
)
c = self._conn.execute(request, (currency, pubkey, pubkey))
datas = c.fetchall()
......@@ -86,7 +101,12 @@ class DividendsRepo:
Drop an existing dividend from the database
:param sakia.data.entities.Dividend dividend: the dividend to update
"""
where_fields = attr.astuple(dividend, filter=attr.filters.include(*DividendsRepo._primary_keys))
self._conn.execute("""DELETE FROM dividends
where_fields = attr.astuple(
dividend, filter=attr.filters.include(*DividendsRepo._primary_keys)
)
self._conn.execute(
"""DELETE FROM dividends
WHERE
currency=? AND pubkey=? AND block_number=? """, where_fields)
currency=? AND pubkey=? AND block_number=? """,
where_fields,
)
......@@ -7,28 +7,52 @@ from ..entities import Identity
@attr.s(frozen=True)
class IdentitiesRepo:
"""The repository for Identities entities.
"""
"""The repository for Identities entities."""
_conn = attr.ib() # :type sqlite3.Connection
_primary_keys = (attr.fields(Identity).currency, attr.fields(Identity).pubkey, attr.fields(Identity).uid, attr.fields(Identity).blockstamp)
_primary_keys = (
attr.fields(Identity).currency,
attr.fields(Identity).pubkey,
attr.fields(Identity).uid,
attr.fields(Identity).blockstamp,
)
def insert(self, identity):
"""
Commit an identity to the database
:param sakia.data.entities.Identity identity: the identity to commit
"""
if identity.blockstamp != BlockUID.empty():
# Search same identity with empty blockstamp (incomplete)
same_with_empty_blockstamp = self.get_one(
currency=identity.currency,
uid=identity.uid,
pubkey=identity.pubkey,
blockstamp=BlockUID.empty(),
)
# if same identity with empty blockstamp...
if same_with_empty_blockstamp:
# remove it to avoid duplicates
self.drop(same_with_empty_blockstamp)
identity_tuple = attr.astuple(identity)
values = ",".join(['?'] * len(identity_tuple))
self._conn.execute("INSERT INTO identities VALUES ({0})".format(values), identity_tuple)
values = ",".join(["?"] * len(identity_tuple))
self._conn.execute(
"INSERT INTO identities VALUES ({0})".format(values), identity_tuple
)
def update(self, identity):
"""
Update an existing identity in the database
:param sakia.data.entities.Identity identity: the identity to update
"""
updated_fields = attr.astuple(identity, filter=attr.filters.exclude(*IdentitiesRepo._primary_keys))
where_fields = attr.astuple(identity, filter=attr.filters.include(*IdentitiesRepo._primary_keys))
self._conn.execute("""UPDATE identities SET
updated_fields = attr.astuple(
identity, filter=attr.filters.exclude(*IdentitiesRepo._primary_keys)
)
where_fields = attr.astuple(
identity, filter=attr.filters.include(*IdentitiesRepo._primary_keys)
)
self._conn.execute(
"""UPDATE identities SET
signature=?,
timestamp=?,
written=?,
......@@ -44,13 +68,14 @@ class IdentitiesRepo:
currency=? AND
pubkey=? AND
uid=? AND
blockstamp=?""", updated_fields + where_fields
blockstamp=?""",
updated_fields + where_fields,
)
def get_one(self, **search):
"""
Get an existing identity in the database
:param dict search: the criterions of the lookup
:param **kwargs search: the criterions of the lookup
:rtype: sakia.data.entities.Identity
"""
filters = []
......@@ -59,7 +84,9 @@ class IdentitiesRepo:
filters.append("{k}=?".format(k=k))
values.append(v)
request = "SELECT * FROM identities WHERE {filters}".format(filters=" AND ".join(filters))
request = "SELECT * FROM identities WHERE {filters}".format(
filters=" AND ".join(filters)
)
c = self._conn.execute(request, tuple(values))
data = c.fetchone()
......@@ -80,7 +107,9 @@ class IdentitiesRepo:
filters.append("{k}=?".format(k=k))
values.append(v)
request = "SELECT * FROM identities WHERE {filters}".format(filters=" AND ".join(filters))
request = "SELECT * FROM identities WHERE {filters}".format(
filters=" AND ".join(filters)
)
c = self._conn.execute(request, tuple(values))
datas = c.fetchall()
......@@ -96,7 +125,9 @@ class IdentitiesRepo:
"""
request = "SELECT * FROM identities WHERE currency=? AND (UID LIKE ? or PUBKEY LIKE ?)"
c = self._conn.execute(request, (currency, "%{0}%".format(text), "%{0}%".format(text)))
c = self._conn.execute(
request, (currency, "%{0}%".format(text), "%{0}%".format(text))
)
datas = c.fetchall()
if datas:
return [Identity(*data) for data in datas]
......@@ -107,9 +138,14 @@ class IdentitiesRepo:
Drop an existing identity from the database
:param sakia.data.entities.Identity identity: the identity to update
"""
where_fields = attr.astuple(identity, filter=attr.filters.include(*IdentitiesRepo._primary_keys))
self._conn.execute("""DELETE FROM identities WHERE
where_fields = attr.astuple(
identity, filter=attr.filters.include(*IdentitiesRepo._primary_keys)
)
self._conn.execute(
"""DELETE FROM identities WHERE
currency=? AND
pubkey=? AND
uid=? AND
blockstamp=?""", where_fields)
blockstamp=?""",
where_fields,
)
......@@ -7,7 +7,7 @@ from .connections import ConnectionsRepo
from .identities import IdentitiesRepo
from .blockchains import BlockchainsRepo
from .certifications import CertificationsRepo
from .transactions import TransactionsRepo, Transaction
from .transactions import TransactionsRepo
from .dividends import DividendsRepo
from .nodes import NodesRepo, Node
from .sources import SourcesRepo
......@@ -19,6 +19,7 @@ class SakiaDatabase:
"""
This is Sakia unique SQLite database.
"""
conn = attr.ib() # :type sqlite3.Connection
connections_repo = attr.ib(default=None)
identities_repo = attr.ib(default=None)
......@@ -29,7 +30,7 @@ class SakiaDatabase:
sources_repo = attr.ib(default=None)
dividends_repo = attr.ib(default=None)
contacts_repo = attr.ib(default=None)
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
_logger = attr.ib(default=attr.Factory(lambda: logging.getLogger("sakia")))
@classmethod
def load_or_init(cls, options, profile_name):
......@@ -40,12 +41,23 @@ class SakiaDatabase:
def total_amount(amount, amount_base):
return amount * 10 ** amount_base
db_path = os.path.join(options.config_path, profile_name, options.currency + ".db")
db_path = os.path.join(
options.config_path, profile_name, options.currency + ".db"
)
con = sqlite3.connect(db_path, detect_types=sqlite3.PARSE_DECLTYPES)
con.create_function("total_amount", 2, total_amount)
meta = SakiaDatabase(con, ConnectionsRepo(con), IdentitiesRepo(con),
BlockchainsRepo(con), CertificationsRepo(con), TransactionsRepo(con),
NodesRepo(con), SourcesRepo(con), DividendsRepo(con), ContactsRepo(con))
meta = SakiaDatabase(
con,
ConnectionsRepo(con),
IdentitiesRepo(con),
BlockchainsRepo(con),
CertificationsRepo(con),
TransactionsRepo(con),
NodesRepo(con),
SourcesRepo(con),
DividendsRepo(con),
ContactsRepo(con),
)
meta.prepare()
meta.upgrade_database()
......@@ -58,7 +70,8 @@ class SakiaDatabase:
with self.conn:
self._logger.debug("Initializing meta database")
self.conn.execute("PRAGMA busy_timeout = 50000")
self.conn.execute("""CREATE TABLE IF NOT EXISTS meta(
self.conn.execute(
"""CREATE TABLE IF NOT EXISTS meta(
id INTEGER NOT NULL,
version INTEGER NOT NULL,
PRIMARY KEY (id)
......@@ -75,7 +88,9 @@ class SakiaDatabase:
self.add_last_state_change_property,
self.refactor_transactions,
self.drop_incorrect_nodes,
self.insert_last_mass_attribute
self.insert_last_mass_attribute,
self.add_sources_conditions_property,
self.add_sources_used_by_property,
]
def upgrade_database(self, to=0):
......@@ -89,7 +104,9 @@ class SakiaDatabase:
self._logger.debug("Upgrading to version {0}...".format(v))
self.upgrades[v]()
with self.conn:
self.conn.execute("UPDATE meta SET version=? WHERE id=1", (version + 1,))
self.conn.execute(
"UPDATE meta SET version=? WHERE id=1", (version + 1,)
)
version += 1
self._logger.debug("End upgrade of database...")
......@@ -99,7 +116,7 @@ class SakiaDatabase:
:return:
"""
self._logger.debug("Initialiazing all databases")
sql_file = open(os.path.join(os.path.dirname(__file__), 'meta.sql'), 'r')
sql_file = open(os.path.join(os.path.dirname(__file__), "meta.sql"), "r")
with self.conn:
self.conn.executescript(sql_file.read())
......@@ -109,7 +126,10 @@ class SakiaDatabase:
:return:
"""
self._logger.debug("Add ud rythm parameters to blockchains table")
sql_file = open(os.path.join(os.path.dirname(__file__), '000_add_ud_rythm_parameters.sql'), 'r')
sql_file = open(
os.path.join(os.path.dirname(__file__), "000_add_ud_rythm_parameters.sql"),
"r",
)
with self.conn:
self.conn.executescript(sql_file.read())
......@@ -119,7 +139,9 @@ class SakiaDatabase:
:return:
"""
self._logger.debug("Add contacts table")
sql_file = open(os.path.join(os.path.dirname(__file__), '001_add_contacts.sql'), 'r')
sql_file = open(
os.path.join(os.path.dirname(__file__), "001_add_contacts.sql"), "r"
)
with self.conn:
self.conn.executescript(sql_file.read())
......@@ -129,7 +151,9 @@ class SakiaDatabase:
:return:
"""
self._logger.debug("Add sentry property")
sql_file = open(os.path.join(os.path.dirname(__file__), '002_add_sentry_property.sql'), 'r')
sql_file = open(
os.path.join(os.path.dirname(__file__), "002_add_sentry_property.sql"), "r"
)
with self.conn:
self.conn.executescript(sql_file.read())
......@@ -139,7 +163,12 @@ class SakiaDatabase:
:return:
"""
self._logger.debug("Add last state change property")
sql_file = open(os.path.join(os.path.dirname(__file__), '003_add_last_state_change_property.sql'), 'r')
sql_file = open(
os.path.join(
os.path.dirname(__file__), "003_add_last_state_change_property.sql"
),
"r",
)
with self.conn:
self.conn.executescript(sql_file.read())
......@@ -149,7 +178,10 @@ class SakiaDatabase:
:return:
"""
self._logger.debug("Refactor transactions")
sql_file = open(os.path.join(os.path.dirname(__file__), '004_refactor_transactions.sql'), 'r')
sql_file = open(
os.path.join(os.path.dirname(__file__), "004_refactor_transactions.sql"),
"r",
)
with self.conn:
self.conn.executescript(sql_file.read())
......@@ -163,15 +195,43 @@ class SakiaDatabase:
Node(*data)
except MalformedDocumentError:
self._logger.debug("Dropping node {0}".format(data[1]))
self.conn.execute("""DELETE FROM nodes
self.conn.execute(
"""DELETE FROM nodes
WHERE
currency=? AND pubkey=?""", (data[0], data[1]))
currency=? AND pubkey=?""",
(data[0], data[1]),
)
finally:
data = c.fetchone()
def insert_last_mass_attribute(self):
self._logger.debug("Insert last_mass attribute")
sql_file = open(os.path.join(os.path.dirname(__file__), '005_add_lass_monetary_mass.sql'), 'r')
sql_file = open(
os.path.join(os.path.dirname(__file__), "005_add_lass_monetary_mass.sql"),
"r",
)
with self.conn:
self.conn.executescript(sql_file.read())
def add_sources_conditions_property(self):
self._logger.debug("Add sources conditions property")
sql_file = open(
os.path.join(
os.path.dirname(__file__), "006_add_sources_conditions_property.sql"
),
"r",
)
with self.conn:
self.conn.executescript(sql_file.read())
def add_sources_used_by_property(self):
self._logger.debug("Add sources used_by property")
sql_file = open(
os.path.join(
os.path.dirname(__file__), "007_add_sources_used_by_property.sql"
),
"r",
)
with self.conn:
self.conn.executescript(sql_file.read())
......
......@@ -106,7 +106,7 @@ CREATE TABLE IF NOT EXISTS nodes(
PRIMARY KEY (currency, pubkey)
);
-- Cnnections TABLE
-- CONNECTIONS TABLE
CREATE TABLE IF NOT EXISTS connections(
currency VARCHAR(30),
pubkey VARCHAR(50),
......@@ -118,7 +118,7 @@ CREATE TABLE IF NOT EXISTS connections(
PRIMARY KEY (currency, pubkey)
);
-- Cnnections TABLE
-- SOURCES TABLE
CREATE TABLE IF NOT EXISTS sources(
currency VARCHAR(30),
pubkey VARCHAR(50),
......@@ -130,6 +130,7 @@ CREATE TABLE IF NOT EXISTS sources(
PRIMARY KEY (currency, pubkey, identifier, noffset)
);
-- DIVIDENDS TABLE
CREATE TABLE IF NOT EXISTS dividends(
currency VARCHAR(30),
pubkey VARCHAR(50),
......