Skip to content
Snippets Groups Projects
Commit 85452628 authored by inso's avatar inso
Browse files

Fix bug #686 : error in parsing tx and ud history

parent 2eedf1ee
No related branches found
No related tags found
No related merge requests found
......@@ -10,7 +10,7 @@ pyenv rehash
pip install coveralls
pip install pytest-cov
pip install pyinstaller
pip install PyQt5
pip install PyQt5==5.7.1
pip install -r requirements.txt
if [ $TRAVIS_OS_NAME == "linux" ]
then
......
......@@ -43,8 +43,8 @@ class Application(QObject):
:param sakia.services.DocumentsService documents_service: A service to broadcast documents
"""
new_dividend = pyqtSignal(Dividend)
new_transfer = pyqtSignal(Transaction)
new_dividend = pyqtSignal(Connection, Dividend)
new_transfer = pyqtSignal(Connection, Transaction)
transaction_state_changed = pyqtSignal(Transaction)
identity_changed = pyqtSignal(Identity)
new_connection = pyqtSignal(Connection)
......
......@@ -13,11 +13,11 @@ class Connection:
currency = attr.ib(convert=str)
pubkey = attr.ib(convert=str)
uid = attr.ib(convert=str, default="", cmp=False, hash=False)
scrypt_N = attr.ib(convert=int, default=4096)
scrypt_r = attr.ib(convert=int, default=16)
scrypt_p = attr.ib(convert=int, default=1)
scrypt_N = attr.ib(convert=int, default=4096, cmp=False, hash=False)
scrypt_r = attr.ib(convert=int, default=16, cmp=False, hash=False)
scrypt_p = attr.ib(convert=int, default=1, cmp=False, hash=False)
blockstamp = attr.ib(convert=block_uid, default=BlockUID.empty(), cmp=False, hash=False)
salt = attr.ib(convert=str, init=False)
salt = attr.ib(convert=str, init=False, cmp=False, hash=False)
password = attr.ib(init=False, convert=str, default="", cmp=False, hash=False)
def is_identity(self):
......
......@@ -160,12 +160,12 @@ class TransferController(QObject):
# If we sent back a transaction we cancel the first one
self.model.cancel_previous()
for tx in transactions:
self.model.app.new_transfer.emit(tx)
self.model.app.new_transfer.emit(self.model.connection, tx)
self.view.accept()
else:
await self.view.show_error(self.model.notifications(), result[1])
for tx in transactions:
self.model.app.new_transfer.emit(tx)
self.model.app.new_transfer.emit(self.model.connection, tx)
QApplication.restoreOverrideCursor()
self.view.button_box.setEnabled(True)
......
from PyQt5.QtCore import pyqtSignal, QObject, Qt
from PyQt5.QtGui import QCursor
from PyQt5.QtWidgets import QMenu, QAction, QMessageBox, QFileDialog
from PyQt5.QtWidgets import QMenu, QAction, QMessageBox
from duniterpy.key import SigningKey
from sakia.data.entities import Connection
......
......@@ -215,8 +215,8 @@ class HistoryTableModel(QAbstractTableModel):
"""
return self.transactions_service.dividends(self.connection.pubkey)
def add_transfer(self, transfer):
if self.connection.pubkey in (transfer.issuer, *transfer.receivers):
def add_transfer(self, connection, transfer):
if self.connection == connection:
self.beginInsertRows(QModelIndex(), len(self.transfers_data), len(self.transfers_data))
if transfer.issuer == self.connection.pubkey:
self.transfers_data.append(self.data_sent(transfer))
......@@ -224,8 +224,8 @@ class HistoryTableModel(QAbstractTableModel):
self.transfers_data.append(self.data_received(transfer))
self.endInsertRows()
def add_dividend(self, dividend):
if dividend.pubkey == self.connection.pubkey:
def add_dividend(self, connection, dividend):
if self.connection == connection:
self.beginInsertRows(QModelIndex(), len(self.transfers_data), len(self.transfers_data))
self.transfers_data.append(self.data_dividend(dividend))
self.endInsertRows()
......
......@@ -13,6 +13,7 @@ class QAsyncFileDialog:
@staticmethod
async def get_save_filename(parent, title, url, filtr):
dialog = QFileDialog(parent, title, url, filtr)
dialog.setOption(QFileDialog.DontUseNativeDialog, True)
dialog.setAcceptMode(QFileDialog.AcceptSave)
result = await dialog_async_exec(dialog)
if result == QFileDialog.AcceptSave:
......
......@@ -66,15 +66,20 @@ class BlockchainService(QObject):
if len(blocks) > 0:
identities = await self._identities_service.handle_new_blocks(blocks)
changed_tx, new_tx, new_dividends = await self._transactions_service.handle_new_blocks(blocks)
new_tx += await self._sources_service.refresh_sources(new_tx, new_dividends)
destructions = await self._sources_service.refresh_sources(new_tx, new_dividends)
self.handle_new_blocks(blocks)
self.app.db.commit()
for tx in changed_tx:
self.app.transaction_state_changed.emit(tx)
for tx in new_tx:
self.app.new_transfer.emit(tx)
for ud in new_dividends:
self.app.new_dividend.emit(ud)
for conn in new_tx:
for tx in new_tx[conn]:
self.app.new_transfer.emit(conn, tx)
for conn in destructions:
for tx in destructions[conn]:
self.app.new_transfer.emit(conn, tx)
for conn in new_dividends:
for ud in new_dividends[conn]:
self.app.new_dividend.emit(conn, ud)
for idty in identities:
self.app.identity_changed.emit(idty)
self.app.new_blocks_handled.emit()
......
......@@ -165,14 +165,17 @@ class SourcesServices(QObject):
:param list[sakia.data.entities.Dividend] dividends:
:return: the destruction of sources
"""
connections_pubkeys = [c.pubkey for c in self._connections_processor.connections_to(self.currency)]
destructions = []
for pubkey in connections_pubkeys:
connections = self._connections_processor.connections_to(self.currency)
destructions = {}
for conn in connections:
destructions[conn] = []
_, current_base = self._blockchain_processor.last_ud(self.currency)
# there can be bugs if the current base switch during the parsing of blocks
# but since it only happens every 23 years and that its only on accounts having less than 100
# this is acceptable I guess
destructions += await self.refresh_sources_of_pubkey(pubkey, transactions, dividends, current_base)
destructions[conn] += await self.refresh_sources_of_pubkey(conn.pubkey, transactions[conn],
dividends[conn], current_base)
return destructions
def restore_sources(self, pubkey, tx):
......
......@@ -42,7 +42,7 @@ class TransactionsService(QObject):
:return: The list of transfers sent
"""
transfers_changed = []
new_transfers = []
new_transfers = {}
for tx in [t for t in self._transactions_processor.awaiting(self.currency)]:
if self._transactions_processor.run_state_transitions(tx, block_doc):
transfers_changed.append(tx)
......@@ -51,12 +51,13 @@ class TransactionsService(QObject):
new_transactions = [t for t in block_doc.transactions
if not self._transactions_processor.find_by_hash(t.sha_hash)
and SimpleTransaction.is_simple(t)]
connections_pubkeys = [c.pubkey for c in self._connections_processor.connections_to(self.currency)]
for pubkey in connections_pubkeys:
connections = self._connections_processor.connections_to(self.currency)
for conn in connections:
new_transfers[conn] = []
for (i, tx_doc) in enumerate(new_transactions):
tx = parse_transaction_doc(tx_doc, pubkey, block_doc.blockUID.number, block_doc.mediantime, txid+i)
tx = parse_transaction_doc(tx_doc, conn.pubkey, block_doc.blockUID.number, block_doc.mediantime, txid+i)
if tx:
new_transfers.append(tx)
new_transfers[conn].append(tx)
self._transactions_processor.commit(tx)
else:
logging.debug("Error during transfer parsing")
......@@ -71,13 +72,17 @@ class TransactionsService(QObject):
"""
self._logger.debug("Refresh transactions")
transfers_changed = []
new_transfers = []
new_transfers = {}
txid = 0
for block in blocks:
changes, new_tx = self._parse_block(block, txid)
txid += len(new_tx)
transfers_changed += changes
new_transfers += new_tx
for conn in new_tx:
try:
new_transfers[conn] += new_tx[conn]
except KeyError:
new_transfers[conn] = new_tx[conn]
new_dividends = await self.parse_dividends_history(blocks, new_transfers)
return transfers_changed, new_transfers, new_dividends
......@@ -87,17 +92,18 @@ class TransactionsService(QObject):
:param List[duniterpy.documents.Block] blocks: the list of transactions found by tx parsing
:param List[sakia.data.entities.Transaction] transactions: the list of transactions found by tx parsing
"""
connections_pubkeys = [c.pubkey for c in self._connections_processor.connections_to(self.currency)]
connections = self._connections_processor.connections_to(self.currency)
min_block_number = blocks[0].number
max_block_number = blocks[-1].number
dividends = []
for pubkey in connections_pubkeys:
dividends = {}
for connection in connections:
dividends[connection] = []
history_data = await self._bma_connector.get(self.currency, bma.ud.history,
req_args={'pubkey': pubkey})
req_args={'pubkey': connection.pubkey})
block_numbers = []
for ud_data in history_data["history"]["history"]:
dividend = Dividend(currency=self.currency,
pubkey=pubkey,
pubkey=connection.pubkey,
block_number=ud_data["block_number"],
timestamp=ud_data["time"],
amount=ud_data["amount"],
......@@ -106,13 +112,13 @@ class TransactionsService(QObject):
self._logger.debug("Dividend of block {0}".format(dividend.block_number))
block_numbers.append(dividend.block_number)
if self._dividends_processor.commit(dividend):
dividends.append(dividend)
dividends[connection].append(dividend)
for tx in transactions:
for tx in transactions[connection]:
txdoc = TransactionDoc.from_signed_raw(tx.raw)
for input in txdoc.inputs:
# For each dividends inputs, if it is consumed (not present in ud history)
if input.source == "D" and input.origin_id == pubkey and input.index not in block_numbers:
if input.source == "D" and input.origin_id == connection.pubkey and input.index not in block_numbers:
try:
# we try to get the block of the dividend
block = next((b for b in blocks if b.number == input.index))
......@@ -121,14 +127,14 @@ class TransactionsService(QObject):
req_args={'number': input.index})
block = Block.from_signed_raw(block_data["raw"] + block_data["signature"] + "\n")
dividend = Dividend(currency=self.currency,
pubkey=pubkey,
pubkey=connection.pubkey,
block_number=input.index,
timestamp=block.mediantime,
amount=block.ud,
base=block.unit_base)
self._logger.debug("Dividend of block {0}".format(dividend.block_number))
if self._dividends_processor.commit(dividend):
dividends.append(dividend)
dividends[connection].append(dividend)
return dividends
def transfers(self, pubkey):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment