Skip to content
Snippets Groups Projects
Commit 31d97c68 authored by Pascal Engélibert's avatar Pascal Engélibert :bicyclist:
Browse files

Up/down status & fixes

parent f4854766
Branches
No related tags found
No related merge requests found
Pipeline #5898 passed
......@@ -499,7 +499,11 @@ class ServerThread(Thread):
utils.logprint("Confirmed "+tx.sender_pubkey[:8]+" -> "+tx.receiver_pubkey[:8]+" = "+str(tx.in_amount)+":"+str(tx.in_base)+" -> "+str(tx.out_amount)+":"+str(tx.out_base), utils.LOG_TRACE)
if "peers" in url:
resp["peers"] = [self.peers[peer].raw for peer in self.peers]
resp["peers"] = [{
"raw": self.peers[peer].raw,
"up_in": self.peers[peer].up_in,
"up_out": self.peers[peer].up_out
} for peer in self.peers]
if "new" in url:
try:
......@@ -597,9 +601,10 @@ class ClientThread(Thread):
new_peers.setdefault(new_peer.hash, [])
new_peers[new_peer.hash].append(new_peer)
for raw in data["peers"]:
for i_peer in data["peers"]:
try:
new_peer = utils.Peer(self.conf, raw)
assert type(i_peer) == dict and "raw" in i_peer and "up_in" in i_peer and "up_out" in i_peer, "bad peer list encoding"
new_peer = utils.Peer(self.conf, i_peer["raw"])
except Exception as e:
utils.logprint("Peer detection: "+str(e), utils.LOG_WARN)
if new_peer.hash == self.local_peer.hash:
......@@ -620,7 +625,7 @@ class ClientThread(Thread):
if new_peer.sigtime + self.conf["server"]["peer_sig_age_max"] <= new_peer.rectime or \
(peer in self.peers and new_peer.sigtime <= self.peers[peer].sigtime):
utils.logprint("Peer detection: too old sig", utils.LOG_TRACE)
utils.logprint("Peer detection: too old sig: "+new_peer.to_human_str(), utils.LOG_TRACE)
continue
self.peers[peer] = new_peer
added_peers = True
......@@ -645,9 +650,9 @@ class ClientThread(Thread):
for peer in self.peers:
try:
utils.sdata(self.peers[peer].host, "POST", "/new", self.local_peer.raw, proxy=self.conf["client"]["proxy"], proxy_onion_only=self.conf["client"]["proxy_onion_only"])
peer.up_out = True
self.peers[peer].up_out = True
except (ConnectionRefusedError, socks.GeneralProxyError, socket.gaierror, socket.timeout):
peer.up_out = False
self.peers[peer].up_out = False
utils.logprint("Network error: "+self.peers[peer].to_human_str(), utils.LOG_WARN)
utils.logprint("Finished spreading peer info", utils.LOG_TRACE)
......@@ -737,6 +742,7 @@ class ClientThread(Thread):
next_peer_info = t + self.conf["server"]["peer_info_interval"]
self.detect_peers()
local_peer = utils.Peer.generate(self.conf, self.keys, self.peers)
self.spread_peer_info()
while self.work:
......@@ -813,7 +819,7 @@ class ClientThread(Thread):
# Generate peer info
if t > next_peer_info:
self.local_peer = utils.Peer.generate(self.conf, self.keys)
self.local_peer = utils.Peer.generate(self.conf, self.keys, self.peers)
utils.logprint("Generated new peer info", utils.LOG_TRACE)
self.spread_peer_info()
next_peer_info = time.time() + self.conf["server"]["peer_info_interval"]
......@@ -876,7 +882,7 @@ def main():
utils.logprint("Pubkey: "+keys.pubkey, utils.LOG_INFO)
# Generate peer info
local_peer = utils.Peer.generate(conf, keys)
local_peer = utils.Peer.generate(conf, keys, {})
# Start threads
clientThread = ClientThread(conf, peers, keys, local_peer, pool, tx_in_index, tx_out_index, db_txs)
......@@ -974,7 +980,7 @@ if __name__ == "__main__":
except Exception as e: # TODO more specific exceptions
print("Error: invalid peer data: "+str(e))
continue
if new_peer.hash in peers and peers[new_peer].sigtime > new_peer.sigtime:
if new_peer.hash in peers and peers[new_peer.hash].sigtime > new_peer.sigtime:
print("Too old: "+new_peer.to_human_str())
continue
peers[new_peer.hash] = new_peer
......@@ -994,7 +1000,7 @@ if __name__ == "__main__":
print("Pubkey: "+keys.pubkey)
# Generate peer info
local_peer = utils.Peer.generate(conf, keys)
local_peer = utils.Peer.generate(conf, keys, {})
if local_peer == None:
exit()
......
......@@ -147,8 +147,8 @@ def verify_idty_sig(conf, raw, idty_pubkey, peer_pubkey):
return True
class Peer:
VERSION = "1"
def __init__(self, conf:dict, raw:bytes):
VERSION = "2"
def __init__(self, conf:dict, raw:bytes, do_check_idty:bool=True):
self.rectime = time.time()
self.raw = raw
try:
......@@ -167,7 +167,7 @@ class Peer:
except ubjson.decoder.DecoderException:
raise AssertionError("Bad data encoding")
assert "doctype" in data and "docver" in data and "currency" in data and "pubkey" in data and "sigtime" in data and "host" in data and "idty" in data and "idtysig" in data ,\
assert "doctype" in data and "docver" in data and "currency" in data and "pubkey" in data and "sigtime" in data and "host" in data and "idty" in data and "idtysig" in data and "peers" in data,\
"Missing values in data"
assert data["doctype"] == "gmixer/peer" , "Bad doctype"
......@@ -176,8 +176,12 @@ class Peer:
assert data["pubkey"] == pubkey , "Different pubkey"
assert isinstance(data["sigtime"], (int, float)) , "Bad sigtime"
assert data["sigtime"] < self.rectime , "Futuristic sigtime"
assert isinstance(data["peers"], list) , "Bad peer list"
for p in data["peers"]:
assert isinstance(p, dict) and "up_in" in p and "up_out" in p and p["up_in"] in (None, False, True) and p["up_out"] in (None, False, True),\
"Bad peer list value"
if conf["idty"]["needed"]:
if do_check_idty and conf["idty"]["needed"]:
assert run_async(check_idty(conf["client"]["bma_hosts"], data["idty"])) , "Idty is not member"
assert verify_idty_sig(conf, data["idtysig"], data["idty"], pubkey) , "Bad idty sig"
......@@ -187,17 +191,19 @@ class Peer:
self.sigtime = data["sigtime"]
self.host = tuple(data["host"]) # socket cannot manage lists
self.idty = data["idty"]
self.idtysig = data["idtysig"]# TODO check
self.idtysig = data["idtysig"]
self.peers = data["peers"]# TODO check size & useless values
self.hash = hashlib.sha512((self.pubkey+"@"+self.host[0]+":"+str(self.host[1])).encode()).digest()
self.keys = PublicKey(self.pubkey)
self.up_in = None # can reach local node
self.up_out = None # is reachable by local node
def to_human_str(self, short=True):
return (self.pubkey[:8] if short else self.pubkey)+"@"+self.host[0]+":"+str(self.host[1])
def generate(conf:dict, keys:SigningKey) -> bytes:
def generate(conf:dict, keys:SigningKey, peers:list) -> bytes:
data = {
"doctype": "gmixer/peer",
"docver": Peer.VERSION,
......@@ -206,7 +212,8 @@ class Peer:
"sigtime": time.time(),
"host": [conf["server"]["public_host"], conf["server"]["public_port"]],
"idty": conf["idty"]["pubkey"],
"idtysig": bytes.fromhex(conf["idty"]["sig"])
"idtysig": bytes.fromhex(conf["idty"]["sig"]),
"peers": [{"hash": peers[p].hash, "up_in": peers[p].up_in, "up_out": peers[p].up_out} for p in peers]
}
raw = keys.sign(ubjson.dumpb(data))
data = {
......@@ -215,31 +222,47 @@ class Peer:
}
raw = ubjson.dumpb(data)
try:
return Peer(conf, raw)
return Peer(conf, raw, do_check_idty=False)
except AssertionError as e:
logprint("Gen peer: "+str(e), LOG_ERROR)
return None
def load_peers(conf:dict, db_peers:plyvel.DB, peers:dict):
for _, data in db_peers:
to_remove = []
for peer_hash, data in db_peers:
try:
peer = Peer(conf, data)
except Exception as e:
logprint("Importing peer: "+str(e), LOG_WARN)
to_remove.append(peer_hash)
continue
peers[peer.hash] = peer
for peer_hash in to_remove:
db_peers.delete(peer_hash)
def save_peers(db_peers:plyvel.DB, peers:dict):
for peer in peers:
db_peers.put(peers[peer].hash, peers[peer].raw)
async def bma_client(bma_endpoints:list):
client = None
downs = 0
for bma_endpoint in bma_endpoints:
client = Client("BMAS "+bma_endpoint)
try:
await client(bma.node.summary)
logprint("BMA up: "+bma_endpoint, LOG_TRACE)
break
except:
logprint("BMA down: "+bma_endpoint, utils.LOG_WARN)
logprint("BMA down: "+bma_endpoint, LOG_WARN)
await client.close()
client = None
downs += 1
# Put down endpoints at the end
for i in range(downs):
bma_endpoints.append(bma_endpoints.pop(0))
return client
async def check_idty(bma_endpoints:list, pubkey:str):
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment