Skip to content
Snippets Groups Projects
network.py 4.8 KiB
Newer Older
'''
Created on 24 févr. 2015

@author: inso
'''
from .node import Node

import logging
import time

inso's avatar
inso committed
from PyQt5.QtCore import QObject, pyqtSignal
    A network is managing nodes polling and crawling of a
    given community.
    '''
    nodes_changed = pyqtSignal()

    def __init__(self, currency, nodes):
        '''
        Constructor of a network

        :param str currency: The currency name of the community
        :param list nodes: The nodes of the network
        self.currency = currency
        self._nodes = nodes
        for n in self._nodes:
            n.changed.connect(self.nodes_changed)
        self.must_crawl = False
    def create(cls, node):
        Create a new network with one knew node
        Crawls the nodes from the first node to build the
        community network

        :param node: The first knew node of the network
        nodes = [node]
        network = cls(node.currency, nodes)
        nodes = network.crawling()
        block_max = max([n.block for n in nodes])
        for node in nodes:
inso's avatar
inso committed
            node.check_sync(block_max)
        network._nodes = nodes
        return network
    def merge_with_json(self, json_data):
        '''
        We merge with knew nodes when we
        last stopped cutecoin

        :param dict json_data: Nodes in json format
        '''
        for data in json_data:
            node = Node.from_json(self.currency, data)
            self._nodes.append(node)
            logging.debug("Loading : {:}".format(data['pubkey']))
        self._nodes = self.crawling()

    def from_json(cls, currency, json_data):
        '''
        Load a network from a configured community

        :param str currency: The currency name of a community
        :param dict json_data: A json_data view of a network
        '''
        nodes = []
        for data in json_data:
            node = Node.from_json(currency, data)
            nodes.append(node)
            logging.debug("Loading : {:}".format(data['pubkey']))
        block_max = max([n.block for n in nodes])
        for node in nodes:
inso's avatar
inso committed
            node.check_sync(block_max)
        return cls(currency, nodes)
        '''
        Get the network in json format.

        :return: The network as a dict in json format.
        '''
inso's avatar
inso committed
        for node in self._nodes:
            data.append(node.jsonify())
        return data

    def stop_crawling(self):
        '''
        Stop network nodes crawling.
        '''
    @property
    def online_nodes(self):
        '''
        Get nodes which are in the ONLINE state.
        '''
        return [n for n in self._nodes if n.state == Node.ONLINE]

    @property
    def all_nodes(self):
        '''
        Get all knew nodes.
        '''
    def add_nodes(self, node):
        '''
        Add a node to the network.
        '''
        self._nodes.append(node)
        node.changed.connect(self.nodes_changed)

inso's avatar
inso committed
    def start_perpetual_crawling(self):
        '''
        Start crawling which never stops.
        To stop this crawling, call "stop_crawling" method.
        '''
        self.must_crawl = True
inso's avatar
inso committed
            nodes = self.crawling(interval=10)

            new_inlines = [n.endpoint.inline() for n in nodes]
            last_inlines = [n.endpoint.inline() for n in self._nodes]

            hash_new_nodes = hash(tuple(frozenset(sorted(new_inlines))))
inso's avatar
inso committed
            hash_last_nodes = hash(tuple(frozenset(sorted(last_inlines))))
inso's avatar
inso committed

            if hash_new_nodes != hash_last_nodes:
                self._nodes = nodes
                self.nodes_changed.emit()
                for n in self._nodes:
                    n.changed.connect(self.nodes_changed)

    def crawling(self, interval=0):
        '''
        One network crawling.

        :param int interval: The interval between two nodes request.
        '''
        nodes = []
        traversed_pubkeys = []
inso's avatar
inso committed
        for n in self._nodes.copy():
            logging.debug(traversed_pubkeys)
            logging.debug("Peering : next to read : {0} : {1}".format(n.pubkey,
                          (n.pubkey not in traversed_pubkeys)))
            if n.pubkey not in traversed_pubkeys:
inso's avatar
inso committed
                n.peering_traversal(nodes,
                                    traversed_pubkeys, interval)
                time.sleep(interval)

        block_max = max([n.block for n in nodes])
        for node in [n for n in nodes if n.state == Node.ONLINE]:
inso's avatar
inso committed
            node.check_sync(block_max)

        #TODO: Offline nodes for too long have to be removed
        #TODO: Corrupted nodes should maybe be removed faster ?

        logging.debug("Nodes found : {0}".format(nodes))
        return nodes