diff --git a/src/sakia/app.py b/src/sakia/app.py
index af47f2141df7626c4e27fc77ae8c9df965360af7..fd0b02733d1a3c5afeb813ae6667ff98b38190ac 100644
--- a/src/sakia/app.py
+++ b/src/sakia/app.py
@@ -4,26 +4,26 @@ Created on 1 févr. 2014
 @author: inso
 """
 
-import asyncio
 import datetime
 import json
 import logging
 import os
-import shutil
 import tarfile
 
 import aiohttp
 from PyQt5.QtCore import QObject, pyqtSignal, QTranslator, QCoreApplication, QLocale
 from aiohttp.connector import ProxyConnector
-from pkg_resources import parse_version
 
 from duniterpy.api.bma import API
+from . import __version__
+from .options import SakiaOptions
+from sakia.data.connectors import BmaConnector
+from sakia.services import NetworkService, BlockchainService, IdentitiesService
+from sakia.data.repositories import MetaDatabase, BlockchainsRepo, CertificationsRepo, \
+    IdentitiesRepo, NodesRepo, TransactionsRepo, ConnectionsRepo
+from sakia.data.processors import BlockchainProcessor, NodesProcessor, IdentitiesProcessor, CertificationsProcessor
+from sakia.data.files import AppDataFile
 from sakia.decorators import asyncify
-from sakia.core import config
-from sakia.core.account import Account
-from sakia.core.registry import IdentitiesRegistry, Identity
-from sakia import __version__
-from ..tools.exceptions import NameAlreadyExists, BadAccountFile
 
 
 class Application(QObject):
@@ -33,76 +33,80 @@ class Application(QObject):
     Accounts list and general configuration
     Saving and loading the application state
     """
-
-    version_requested = pyqtSignal()
-    view_identity_in_wot = pyqtSignal(Identity)
     refresh_transfers = pyqtSignal()
     account_imported = pyqtSignal(str)
     account_changed = pyqtSignal()
 
-    def __init__(self, qapp, loop, identities_registry):
+    def __init__(self, qapp, loop, app_data, current_profile, connections_repo,
+                 network_services, blockchain_services, identities_services):
         """
         Init a new "sakia" application
         :param QCoreApplication qapp: Qt Application
         :param quamash.QEventLoop loop: quamash.QEventLoop instance
-        :param sakia.core.registry.IdentitiesRegistry identities_registry: IdentitiesRegistry instance
+        :param sakia.data.entities.AppData app_data: the application data
+        :param str current_profile: the application current profile
+        :param sakia.data.repositories.ConnectionsRepo connections_repo: The connections repository
+        :param dict network_services: All network services for current currency
+        :param dict blockchain_services: All network services for current currency
+        :param dict identities_services: All network services for current currency
         :return:
         """
-
         super().__init__()
         self.qapp = qapp
-        self.accounts = {}
-        self._current_account = None
         self.loop = loop
         self.available_version = (True,
                                   __version__,
                                   "")
         self._translator = QTranslator(self.qapp)
-        self._identities_registry = identities_registry
-        self.preferences = {'account': "",
-                            'lang': 'en_GB',
-                            'ref': 0,
-                            'expert_mode': False,
-                            'digits_after_comma': 6,
-                            'maximized': False,
-                            'notifications': True,
-                            'enable_proxy': False,
-                            'proxy_type': "HTTP",
-                            'proxy_address': "",
-                            'proxy_port': 8080,
-                            'international_system_of_units': True,
-                            'auto_refresh': False,
-                            'forgetfulness':False
-                            }
+        self._app_data = app_data
+        self._current_profile = current_profile
+        self._connections_repo = connections_repo
+        self._network_services = network_services
+        self._blockchain_services = blockchain_services
+        self._identities_services = identities_services
 
     @classmethod
     def startup(cls, argv, qapp, loop):
-        config.parse_arguments(argv)
-        identities_registry = IdentitiesRegistry()
-        app = cls(qapp, loop, identities_registry)
-        app.load()
+        options = SakiaOptions.from_arguments(argv)
+        app_data = AppDataFile.in_config_path(options.config_path).load_or_init()
+        app = cls(qapp, loop, app_data, app_data.default, {}, {}, {})
         app.switch_language()
         app.set_proxy()
         app.get_last_version()
-        if app.preferences["account"] != "":
-            account = app.get_account(app.preferences["account"])
-            app._current_account = account
-        # no default account...
-        else:
-            # if at least one account exists, set it as default...
-            if len(app.accounts) > 0:
-                # capture names sorted alphabetically
-                names = list(app.accounts.keys())
-                names.sort()
-                # set first name in list as default in preferences
-                app.preferences['account'] = names[0]
-                app.save_preferences(app.preferences)
-                # open it
-                logging.debug("No default account in preferences. Set %s as default account." % names[0])
-        if app._current_account:
-            app._current_account.start_coroutines()
+        app.load_profile(app_data.default)
         return app
 
+    def load_profile(self, profile_name):
+        """
+        Initialize databases depending on profile loaded
+        :param profile_name:
+        :return:
+        """
+        self._current_profile = profile_name
+        meta = MetaDatabase.load_or_init(self.options.config_path, profile_name)
+        self._connections_repo = ConnectionsRepo(meta.conn)
+        certifications_repo = CertificationsRepo(meta.conn)
+        nodes_repo = NodesRepo(meta.conn)
+        blockchain_repo = BlockchainsRepo(meta.conn)
+        identities_repo = IdentitiesRepo(meta.conn)
+
+        nodes_processor = NodesProcessor(nodes_repo)
+        bma_connector = BmaConnector(nodes_processor)
+        identities_processor = IdentitiesProcessor(identities_repo, bma_connector)
+        certs_processor = CertificationsProcessor(certifications_repo, bma_connector)
+        blockchain_processor = BlockchainProcessor(blockchain_repo, bma_connector)
+
+        self._blockchain_services = {}
+        self._network_services = {}
+        self._identities_services = {}
+        for currency in self._connections_repo.get_currencies():
+            self._identities_services[currency] = IdentitiesService(currency, identities_processor, certs_processor,
+                                                             blockchain_processor)
+            self._blockchain_services[currency] = BlockchainService(currency, blockchain_processor, bma_connector,
+                                                              self.identities_service[currency])
+            self._network_services[currency] = NetworkService.load(currency, nodes_processor,
+                                                             self.blockchain_services[currency])
+
     def set_proxy(self):
         if self.preferences['enable_proxy'] is True:
             API.aiohttp_connector = ProxyConnector("http://{0}:{1}".format(
@@ -126,396 +130,20 @@ class Application(QObject):
                 logging.debug("Couldn't load translation")
 
     @property
-    def current_account(self):
-        return self._current_account
-
-    def get_account(self, name):
-        """
-        Load an account then return it
-
-        :param str name: The account name
-        :return: The loaded account if it's a success, else return None
-        """
-        if name in self.accounts.keys():
-            self.load_account(name)
-            return self.accounts[name]
-        else:
-            return None
-
-    def create_account(self, name):
-        """
-        Create a new account from its name
-
-        :param str name: The account name
-        :return: The new account
-        :raise: NameAlreadyExists if the account name is already used locally
-        """
-        for a in self.accounts:
-            if a == name:
-                raise NameAlreadyExists(a)
-
-        account = Account.create(name, self._identities_registry)
-
-        return account
-
-    @property
-    def identities_registry(self):
-        return self._identities_registry
-
-    def add_account(self, account):
-        self.accounts[account.name] = account
-
-    @asyncify
-    async def delete_account(self, account):
-        """
-        Delete an account.
-        Current account changes to None if it is deleted.
-        """
-        await account.stop_coroutines()
-        self.accounts.pop(account.name)
-        if self._current_account == account:
-            self._current_account = None
-        with open(config.parameters['data'], 'w') as outfile:
-            json.dump(self.jsonify(), outfile, indent=4, sort_keys=True)
-        if self.preferences['account'] == account.name:
-            self.preferences['account'] = ""
-            self.save_preferences(self.preferences)
-
-    @asyncify
-    async def change_current_account(self, account):
-        """
-        Change current account displayed and refresh its cache.
+    def current_profile(self):
+        return self._current_profile
 
-        :param sakia.core.Account account: The account object to display
-        .. note:: Emits the application pyqtSignal loading_progressed
-        during cache refresh
-        """
-        if self._current_account is not None:
-            await self.stop_current_account()
-
-        self._current_account = account
-        if self._current_account is not None:
-            self._current_account.start_coroutines()
-        self.account_changed.emit()
+    def start_coroutines(self):
+        for currency in self._connections_repo.get_currencies():
+            self._network_services[currency].start_coroutines()
 
-    async def stop_current_account(self, closing=False):
+    async def stop_current_profile(self, closing=False):
         """
         Save the account to the cache
         and stop the coroutines
         """
-        self.save_cache(self._current_account)
-        self.save_notifications(self._current_account)
-        await self._current_account.stop_coroutines(closing)
-
-    def load(self):
-        """
-        Load a saved application state from the data file.
-        Loads only jsonified objects but not their cache.
-
-        If the standard application state file can't be found,
-        no error is raised.
-        """
-        self.load_registries()
-        self.load_preferences()
-        try:
-            logging.debug("Loading data...")
-            with open(config.parameters['data'], 'r') as json_data:
-                data = json.load(json_data)
-                for account_name in data['local_accounts']:
-                    self.accounts[account_name] = None
-        except FileNotFoundError:
-            pass
-
-    def load_registries(self):
-        """
-        Load the Person instances of the person module.
-        Each instance is unique, and can be find by its public key.
-        """
-        try:
-            identities_path = os.path.join(config.parameters['home'],
-                                        '__identities__')
-            with open(identities_path, 'r') as identities_data:
-                data = json.load(identities_data)
-                self._identities_registry.load_json(data)
-        except FileNotFoundError:
-            pass
-
-    def load_account(self, account_name):
-        """
-        Load an account from its name
-
-        :param str account_name: The account name
-        """
-        account_path = os.path.join(config.parameters['home'],
-                                    account_name, 'properties')
-        with open(account_path, 'r') as json_data:
-            data = json.load(json_data)
-            account = Account.load(data, self._identities_registry)
-            self.load_cache(account)
-            self.accounts[account_name] = account
-
-            for community in account.communities:
-                community.network.blockchain_rollback.connect(community.rollback_cache)
-                community.network.new_block_mined.connect(lambda b, co=community:
-                                                          account.refresh_transactions(self, co))
-                community.network.blockchain_rollback.connect(lambda b, co=community:
-                                                              account.rollback_transaction(self, co))
-                community.network.root_nodes_changed.connect(lambda acc=account: self.save(acc))
-
-        account_notifications_path = os.path.join(config.parameters['home'],
-                                    account_name, '__notifications__')
-
-        try:
-            with open(account_notifications_path, 'r') as json_data:
-                data = json.load(json_data)
-                for notification in data:
-                    if notification in account.notifications:
-                        account.notifications[notification] = data[notification]
-        except FileNotFoundError:
-            logging.debug("Could not find notifications file")
-            pass
-
-    def load_cache(self, account):
-        """
-        Load an account cache
-
-        :param account: The account object to load the cache
-        """
-        for community in account.communities:
-            bma_path = os.path.join(config.parameters['home'],
-                                        account.name, '__cache__',
-                                        community.currency + '_bma')
-
-            network_path = os.path.join(config.parameters['home'],
-                                        account.name, '__cache__',
-                                        community.currency + '_network')
-
-            if os.path.exists(network_path):
-                with open(network_path, 'r') as json_data:
-                    data = json.load(json_data)
-                    community.network.merge_with_json(data['network'], parse_version(data['version']))
-
-            if os.path.exists(bma_path):
-                with open(bma_path, 'r') as json_data:
-                    data = json.load(json_data)
-                    community.bma_access.load_from_json(data['cache'])
-
-        for wallet in account.wallets:
-            for c in account.communities:
-                wallet.init_cache(self, c)
-            wallet_path = os.path.join(config.parameters['home'],
-                                        account.name, '__cache__', wallet.pubkey + "_wal")
-            if os.path.exists(wallet_path):
-                with open(wallet_path, 'r') as json_data:
-                    data = json.load(json_data)
-                    wallet.load_caches(self, data)
-
-    def load_preferences(self):
-        """
-        Load the preferences.
-        """
-        try:
-            preferences_path = os.path.join(config.parameters['home'],
-                                            'preferences')
-            with open(preferences_path, 'r') as json_data:
-                data = json.load(json_data)
-                for key in data:
-                    self.preferences[key] = data[key]
-        except FileNotFoundError:
-            pass
-
-    def save_preferences(self, preferences):
-        """
-        Save the preferences.
-
-        :param preferences: A dict containing the keys/values of the preferences
-        """
-        assert('lang' in preferences)
-        assert('account' in preferences)
-        assert('ref' in preferences)
-
-        self.preferences = preferences
-        preferences_path = os.path.join(config.parameters['home'],
-                                        'preferences')
-        with open(preferences_path, 'w') as outfile:
-            json.dump(preferences, outfile, indent=4)
-
-        self.set_proxy()
-
-    def save(self, account):
-        """
-        Save an account
-
-        :param account: The account object to save
-        """
-        with open(config.parameters['data'], 'w') as outfile:
-            json.dump(self.jsonify(), outfile, indent=4, sort_keys=True)
-        account_path = os.path.join(config.parameters['home'],
-                                account.name)
-        if account.name in self.accounts:
-            properties_path = os.path.join(account_path, 'properties')
-            if not os.path.exists(account_path):
-                logging.info("Creating account directory")
-                os.makedirs(account_path)
-            with open(properties_path, 'w') as outfile:
-                json.dump(account.jsonify(), outfile, indent=4, sort_keys=True)
-        else:
-            account_path = os.path.join(config.parameters['home'], account.name)
-            shutil.rmtree(account_path)
-
-    def save_notifications(self, account):
-        """
-        Save an account notifications
-
-        :param account: The account object to save
-        """
-        account_path = os.path.join(config.parameters['home'],
-                                account.name)
-        notifications_path = os.path.join(account_path, '__notifications__')
-        with open(notifications_path, 'w') as outfile:
-            json.dump(account.notifications, outfile, indent=4, sort_keys=True)
-
-    def save_registries(self):
-        """
-        Save the registries
-        """
-        identities_path = os.path.join(config.parameters['home'],
-                                    '__identities__')
-        buffer_path = identities_path + ".buf"
-        with open(buffer_path, 'w') as outfile:
-            data = self.identities_registry.jsonify()
-            data['version'] = __version__
-            for chunk in json.JSONEncoder().iterencode(data):
-                outfile.write(chunk)
-        shutil.move(buffer_path, identities_path)
-
-    def save_wallet(self, account, wallet):
-        """
-        Save wallet of account in cache
-
-        :param sakia.core.account.Account account: Account instance
-        :param sakia.core.wallet.Wallet wallet: Wallet instance
-        """
-        if not os.path.exists(os.path.join(config.parameters['home'],
-                                           account.name, '__cache__')):
-            os.makedirs(os.path.join(config.parameters['home'],
-                                     account.name, '__cache__'))
-        wallet_path = os.path.join(config.parameters['home'],
-                                   account.name, '__cache__', wallet.pubkey + "_wal")
-        buffer_path = wallet_path + ".buf"
-        with open(buffer_path, 'w') as outfile:
-            data = wallet.jsonify_caches()
-            data['version'] = __version__
-            for chunk in json.JSONEncoder().iterencode(data):
-                outfile.write(chunk)
-        shutil.move(buffer_path, wallet_path)
-
-    def save_cache(self, account):
-        """
-        Save the cache of an account
-
-        :param account: The account object to save the cache
-        """
-        if not os.path.exists(os.path.join(config.parameters['home'],
-                                        account.name, '__cache__')):
-            os.makedirs(os.path.join(config.parameters['home'],
-                                        account.name, '__cache__'))
-        for wallet in account.wallets:
-            self.save_wallet(account, wallet)
-
-        for community in account.communities:
-            bma_path = os.path.join(config.parameters['home'],
-                                        account.name, '__cache__',
-                                        community.currency + '_bma')
-
-            network_path = os.path.join(config.parameters['home'],
-                                        account.name, '__cache__',
-                                        community.currency + '_network')
-            buffer_path = network_path + ".buf"
-
-            with open(buffer_path, 'w') as outfile:
-                data = dict()
-                data['network'] = community.network.jsonify()
-                data['version'] = __version__
-                for chunk in json.JSONEncoder().iterencode(data):
-                    outfile.write(chunk)
-            shutil.move(buffer_path, network_path)
-
-            buffer_path = bma_path + ".buf"
-
-            with open(buffer_path, 'w') as outfile:
-                data['cache'] = community.bma_access.jsonify()
-                data['version'] = __version__
-                for chunk in json.JSONEncoder().iterencode(data):
-                    outfile.write(chunk)
-            shutil.move(buffer_path, bma_path)
-
-    def import_account(self, file, name):
-        """
-        Import an account from a tar file and open it
-
-        :param str file: The file path of the tar file
-        :param str name: The account name
-        """
-        with tarfile.open(file, "r") as tar:
-            path = os.path.join(config.parameters['home'],
-                                name)
-            for obj in ["properties"]:
-                try:
-                    tar.getmember(obj)
-                except KeyError:
-                    raise BadAccountFile(file)
-            tar.extractall(path)
-
-        account_path = os.path.join(config.parameters['home'],
-                                    name, 'properties')
-        json_data = open(account_path, 'r')
-        data = json.load(json_data)
-        account = Account.load(data, self._identities_registry)
-        account.name = name
-        self.add_account(account)
-        self.save(account)
-        self.account_imported.emit(account.name)
-
-    def export_account(self, file, account):
-        """
-        Export an account to a tar file
-
-        :param str file: The filepath of the tar file
-        :param account: The account object to export
-        """
-        with tarfile.open(file, "w") as tar:
-            for file in ["properties"]:
-                path = os.path.join(config.parameters['home'],
-                                    account.name, file)
-                tar.add(path, file)
-
-    def jsonify_accounts(self):
-        """
-        Jsonify an account
-
-        :return: The account as a dict to format as json
-        """
-        data = []
-        logging.debug("{0}".format(self.accounts))
-        for account in self.accounts:
-            data.append(account)
-        return data
-
-    def jsonify(self):
-        """
-        Jsonify the app datas
-
-        :return: The accounts of the app to format as json
-        """
-        data = {'local_accounts': self.jsonify_accounts()}
-        return data
-
-    async def stop(self):
-        if self._current_account:
-            await self.stop_current_account(closing=True)
-        await asyncio.sleep(0)
-        self.save_registries()
+        for currency in self._connections_repo.get_currencies():
+            await self._network_services[currency].stop_coroutines(closing)
 
     @asyncify
     async def get_last_version(self):
diff --git a/src/sakia/core/config.py b/src/sakia/core/config.py
deleted file mode 100644
index 50a9ecc011676a8064061c7280f08bbb2d18c538..0000000000000000000000000000000000000000
--- a/src/sakia/core/config.py
+++ /dev/null
@@ -1,54 +0,0 @@
-"""
-Created on 7 févr. 2014
-
-@author: inso
-"""
-
-import logging
-from logging import FileHandler
-from optparse import OptionParser
-from os import environ, path, makedirs
-
-
-if "XDG_CONFIG_HOME" in environ:
-    config_path = environ["XDG_CONFIG_HOME"]
-elif "HOME" in environ:
-    config_path = path.join(environ["HOME"], ".config")
-elif "APPDATA" in environ:
-    config_path = environ["APPDATA"]
-else:
-    config_path = path.dirname(__file__)
-
-parameters = {'home': path.join(config_path, 'sakia'),
-              'data': path.join(config_path, 'sakia', 'data')}
-
-
-if not path.exists(parameters['home']):
-    logging.info("Creating home directory")
-    makedirs((parameters['home']))
-
-
-def parse_arguments(argv):
-    parser = OptionParser()
-
-    parser.add_option("-v", "--verbose",
-                      action="store_true", dest="verbose", default=False,
-                      help="Print INFO messages to stdout")
-
-    parser.add_option("-d", "--debug",
-                      action="store_true", dest="debug", default=False,
-                      help="Print DEBUG messages to stdout")
-
-    (options, args) = parser.parse_args(argv)
-
-    if options.debug:
-        logging.basicConfig(format='%(levelname)s:%(module)s:%(funcName)s:%(message)s',
-            level=logging.DEBUG)
-    elif options.verbose:
-        logging.basicConfig(format='%(levelname)s:%(message)s',
-            level=logging.INFO)
-    else:
-        logging.getLogger().propagate = False
-    logging.getLogger('quamash').setLevel(logging.INFO)
-    logfile = FileHandler(path.join(parameters['home'], 'sakia.log'))
-    logging.getLogger().addHandler(logfile)
diff --git a/src/sakia/data/connectors/bma.py b/src/sakia/data/connectors/bma.py
index c7e4415b2802c4e29cebab1ec82e17b6d6efc659..dabb95a5f6f3fc596d58be6ccb1310884a746310 100644
--- a/src/sakia/data/connectors/bma.py
+++ b/src/sakia/data/connectors/bma.py
@@ -10,14 +10,11 @@ import attr
 from sakia.errors import NoPeerAvailable
 
 
-@attr.s
+@attr.s()
 class BmaConnector:
     """
     This class is used to access BMA API.
     """
-
-    __saved_requests = [str(bma.blockchain.Block), str(bma.blockchain.Parameters)]
-
     _nodes_processor = attr.ib()
 
     def filter_nodes(self, request, nodes):
@@ -39,7 +36,7 @@ class BmaConnector:
         else:
             return nodes
 
-    async def get(self, request, req_args={}, get_args={}):
+    async def get(self, currency, request, req_args={}, get_args={}):
         """
         Start a request to the network but don't cache its result.
 
@@ -48,7 +45,7 @@ class BmaConnector:
         :param dict get_args: Arguments to pass to the request __get__ method
         :return: The returned data
         """
-        nodes = self.filter_nodes(request, self._nodes_processor.synced_nodes)
+        nodes = self.filter_nodes(request, self._nodes_processor.synced_nodes(currency))
         if len(nodes) > 0:
             tries = 0
             json_data = None
@@ -67,7 +64,7 @@ class BmaConnector:
             raise NoPeerAvailable("", len(nodes))
         return json_data
 
-    async def broadcast(self, request, req_args={}, post_args={}):
+    async def broadcast(self, currency, request, req_args={}, post_args={}):
         """
         Broadcast data to a network.
         Sends the data to all knew nodes.
@@ -81,9 +78,9 @@ class BmaConnector:
         .. note:: If one node accept the requests (returns 200),
         the broadcast should be considered accepted by the network.
         """
-        nodes = random.sample(self._nodes_processor.synced_nodes, 6) \
-            if len(self._nodes_processor.synced_nodes) > 6 \
-            else self._nodes_processor.synced_nodes
+        nodes = random.sample(self._nodes_processor.synced_nodes(currency), 6) \
+            if len(self._nodes_processor.synced_nodes(currency)) > 6 \
+            else self._nodes_processor.synced_nodes(currency)
         replies = []
         if len(nodes) > 0:
             for node in nodes:
diff --git a/src/sakia/data/entities/app_data.py b/src/sakia/data/entities/app_data.py
index e5a9cb7fdb550591ad11cdea8150c924eb21eca5..0c138fabe7164fe9d6374f00c89b0019acefc6f5 100644
--- a/src/sakia/data/entities/app_data.py
+++ b/src/sakia/data/entities/app_data.py
@@ -4,3 +4,4 @@ import attr
 @attr.s()
 class AppData:
     profiles = attr.ib(default=attr.Factory(list))
+    default = attr.ib(convert=str, default="")
diff --git a/src/sakia/data/files/app_data.py b/src/sakia/data/files/app_data.py
index 79b3fdb6ac68fbdec23e8ed4e767f3b6462ff30a..921c8f24787488618833750a93e6972c5747a655 100644
--- a/src/sakia/data/files/app_data.py
+++ b/src/sakia/data/files/app_data.py
@@ -1,5 +1,7 @@
 import attr
 import json
+import os
+import logging
 from ..entities import AppData
 
 
@@ -9,6 +11,12 @@ class AppDataFile:
     The repository for AppData
     """
     _file = attr.ib()
+    _logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
+    filename = "appdata.json"
+
+    @classmethod
+    def in_config_path(cls, config_path):
+        return cls(os.path.join(config_path, AppDataFile.filename))
 
     def save(self, app_data):
         """
@@ -18,11 +26,14 @@ class AppDataFile:
         with open(self._file, 'w') as outfile:
             json.dump(attr.asdict(app_data), outfile, indent=4)
 
-    def load(self):
+    def load_or_init(self):
         """
         Update an existing app_data in the database
         :param sakia.data.entities.AppData app_data: the app_data to update
         """
-        with open(self._file, 'r') as json_data:
-            app_data = AppData(**json.load(json_data))
+        try:
+            with open(self._file, 'r') as json_data:
+                app_data = AppData(**json.load(json_data))
+        except OSError:
+            app_data = AppData()
         return app_data
diff --git a/src/sakia/data/files/user_parameters.py b/src/sakia/data/files/user_parameters.py
index ec161a1d0656181f7a0d3853d7bbedf29429de6b..8dbe16782c769301c07b8ef9bdac62a2163e8789 100644
--- a/src/sakia/data/files/user_parameters.py
+++ b/src/sakia/data/files/user_parameters.py
@@ -1,5 +1,7 @@
 import attr
 import json
+import os
+import logging
 from ..entities import UserParameters
 
 
@@ -9,6 +11,12 @@ class UserParametersFile:
     The repository for UserParameters
     """
     _file = attr.ib()
+    _logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
+    filename = "parameters.json"
+
+    @classmethod
+    def in_config_path(cls, config_path):
+        return cls(os.path.join(config_path, UserParametersFile.filename))
 
     def save(self, user_parameters):
         """
@@ -18,11 +26,14 @@ class UserParametersFile:
         with open(self._file, 'w') as outfile:
             json.dump(attr.asdict(user_parameters), outfile, indent=4)
 
-    def load(self):
+    def load_or_init(self):
         """
         Update an existing user_parameters in the database
         :param sakia.data.entities.UserParameters user_parameters: the user_parameters to update
         """
-        with open(self._file, 'r') as json_data:
-            user_parameters = UserParameters(**json.load(json_data))
+        try:
+            with open(self._file, 'r') as json_data:
+                user_parameters = UserParameters(**json.load(json_data))
+        except OSError:
+            user_parameters = UserParameters()
         return user_parameters
diff --git a/src/sakia/data/processors/blockchain.py b/src/sakia/data/processors/blockchain.py
index 358263e14539bd541fc4a58b373d86e49553862a..8b4a93c2b17edfbf64aa43f56521d340ee7d431a 100644
--- a/src/sakia/data/processors/blockchain.py
+++ b/src/sakia/data/processors/blockchain.py
@@ -8,82 +8,53 @@ import asyncio
 
 @attr.s
 class BlockchainProcessor:
-    _currency = attr.ib()  # :type str
     _repo = attr.ib()  # :type sakia.data.repositories.CertificationsRepo
     _bma_connector = attr.ib()  # :type sakia.data.connectors.bma.BmaConnector
 
-    def current_buid(self):
+    def current_buid(self, currency):
         """
         Get the local current blockuid
         :rtype: duniterpy.documents.BlockUID
         """
-        return self._repo.get_one({'currency': self._currency}).current_buid
+        return self._repo.get_one({'currency': currency}).current_buid
 
-    def time(self):
+    def time(self, currency):
         """
         Get the local current median time
         :rtype: int
         """
-        return self._repo.get_one({'currency': self._currency}).median_time
+        return self._repo.get_one({'currency': currency}).median_time
 
-    def parameters(self):
+    def parameters(self, currency):
         """
         Get the parameters of the blockchain
         :rtype: sakia.data.entities.BlockchainParameters
         """
-        return self._repo.get_one({'currency': self._currency}).parameters
+        return self._repo.get_one({'currency': currency}).parameters
 
-    def monetary_mass(self):
+    def monetary_mass(self, currency):
         """
         Get the local current monetary mass
         :rtype: int
         """
-        return self._repo.get_one({'currency': self._currency}).monetary_mass
+        return self._repo.get_one({'currency': currency}).monetary_mass
 
-    def nb_members(self):
+    def nb_members(self, currency):
         """
         Get the number of members in the blockchain
         :rtype: int
         """
-        return self._repo.get_one({'currency': self._currency}).nb_members
+        return self._repo.get_one({'currency': currency}).nb_members
 
-    def last_ud(self):
+    def last_ud(self, currency):
         """
         Get the last ud value and base
         :rtype: int, int
         """
-        blockchain = self._repo.get_one({'currency': self._currency})
+        blockchain = self._repo.get_one({'currency': currency})
         return blockchain.last_ud, blockchain.last_ud_base
 
-    @property
-    def short_currency(self):
-        """
-        Format the currency name to a short one
-
-        :return: The currency name in a shot format.
-        """
-        words = re.split('[_\W]+', self.currency)
-        shortened = ""
-        if len(words) > 1:
-            shortened = ''.join([w[0] for w in words])
-        else:
-            vowels = ('a', 'e', 'i', 'o', 'u', 'y')
-            shortened = self.currency
-            shortened = ''.join([c for c in shortened if c not in vowels])
-        return shortened.upper()
-
-    @property
-    def currency_symbol(self):
-        """
-        Format the currency name to a symbol one.
-
-        :return: The currency name as a utf-8 circled symbol.
-        """
-        letter = self.currency[0]
-        u = ord('\u24B6') + ord(letter) - ord('A')
-        return chr(u)
-
-    async def new_blocks_with_identities(self):
+    async def new_blocks_with_identities(self, currency):
         """
         Get blocks more recent than local blockuid
         with identities
@@ -104,7 +75,7 @@ class BlockchainProcessor:
         local_current_buid = self.current_buid()
         return sorted([b for b in with_identities if b > local_current_buid.number])
 
-    async def new_blocks_with_money(self):
+    async def new_blocks_with_money(self, currency):
         """
         Get blocks more recent than local block uid
         with money data (tx and uds)
@@ -121,7 +92,7 @@ class BlockchainProcessor:
         local_current_buid = self.current_buid()
         return sorted([b for b in with_money if b > local_current_buid.number])
 
-    async def blocks(self, numbers):
+    async def blocks(self, numbers, currency):
         """
         Get blocks from the network
         :param List[int] numbers: list of blocks numbers to get
diff --git a/src/sakia/data/processors/certifications.py b/src/sakia/data/processors/certifications.py
index a01ea4e0debb3ffe454104119601c895ffabe62e..579e191081d33de49cd2d92efa7cfb5725271610 100644
--- a/src/sakia/data/processors/certifications.py
+++ b/src/sakia/data/processors/certifications.py
@@ -4,11 +4,10 @@ from ..entities import Certification
 
 @attr.s
 class CertificationsProcessor:
-    _currency = attr.ib()  # :type str
     _repo = attr.ib()  # :type sakia.data.repositories.CertificationsRepo
     _bma_connector = attr.ib()  # :type sakia.data.connectors.bma.BmaConnector
 
-    def create_certification(self, cert, blockstamp):
+    def create_certification(self, currency, cert, blockstamp):
         """
         Creates a certification and insert it in the db
         :param duniterpy.documents.Certification cert:
@@ -16,7 +15,7 @@ class CertificationsProcessor:
         :return: the instanciated certification
         :rtype: sakia.data.entities.Certification
         """
-        cert = Certification(self._currency, cert.pubkey_from, cert.pubkey_to, cert.timestamp,
+        cert = Certification(currency, cert.pubkey_from, cert.pubkey_to, cert.timestamp,
                              0, cert.signatures[0], blockstamp)
         self._repo.insert(cert)
         return cert
diff --git a/src/sakia/data/processors/identities.py b/src/sakia/data/processors/identities.py
index defb3bf97964a45ee7a77186d70b160e9328f3e4..2a70ff5891d19e6502a1b9f6706dd3c690882d38 100644
--- a/src/sakia/data/processors/identities.py
+++ b/src/sakia/data/processors/identities.py
@@ -8,11 +8,10 @@ from sakia.errors import NoPeerAvailable
 
 @attr.s
 class IdentitiesProcessor:
-    _currency = attr.ib()  # :type str
     _identities_repo = attr.ib()  # :type sakia.data.repositories.IdentitiesRepo
     _bma_connector = attr.ib()  # :type sakia.data.connectors.bma.BmaConnector
 
-    async def find_from_pubkey(self, pubkey):
+    async def find_from_pubkey(self, currency, pubkey):
         """
         Get the list of identities corresponding to a pubkey
         from the network and the local db
@@ -20,7 +19,7 @@ class IdentitiesProcessor:
         :param pubkey:
         :rtype: list[sakia.data.entities.Identity]
         """
-        identities = self._identities_repo.get_all(currency=self._currency, pubkey=pubkey)
+        identities = self._identities_repo.get_all(currency=currency, pubkey=pubkey)
         tries = 0
         while tries < 3:
             try:
@@ -30,7 +29,7 @@ class IdentitiesProcessor:
                     if result["pubkey"] == pubkey:
                         uids = result['uids']
                         for uid_data in uids:
-                            identity = Identity(self._currency, pubkey)
+                            identity = Identity(currency, pubkey)
                             identity.uid = uid_data['uid']
                             identity.blockstamp = data['sigDate']
                             identity.signature = data['self']
@@ -43,14 +42,15 @@ class IdentitiesProcessor:
                 return identities
         return identities
 
-    def get_written(self, pubkey):
+    def get_written(self, currency, pubkey):
         """
         Get identities from a given certification document
+        :param str currency: the currency in which to look for written identities
         :param str pubkey: the pubkey of the identity
 
         :rtype: sakia.data.entities.Identity
         """
-        return self._identities_repo.get_written(**{'currency': self._currency, 'pubkey': pubkey})
+        return self._identities_repo.get_written(**{'currency': currency, 'pubkey': pubkey})
 
     def update_identity(self, identity):
         """
diff --git a/src/sakia/data/processors/nodes.py b/src/sakia/data/processors/nodes.py
index a85cd42a7d862701c622fb4e8a7b5e641aa3c793..de72c74c2fbc72498dd6d5107259431467a047ef 100644
--- a/src/sakia/data/processors/nodes.py
+++ b/src/sakia/data/processors/nodes.py
@@ -6,30 +6,29 @@ import logging
 
 @attr.s
 class NodesProcessor:
-    _currency = attr.ib(convert=str)
     _repo = attr.ib()  # :type sakia.data.repositories.NodesRepo
 
-    def synced_nodes(self):
+    def synced_nodes(self, currency):
         """
         Get nodes which are in the ONLINE state.
         """
-        return self._repo.get_all(**{'currency': self._currency, 'state': Node.ONLINE})
+        return self._repo.get_all(**{'currency': currency, 'state': Node.ONLINE})
 
-    def online_nodes(self):
+    def online_nodes(self, currency):
         """
         Get nodes which are in the ONLINE state.
         """
-        return self._repo.get_all(**{'currency': self._currency, 'state': Node.ONLINE}) + \
-               self._repo.get_all(**{'currency': self._currency, 'state': Node.DESYNCED})
+        return self._repo.get_all(**{'currency': currency, 'state': Node.ONLINE}) + \
+               self._repo.get_all(**{'currency': currency, 'state': Node.DESYNCED})
 
-    def update_node(self, node):
+    def update_node(self, currency, node):
         """
         Update node in the repository.
         First involves basic checks about pubkey and primary key constraints.
 
         :param sakia.data.entities.Node node: the node to update
         """
-        other_node = self._repo.get_one(**{'currency': self._currency, 'pubkey': node.pubkey})
+        other_node = self._repo.get_one(**{'currency': currency, 'pubkey': node.pubkey})
         if other_node:
             self._repo.update(node)
         else:
@@ -44,56 +43,57 @@ class NodesProcessor:
         """
         self._repo.insert(node)
 
-    def unknown_node(self, pubkey):
+    def unknown_node(self, currency, pubkey):
         """
         Search for pubkey in the repository.
         :param str pubkey: the pubkey to lookup
         """
-        other_node = self._repo.get_one(**{'currency': self._currency, 'pubkey': pubkey})
+        other_node = self._repo.get_one(**{'currency': currency, 'pubkey': pubkey})
         return other_node is None
 
-    def nodes(self):
+    def nodes(self, currency):
         """
         Get all knew nodes.
         """
-        return self._repo.get_all(**{'currency': self._currency})
+        return self._repo.get_all(**{'currency': currency})
 
-    def root_nodes(self):
+    def root_nodes(self, currency):
         """
         Get root nodes.
         """
-        return self._repo.get_all(**{'currency': self._currency, 'root': True})
+        return self._repo.get_all(**{'currency': currency, 'root': True})
 
-    def current_buid(self):
+    def current_buid(self, currency):
         """
         Get the latest block considered valid
         It is the most frequent last block of every known nodes
         """
-        blocks_uids = [n.current_buid for n in self.synced_nodes()]
+        blocks_uids = [n.current_buid for n in self.synced_nodes(currency)]
         if len(blocks_uids) > 0:
             return blocks_uids[0]
         else:
             return BlockUID.empty()
 
-    def quality(self):
+    def quality(self, currency):
         """
         Get a ratio of the synced nodes vs the rest
         """
-        synced = len(self.synced_nodes())
-        total = len(self.nodes())
+        synced = len(self.synced_nodes(currency))
+        total = len(self.nodes(currency))
         if total == 0:
             ratio_synced = 0
         else:
             ratio_synced = synced / total
         return ratio_synced
 
-    def update_peer(self, peer):
+    def update_peer(self, currency, peer):
         """
         Update the peer of a node
+        :param str currency: the currency of the peer
         :param peer:
         :return:
         """
-        node = self._repo.get_one(**{'pubkey': peer.pubkey, 'currency': self._currency})
+        node = self._repo.get_one(**{'pubkey': peer.pubkey, 'currency': currency})
         if node.peer_blockstamp < peer.blockUID:
             logging.debug("Update node : {0}".format(peer.pubkey[:5]))
             node.endpoints = tuple(peer.endpoints)
diff --git a/src/sakia/data/repositories/connections.py b/src/sakia/data/repositories/connections.py
index 11a53cded404a65ef7aad4d1f6d1ad5eb1089ad6..1166ade34d9771f6cfcf1740eb599015ce819c99 100644
--- a/src/sakia/data/repositories/connections.py
+++ b/src/sakia/data/repositories/connections.py
@@ -55,7 +55,9 @@ class ConnectionsRepo:
                 filters.append("{connection} = ?".format(connection=k))
                 values.append(value)
 
-            request = "SELECT * FROM connections WHERE {filters}".format(filters=" AND ".join(filters))
+            request = "SELECT * FROM connections"
+            if filters:
+                request += "WHERE {filters}".format(filters=" AND ".join(filters))
 
             c = self._conn.execute(request, tuple(values))
             datas = c.fetchall()
@@ -63,6 +65,21 @@ class ConnectionsRepo:
                 return [Connection(*data) for data in datas]
         return []
 
+    def get_currencies(self):
+        """
+        Get all existing connection in the database corresponding to the search
+        :param dict search: the criterions of the lookup
+        :rtype: List[str]
+        """
+        with self._conn:
+            request = "SELECT UNIQUE currency FROM connections"
+            c = self._conn.execute(request)
+            datas = c.fetchall()
+            if datas:
+                return [Connection(*data) for data in datas]
+        return []
+
+
     def drop(self, connection):
         """
         Drop an existing connection from the database
diff --git a/src/sakia/data/repositories/meta.py b/src/sakia/data/repositories/meta.py
index fff55f9dee70a45f1b369f74348459328b61d937..1680cafa0bbd188fa6412489ae27f687765a5d85 100644
--- a/src/sakia/data/repositories/meta.py
+++ b/src/sakia/data/repositories/meta.py
@@ -1,5 +1,6 @@
 import attr
 import os
+import logging
 import sqlite3
 from duniterpy.documents import BlockUID
 
@@ -9,24 +10,31 @@ class MetaDatabase:
     """The repository for Identities entities.
     """
     _conn = attr.ib()  # :type sqlite3.Connection
+    _logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
+    db_file = 'sakia.db'
 
     @property
     def conn(self):
         return self._conn
 
     @classmethod
-    def create(cls, dbpath):
+    def load_or_init(cls, config_path, profile_name):
         sqlite3.register_adapter(BlockUID, str)
         sqlite3.register_adapter(bool, int)
         sqlite3.register_converter("BOOLEAN", lambda v: bool(int(v)))
-        con = sqlite3.connect(dbpath, detect_types=sqlite3.PARSE_DECLTYPES)
-        return MetaDatabase(con)
+        con = sqlite3.connect(os.path.join(config_path, profile_name, MetaDatabase.db_file),
+                              detect_types=sqlite3.PARSE_DECLTYPES)
+        meta = MetaDatabase(con)
+        meta.prepare()
+        meta.upgrade_database()
+        return meta
 
     def prepare(self):
         """
         Prepares the database if the table is missing
         """
         with self._conn:
+            self._logger.debug("Initializing meta database")
             self._conn.execute("""CREATE TABLE IF NOT EXISTS meta(
                                id INTEGER NOT NULL,
                                version INTEGER NOT NULL,
@@ -44,18 +52,22 @@ class MetaDatabase:
         """
         Execute the migrations
         """
+        self._logger.debug("Begin upgrade of database...")
         version = self.version()
         nb_versions = len(self.upgrades)
         for v in range(version, nb_versions):
+            self._logger.debug("Upgrading to version {0}...".format(v))
             self.upgrades[v]()
             with self._conn:
                 self._conn.execute("UPDATE meta SET version=? WHERE id=1", (version + 1,))
+        self._logger.debug("End upgrade of database...")
 
     def create_all_tables(self):
         """
         Init all the tables
         :return:
         """
+        self._logger.debug("Initialiazing all databases")
         sql_file = open(os.path.join(os.path.dirname(__file__), 'meta.sql'), 'r')
         with self._conn:
             self._conn.executescript(sql_file.read())
diff --git a/src/sakia/money/currency.py b/src/sakia/money/currency.py
new file mode 100644
index 0000000000000000000000000000000000000000..8d1e335666583941cfb26539ed4bfd982f1cc599
--- /dev/null
+++ b/src/sakia/money/currency.py
@@ -0,0 +1,28 @@
+import re
+
+
+def shortened(currency):
+    """
+    Format the currency name to a short one
+
+    :return: The currency name in a shot format.
+    """
+    words = re.split('[_\W]+', currency)
+    if len(words) > 1:
+        short = ''.join([w[0] for w in words])
+    else:
+        vowels = ('a', 'e', 'i', 'o', 'u', 'y')
+        short = currency
+        short = ''.join([c for c in short if c not in vowels])
+    return short.upper()
+
+
+def symbol(currency):
+    """
+    Format the currency name to a symbol one.
+
+    :return: The currency name as a utf-8 circled symbol.
+    """
+    letter = currency[0]
+    u = ord('\u24B6') + ord(letter) - ord('A')
+    return chr(u)
\ No newline at end of file
diff --git a/src/sakia/options.py b/src/sakia/options.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc8843e5118c9a25f9711c5064f636f4c952b19a
--- /dev/null
+++ b/src/sakia/options.py
@@ -0,0 +1,68 @@
+"""
+Created on 7 févr. 2014
+
+@author: inso
+"""
+
+import attr
+import logging
+from logging import FileHandler, StreamHandler
+from logging.handlers import RotatingFileHandler
+from optparse import OptionParser
+from os import environ, path, makedirs
+
+
+def config_path():
+    if "XDG_CONFIG_HOME" in environ:
+        env_path = environ["XDG_CONFIG_HOME"]
+    elif "HOME" in environ:
+        env_path = path.join(environ["HOME"], ".config")
+    elif "APPDATA" in environ:
+        env_path = environ["APPDATA"]
+    else:
+        env_path = path.dirname(__file__)
+    return path.join(env_path, 'sakia')
+
+
+@attr.s()
+class SakiaOptions:
+    config_path = attr.ib(default=attr.Factory(config_path))
+    _logger = attr.ib(default=attr.Factory(lambda: logging.getLogger('sakia')))
+
+    @classmethod
+    def from_arguments(cls, argv):
+        options = cls()
+        options._parse_arguments(argv)
+
+        if not path.exists(options.config_path):
+            cls.logger.info("Creating home directory")
+            makedirs(options.config_path)
+
+        return options
+
+    def _parse_arguments(self, argv):
+        parser = OptionParser()
+        parser.add_option("-v", "--verbose",
+                          action="store_true", dest="verbose", default=False,
+                          help="Print INFO messages to stdout")
+
+        parser.add_option("-d", "--debug",
+                          action="store_true", dest="debug", default=False,
+                          help="Print DEBUG messages to stdout")
+
+        (options, args) = parser.parse_args(argv)
+
+        formatter = logging.Formatter('%(levelname)s:%(message)s')
+        if options.debug:
+            self._logger.setLevel(logging.DEBUG)
+            formatter = logging.Formatter('%(levelname)s:%(module)s:%(funcName)s:%(message)s')
+        elif options.verbose:
+            self._logger.setLevel(logging.INFO)
+
+        logging.getLogger('quamash').setLevel(logging.INFO)
+        file_handler = RotatingFileHandler(path.join(self.config_path, 'sakia.log'), 'a', 1000000, 10)
+        file_handler.setFormatter(formatter)
+        self._logger.addHandler(file_handler)
+        stream_handler = StreamHandler()
+        stream_handler.setFormatter(formatter)
+        self.logger.addHandler(stream_handler)
diff --git a/src/sakia/services/__init__.py b/src/sakia/services/__init__.py
index 95bd45c68fc1ca3dbb91e9fffe96c0d065bf6ff7..fd456d1a052b30a21646849b6ffc0ac653ac798e 100644
--- a/src/sakia/services/__init__.py
+++ b/src/sakia/services/__init__.py
@@ -1,2 +1,3 @@
 from .network import NetworkService
-from .identities import IdentitiesService
\ No newline at end of file
+from .identities import IdentitiesService
+from .blockchain import BlockchainService
diff --git a/src/sakia/services/blockchain.py b/src/sakia/services/blockchain.py
index 7bbc2d4b704cc44da3b1dbd0d8e94db282d87da9..397da1d5bb27a81eeb3c504834a95ab9b4ac4e96 100644
--- a/src/sakia/services/blockchain.py
+++ b/src/sakia/services/blockchain.py
@@ -1,6 +1,7 @@
 from PyQt5.QtCore import QObject
 from duniterpy.api import bma
 import asyncio
+import logging
 
 
 class BlockchainService(QObject):
@@ -22,8 +23,9 @@ class BlockchainService(QObject):
         self._bma_connector = bma_connector
         self.currency = currency
         self._identities_service = identities_service
+        self._logger = logging.getLogger('sakia')
 
-    async def handle_blockchain_progress(self, new_block_uid):
+    async def handle_blockchain_progress(self):
         """
         Handle a new current block uid
         :param duniterpy.documents.BlockUID new_block_uid: the new current blockuid
diff --git a/src/sakia/services/identities.py b/src/sakia/services/identities.py
index 13c878add583aafd53453457bd12bf767f0abb28..bff1ad058ef957366414db7907f4d0aef9df0b9f 100644
--- a/src/sakia/services/identities.py
+++ b/src/sakia/services/identities.py
@@ -1,6 +1,7 @@
 from PyQt5.QtCore import QObject
 import asyncio
 from duniterpy.api import bma
+import logging
 
 
 class IdentitiesService(QObject):
@@ -15,7 +16,7 @@ class IdentitiesService(QObject):
         :param str currency: The currency name of the community
         :param sakia.data.processors.IdentitiesProcessor identities_processor: the identities processor for given currency
         :param sakia.data.processors.CertificationsProcessor certs_processor: the certifications processor for given currency
-        :param sakia.data.processors.BlockchainProcessor certs_processor: the blockchain processor for given currency
+        :param sakia.data.processors.BlockchainProcessor blockchain_processor: the blockchain processor for given currency
         :param sakia.data.connectors.BmaConnector bma_connector: The connector to BMA API
         """
         super().__init__()
@@ -24,6 +25,7 @@ class IdentitiesService(QObject):
         self._blockchain_processor = blockchain_processor
         self._bma_connector = bma_connector
         self.currency = currency
+        self._logger = logging.getLogger('sakia')
 
     def certification_expired(self, cert_time):
         """
diff --git a/src/sakia/services/network.py b/src/sakia/services/network.py
index 7cfac5f2676c3e01126427c5472361063bc71657..ab13d3b09da7a3f36569ed1f940243d01b54c639 100644
--- a/src/sakia/services/network.py
+++ b/src/sakia/services/network.py
@@ -1,6 +1,7 @@
 import asyncio
 import logging
 import time
+import aiohttp
 from collections import Counter
 
 from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject
@@ -20,17 +21,18 @@ class NetworkService(QObject):
     nodes_changed = pyqtSignal()
     root_nodes_changed = pyqtSignal()
 
-    def __init__(self, currency, processor, connectors, session):
+    def __init__(self, currency, node_processor, connectors, session, blockchain_service):
         """
         Constructor of a network
 
         :param str currency: The currency name of the community
-        :param sakia.data.processors.NodesProcessor processor: the nodes processor for given currency
+        :param sakia.data.processors.NodesProcessor node_processor: the nodes processor for given currency
         :param list connectors: The connectors to nodes of the network
         :param aiohttp.ClientSession session: The main aiohttp client session
+        :param sakia.services.BlockchainService blockchain_service: the blockchain service
         """
         super().__init__()
-        self._processor = processor
+        self._processor = node_processor
         self._connectors = []
         for c in connectors:
             self.add_connector(c)
@@ -39,21 +41,39 @@ class NetworkService(QObject):
         self._block_found = self._processor.current_buid()
         self._client_session = session
         self._discovery_stack = []
+        self._blockchain_service = blockchain_service
+        self._logger = logging.getLogger('sakia')
 
     @classmethod
-    def create(cls, processor, node_connector):
+    def create(cls, node_processor, node_connector):
         """
         Create a new network with one knew node
         Crawls the nodes from the first node to build the
         community network
 
-        :param sakia.data.processors.NodeProcessor processor: The nodes processor
+        :param sakia.data.processors.NodeProcessor node_processor: The nodes processor
         :param sakia.data.connectors.NodeConnector node_connector: The first connector of the network service
         :return:
         """
         connectors = [node_connector]
-        processor.insert_node(node_connector.node)
-        network = cls(node_connector.node.currency, processor, connectors, node_connector.session)
+        node_processor.insert_node(node_connector.node)
+        network = cls(node_connector.node.currency, node_processor, connectors, node_connector.session)
+        return network
+
+    @classmethod
+    def load(cls, currency, node_processor, blockchain_service):
+        """
+        Create a new network with all known nodes
+
+        :param str currency: The currency of this service
+        :param sakia.data.processors.NodeProcessor node_processor: The nodes processor
+        :return:
+        """
+        connectors = []
+        session = aiohttp.ClientSession()
+        for node in node_processor.nodes():
+            connectors.append(NodeConnector(node, session))
+        network = cls(currency, node_processor, connectors, session)
         return network
 
     def start_coroutines(self):
@@ -69,16 +89,16 @@ class NetworkService(QObject):
         """
         self._must_crawl = False
         close_tasks = []
-        logging.debug("Start closing")
+        self._logger.debug("Start closing")
         for connector in self._connectors:
             close_tasks.append(asyncio.ensure_future(connector.close_ws()))
-        logging.debug("Closing {0} websockets".format(len(close_tasks)))
+        self._logger.debug("Closing {0} websockets".format(len(close_tasks)))
         if len(close_tasks) > 0:
             await asyncio.wait(close_tasks, timeout=15)
         if closing:
-            logging.debug("Closing client session")
+            self._logger.debug("Closing client session")
             await self._client_session.close()
-        logging.debug("Closed")
+        self._logger.debug("Closed")
 
     @property
     def session(self):
@@ -134,7 +154,7 @@ class NetworkService(QObject):
         node_connector.error.connect(self.handle_error)
         node_connector.identity_changed.connect(self.handle_identity_change)
         node_connector.neighbour_found.connect(self.handle_new_node)
-        logging.debug("{:} connected".format(node_connector.node.pubkey[:5]))
+        self._logger.debug("{:} connected".format(node_connector.node.pubkey[:5]))
 
     @asyncify
     async def refresh_once(self):
@@ -159,7 +179,7 @@ class NetworkService(QObject):
             first_loop = False
             await asyncio.sleep(15)
 
-        logging.debug("End of network discovery")
+        self._logger.debug("End of network discovery")
 
     async def discovery_loop(self):
         """
@@ -171,7 +191,7 @@ class NetworkService(QObject):
                 await asyncio.sleep(1)
                 peer = self._discovery_stack.pop()
                 if self._processor.unknown_node(peer.pubkey):
-                    logging.debug("New node found : {0}".format(peer.pubkey[:5]))
+                    self._logger.debug("New node found : {0}".format(peer.pubkey[:5]))
                     try:
                         connector = NodeConnector.from_peer(self.currency, peer, self.session)
                         self._processor.insert_node(connector.node)
@@ -179,7 +199,7 @@ class NetworkService(QObject):
                         self.add_connector(connector)
                         self.nodes_changed.emit()
                     except InvalidNodeCurrency as e:
-                        logging.debug(str(e))
+                        self._logger.debug(str(e))
                 else:
                     self._processor.update_peer(peer)
             except IndexError:
@@ -190,10 +210,10 @@ class NetworkService(QObject):
         if key.verify_document(peer):
             if len(self._discovery_stack) < 1000 \
             and peer.signatures[0] not in [p.signatures[0] for p in self._discovery_stack]:
-                logging.debug("Stacking new peer document : {0}".format(peer.pubkey))
+                self._logger.debug("Stacking new peer document : {0}".format(peer.pubkey))
                 self._discovery_stack.append(peer)
         else:
-            logging.debug("Wrong document received : {0}".format(peer.signed_raw()))
+            self._logger.debug("Wrong document received : {0}".format(peer.signed_raw()))
 
     @pyqtSlot()
     def handle_identity_change(self):
@@ -221,16 +241,16 @@ class NetworkService(QObject):
 
         if node_connector.node.state == Node.ONLINE:
             current_buid = self._processor.current_buid()
-            logging.debug("{0} -> {1}".format(self._block_found.sha_hash[:10], current_buid.sha_hash[:10]))
+            self._logger.debug("{0} -> {1}".format(self._block_found.sha_hash[:10], current_buid.sha_hash[:10]))
             if self._block_found.sha_hash != current_buid.sha_hash:
-                logging.debug("Latest block changed : {0}".format(current_buid.number))
+                self._logger.debug("Latest block changed : {0}".format(current_buid.number))
                 # If new latest block is lower than the previously found one
                 # or if the previously found block is different locally
                 # than in the main chain, we declare a rollback
                 if current_buid <= self._block_found \
                    or node_connector.node.previous_buid != self._block_found:
                     self._block_found = current_buid
-                    self.blockchain_rollback.emit(current_buid.number)
+                    #TODO: self._blockchain_service.rollback()
                 else:
                     self._block_found = current_buid
-                    self.blockchain_progress.emit(current_buid.number)
+                    self._blockchain_service.handle_blockchain_progress()
diff --git a/src/sakia/tests/unit/data/test_appdata_file.py b/src/sakia/tests/unit/data/test_appdata_file.py
index db1d0cbdde6ac760b60e003754a3c8981bb428e6..99912fc5f0c3befb719c954adcb1d36aa6b66f7d 100644
--- a/src/sakia/tests/unit/data/test_appdata_file.py
+++ b/src/sakia/tests/unit/data/test_appdata_file.py
@@ -12,5 +12,5 @@ class TestAppDataFile(unittest.TestCase):
         app_data_file = AppDataFile(file)
         app_data.profiles.append("default")
         app_data_file.save(app_data)
-        app_data_2 = app_data_file.load()
+        app_data_2 = app_data_file.load_or_init()
         self.assertEqual(app_data, app_data_2)
diff --git a/src/sakia/tests/unit/data/test_user_parameters_file.py b/src/sakia/tests/unit/data/test_user_parameters_file.py
index c4fe4980b9229335148fafd1208e86d8c97a8aa8..1013a6d74f652cd64b77f3601b42230bda8cddd7 100644
--- a/src/sakia/tests/unit/data/test_user_parameters_file.py
+++ b/src/sakia/tests/unit/data/test_user_parameters_file.py
@@ -12,5 +12,5 @@ class TestUserParametersFile(unittest.TestCase):
         user_parameters_file = UserParametersFile(file)
         user_parameters.proxy_address = "test.fr"
         user_parameters_file.save(user_parameters)
-        user_parameters_2 = user_parameters_file.load()
+        user_parameters_2 = user_parameters_file.load_or_init()
         self.assertEqual(user_parameters, user_parameters_2)