File size: 145,303 Bytes
5980447
1
2
{"repo": "mempoolco/spruned", "pull_number": 43, "instance_id": "mempoolco__spruned-43", "issue_numbers": "", "base_commit": "77144dc2d41caac646fed1fdf4086345c68b0765", "patch": "diff --git a/spruned.py b/spruned.py\n--- a/spruned.py\n+++ b/spruned.py\n@@ -1,8 +1,25 @@\n import asyncio\n-from spruned.builder import electrod_daemon, jsonrpc_server\n+\n+from spruned.application.logging_factory import Logger\n+from spruned.builder import blocks_reactor, headers_reactor, jsonrpc_server, repository, cache\n \n if __name__ == '__main__':  # pragma: no cover\n-    loop = asyncio.get_event_loop()\n-    loop.create_task(electrod_daemon.start())\n-    loop.create_task(jsonrpc_server.start())\n-    loop.run_forever()\n+    try:\n+        loop = asyncio.get_event_loop()\n+        Logger.leveldb.debug('Ensuring integrity of the storage, and tracking missing items')\n+        try:\n+            asyncio.wait_for(repository.ensure_integrity(), timeout=30)\n+        except asyncio.TimeoutError:\n+            Logger.cache.error('There must be an error in storage, 30 seconds to check are too many')\n+        Logger.leveldb.debug('Checking cache limits')\n+        try:\n+            asyncio.wait_for(asyncio.gather(cache.check()), timeout=10)\n+        except asyncio.TimeoutError:\n+            Logger.cache.error('There must be an error in cache, 10 seconds to check are too many')\n+        loop.create_task(headers_reactor.start())\n+        loop.create_task(blocks_reactor.start())\n+        loop.create_task(jsonrpc_server.start())\n+        loop.create_task(cache.lurk())\n+        loop.run_forever()\n+    finally:\n+        pass\ndiff --git a/spruned/application/abstracts.py b/spruned/application/abstracts.py\n--- a/spruned/application/abstracts.py\n+++ b/spruned/application/abstracts.py\n@@ -12,10 +12,6 @@ class RPCAPIService(metaclass=abc.ABCMeta):\n     client = None\n     throttling_error_codes = []\n \n-    @abc.abstractmethod\n-    def getblock(self, blockhash):\n-        pass  # pragma: no cover\n-\n     @abc.abstractmethod\n     def getrawtransaction(self, txid, **kwargs):\n         pass  # pragma: no cover\n@@ -95,3 +91,6 @@ def get_block_hash(self, height: int):\n     def remove_header_at_height(self, blockheight: int):\n         pass  # pragma: no cover\n \n+    @abc.abstractmethod\n+    def get_headers(self, *blockhashes: str):\n+        pass  # pragma: no cover\n\\ No newline at end of file\ndiff --git a/spruned/application/cache.py b/spruned/application/cache.py\n--- a/spruned/application/cache.py\n+++ b/spruned/application/cache.py\n@@ -1,69 +1,130 @@\n-import functools\n-from spruned.application.storage import StorageFileInterface\n+import asyncio\n+import pickle\n+from leveldb import LevelDB\n+import time\n \n+from spruned.application.database import ldb_batch\n+from spruned.application.logging_factory import Logger\n+from spruned.application.tools import async_delayed_task\n+from spruned.repositories.blockchain_repository import TRANSACTION_PREFIX, BLOCK_PREFIX\n \n-class CacheFileInterface(StorageFileInterface):\n-    def __init__(self, directory, cache_limit=None, compress=True):\n-        super().__init__(directory, cache_limit=cache_limit, compress=compress)\n \n+class CacheAgent:\n+    def __init__(self, repository, limit, loop=asyncio.get_event_loop(), delayer=async_delayed_task):\n+        self.session: LevelDB = repository.ldb\n+        self.repository = repository\n+        self.repository.blockchain.set_cache(self)\n+        self.cache_name = b'cache_index'\n+        self.index = None\n+        self.limit = limit\n+        self._last_dump_size = None\n+        self.loop = loop\n+        self.lock = asyncio.Lock()\n+        self.delayer = delayer\n \n-def cache_block(func):\n-    @functools.wraps(func)\n-    async def wrapper(*args, **kwargs):\n-        cacheargs = ''.join(args[1:])\n-        cached = False\n-        res = None\n-        if args[0].cache:\n-            cache_res = args[0].cache.get('getblock', cacheargs)\n-            if cache_res:\n-                res = cache_res\n-                cached = True\n+    def init(self):\n+        self._load_index()\n \n-        if res is None:\n-            res = await func(*args, **kwargs)\n-            cached = False\n+    def dump(self):\n+        self._save_index()\n \n-        if res and args[0].cache and not cached:\n-            best_height = await args[0].getblockcount()\n-            args[0].current_best_height = best_height\n-            height = res['height']\n-            res['confirmations'] = best_height - height\n-            if res['confirmations'] > 3:\n-                args[0].cache.set('getblock', res['hash'], res)\n-        return res\n-    return wrapper\n+    def _deserialize_index(self, rawdata):\n+        index = {'keys': {}}\n+        data = pickle.loads(rawdata)\n+        s = 0\n+        for d in data:\n+            s += d[2]\n+            index['keys'][d[0]] = {\n+                'saved_at': d[1],\n+                'size': d[2],\n+                'key': d[0]\n+            }\n+        index['total'] = s\n+        self.index = index\n+        return index\n \n+    def _serialize_index(self):\n+        data = []\n+        for k, d in self.index['keys'].items():\n+            if k != 'total':\n+                data.append([k, d['saved_at'], d['size']])\n+        return pickle.dumps(data)\n \n-def cache_transaction(func):\n-    @functools.wraps(func)\n-    async def wrapper(*args, **kwargs):\n-        cacheargs = ''.join(args[1:])\n-        cached = False\n-        res = None\n-        if args[0].cache:\n-            cache_res = args[0].cache.get('getrawtransaction', cacheargs)\n-            if cache_res:\n-                res = cache_res\n-                cached = True\n-        if res is None:\n-            res = await func(*args, **kwargs)\n-            cached = False\n+    @ldb_batch\n+    def _save_index(self):\n+        data = self._serialize_index()\n+        self.session.put(self.cache_name, data)\n+        Logger.cache.debug('Saved index')\n+        self._last_dump_size = self.index['total']\n \n-        if res and args[0].cache and not cached:\n-            best_height = args[0].repository.get_best_header().get('block_height')\n-            args[0].current_best_height = best_height\n-            confirmed = False\n-            if res.get('blockhash'):\n-                header = args[0].repository.get_block_header(res['blockhash'])\n-                if header:\n-                    res['confirmations'] = best_height - header['block_height']\n-                    if res['confirmations'] > 3:\n-                        args[0].cache.set('getrawtransaction', res['txid'], res)\n-                        confirmed = True\n+    def _load_index(self):\n+        index = self.session.get(self.cache_name)\n+        if not index:\n+            Logger.cache.warning('Cache not found. Ok if is the first time')\n+            return\n+        Logger.cache.debug('Loaded index')\n+        index and self._deserialize_index(index)\n+        self._last_dump_size = self.index and self.index['total']\n \n-        if kwargs.get('verbose'):\n-            raise NotImplementedError\n-            # Note: I have to do a PR to ElectrumX Server and today is saturday :-)\n+    def track(self, key, size):\n+        if not self.index:\n+            self.index = {'keys': {}, 'total': 0}\n+        self.index['keys'][key] = {\n+            'saved_at': int(time.time()),\n+            'size': size,\n+            'key': key\n+        }\n+        self.index['total'] += size\n+\n+    async def check(self):\n+        if self.index and not self._last_dump_size or (self.index and self.index['total'] != self._last_dump_size):\n+            Logger.cache.debug('Pending data, dumping')\n+            self._save_index()\n+        if not self.index:\n+            Logger.cache.debug('No prev index found, trying to load')\n+            self._load_index()\n+        if not self.index:\n+            self.index = {'keys': {}, 'total': 0}\n+            Logger.cache.debug('No prev index found nor loaded')\n+            self._save_index()\n+            return\n+        if self.index['total'] > self.limit:\n+            Logger.cache.debug('Purging cache, size: %s, limit: %s', self.index['total'], self.limit)\n+            blockfirst = {0: 2, 1: 1}\n+            index_sorted = sorted(\n+                self.index['keys'].values(), key=lambda x: ((blockfirst[x['key'][0]] ** 33) + x['saved_at'])\n+            )\n+            i = 0\n+            while self.index['total'] * 1.1 > self.limit:\n+                item = index_sorted[i]\n+                Logger.cache.debug('Deleting %s' % item)\n+                self.delete(item)\n+                i += 1\n+        else:\n+            Logger.cache.debug('Cache is ok, size: %s, limit: %s', self.index['total'], self.limit)\n+        if self.index['total'] != self._last_dump_size:\n+            self._save_index()\n+\n+    def delete(self, item):\n+        if item['key'][0] == int.from_bytes(BLOCK_PREFIX, 'little'):\n+            Logger.leveldb.debug('Deleting block %s', item)\n+            self.repository.blockchain.remove_block(item['key'][2:])\n+        elif item['key'][0] == int.from_bytes(TRANSACTION_PREFIX, 'little'):\n+            Logger.leveldb.debug('Deleting transaction %s', item)\n+            self.repository.blockchain.remove_transaction(item['key'][2:])\n         else:\n-            return res and res['rawtx']\n-    return wrapper\n\\ No newline at end of file\n+            raise ValueError('Problem: %s' % item)\n+        self.index['total'] -= self.index['keys'].pop(item['key'])['size']\n+\n+    async def lurk(self):\n+        try:\n+            await self.lock.acquire()\n+            await self.check()\n+        finally:\n+            self.lock.release()\n+            self.loop.create_task(self.delayer(self.lurk(), 30))\n+\n+    def get_index(self):\n+        if not self.index:\n+            self._load_index()\n+        return self.index\ndiff --git a/spruned/application/database.py b/spruned/application/database.py\n--- a/spruned/application/database.py\n+++ b/spruned/application/database.py\n@@ -1,10 +1,13 @@\n import os\n+\n+import plyvel\n+\n from sqlalchemy import Column, String, Integer, create_engine\n from sqlalchemy.ext.declarative import declarative_base\n from sqlalchemy.orm import sessionmaker, scoped_session\n import threading\n from functools import wraps\n-from spruned.application import settings\n+from spruned import settings\n \n Base = declarative_base()\n \n@@ -21,10 +24,17 @@ class Header(Base):\n if not settings.SQLITE_DBNAME or os.path.exists(settings.SQLITE_DBNAME):\n     Base.metadata.create_all(engine)\n \n-session = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))\n+sqlite = scoped_session(sessionmaker(autocommit=False, autoflush=False, bind=engine))\n+\n+if not settings.TESTING:\n+    storage_ldb = plyvel.DB(settings.LEVELDB_BLOCKCHAIN_ADDRESS, create_if_missing=True)\n+else:\n+    from unittest.mock import Mock\n+    storage_ldb = Mock()\n \n _local = threading.local()\n-_local.session = session\n+_local.session = sqlite\n+_local.storage_ldb = storage_ldb\n \n \n def atomic(fun):\n@@ -48,3 +58,29 @@ def decorator(*args, **kwargs):\n         finally:\n             _local.session.close()\n     return decorator\n+\n+\n+def ldb_batch(fun):\n+    @wraps(fun)\n+    def decorator(*args, **kwargs):\n+        try:\n+            _local.leveldb_counter += 1\n+        except AttributeError:\n+            _local.leveldb_counter = 1\n+        if _local.leveldb_counter == 1:\n+            try:\n+                if not _local.in_ldb_batch:\n+                    _local.storage_ldb = storage_ldb.write_batch()\n+                    _local.in_ldb_batch = True\n+            except AttributeError:\n+                _local.in_ldb_batch = True\n+                _local.storage_ldb = storage_ldb.write_batch()\n+        r = fun(*args, **kwargs)\n+        if _local.leveldb_counter == 1:\n+            _local.storage_ldb.write()\n+            if _local.in_ldb_batch:\n+                _local.in_ldb_batch = False\n+                _local.storage_ldb = storage_ldb\n+        _local.leveldb_counter -= 1\n+        return r\n+    return decorator\ndiff --git a/spruned/application/jsonrpc_server.py b/spruned/application/jsonrpc_server.py\n--- a/spruned/application/jsonrpc_server.py\n+++ b/spruned/application/jsonrpc_server.py\n@@ -1,9 +1,12 @@\n import binascii\n from aiohttp import web\n+from decimal import Decimal\n from jsonrpcserver.aio import methods\n from jsonrpcserver import config\n from jsonrpcserver.response import ExceptionResponse\n \n+from spruned.application.logging_factory import Logger\n+\n config.schema_validation = False\n \n \n@@ -24,6 +27,7 @@ async def _handle(self, request):\n         if isinstance(response, ExceptionResponse):\n             return web.json_response(response, status=response.http_status)\n         if response and \"error\" in response.get(\"result\"):\n+            Logger.jsonrpc.error('Error in response: %s', response)\n             return web.json_response(response[\"result\"], status=400)\n         return web.json_response(response, status=response.http_status)\n \n@@ -32,6 +36,7 @@ async def start(self):\n         app.router.add_post('/', self._handle)\n         runner = web.AppRunner(app)\n         await runner.setup()\n+        methods.add(self.echo)\n         methods.add(self.estimatefee)\n         methods.add(self.estimatesmartfee)\n         methods.add(self.getbestblockhash)\n@@ -45,13 +50,16 @@ async def start(self):\n \n         return await web.TCPSite(runner, host=self.host, port=self.port).start()\n \n-    async def getblock(self, blockhash: str):\n+    async def echo(self, *args):\n+        return \"\"\n+\n+    async def getblock(self, blockhash: str, mode: int = 1):\n         try:\n             binascii.unhexlify(blockhash)\n             assert len(blockhash) == 64\n         except (binascii.Error, AssertionError):\n             return {\"error\": {\"code\": -5, \"message\": \"Block not found\"}}\n-        response = await self.vo_service.getblock(blockhash)\n+        response = await self.vo_service.getblock(blockhash, mode)\n         if not response:\n             return {\"error\": {\"code\": -5, \"message\": \"Block not found\"}}\n         return response\n@@ -63,7 +71,7 @@ async def getrawtransaction(self, txid: str, verbose=False):\n             return {\"error\": {\"code\": -8, \"message\": \"parameter 1 must be hexadecimal string (not '%s')\" % txid}}\n         if len(txid) != 64:\n             return {\"error\": {\"code\": -8, \"message\": \"parameter 1 must be of length 64 (not '%s')\" % len(txid)}}\n-        response = await self.vo_service.getrawtransaction(txid)\n+        response = await self.vo_service.getrawtransaction(txid, verbose)\n         if not response:\n             return {\"error\": {\"code\": -5, \"message\": \"No such mempool or blockchain transaction. [maybe try again]\"}}\n         return response\n@@ -106,7 +114,7 @@ async def estimatefee(self, blocks: int):\n             return \"-1\"\n         return response.get(\"response\")\n \n-    async def estimatesmartfee(self, blocks: int):\n+    async def estimatesmartfee(self, blocks: int, estimate_mode=None):\n         try:\n             int(blocks)\n         except ValueError:\n@@ -116,9 +124,11 @@ async def estimatesmartfee(self, blocks: int):\n         response = await self.vo_service.estimatefee(blocks)\n         if response is None:\n             return {\"error\": {\"code\": -8, \"message\": \"server error: try again\"}}\n+\n         return {\n             \"blocks\": blocks,\n-            \"feerate\": response[\"response\"]\n+            \"feerate\": response,\n+            \"_feerate\": \"{:.8f}\".format(response)\n         }\n \n     async def getblockchaininfo(self):\ndiff --git a/spruned/application/logging_factory.py b/spruned/application/logging_factory.py\n--- a/spruned/application/logging_factory.py\n+++ b/spruned/application/logging_factory.py\n@@ -1,6 +1,6 @@\n import logging\n import sys\n-from spruned.application import settings\n+from spruned import settings\n \n \n class LoggingFactory:\n@@ -36,10 +36,28 @@ def third_party(self):\n     def electrum(self):\n         return logging.getLogger('electrum')\n \n+    @property\n+    def p2p(self):\n+        return logging.getLogger('p2p')\n+\n+    @property\n+    def leveldb(self):\n+        return logging.getLogger('leveldb')\n+\n     @property\n     def bitcoind(self):\n         return logging.getLogger('bitcoind')\n \n+    @property\n+    def cache(self):\n+        return logging.getLogger('cache')\n+\n+    @property\n+    def jsonrpc(self):\n+        return logging.getLogger('jsonrpc')\n+\n+\n+\n \n if settings.TESTING:\n     Logger = LoggingFactory(\n@@ -49,6 +67,11 @@ def bitcoind(self):\n     )  # type: LoggingFactory\n \n elif settings.DEBUG:\n+    logging.getLogger('jsonrpcserver.dispatcher.response').setLevel(logging.WARNING)\n+    logging.getLogger('pycoin').setLevel(logging.INFO)\n+    logging.getLogger('p2p').setLevel(logging.INFO)\n+    logging.getLogger('connectrum').setLevel(logging.INFO)\n+    logging.getLogger('electrum').setLevel(logging.INFO)\n     Logger = LoggingFactory(\n         logfile=settings.LOGFILE,\n         loglevel=logging.DEBUG,\n@@ -59,6 +82,8 @@ def bitcoind(self):\n     logging.getLogger('jsonrpcserver.dispatcher.response').setLevel(logging.WARNING)\n     logging.getLogger('aiohttp.access').setLevel(logging.WARNING)\n     logging.getLogger('connectrum').setLevel(logging.WARNING)\n+    logging.getLogger('pycoin').setLevel(logging.WARNING)\n+    logging.getLogger('p2p').setLevel(logging.WARNING)\n     Logger = LoggingFactory(\n         logfile=settings.LOGFILE,\n         loglevel=logging.INFO,\ndiff --git a/spruned/application/spruned_vo_service.py b/spruned/application/spruned_vo_service.py\n--- a/spruned/application/spruned_vo_service.py\n+++ b/spruned/application/spruned_vo_service.py\n@@ -1,18 +1,20 @@\n-import typing\n import random\n import binascii\n+from spruned import settings\n+from spruned.application.cache import CacheAgent\n+from spruned.application.database import ldb_batch\n from spruned.application.tools import deserialize_header\n-from spruned.application import settings, exceptions\n-from spruned.application.abstracts import RPCAPIService, StorageInterface\n-from spruned.application.cache import cache_block, cache_transaction\n-from spruned.application.logging_factory import Logger\n+from spruned.application import exceptions\n+from spruned.application.abstracts import RPCAPIService\n+from spruned.daemon.exceptions import ElectrodMissingResponseException\n \n \n class SprunedVOService(RPCAPIService):\n-    def __init__(self, electrod, cache=None, utxo_tracker=None, repository=None):\n+    def __init__(self, electrod, p2p, cache: CacheAgent=None, utxo_tracker=None, repository=None):\n         self.sources = []\n         self.primary = []\n         self.cache = cache\n+        self.p2p = p2p\n         self.electrod = electrod\n         self.min_sources = 1\n         self.current_best_height = None\n@@ -22,168 +24,106 @@ def __init__(self, electrod, cache=None, utxo_tracker=None, repository=None):\n     def available(self):\n         raise NotImplementedError\n \n-    def _get_from_cache(self, *a):\n-        if self.cache:\n-            data = self.cache.get(a[0], a[1])\n-            if data:\n-                return data\n-\n-    def add_cache(self, cache: StorageInterface):\n-        assert isinstance(cache, StorageInterface)\n-        self.cache = cache\n-\n-    def add_source(self, service: RPCAPIService):\n-        assert isinstance(service, RPCAPIService)\n-        self.sources.append(service)\n-\n-    @cache_block\n-    async def getblock(self, blockhash: str):\n-        source = random.choice(self.sources)\n-        block = await source.getblock(blockhash)\n-        await self._verify_block_with_local_header(block)\n-        return block\n-\n-    async def _verify_block_with_local_header(self, block):\n-        repo_header = self.repository.get_block_header(block['hash'])\n-        _header = binascii.hexlify(repo_header['header_bytes']).decode()\n-        header = deserialize_header(_header)\n-        block['version'] = header['version']\n-        block['time'] = header['timestamp']\n-        block['versionHex'] = None\n-        block['mediantime'] = None\n-        block['nonce'] = header['nonce']\n-        block['bits'] = header['bits']\n-        block['difficulty'] = None\n-        block['chainwork'] = None\n-        block['previousblockhash'] = header['prev_block_hash']\n-        block['height'] = repo_header['block_height']\n-        # TODO Verify transactions tree\n-        if header.get('nextblockhash'):\n-            block['nextblockhash'] = repo_header.get('next_block_hash')\n-        block.pop('confirmations', None)\n+    async def getblock(self, blockhash: str, mode: int=1):\n+        block_header = self.repository.headers.get_block_header(blockhash)\n+        if not block_header:\n+            return\n+        block = await self._get_block(block_header)\n+        if mode == 1:\n+            best_header = self.repository.headers.get_best_header()\n+            block['confirmations'] = best_header['block_height'] - block_header['block_height']\n+            serialized = self._serialize_header(block_header)\n+            serialized['tx'] = [tx.id() for tx in block['block_object'].txs]\n+            return serialized\n+        elif mode == 2:\n+            raise NotImplementedError\n+        return binascii.hexlify(block['block_bytes']).decode()\n+\n+    @ldb_batch\n+    async def _get_block(self, blockheader, _r=0):\n+        blockhash = blockheader['block_hash']\n+        storedblock = self.repository.blockchain.get_block(blockhash)\n+        block = storedblock or await self.p2p.get_block(blockhash)\n+        if not block:\n+            if _r > 10:\n+                raise exceptions.ServiceException\n+            else:\n+                return await self._get_block(blockheader, _r + 1)\n+        if not storedblock:\n+            self.repository.blockchain.save_block(block, tracker=self.cache)\n         return block\n \n-    @cache_transaction\n     async def getrawtransaction(self, txid: str, verbose=False):\n-        source = random.choice(self.sources)\n-        transaction = await source.getrawtransaction(txid, verbose)\n-        if not transaction:\n-            # design fixme\n-            return\n         electrod_rawtx = await self.electrod.getrawtransaction(txid)\n-        transaction['rawtx'] = electrod_rawtx\n-        transaction['source'] += ', electrum'\n-        if transaction.get('blockhash'):\n-            blockheader = self.repository.get_block_header(transaction['blockhash'])\n-            merkleproof = await self.electrod.getmerkleproof(txid, blockheader['block_height'])\n-            assert merkleproof  # todo verify\n-        return transaction\n-\n-    '''\n-    async def gettxout(self, txid: str, index: int):  # pragma: no cover\n-        \"\"\"\n-        This entire part need a rethinking\n-        At the moment consider as we don't have a gettxout api available yet.\n-        \"\"\"\n-        source = random.choice(self.sources)\n-        txout = await source.gettxout(txid, index)\n-        if not txout:\n-            return\n-        if not await self.ensure_unspent_consistency_with_electrum_network(txid, index, txout):\n-            return\n \n-        best_block_header = self.repository.get_best_header()\n-        tx_blockheader = self.repository.get_block_header(txout['in_block'])\n-        in_block_height = tx_blockheader['block_height']\n-        confirmations = best_block_header['block_height'] - in_block_height\n-        return {\n-            \"bestblock\": best_block_header['block_hash'],\n-            \"confirmations\": confirmations,\n-            \"value\": '{:.8f}'.format(txout['value_satoshi'] / 10**8),\n-            \"scriptPubKey\": {\n-                \"asm\": txout['script_asm'],\n-                \"hex\": txout['script_hex'],\n-                \"reqSigs\": \"Not Implemented Yet\",\n-                \"type\": txout[\"script_type\"],\n-                \"addresses\": txout[\"addresses\"]\n-            },\n-            \"coinbase\": \"Not Implemented Yet\"\n-        }\n+        #blockheader = self.repository.headers.get_block_header(transaction['blockhash'])\n+        #merkleproof = await self.electrod.getmerkleproof(txid, blockheader['block_height'])\n \n-    async def ensure_unspent_consistency_with_electrum_network(\n-            self, txid: str, index: int, data: typing.Dict):   # pragma: no cover\n-        if not data['addresses']:\n-            if settings.ALLOW_UNSAFE_UTXO:\n-                return True\n-            return\n-        if data['script_type'] == 'nulldata':  # I'm not sure if nulldata can reach this point, investigate.\n-            return True\n-        found = False\n-        unspents = await self.electrod.listunspents(data['addresses'][0])\n-        if not unspents:\n-            return\n-        for unspent in unspents:\n-            if unspent['tx_hash'] == txid and unspent['tx_pos'] == index:\n-                found = unspent\n-                break\n-        if not data['unspent'] and found:\n-            Logger.third_party.debug(\n-                'unspent found in the electrum listunspent for the given address, '\n-                'but marked as spent by the other sources'\n-            )\n-            self.utxo_tracker and not data['unspent'] and self.utxo_tracker.invalidate_spent(txid, index)\n-            raise exceptions.SpentTxOutException\n-\n-        if data['unspent'] and not found:\n-            if bool(data['value_satoshi'] != found['value']):\n-                raise exceptions.SpentTxOutException\n-        return True\n-    '''\n+        if verbose:\n+            return {\n+                'source': 'electrum',\n+                'rawtx': electrod_rawtx\n+            }\n+        return electrod_rawtx\n \n     async def getbestblockhash(self):\n-        res = self.repository.get_best_header().get('block_hash')\n+        res = self.repository.headers.get_best_header().get('block_hash')\n         return res and res\n \n     async def getblockhash(self, blockheight: int):\n-        return self.repository.get_block_hash(blockheight)\n+        return self.repository.headers.get_block_hash(blockheight)\n \n     async def getblockheader(self, blockhash: str, verbose=True):\n-        header = self.repository.get_block_header(blockhash)\n+        header = self.repository.headers.get_block_header(blockhash)\n         if verbose:\n-            _best_header = self.repository.get_best_header()\n-            _deserialized_header = deserialize_header(binascii.hexlify(header['header_bytes']).decode())\n-            res = {\n-                  \"hash\": _deserialized_header['hash'],\n-                  \"confirmations\": _best_header['block_height'] - header['block_height'] + 1,\n-                  \"height\": header['block_height'],\n-                  \"version\": _deserialized_header['version'],\n-                  \"versionHex\": \"Not Implemented Yet\",\n-                  \"merkleroot\": _deserialized_header['merkle_root'],\n-                  \"time\": _deserialized_header['timestamp'],\n-                  \"mediantime\": _deserialized_header['timestamp'],\n-                  \"nonce\": _deserialized_header['nonce'],\n-                  \"bits\": _deserialized_header['bits'],\n-                  \"difficulty\": \"Not Implemented Yet\",\n-                  \"chainwork\": \"Not Implemented Yet\",\n-                  \"previousblockhash\": _deserialized_header['prev_block_hash'],\n-                  \"nextblockhash\": header.get('next_block_hash')\n-                }\n+            _best_header = self.repository.headers.get_best_header()\n+            res = self._serialize_header(header)\n+            res[\"confirmations\"] = _best_header['block_height'] - header['block_height'] + 1\n         else:\n             res = binascii.hexlify(header['header_bytes']).decode()\n         return res\n \n+    @staticmethod\n+    def _serialize_header(header):\n+        _deserialized_header = deserialize_header(binascii.hexlify(header['header_bytes']).decode())\n+        return {\n+            \"hash\": _deserialized_header['hash'],\n+            \"height\": header['block_height'],\n+            \"version\": _deserialized_header['version'],\n+            \"versionHex\": \"Not Implemented Yet\",\n+            \"merkleroot\": _deserialized_header['merkle_root'],\n+            \"time\": _deserialized_header['timestamp'],\n+            \"mediantime\": _deserialized_header['timestamp'],\n+            \"nonce\": _deserialized_header['nonce'],\n+            \"bits\": _deserialized_header['bits'],\n+            \"difficulty\": \"Not Implemented Yet\",\n+            \"chainwork\": \"Not Implemented Yet\",\n+            \"previousblockhash\": _deserialized_header['prev_block_hash'],\n+            \"nextblockhash\": header.get('next_block_hash')\n+        }\n+\n     async def getblockcount(self):\n-        return self.repository.get_best_header().get('block_height')\n+        return self.repository.headers.get_best_header().get('block_height')\n \n     async def estimatefee(self, blocks: int):\n-        return await self.electrod.estimatefee(blocks)\n+        return await self._estimatefee(blocks)\n+\n+    async def _estimatefee(self, blocks, _r=1):\n+        try:\n+            res = await self.electrod.estimatefee(blocks)\n+        except ElectrodMissingResponseException as e:\n+            _r += 1\n+            if _r > 5:\n+                raise e\n+            return await self._estimatefee(blocks, _r + 1)\n+        return res\n \n     async def getbestblockheader(self, verbose=True):\n-        best_header = self.repository.get_best_header()\n+        best_header = self.repository.headers.get_best_header()\n         return await self.getblockheader(best_header['block_hash'], verbose=verbose)\n \n     async def getblockchaininfo(self):\n-        best_header = self.repository.get_best_header()\n+        best_header = self.repository.headers.get_best_header()\n         _deserialized_header = deserialize_header(best_header['header_bytes'])\n         return {\n             \"chain\": \"main\",\ndiff --git a/spruned/application/storage.py b/spruned/application/storage.py\ndeleted file mode 100644\n--- a/spruned/application/storage.py\n+++ /dev/null\n@@ -1,60 +0,0 @@\n-import os\n-import pickle\n-import shutil\n-from spruned.application.abstracts import StorageInterface\n-import gzip\n-\n-from spruned.application.logging_factory import Logger\n-\n-\n-class StorageFileInterface(StorageInterface):\n-    def __init__(self, directory, cache_limit=None, compress=True):\n-        self.directory = directory\n-        if not os.path.exists(directory):\n-            os.makedirs(directory)\n-        if cache_limit:\n-            raise NotImplementedError\n-        self._compress = compress\n-        self._interface = compress and gzip.open or open\n-        self._file_extension = compress and '.bin.gz' or '.bin'\n-\n-    def set(self, *a, ttl=None):\n-        if ttl:\n-            raise NotImplementedError\n-        args = list(a)[:-1]\n-        prefix = a[1].lstrip('0')[:2] + '/'\n-        if not os.path.exists(self.directory + prefix):\n-            os.makedirs(self.directory + prefix)\n-        file = self.directory + prefix + '.'.join(args) + self._file_extension\n-        with self._interface(file, 'wb') as pointer:\n-            pickle.dump(a[-1], pointer)\n-\n-    def get(self, *a):\n-        prefix = a[1].lstrip('0')[:2] + '/'\n-        file = self.directory + prefix + '.'.join(a) + self._file_extension\n-        try:\n-            with self._interface(file, 'rb') as pointer:\n-                res = pickle.load(pointer)\n-        except FileNotFoundError:\n-            return None\n-        return res\n-\n-    def remove(self, *a, may_fail=True):\n-        prefix = a[1].lstrip('0')[:2] + '/'\n-        file = self.directory + prefix + '.'.join(a) + self._file_extension\n-        try:\n-            os.remove(file)\n-        except OSError:\n-            raise OSError\n-\n-    def purge(self):\n-        folder = self.directory\n-        for the_file in os.listdir(folder):\n-            file_path = os.path.join(folder, the_file)\n-            try:\n-                if os.path.isfile(file_path):\n-                    os.unlink(file_path)\n-                elif os.path.isdir(file_path):\n-                    shutil.rmtree(file_path)\n-            except Exception as e:\n-                Logger.root.exception('storage exception')\ndiff --git a/spruned/application/tools.py b/spruned/application/tools.py\n--- a/spruned/application/tools.py\n+++ b/spruned/application/tools.py\n@@ -89,19 +89,17 @@ def load_config():\n     \"\"\"\n     todo: parse config or create with default values\n     \"\"\"\n-    from spruned.application import settings\n+    from spruned import settings\n     import os\n     if not os.path.exists(settings.FILE_DIRECTORY):\n         os.makedirs(settings.FILE_DIRECTORY)\n     if not os.path.exists(settings.STORAGE_ADDRESS):\n         os.makedirs(settings.STORAGE_ADDRESS)\n-    if not os.path.exists(settings.CACHE_ADDRESS):\n-        os.makedirs(settings.CACHE_ADDRESS)\n \n \n def check_internet_connection():\n     from spruned.application.logging_factory import Logger\n-    from spruned.application.settings import CHECK_NETWORK_HOST\n+    from spruned.settings import CHECK_NETWORK_HOST\n     import subprocess\n     import os\n     Logger.electrum.debug('Checking internet connectivity')\ndiff --git a/spruned/application/utils/__init__.py b/spruned/application/utils/__init__.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/application/utils/__init__.py\n@@ -0,0 +1,4 @@\n+def split(data, offset):\n+    return [\n+        data[i:i+offset] for i in range(0, len(data), offset)\n+    ]\ndiff --git a/spruned/application/utils/jsonrpc_client.py b/spruned/application/utils/jsonrpc_client.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/application/utils/jsonrpc_client.py\n@@ -0,0 +1,67 @@\n+import asyncio\n+import base64\n+import time\n+import typing\n+from json import JSONDecodeError\n+\n+import aiohttp\n+import json\n+\n+import async_timeout\n+\n+\n+class JSONClient:\n+    def __init__(self, user, password, host, port):\n+        self.url = \"http://{}:{}\".format(host, port)\n+        self._auth = 'Basic %s' % base64.b64encode(user + b':' + password).decode()\n+\n+    async def _call(self, method: str, params: typing.List=None, jsonRes=True):\n+        payload = {\n+            \"method\": method,\n+            \"params\": params,\n+            \"jsonrpc\": \"2.0\",\n+            \"id\": 1,\n+        }\n+        async with aiohttp.ClientSession(conn_timeout=10) as session:\n+            start = time.time()\n+            print('calling %s with data %s' % (self.url, payload))\n+            response = await session.post(\n+                self.url,\n+                data=json.dumps(payload),\n+                headers={'content-type': 'application/json', 'Authorization': self._auth},\n+            )\n+            print('done in %s' % round(time.time() - start, 2))\n+        if jsonRes:\n+            try:\n+\n+                return (await response.json()).get('result')\n+            except JSONDecodeError as e:\n+                print('Error decoding: %s' % e)\n+        else:\n+            return response.content\n+\n+\n+async def getblock_test(cli, bestheight=50000):\n+    print(await cli._call('getblockchaininfo'))\n+    blhash = None\n+    while not blhash:\n+        blhash = await cli._call('getblockhash', [bestheight])\n+        if not blhash:\n+            print('Failed get best block: %s' % blhash)\n+            await asyncio.sleep(1)\n+        else:\n+            print('Block hash found: %s' % blhash)\n+    while 1:\n+        block = await cli._call('getblock', [blhash])\n+        if block:\n+            blhash = block['previousblockhash']\n+            print('block %s downloaded' % blhash)\n+        else:\n+            print('block %s failed, sleeping' % blhash)\n+            await asyncio.sleep(5)\n+\n+if __name__ == '__main__':\n+    cli = JSONClient(b'rpcuser', b'password', 'localhost', 8332)\n+    loop = asyncio.get_event_loop()\n+    bestheight = 500000\n+    loop.run_until_complete(getblock_test(cli, bestheight=bestheight))\ndiff --git a/spruned/builder.py b/spruned/builder.py\n--- a/spruned/builder.py\n+++ b/spruned/builder.py\n@@ -1,34 +1,36 @@\n-def build():  # pragma: no cover\n+from spruned.application import tools\n+from spruned.application.cache import CacheAgent\n+from spruned.repositories.repository import Repository\n \n-    from spruned.application import tools\n-    tools.load_config()\n-    from spruned.services.thirdparty_service import builder as third_party_services_builder\n-    from spruned.application import spruned_vo_service, settings, database\n-    from spruned.application.cache import CacheFileInterface\n-    from spruned.application.jsonrpc_server import JSONRPCServer\n-    from spruned.daemon.electrod.electrod_reactor import build_electrod\n-    from spruned.application.headers_repository import HeadersSQLiteRepository\n+tools.load_config()\n+from spruned import settings\n+from spruned.daemon.tasks.blocks_reactor import BlocksReactor\n+from spruned.daemon.tasks.headers_reactor import HeadersReactor\n+from spruned.application import spruned_vo_service\n+from spruned.application.jsonrpc_server import JSONRPCServer\n+from spruned.daemon.electrod import build as electrod_builder\n+from spruned.daemon.p2p import build as p2p_builder\n \n-    headers_repository = HeadersSQLiteRepository(database.session)\n-    cache = CacheFileInterface(settings.CACHE_ADDRESS)\n-    electrod_daemon, electrod_service = build_electrod(\n-        headers_repository, settings.NETWORK, settings.ELECTROD_CONNECTIONS\n-    )\n-    third_pary_services = third_party_services_builder()\n-    service = spruned_vo_service.SprunedVOService(\n-        electrod_service,\n-        cache=cache,\n-        repository=headers_repository\n-    )\n-    service.add_source(third_pary_services)\n-    jsonrpc_server = JSONRPCServer(\n-        settings.JSONRPCSERVER_HOST,\n-        settings.JSONRPCSERVER_PORT,\n-        settings.JSONRPCSERVER_USER,\n-        settings.JSONRPCSERVER_PASSWORD\n-    )\n-    jsonrpc_server.set_vo_service(service)\n-    return electrod_daemon, jsonrpc_server\n+electrod_connectionpool, electrod_interface = electrod_builder(settings.NETWORK)\n+p2p_connectionpool, p2p_interface = p2p_builder(settings.NETWORK)\n \n+repository = Repository.instance()\n+cache = CacheAgent(repository, settings.CACHE_SIZE)\n+repository.set_cache(cache)\n \n-electrod_daemon, jsonrpc_server = build()  # pragma: no cover\n+service = spruned_vo_service.SprunedVOService(\n+    electrod_interface,\n+    p2p_interface,\n+    repository=repository,\n+    cache=cache\n+)\n+\n+jsonrpc_server = JSONRPCServer(\n+    settings.JSONRPCSERVER_HOST,\n+    settings.JSONRPCSERVER_PORT,\n+    settings.JSONRPCSERVER_USER,\n+    settings.JSONRPCSERVER_PASSWORD\n+)\n+jsonrpc_server.set_vo_service(service)\n+headers_reactor = HeadersReactor(repository.headers, electrod_interface)\n+blocks_reactor = BlocksReactor(repository, p2p_interface)\ndiff --git a/spruned/daemon/abstracts.py b/spruned/daemon/abstracts.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/daemon/abstracts.py\n@@ -0,0 +1,104 @@\n+import abc\n+from typing import List\n+\n+\n+class ConnectionAbstract(metaclass=abc.ABCMeta):  # pragma: no cover\n+\n+    @property\n+    @abc.abstractmethod\n+    def start_score(self):\n+        pass\n+\n+    @property\n+    @abc.abstractmethod\n+    def hostname(self):\n+        pass\n+\n+    @property\n+    @abc.abstractmethod\n+    def score(self):\n+        pass\n+\n+    @property\n+    @abc.abstractmethod\n+    def version(self):\n+        pass\n+\n+    @property\n+    @abc.abstractmethod\n+    def connected(self):\n+        pass\n+\n+    @property\n+    @abc.abstractmethod\n+    def last_header(self):\n+        pass\n+\n+    @abc.abstractmethod\n+    def is_online(self) -> bool:\n+        pass\n+\n+    @abc.abstractmethod\n+    def add_on_header_callbacks(self, callback):\n+        pass\n+\n+    @abc.abstractmethod\n+    def add_on_connect_callback(self, callback):\n+        pass\n+\n+    @abc.abstractmethod\n+    def add_on_disconnect_callback(self, callback):\n+        pass\n+\n+    @abc.abstractmethod\n+    def add_on_peers_callback(self, callback):\n+        pass\n+\n+    @abc.abstractmethod\n+    def connect(self):\n+        pass\n+\n+    @abc.abstractmethod\n+    def ping(self, timeout=None):\n+        pass\n+\n+    @property\n+    @abc.abstractmethod\n+    def errors(self):\n+        pass\n+\n+    @abc.abstractmethod\n+    def disconnect(self):\n+        pass\n+\n+    @abc.abstractmethod\n+    def add_error(self, *a):\n+        pass\n+\n+\n+class ConnectionPoolAbstract(metaclass=abc.ABCMeta):  # pragma: no cover\n+    @property\n+    @abc.abstractmethod\n+    def established_connections(self) -> List:\n+        pass\n+\n+    @property\n+    @abc.abstractmethod\n+    def connections(self) -> List:\n+        pass\n+\n+    @abc.abstractmethod\n+    def add_on_connected_observer(self, observer):\n+        pass\n+\n+    @abc.abstractmethod\n+    def add_header_observer(self, observer):\n+        pass\n+\n+    @abc.abstractmethod\n+    async def connect(self):\n+        pass\n+\n+    @abc.abstractmethod\n+    def is_online(self) -> bool:\n+        pass\ndiff --git a/spruned/daemon/connection_base_impl.py b/spruned/daemon/connection_base_impl.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/daemon/connection_base_impl.py\n@@ -0,0 +1,114 @@\n+import abc\n+import asyncio\n+import time\n+from typing import Dict, List\n+from spruned.application.tools import async_delayed_task\n+from spruned.daemon.abstracts import ConnectionAbstract\n+\n+\n+class BaseConnection(ConnectionAbstract, metaclass=abc.ABCMeta):\n+    def __init__(\n+            self, hostname: str, use_tor=False, loop=None,\n+            start_score=10, timeout=10, expire_errors_after=180,\n+            is_online_checker: callable=None, delayer=async_delayed_task\n+    ):\n+        self._is_online_checker = is_online_checker\n+        self._hostname = hostname\n+        self._hostname = hostname\n+        self.use_tor = use_tor\n+        self._version = None\n+        self._on_headers_callbacks = []\n+        self._on_connect_callbacks = []\n+        self._on_disconnect_callbacks = []\n+        self._on_errors_callbacks = []\n+        self._on_peers_callbacks = []\n+        self.loop = loop or asyncio.get_event_loop()\n+        self._start_score = start_score\n+        self._last_header = None\n+        self._subscriptions = []\n+        self._timeout = timeout\n+        self._errors = []\n+        self._peers = []\n+        self._expire_errors_after = expire_errors_after\n+        self._is_online_checker = is_online_checker\n+        self.delayer = delayer\n+\n+    @property\n+    def hostname(self):\n+        return self._hostname\n+\n+    def add_error(self, *a):\n+        if len(a) and isinstance(a[0], int):\n+            self._errors.append(a[0])\n+        else:\n+            self._errors.append(int(time.time()))\n+\n+    def is_online(self):\n+        if self._is_online_checker is not None:\n+            return self._is_online_checker()\n+        return True\n+\n+    def add_on_header_callbacks(self, callback):\n+        self._on_headers_callbacks.append(callback)\n+\n+    def add_on_connect_callback(self, callback):\n+        self._on_connect_callbacks.append(callback)\n+\n+    def add_on_disconnect_callback(self, callback):\n+        self._on_disconnect_callbacks.append(callback)\n+\n+    def add_on_peers_callback(self, callback):\n+        self._on_peers_callbacks.append(callback)\n+\n+    def add_on_error_callback(self, callback):\n+        self._on_errors_callbacks.append(callback)\n+\n+    async def on_header(self, header):\n+        self._last_header = header\n+        for callback in self._on_headers_callbacks:\n+            self.loop.create_task(callback(self))\n+\n+    async def on_connect(self):\n+        for callback in self._on_connect_callbacks:\n+            self.loop.create_task(callback(self))\n+\n+    async def on_error(self, error):\n+        if not self.is_online:\n+            return\n+        self.add_error()\n+        for callback in self._on_errors_callbacks:\n+            self.loop.create_task(callback(self, error_type=error))\n+\n+    async def on_peers(self):\n+        for callback in self._on_peers_callbacks:\n+            self.loop.create_task(callback(self))\n+\n+    @property\n+    def start_score(self):\n+        return self._start_score\n+\n+    @property\n+    def version(self):\n+        return self._version\n+\n+    @property\n+    def last_header(self) -> Dict:\n+        return self._last_header\n+\n+    @property\n+    def subscriptions(self) -> List:\n+        return self._subscriptions\n+\n+    @property\n+    def score(self):\n+        return self._start_score - len(self.errors)\n+\n+    @property\n+    def errors(self):\n+        now = int(time.time())\n+        self._errors = [error for error in self._errors if now - error < self._expire_errors_after]\n+        return self._errors\n+\n+    @property\n+    def peers(self):\n+        return self._peers\ndiff --git a/spruned/daemon/connectionpool_base_impl.py b/spruned/daemon/connectionpool_base_impl.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/daemon/connectionpool_base_impl.py\n@@ -0,0 +1,172 @@\n+import abc\n+import asyncio\n+import random\n+import time\n+from typing import List\n+\n+from spruned.application.logging_factory import Logger\n+from spruned.application.tools import check_internet_connection, async_delayed_task\n+from spruned.daemon import exceptions\n+from spruned.daemon.abstracts import ConnectionPoolAbstract, ConnectionAbstract\n+\n+\n+class BaseConnectionPool(ConnectionPoolAbstract, metaclass=abc.ABCMeta):\n+    def __init__(self,\n+                 peers=list(),\n+                 network_checker=check_internet_connection,\n+                 delayer=async_delayed_task,\n+                 loop=asyncio.get_event_loop(),\n+                 use_tor=False,\n+                 connections=3,\n+                 sleep_no_internet=30,\n+                 ipv6=False\n+                 ):\n+        self._connections = []\n+        self._peers = peers\n+        self._headers_observers = []\n+        self._new_peers_observers = []\n+        self._on_connect_observers = []\n+        self._required_connections = connections\n+        self._network_checker = network_checker\n+        self._use_tor = use_tor\n+        self.loop = loop\n+        self.delayer = delayer\n+        self._connection_notified = False\n+        self._is_online = False\n+        self._sleep_on_no_internet_connectivity = sleep_no_internet\n+        self._keepalive = True\n+        self._ipv6 = ipv6\n+\n+    @property\n+    def peers(self):\n+        return self._peers\n+\n+    @property\n+    def connections(self):\n+        self._connections = [\n+            c for c in self._connections if (c.connected or (not c.connected and c.score < c.start_score))\n+        ]\n+        return self._connections\n+\n+    @property\n+    def established_connections(self):\n+        return [connection for connection in self.connections if connection.connected]\n+\n+    def _pick_peer(self):\n+        i = 0\n+        while 1:\n+            if self.peers:\n+                server = random.choice(self.peers)\n+                if server not in [connection.hostname for connection in self.connections]:\n+                    if ':' in server[0] and not self._ipv6:\n+                        i += 1\n+                        continue\n+                    return server\n+                i += 1\n+                if i < 100:\n+                    continue\n+            raise exceptions.NoServersException\n+\n+    def _pick_multiple_peers(self, howmany: int):\n+        assert howmany >= 1\n+        i = 0\n+        servers = []\n+        while 1:\n+            if self.peers:\n+                server = self._pick_peer()\n+                if server in servers:\n+                    continue\n+                servers.append(server)\n+                if len(servers) == howmany:\n+                    return servers\n+                if i < 100:\n+                    continue\n+            raise exceptions.NoServersException\n+\n+    def _pick_connection(self, fail_silent=False):\n+        i = 0\n+        while 1:\n+            if self.established_connections:\n+                connection = random.choice(self.established_connections)\n+                if connection.connected and connection.score > 0:\n+                    return connection\n+                i += 1\n+                if i < 100:\n+                    continue\n+            if not fail_silent:\n+                raise exceptions.NoPeersException\n+            return\n+\n+    def _pick_multiple_connections(self, howmany: int) -> List[ConnectionAbstract]:\n+        assert howmany >= 1\n+        i = 0\n+        connections = []\n+        while 1:\n+            if self.established_connections:\n+                connection = self._pick_connection()\n+                if connection in connections:\n+                    i += 1\n+                    if i > 100:\n+                        raise exceptions.NoPeersException\n+                    continue\n+                connections.append(connection)\n+                if len(connections) == howmany:\n+                    return connections\n+            i += 1\n+            if i < 100:\n+                continue\n+            raise exceptions.NoPeersException\n+\n+    def is_online(self):\n+        return self._is_online\n+\n+    def add_on_connected_observer(self, observer):\n+        self._on_connect_observers.append(observer)\n+\n+    def add_header_observer(self, observer):\n+        self._headers_observers.append(observer)\n+\n+    def on_peer_disconnected(self, peer: ConnectionAbstract, *_):\n+        peer.add_error(int(time.time()) + 180)\n+\n+    async def on_peer_received_header(self, peer: ConnectionAbstract, *_):\n+        for observer in self._headers_observers:\n+            self.loop.create_task(self.delayer(observer(peer, peer.last_header)))\n+\n+    async def on_peer_received_peers(self, peer: ConnectionAbstract, *_):\n+        raise NotImplementedError\n+\n+    async def on_peer_error(self, peer: ConnectionAbstract, error_type=None):\n+        if error_type == 'connect':\n+            if await self._check_internet_connectivity():\n+                peer.add_error(int(time.time()) + 180)\n+            return\n+        if self.is_online:\n+            Logger.electrum.debug('Peer %s error', peer)\n+            await self._handle_peer_error(peer)\n+\n+    def stop(self):\n+        self._keepalive = False\n+\n+    async def _check_internet_connectivity(self):\n+        if self._network_checker is None:  # pragma: no cover\n+            self._is_online = True\n+            return\n+        self._is_online = self._network_checker()\n+        return self._is_online\n+\n+    async def _handle_peer_error(self, peer: ConnectionAbstract):\n+        Logger.electrum.debug('Handling connection error for %s', peer.hostname)\n+        if not peer.connected:\n+            peer.add_error()\n+            return\n+        if not peer.score:\n+            Logger.electrum.error('Disconnecting from peer %s, score: %s', peer.hostname, peer.score)\n+            self.loop.create_task(self.delayer(peer.disconnect()))\n+            return\n+        if not await peer.ping(timeout=2):\n+            Logger.electrum.error('Ping timeout from peer %s, score: %s', peer.hostname, peer.score)\n+            self.loop.create_task(self.delayer(peer.disconnect()))\n+\n+    def connect(self):\n+        raise NotImplementedError\ndiff --git a/spruned/daemon/electrod/__init__.py b/spruned/daemon/electrod/__init__.py\n--- a/spruned/daemon/electrod/__init__.py\n+++ b/spruned/daemon/electrod/__init__.py\n@@ -0,0 +1,23 @@\n+import os\n+import json\n+import asyncio\n+\n+from spruned.daemon.electrod.electrod_connection import ElectrodConnectionPool\n+from spruned.daemon.electrod.electrod_interface import ElectrodInterface\n+\n+\n+def build(network, connections=3, loop=asyncio.get_event_loop()):  # pragma: no cover\n+    assert network\n+\n+    def load_electrum_servers(network):\n+        _current_path = os.path.dirname(os.path.abspath(__file__))\n+        with open(_current_path + '/electrum_servers.json', 'r') as f:\n+            servers = json.load(f)\n+        return servers[network]\n+\n+    electrod_pool = ElectrodConnectionPool(\n+        connections=connections, peers=load_electrum_servers(\"bc_mainnet\")\n+    )\n+    electrod_interface = ElectrodInterface(electrod_pool, loop)\n+    return electrod_pool, electrod_interface\n+\ndiff --git a/spruned/daemon/electrod/electrod_connection.py b/spruned/daemon/electrod/electrod_connection.py\n--- a/spruned/daemon/electrod/electrod_connection.py\n+++ b/spruned/daemon/electrod/electrod_connection.py\n@@ -1,92 +1,41 @@\n import asyncio\n import os\n import binascii\n-import random\n import time\n-from typing import Dict, List\n+from typing import Dict\n import async_timeout\n from connectrum.client import StratumClient\n from connectrum.svr_info import ServerInfo\n from spruned.application.logging_factory import Logger\n from spruned.application.tools import async_delayed_task, check_internet_connection\n from spruned.daemon import exceptions\n+from spruned.daemon.connection_base_impl import BaseConnection\n+from spruned.daemon.connectionpool_base_impl import BaseConnectionPool\n \n \n-class ElectrodConnection:\n+class ElectrodConnection(BaseConnection):\n     def __init__(\n             self, hostname: str, protocol: str, keepalive=180,\n             client=StratumClient, serverinfo=ServerInfo, nickname=None, use_tor=False, loop=None,\n             start_score=10, timeout=10, expire_errors_after=180,\n             is_online_checker: callable=None, delayer=async_delayed_task\n     ):\n-        self.hostname = hostname\n+\n         self.protocol = protocol\n         self.keepalive = keepalive\n         self.client: StratumClient = client()\n         self.serverinfo_factory = serverinfo\n         self.client.keepalive_interval = keepalive\n         self.nickname = nickname or binascii.hexlify(os.urandom(8)).decode()\n-        self.use_tor = use_tor\n-        self._version = None\n-        self._on_headers_callbacks = []\n-        self._on_connect_callbacks = []\n-        self._on_disconnect_callbacks = []\n-        self._on_errors_callbacks = []\n-        self._on_peers_callbacks = []\n-        self.loop = loop or asyncio.get_event_loop()\n-        self._start_score = start_score\n-        self._last_header = None\n-        self._subscriptions = []\n-        self._timeout = timeout\n-        self._errors = []\n-        self._peers = []\n-        self._expire_errors_after = expire_errors_after\n-        self._is_online_checker = is_online_checker\n-        self.delayer = delayer\n-\n-    def is_online(self):\n-        if self._is_online_checker is not None:\n-            return self._is_online_checker()\n-        return True\n-\n-    def add_on_header_callbacks(self, callback):\n-        self._on_headers_callbacks.append(callback)\n-\n-    def add_on_connect_callback(self, callback):\n-        self._on_connect_callbacks.append(callback)\n-\n-    def add_on_disconnect_callback(self, callback):\n-        self._on_disconnect_callbacks.append(callback)\n-\n-    def add_on_peers_callback(self, callback):\n-        self._on_peers_callbacks.append(callback)\n-\n-    def add_on_error_callback(self, callback):\n-        self._on_errors_callbacks.append(callback)\n-\n-    async def on_header(self, header):\n-        self._last_header = header\n-        for callback in self._on_headers_callbacks:\n-            self.loop.create_task(callback(self))\n-\n-    async def on_connect(self):\n-        for callback in self._on_connect_callbacks:\n-            self.loop.create_task(callback(self))\n-\n-    def on_connectrum_disconnect(self, *_, **__):\n-        for callback in self._on_disconnect_callbacks:\n-            self.loop.create_task(callback(self))\n-\n-    async def on_error(self, error):\n-        if not self.is_online:\n-            return\n-        self._errors.append(int(time.time()))\n-        for callback in self._on_errors_callbacks:\n-            self.loop.create_task(callback(self, error_type=error))\n+        super().__init__(\n+            hostname=hostname, use_tor=use_tor, loop=loop, start_score=start_score,\n+            is_online_checker=is_online_checker, timeout=timeout, delayer=delayer,\n+            expire_errors_after=expire_errors_after\n+        )\n \n-    async def on_peers(self):\n-        for callback in self._on_peers_callbacks:\n-            self.loop.create_task(callback(self))\n+    @property\n+    def connected(self):\n+        return bool(self.client.protocol)\n \n     async def connect(self):\n         try:\n@@ -105,17 +54,9 @@ async def connect(self):\n             Logger.electrum.error('Exception connecting to %s (%s)', self.hostname, e)\n             await self.on_error('connect')\n \n-    @property\n-    def start_score(self):\n-        return self._start_score\n-\n-    @property\n-    def version(self):\n-        return self._version\n-\n-    @property\n-    def connected(self):\n-        return bool(self.client.protocol)\n+    def on_connectrum_disconnect(self, *_, **__):\n+        for callback in self._on_disconnect_callbacks:\n+            self.loop.create_task(callback(self))\n \n     async def ping(self, timeout=2) -> (None, float):\n         try:\n@@ -126,28 +67,6 @@ async def ping(self, timeout=2) -> (None, float):\n         except asyncio.TimeoutError:\n             return\n \n-    @property\n-    def last_header(self) -> Dict:\n-        return self._last_header\n-\n-    @property\n-    def subscriptions(self) -> List:\n-        return self._subscriptions\n-\n-    @property\n-    def score(self):\n-        return self._start_score - len(self.errors)\n-\n-    @property\n-    def errors(self):\n-        now = int(time.time())\n-        self._errors = [error for error in self._errors if now - error < self._expire_errors_after]\n-        return self._errors\n-\n-    @property\n-    def peers(self):\n-        return self._peers\n-\n     async def rpc_call(self, method: str, args):\n         try:\n             async with async_timeout.timeout(self._timeout):\n@@ -188,152 +107,24 @@ async def disconnect(self):\n             self.client.protocol = None\n \n \n-class ElectrodConnectionPool:\n-    def __init__(self,\n-                 connections=3,\n-                 loop=asyncio.get_event_loop(),\n-                 use_tor=False,\n-                 electrum_servers=[],\n-                 network_checker=check_internet_connection,\n-                 delayer=async_delayed_task,\n-                 connection_factory=ElectrodConnection,\n-                 sleep_no_internet=30\n-                 ):\n-        self._use_tor = use_tor\n-        self._servers = electrum_servers\n-        self._connections = []\n-        self._required_connections = connections\n-        self._keepalive = True\n-        self.loop = loop\n+class ElectrodConnectionPool(BaseConnectionPool):\n+    def __init__(\n+            self,\n+            connection_factory=ElectrodConnection,\n+            peers=list(),\n+            network_checker=check_internet_connection,\n+            delayer=async_delayed_task,\n+            loop=asyncio.get_event_loop(),\n+            use_tor=False,\n+            connections=3,\n+            sleep_no_internet=30\n+    ):\n+        super().__init__(\n+            peers=peers, network_checker=network_checker, delayer=delayer,\n+            loop=loop, use_tor=use_tor, connections=connections, sleep_no_internet=sleep_no_internet\n+        )\n         self._connections_keepalive_time = 120\n-        self._headers_observers = []\n-        self._new_peers_observers = []\n-        self._on_connect_observers = []\n-        self._connection_notified = False\n-        self._is_online = False\n-        self.delayer = delayer\n-        self._network_checker = network_checker\n         self._connection_factory = connection_factory\n-        self._sleep_on_no_internet_connectivity = sleep_no_internet\n-\n-    def is_online(self):\n-        return self._is_online\n-\n-    def add_on_connected_observer(self, observer):\n-        self._on_connect_observers.append(observer)\n-\n-    def add_header_observer(self, observer):\n-        self._headers_observers.append(observer)\n-\n-    async def on_peer_connected(self, peer: ElectrodConnection):\n-        future = peer.subscribe(\n-            'blockchain.headers.subscribe',\n-            self.on_peer_received_header,\n-            self.on_peer_received_header\n-        )\n-        self.loop.create_task(self.delayer(future))\n-\n-    def on_peer_disconnected(self, peer: ElectrodConnection):\n-        peer._errors.append(int(time.time()) + 180)  # put the peer at sleep for a while\n-\n-    async def on_peer_received_header(self, peer: ElectrodConnection):\n-        for observer in self._headers_observers:\n-            self.loop.create_task(self.delayer(observer(peer, peer.last_header)))\n-\n-    async def on_peer_received_peers(self, peer: ElectrodConnection): # pragma: no cover\n-        raise NotImplementedError\n-\n-    async def on_peer_error(self, peer: ElectrodConnection, error_type=None):\n-        if error_type == 'connect':\n-            if await self._check_internet_connectivity():\n-                peer._errors.append(int(time.time()) + 180)\n-            return\n-        if self.is_online:\n-            Logger.electrum.debug('Peer %s error', peer)\n-            await self._handle_peer_error(peer)\n-\n-    @property\n-    def connections(self):\n-        self._connections = [\n-            c for c in self._connections if (c.connected or (not c.connected and c.score < c.start_score))\n-        ]\n-        return self._connections\n-\n-    @property\n-    def established_connections(self):\n-        return [connection for connection in self.connections if connection.connected]\n-\n-    def _pick_server(self):\n-        i = 0\n-        while 1:\n-            if self.servers:\n-                server = random.choice(self.servers)\n-                if server not in [connection.hostname for connection in self.connections]:\n-                    return server\n-                i += 1\n-                if i < 100:\n-                    continue\n-            raise exceptions.NoServersException\n-\n-    def _pick_multiple_servers(self, howmany: int):\n-        assert howmany >= 1\n-        i = 0\n-        servers = []\n-        while 1:\n-            if self.servers:\n-                server = self._pick_server()\n-                if server in servers:\n-                    continue\n-                servers.append(server)\n-                if len(servers) == howmany:\n-                    return servers\n-                if i < 100:\n-                    continue\n-            raise exceptions.NoServersException\n-\n-    def _pick_connection(self, fail_silent=False):\n-        i = 0\n-        while 1:\n-            if self.established_connections:\n-                connection = random.choice(self.established_connections)\n-                if connection.connected and connection.score > 0:\n-                    return connection\n-                i += 1\n-                if i < 100:\n-                    continue\n-            if not fail_silent:\n-                raise exceptions.NoPeersException\n-            return\n-\n-    def _pick_multiple_connections(self, howmany: int):\n-        assert howmany >= 1\n-        i = 0\n-        connections = []\n-        while 1:\n-            if self.established_connections:\n-                connection = self._pick_connection()\n-                if connection in connections:\n-                    i += 1\n-                    if i > 100:\n-                        raise exceptions.NoPeersException\n-                    continue\n-                connections.append(connection)\n-                if len(connections) == howmany:\n-                    return connections\n-            i += 1\n-            if i < 100:\n-                continue\n-            raise exceptions.NoPeersException\n-\n-    def stop(self):\n-        self._keepalive = False\n-\n-    async def _check_internet_connectivity(self):\n-        if self._network_checker is None:  # pragma: no cover\n-            self._is_online = True\n-            return\n-        self._is_online = self._network_checker()\n-        return self._is_online\n \n     async def connect(self):\n         await self._check_internet_connectivity()\n@@ -353,7 +144,7 @@ async def connect(self):\n                 Logger.electrum.debug('ConnectionPool: connect, needed: %s', missings)\n                 self.loop.create_task(self._connect_servers(missings))\n             elif missings < 0:\n-                Logger.electrum.warning('Too much peers.')\n+                Logger.electrum.warning('Too many peers.')\n                 connection = self._pick_connection(fail_silent=True)\n                 self.loop.create_task(connection.disconnect())\n             elif not self._connection_notified:\n@@ -363,12 +154,12 @@ async def connect(self):\n             await asyncio.sleep(missings and 2 or 10)\n \n     async def _connect_servers(self, howmany: int):\n-        servers = self._pick_multiple_servers(howmany)\n-        servers and Logger.electrum.debug('Connecting to servers (%s)', howmany)\n-        for server in servers:\n+        peers = self._pick_multiple_peers(howmany)\n+        peers and Logger.electrum.debug('Connecting to peers (%s)', howmany)\n+        for peer in peers:\n             instance = self._connection_factory(\n-                hostname=server[0],\n-                protocol=server[1],\n+                hostname=peer[0],\n+                protocol=peer[1],\n                 keepalive=self._connections_keepalive_time,\n                 use_tor=self._use_tor,\n                 loop=self.loop,\n@@ -379,13 +170,9 @@ async def _connect_servers(self, howmany: int):\n             instance.add_on_peers_callback(self.on_peer_received_peers)\n             instance.add_on_error_callback(self.on_peer_error)\n             self._connections.append(instance)\n-            Logger.electrum.debug('Created client instance: %s', server[0])\n+            Logger.electrum.debug('Created client instance: %s', peer[0])\n             self.loop.create_task(instance.connect())\n \n-    @property\n-    def servers(self):\n-        return self._servers\n-\n     async def call(self, method, *params, agreement=1, get_peer=False, fail_silent=False) -> (None, Dict):\n         if get_peer and agreement > 1:\n             raise ValueError('Error!')\n@@ -415,6 +202,7 @@ async def call(self, method, *params, agreement=1, get_peer=False, fail_silent=F\n         connection = self._pick_connection()\n         response = await connection.rpc_call(method, params)\n         if not response and not fail_silent:\n+            await self.on_peer_error(connection)\n             raise exceptions.ElectrodMissingResponseException\n         return (connection, response) if get_peer else response\n \n@@ -427,15 +215,13 @@ def _handle_responses(responses) -> Dict:\n                 return response\n         raise exceptions.NoQuorumOnResponsesException(responses)\n \n-    async def _handle_peer_error(self, peer: ElectrodConnection):\n-        Logger.electrum.debug('Handling connection error for %s', peer.hostname)\n-        if not peer.connected:\n-            peer._errors.append(int(time.time()))\n-            return\n-        if not peer.score:\n-            Logger.electrum.error('Disconnecting from peer %s, score: %s', peer.hostname, peer.score)\n-            self.loop.create_task(self.delayer(peer.disconnect()))\n-            return\n-        if not await peer.ping(timeout=2):\n-            Logger.electrum.error('Ping timeout from peer %s, score: %s', peer.hostname, peer.score)\n-            self.loop.create_task(self.delayer(peer.disconnect()))\n+    def on_peer_received_peers(self, peer: ElectrodConnection, *_):\n+        raise NotImplementedError\n+\n+    async def on_peer_connected(self, peer: ElectrodConnection):\n+        future = peer.subscribe(\n+            'blockchain.headers.subscribe',\n+            self.on_peer_received_header,\n+            self.on_peer_received_header\n+        )\n+        self.loop.create_task(self.delayer(future))\ndiff --git a/spruned/daemon/electrod/electrod_interface.py b/spruned/daemon/electrod/electrod_interface.py\n--- a/spruned/daemon/electrod/electrod_interface.py\n+++ b/spruned/daemon/electrod/electrod_interface.py\n@@ -1,7 +1,7 @@\n import asyncio\n import binascii\n from typing import Dict\n-from spruned.application import settings\n+from spruned import settings\n from spruned.application.exceptions import InvalidPOWException\n from spruned.application.logging_factory import Logger\n from spruned.daemon import exceptions\ndiff --git a/spruned/daemon/electrod/electrod_service.py b/spruned/daemon/electrod/electrod_service.py\ndeleted file mode 100644\n--- a/spruned/daemon/electrod/electrod_service.py\n+++ /dev/null\n@@ -1,29 +0,0 @@\n-from spruned.application.abstracts import RPCAPIService\n-from spruned.daemon.electrod.electrod_interface import ElectrodInterface\n-\n-\n-class ElectrodService(RPCAPIService):\n-    def __init__(self, interface: ElectrodInterface):\n-        self.interface = interface\n-\n-    async def getrawtransaction(self, txid, verbose=False):\n-        return await self.interface.getrawtransaction(txid)\n-\n-    async def getblock(self, blockhash, verbose=False):  # pragma: no cover\n-        raise NotImplementedError\n-\n-    async def estimatefee(self, blocks: int):\n-        return await self.interface.estimatefee(blocks)\n-\n-    async def sendrawtransaction(self, rawtransaction: str):  # pragma: no cover\n-        raise NotImplementedError\n-\n-    async def listunspents(self, address: str):\n-        return await self.interface.listunspents(address)\n-\n-    async def getmerkleproof(self, txid: str, blockheight: int):\n-        return await self.interface.get_merkleproof(txid, blockheight)\n-\n-    @property\n-    def available(self) -> bool:\n-        return True\ndiff --git a/spruned/daemon/exceptions.py b/spruned/daemon/exceptions.py\n--- a/spruned/daemon/exceptions.py\n+++ b/spruned/daemon/exceptions.py\n@@ -31,3 +31,7 @@ class ElectrodMissingResponseException(SprunedException):\n \n class NoServersException(SprunedException):\n     pass\n+\n+\n+class BlocksInconsistencyException(SprunedException):\n+    pass\ndiff --git a/spruned/daemon/p2p/__init__.py b/spruned/daemon/p2p/__init__.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/daemon/p2p/__init__.py\n@@ -0,0 +1,10 @@\n+from spruned.daemon.p2p import utils\n+from spruned.daemon.p2p.p2p_connection import P2PConnectionPool\n+from spruned.daemon.p2p.p2p_interface import P2PInterface\n+\n+\n+def build(network):\n+    assert network\n+    pool = P2PConnectionPool(connections=16, batcher_timeout=5)\n+    interface = P2PInterface(pool)\n+    return pool, interface\ndiff --git a/spruned/daemon/p2p/p2p_connection.py b/spruned/daemon/p2p/p2p_connection.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/daemon/p2p/p2p_connection.py\n@@ -0,0 +1,250 @@\n+import asyncio\n+import async_timeout\n+import time\n+from pycoin.message.InvItem import InvItem, ITEM_TYPE_TX\n+from pycoinnet.Peer import Peer\n+from pycoinnet.PeerEvent import PeerEvent\n+from pycoinnet.networks import MAINNET\n+from pycoinnet.inv_batcher import InvBatcher\n+from pycoinnet.version import version_data_for_peer, NODE_BLOOM\n+from spruned.application.logging_factory import Logger\n+from spruned.application.tools import check_internet_connection, async_delayed_task\n+from spruned.daemon.connection_base_impl import BaseConnection\n+from spruned.daemon.connectionpool_base_impl import BaseConnectionPool\n+\n+\n+class P2PConnection(BaseConnection):\n+    def ping(self, timeout=None):\n+        self.peer.send_msg('ping', int(time.time()))\n+\n+    def __init__(\n+            self, hostname, port, peer=Peer, network=MAINNET, loop=asyncio.get_event_loop(),\n+            use_tor=None, start_score=10,\n+            is_online_checker: callable=None,\n+            timeout=10, delayer=async_delayed_task, expire_errors_after=180,\n+            call_timeout=30):\n+\n+        super().__init__(\n+            hostname=hostname, use_tor=use_tor, loop=loop, start_score=start_score,\n+            is_online_checker=is_online_checker, timeout=timeout, delayer=delayer,\n+            expire_errors_after=expire_errors_after\n+        )\n+        self.port = port\n+        self._peer_factory = peer\n+        self._peer_network = network\n+        self.peer = None\n+        self._version = None\n+        self.loop = loop\n+        self._event_handler = None\n+        self._call_timeout = call_timeout\n+        self._on_block_callbacks = []\n+        self._on_transaction_callbacks = []\n+\n+    @property\n+    def connected(self):\n+        return bool(self.peer)\n+\n+    def add_on_block_callbacks(self, callback):\n+        self._on_block_callbacks.append(callback)\n+\n+    def add_on_transaction_callback(self, callback):\n+        self._on_transaction_callbacks.append(callback)\n+\n+    @property\n+    def peer_event_handler(self) -> PeerEvent:\n+        return self._event_handler\n+\n+    async def connect(self):\n+        try:\n+            async with async_timeout.timeout(self._timeout):\n+                reader, writer = await asyncio.open_connection(host=self.hostname, port=self.port)\n+                peer = Peer(\n+                    reader,\n+                    writer,\n+                    self._peer_network.magic_header,\n+                    self._peer_network.parse_from_data,\n+                    self._peer_network.pack_from_data\n+                )\n+                version_data = version_data_for_peer(peer, version=70011, local_services=NODE_BLOOM)\n+                peer.version = await peer.perform_handshake(**version_data)\n+                self._event_handler = PeerEvent(peer)\n+                self._version = peer.version\n+                Logger.p2p.info('Connected to peer %s', self.version)\n+                self.peer = peer\n+                self._setup_events_handler()\n+        except Exception as e:\n+            self.peer = None\n+            Logger.p2p.error('Exception connecting to %s (%s)', self.hostname, e)\n+            self.loop.create_task(self.on_error('connect'))\n+            return\n+\n+        self.loop.create_task(self.on_connect())\n+        return self\n+\n+    async def on_connect(self):\n+        for callback in self._on_connect_callbacks:\n+            self.loop.create_task(callback(self))\n+\n+    async def disconnect(self):\n+        try:\n+            self.peer and self.peer.close()\n+        except:\n+            Logger.p2p.error('Error closing with peer: %s', self.peer.peername())\n+        finally:\n+            self.peer = None\n+\n+    def _setup_events_handler(self):\n+        self.peer_event_handler.set_request_callback('inv', self._on_inv)\n+        self.peer_event_handler.set_request_callback('addr', self._on_addr)\n+        self.peer_event_handler.set_request_callback('alert', self._on_alert)\n+        self.peer_event_handler.set_request_callback('ping', self._on_ping)\n+\n+    def _on_inv(self, event_handler, name, data):\n+        try:\n+            self.loop.create_task(self._process_inv(event_handler, name, data))\n+        except:\n+            Logger.p2p.exception('Exception on inv')\n+\n+    def _on_alert(self, event_handler, name, data):\n+        try:\n+            Logger.p2p.debug('Handle alert: %s, %s, %s', event_handler, name, data)\n+        except:\n+            Logger.p2p.exception('Exception on alert')\n+\n+    def _on_addr(self, event_handler, name, data):\n+        try:\n+            Logger.p2p.debug('Handle addr: %s, %s, %s', event_handler, name, data)\n+            for callback in self._on_peers_callbacks:\n+                self.loop.create_task(callback(self, data))\n+        except:\n+            Logger.p2p.exception('Exception on addr')\n+\n+    def _on_ping(self, event_handler, name, data):\n+        try:\n+            self.peer.send_msg(\"pong\", nonce=data[\"nonce\"])\n+            Logger.p2p.debug('Handle ping: %s, %s, %s', event_handler, name, data)\n+        except:\n+            Logger.p2p.exception('Exception on ping')\n+\n+    async def _process_inv(self, event_handler, name, data):\n+        txs = 0\n+        for item in data.get('items'):\n+            if item.item_type == ITEM_TYPE_TX:\n+                txs += 1\n+                for callback in self._on_transaction_callbacks:\n+                    item: InvItem\n+                    self.loop.create_task(callback(self, item))\n+            else:\n+                Logger.p2p.error('Unhandled InvType: %s, %s, %s', event_handler, name, item)\n+        Logger.p2p.debug('Received %s items, txs: %s', len(data.get('items')), txs)\n+\n+\n+class P2PConnectionPool(BaseConnectionPool):\n+    async def on_peer_received_peers(self, peer, *a):\n+        Logger.p2p.debug('Received peers from peer: %s: (%s)', peer.hostname, a)\n+\n+    def __init__(\n+            self,\n+            peers=list(),\n+            network_checker=check_internet_connection,\n+            delayer=async_delayed_task,\n+            loop=asyncio.get_event_loop(),\n+            use_tor=False,\n+            connections=3,\n+            sleep_no_internet=30,\n+            batcher=InvBatcher,\n+            network=MAINNET,\n+            batcher_timeout=30,\n+            ipv6=False\n+    ):\n+        super().__init__(\n+            peers=peers, network_checker=network_checker, delayer=delayer, ipv6=ipv6,\n+            loop=loop, use_tor=use_tor, connections=connections, sleep_no_internet=sleep_no_internet\n+        )\n+        self._batcher_factory = batcher\n+        self._network = network\n+        self._batcher_timeout = batcher_timeout\n+\n+    @property\n+    def available(self):\n+        return len(self.connections) >= self._required_connections\n+\n+    def add_peer(self, peer):\n+        self._peers.append(peer)\n+\n+    @property\n+    def connections(self):\n+        return self._connections\n+\n+    async def connect(self):\n+        await self._check_internet_connectivity()\n+        self._keepalive = True\n+        while not self._peers:\n+            Logger.p2p.warning('Peers not loaded. Sleeping.')\n+            await asyncio.sleep(5)\n+\n+        while self._keepalive:\n+            if not self.is_online():\n+                Logger.p2p.error(\n+                    'Looks like there is no internet connection available. '\n+                    'Sleeping the connection loop for %s',\n+                    self._sleep_on_no_internet_connectivity\n+                )\n+                await asyncio.sleep(self._sleep_on_no_internet_connectivity)\n+                await self._check_internet_connectivity()\n+                continue\n+\n+            missings = self._required_connections - len(self.established_connections)\n+            if missings:\n+                peers = self._pick_multiple_peers(missings)\n+                for peer in peers:\n+                    host, port = peer\n+                    self.loop.create_task(self._connect_peer(host, port))\n+            elif len(self.established_connections) > self._required_connections:\n+                Logger.p2p.warning('Too many connections')\n+                connection = self._pick_connection()\n+                self.loop.create_task(connection.disconnect())\n+            Logger.p2p.info(\n+                'P2PConnectionPool: Sleeping %ss, connected to %s peers', 10, len(self.established_connections)\n+            )\n+            await asyncio.sleep(10)\n+\n+    async def _connect_peer(self, host: str, port: int):\n+        Logger.p2p.debug('Allocating peer %s:%s', host, port)\n+        connection = P2PConnection(host, port, loop=self.loop, network=self._network)\n+        if not await connection.connect():\n+            Logger.p2p.warning(\n+                'Connection to %s - %s failed. Connected to %s peers', host, port, len(self.established_connections)\n+            )\n+            return\n+        self._connections.append(connection)\n+        connection.add_on_connect_callback(self.on_peer_connected)\n+        connection.add_on_header_callbacks(self.on_peer_received_header)\n+        connection.add_on_peers_callback(self.on_peer_received_peers)\n+        connection.add_on_error_callback(self.on_peer_error)\n+\n+    async def get(self, inv_item: InvItem):\n+        batcher = self._batcher_factory()\n+        s = time.time()\n+        Logger.p2p.info('Fetching InvItem %s', inv_item)\n+        try:\n+            async with async_timeout.timeout(self._batcher_timeout):\n+                connections = self._pick_multiple_connections(8)\n+                for connection in connections:\n+                    Logger.p2p.debug('Adding connection %s to batcher', connection)\n+                    await batcher.add_peer(connection.peer_event_handler)\n+                future = await batcher.inv_item_to_future(inv_item)\n+                response = await future\n+                Logger.p2p.warning('Stopping batcher')\n+                Logger.p2p.warning('Batcher stopped')\n+                Logger.p2p.info('InvItem %s fetched in %ss', inv_item, round(time.time() - s, 4))\n+                return response and response\n+        except Exception as error:\n+            Logger.p2p.error(\n+                'Error in get InvItem %s, error: %s, failed in %ss', inv_item, str(error), round(time.time() - s, 4)\n+            )\n+        finally:\n+            self.loop.run_in_executor(None, batcher.stop)\n+\n+    async def on_peer_connected(self, peer):\n+        Logger.p2p.debug('on_peer_connected: %s', peer.hostname)\ndiff --git a/spruned/daemon/p2p/p2p_interface.py b/spruned/daemon/p2p/p2p_interface.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/daemon/p2p/p2p_interface.py\n@@ -0,0 +1,51 @@\n+import asyncio\n+from typing import Dict, List\n+from pycoin.message.InvItem import ITEM_TYPE_BLOCK, InvItem\n+from pycoin.serialize import h2b_rev\n+from pycoinnet.networks import MAINNET\n+\n+from spruned.application import exceptions\n+from spruned.daemon.p2p import utils\n+from spruned.daemon.p2p.p2p_connection import P2PConnectionPool\n+\n+\n+class P2PInterface:\n+    def __init__(self, connection_pool: P2PConnectionPool, loop=asyncio.get_event_loop(), network=MAINNET):\n+        self.pool = connection_pool\n+        self._on_connect_callbacks = []\n+        self.loop = loop\n+        self.network = network\n+\n+    async def on_connect(self):\n+        for callback in self._on_connect_callbacks:\n+            self.loop.create_task(callback())\n+\n+    async def get_block(self, blockhash: str) -> Dict:\n+        inv_item = InvItem(ITEM_TYPE_BLOCK, h2b_rev(blockhash))\n+        response = await self.pool.get(inv_item)\n+        return response and {\n+            \"block_hash\": str(response.hash()),\n+            \"prev_block_hash\": str(response.previous_block_hash),\n+            \"timestamp\": int(response.timestamp),\n+            \"header_bytes\": bytes(response.as_blockheader().as_bin()),\n+            \"block_object\": response,\n+            \"block_bytes\": bytes(response.as_bin())\n+        }\n+\n+    async def get_blocks(self, start: str, stop: str, max: int) -> List[Dict]:\n+        pass\n+\n+    def add_on_connect_callback(self, callback):\n+        self._on_connect_callbacks.append(callback)\n+\n+    async def start(self):\n+        self.pool.add_on_connected_observer(self.on_connect)\n+        peers = None\n+        i = 0\n+        while not peers:\n+            if i > 10:\n+                raise exceptions.SprunedException\n+            peers = await utils.dns_bootstrap_servers(self.network)\n+            i += 1\n+        _ = [self.pool.add_peer(peer) for peer in peers]\n+        self.loop.create_task(self.pool.connect())\ndiff --git a/spruned/daemon/p2p/utils.py b/spruned/daemon/p2p/utils.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/daemon/p2p/utils.py\n@@ -0,0 +1,26 @@\n+import async_timeout\n+from pycoinnet.dnsbootstrap import dns_bootstrap_host_port_q\n+from pycoinnet.networks import MAINNET\n+import asyncio\n+\n+from spruned.application.logging_factory import Logger\n+\n+\n+async def dns_bootstrap_servers(network=MAINNET, howmany=50):\n+    host_q = dns_bootstrap_host_port_q(network)\n+    ad = []\n+    while 1:\n+        item: asyncio.coroutine = host_q.get()\n+        try:\n+            async with async_timeout.timeout(1):\n+                peer = await item\n+        except asyncio.TimeoutError:\n+            try:\n+                item.close()\n+            except asyncio.CancelledError:\n+                Logger.p2p.debug('Cancelled')\n+            break\n+        ad.append(peer)\n+    return ad\n+\n+\ndiff --git a/spruned/services/__init__.py b/spruned/daemon/tasks/__init__.py\nsimilarity index 100%\nrename from spruned/services/__init__.py\nrename to spruned/daemon/tasks/__init__.py\ndiff --git a/spruned/daemon/tasks/blocks_reactor.py b/spruned/daemon/tasks/blocks_reactor.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/daemon/tasks/blocks_reactor.py\n@@ -0,0 +1,107 @@\n+import asyncio\n+from spruned.daemon import exceptions\n+from spruned.application.logging_factory import Logger\n+from spruned.application.tools import async_delayed_task\n+from spruned.daemon.p2p import P2PInterface\n+from spruned.repositories.repository import Repository\n+\n+\n+class BlocksReactor:\n+    \"\"\"\n+    This reactor keeps non-pruned blocks aligned to the best height.\n+    \"\"\"\n+    def __init__(\n+            self,\n+            repository: Repository,\n+            interface: P2PInterface,\n+            loop=asyncio.get_event_loop(),\n+            prune=200,\n+            delayed_task=async_delayed_task\n+    ):\n+        self.repo = repository\n+        self.interface = interface\n+        self.loop = loop or asyncio.get_event_loop()\n+        self.lock = asyncio.Lock()\n+        self.delayer = delayed_task\n+        self._last_processed_block = None\n+        self._prune = prune\n+        self._max_per_batch = 10\n+        self._available = False\n+        self._fallback_check_interval = 30\n+\n+    def set_last_processed_block(self, last):\n+        if last != self._last_processed_block:\n+            self._last_processed_block = last\n+            Logger.p2p.info(\n+                'Last processed block: %s (%s)',\n+                self._last_processed_block and self._last_processed_block['block_height'],\n+                self._last_processed_block and self._last_processed_block['block_hash'],\n+            )\n+\n+    def on_header(self, best_header):\n+        Logger.p2p.debug('BlocksReactor.on_header: %s', best_header)\n+        self.loop.create_task(self._check_blockchain(best_header))\n+\n+    async def check(self):\n+        try:\n+            best_header = self.repo.headers.get_best_header()\n+            await self._check_blockchain(best_header)\n+            self.loop.create_task(self._fallback_check_interval)\n+        except Exception as e:\n+            Logger.p2p.error('Error on BlocksReactor fallback %s', str(e))\n+\n+    async def _check_blockchain(self, best_header):\n+        try:\n+            await self.lock.acquire()\n+            if best_header['block_height'] > self._last_processed_block['block_height']:\n+                self._on_blocks_behind_headers(best_header)\n+            elif not self._last_processed_block:\n+                self._on_blocks_behind_headers(best_header)\n+            elif best_header['block_height'] < self._last_processed_block['block_height']:\n+                self._on_headers_behind_blocks(best_header)\n+            else:\n+                if best_header['block_hash'] != self._last_processed_block['block_hash']:\n+                    raise exceptions.BlocksInconsistencyException\n+        except (\n+            exceptions.BlocksInconsistencyException\n+        ):\n+            Logger.p2p.exception('Exception checkping the blockchain')\n+            return\n+        finally:\n+            self.lock.release()\n+\n+    async def _on_blocks_behind_headers(self, best_header):\n+        if self._last_processed_block:\n+            start = self._last_processed_block['block_hash']\n+        else:\n+            _bestheight = best_header['block_height'] - self._prune\n+            _startheight = _bestheight >= 0 and _bestheight or 0\n+            start = self.repo.headers.get_header_at_height(_startheight)\n+\n+        blocks = await self.interface.get_blocks(start, best_header['block_hash'], self._max_per_batch)\n+        try:\n+            self.repo.headers.get_headers(*[block['block_hash'] for block in blocks])\n+        except:\n+            Logger.p2p.exception('Error fetching headers for downloaded blocks')\n+            raise exceptions.BlocksInconsistencyException\n+        try:\n+            saved_block = self.repo.blockchain.save_blocks(*blocks)\n+            Logger.p2p.debug('Saved block %s', saved_block)\n+        except:\n+            Logger.p2p.exception('Error saving blocks %s', blocks)\n+        return\n+\n+    async def _on_headers_behind_blocks(self, best_header):\n+        try:\n+            self.repo.blockchain.get_block(best_header['blockhash'])\n+        except:\n+            Logger.p2p.exception('Error fetching block in headers_behind_blocks behaviour: %s', best_header)\n+            raise exceptions.BlocksInconsistencyException\n+\n+    async def on_connected(self):\n+        self._available = True\n+        #self.loop.create_task(self.check())\n+\n+    async def start(self):\n+        self.interface.add_on_connect_callback(self.on_connected)\n+        self.loop.create_task(self.interface.start())\ndiff --git a/spruned/daemon/electrod/electrod_reactor.py b/spruned/daemon/tasks/headers_reactor.py\nsimilarity index 94%\nrename from spruned/daemon/electrod/electrod_reactor.py\nrename to spruned/daemon/tasks/headers_reactor.py\n--- a/spruned/daemon/electrod/electrod_reactor.py\n+++ b/spruned/daemon/tasks/headers_reactor.py\n@@ -1,21 +1,20 @@\n import asyncio\n-import json\n-from typing import Dict, Tuple\n-\n+from typing import Dict\n import time\n-\n-import os\n from spruned.application.abstracts import HeadersRepository\n-from spruned.daemon.electrod.electrod_connection import ElectrodConnectionPool, ElectrodConnection\n+from spruned.daemon.electrod.electrod_connection import ElectrodConnection\n from spruned.daemon.electrod.electrod_interface import ElectrodInterface\n from spruned.daemon import exceptions\n from spruned.application import database\n from spruned.application.logging_factory import Logger\n from spruned.application.tools import get_nearest_parent, async_delayed_task\n-from spruned.daemon.electrod.electrod_service import ElectrodService\n \n \n-class ElectrodReactor:\n+class HeadersReactor:\n+    \"\"\"\n+    This reactor keeps headers aligned to the best height.\n+    Designed to work with the Electrum Network, it may be ported easily to P2P\n+    \"\"\"\n     def __init__(\n             self,\n             repo: HeadersRepository,\n@@ -309,6 +308,8 @@ async def _fetch_headers_chunks(self, chunks_at_time, local_best_header, network\n             _from = rewind_from\n             _to = rewind_from + chunks_at_time\n             if _from > (network_best_header['block_height'] // 2016):\n+                # fixme, move the chunk stuff inside the electrod interface and\n+                # here just \"fetch headers\". stop.\n                 self.synced = True\n                 return\n             headers = await self.interface.get_headers_in_range_from_chunks(_from, _to)\n@@ -356,21 +357,3 @@ async def handle_headers_inconsistency(self):\n             'Headers inconsistency found, removed headers since %s. Current local: %s',\n             remove_headers_since, local_best_header['block_height']\n         )\n-\n-\n-def build_electrod(headers_repository, network, connections) \\\n-        -> Tuple[ElectrodReactor, ElectrodService]:  # pragma: no cover\n-\n-    def load_electrum_servers(network):\n-        _current_path = os.path.dirname(os.path.abspath(__file__))\n-        with open(_current_path + '/electrum_servers.json', 'r') as f:\n-            servers = json.load(f)\n-        return servers[network]\n-\n-    electrod_pool = ElectrodConnectionPool(\n-        connections=connections, electrum_servers=load_electrum_servers(\"bc_mainnet\")\n-    )\n-    electrod_interface = ElectrodInterface(electrod_pool)\n-    electrod_reactor = ElectrodReactor(headers_repository, electrod_interface)\n-    electrod_service = ElectrodService(electrod_interface)\n-    return electrod_reactor, electrod_service\ndiff --git a/spruned/repositories/blockchain_repository.py b/spruned/repositories/blockchain_repository.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/repositories/blockchain_repository.py\n@@ -0,0 +1,141 @@\n+import io\n+import binascii\n+import time\n+from typing import Dict, List\n+from pycoin.block import Block\n+from pycoin.tx.Tx import Tx\n+from spruned.application import utils, exceptions\n+from spruned.application.database import ldb_batch\n+from spruned.application.logging_factory import Logger\n+\n+TRANSACTION_PREFIX = b'\\x00'\n+BLOCK_PREFIX = b'\\x01'\n+\n+\n+class BlockchainRepository:\n+    \"\"\"\n+    this special repository track what's in it, to ensure the size limit\n+    \"\"\"\n+    def __init__(self, session, storage_name, dbpath):\n+        self.storage_name = storage_name\n+        self.session = session\n+        self.dbpath = dbpath\n+        self._cache = None\n+\n+    def set_cache(self, cache):\n+        self._cache = cache\n+\n+    def _get_key(self, name: str, prefix=b''):\n+        if isinstance(prefix, str):\n+            prefix = prefix.encode()\n+        if isinstance(name, str):\n+            name = binascii.unhexlify(name.encode())\n+        return (prefix and (prefix + b'.') or b'') + name\n+\n+    @ldb_batch\n+    def save_block(self, block: Dict, tracker=None) -> Dict:\n+        saved = self._save_block(block)\n+        tracker and tracker.track(saved['key'], saved['size'])\n+        return saved\n+\n+    @ldb_batch\n+    def save_blocks(self, *blocks: Dict) -> List[Dict]:\n+        saved = []\n+        for block in blocks:\n+            saved.append(self._save_block(block))\n+        return saved\n+\n+    @ldb_batch\n+    def _save_block(self, block: Dict) -> Dict:\n+        _block = block['block_object']\n+        key = self._get_key(_block.id(), prefix=BLOCK_PREFIX)\n+        data = bytes(_block.as_blockheader().as_bin())\n+        for tx in _block.txs:\n+            data += binascii.unhexlify(str(tx.id()))\n+            transaction = {\n+                'transaction_bytes': tx.as_bin(),\n+                'block_hash': _block.id(),\n+                'txid': tx.id()\n+            }\n+            self.save_transaction(transaction)\n+        assert len(data) % 32 == 16\n+        self.session.put(self.storage_name + b'.' + key, data)\n+        block['key'] = key\n+        block['size'] = len(block['block_bytes'])\n+        return block\n+\n+    @ldb_batch\n+    def save_transaction(self, transaction: Dict) -> Dict:\n+        blockhash = binascii.unhexlify(transaction['block_hash'].encode())\n+        data = transaction['transaction_bytes'] + blockhash\n+        key = self._get_key(transaction['txid'], prefix=TRANSACTION_PREFIX)\n+        self.session.put(self.storage_name + b'.' + key, data)\n+        return transaction\n+\n+    @ldb_batch\n+    def save_transactions(self, *transactions: Dict) -> List[Dict]:\n+        saved = []\n+        for transaction in transactions:\n+            saved.append(self.save_transaction(transaction))\n+        return saved\n+\n+    def get_block(self, blockhash: str) -> (None, Dict):\n+        key = self._get_key(blockhash, prefix=BLOCK_PREFIX)\n+        now = time.time()\n+        data = self.session.get(self.storage_name + b'.' + key)\n+        if not data:\n+            Logger.leveldb.debug('%s not found under key %s', blockhash, key)\n+            return\n+\n+        header = data[:80]\n+        txids = utils.split(data[80:], offset=32)\n+        block = Block.parse(io.BytesIO(header), include_transactions=False)\n+        transactions = [self.get_transaction(txid) for txid in txids]\n+        if len(txids) != len(transactions):\n+            Logger.cache.error('Storage corrupted')\n+            return\n+        Logger.leveldb.debug('Found %s transactions for block %s', len(transactions), blockhash)\n+        block.set_txs([transaction['transaction_object'] for transaction in transactions])\n+        Logger.leveldb.debug('Blockchain storage, transaction mounted in {:.4f}'.format(time.time() - now))\n+        return {\n+            'block_hash': block.id(),\n+            'block_bytes': block.as_bin(),\n+            'header_bytes': header,\n+            'timestamp': block.timestamp,\n+            'block_object': block\n+        }\n+\n+    def get_transaction(self, txid) -> (None, Dict):\n+        return self._get_transaction(txid)\n+\n+    def _get_transaction(self, txid: (str, bytes)):\n+        key = self._get_key(txid, prefix=TRANSACTION_PREFIX)\n+        data = self.session.get(self.storage_name + b'.' + key)\n+        if not data:\n+            return\n+        blockhash = data[-32:]\n+        if not int.from_bytes(blockhash[:8], 'little'):\n+            data = data[:-32]\n+        return {\n+            'transaction_bytes': data,\n+            'block_hash': blockhash,\n+            'txid': txid,\n+            'transaction_object': Tx.from_bin(data)\n+        }\n+\n+    @ldb_batch\n+    def remove_block(self, blockhash: str):\n+        block = self.get_block(blockhash)\n+        if block:\n+            for tx in block['block_object'].txs:\n+                self.remove_transaction(str(tx.id()))\n+        else:\n+            Logger.leveldb.warning('remove block on block not found: %s', blockhash)\n+        key = self._get_key(blockhash, prefix=BLOCK_PREFIX)\n+        self.session.delete(self.storage_name + b'.' + key)\n+\n+    @ldb_batch\n+    def remove_transaction(self, txid: str):\n+        key = self._get_key(txid, prefix=TRANSACTION_PREFIX)\n+        self.session.get(self.storage_name + b'.' + key)\n+        self.session.delete(self.storage_name + b'.' + key)\ndiff --git a/spruned/application/headers_repository.py b/spruned/repositories/headers_repository.py\nsimilarity index 85%\nrename from spruned/application/headers_repository.py\nrename to spruned/repositories/headers_repository.py\n--- a/spruned/application/headers_repository.py\n+++ b/spruned/repositories/headers_repository.py\n@@ -9,6 +9,10 @@\n class HeadersSQLiteRepository(HeadersRepository):\n     def __init__(self, session):\n         self.session = session\n+        self._cache = None\n+\n+    def set_cache(self, cache):\n+        self._cache = cache\n \n     @staticmethod\n     def _header_model_to_dict(header: database.Header, nextblockhash: (None, str), prevblockhash: (None, str)) -> Dict:\n@@ -48,6 +52,21 @@ def get_headers_since_height(self, height: int):\n             ) for h in headers\n         ] or []\n \n+    def get_headers(self, *blockhashes: str):\n+        session = self.session()\n+        headers = session.query(database.Header).filter(database.Header.blockhash.in_(blockhashes))\\\n+            .order_by(database.Header.blockheight.asc()).all()\n+        if set([h.blockhash for h in headers]) - set(blockhashes):\n+            # not sure if all raises, investigate # FIXME\n+            raise exceptions.HeadersInconsistencyException\n+        return headers and [\n+            self._header_model_to_dict(\n+                h,\n+                nextblockhash=self.get_block_hash(h.blockheight+1),\n+                prevblockhash=h.blockheight != 0 and self.get_block_hash(h.blockheight-1)\n+            ) for h in headers\n+        ] or []\n+\n     @database.atomic\n     def save_header(self, blockhash: str, blockheight: int, headerbytes: bytes, prev_block_hash: str):\n         session = self.session()\n@@ -124,6 +143,8 @@ def get_block_height(self, blockhash: str):\n     def get_block_header(self, blockhash: str):\n         session = self.session()\n         header = session.query(database.Header).filter_by(blockhash=blockhash).one_or_none()\n+        if not header:\n+            return\n         nextblockhash = self.get_block_hash(header.blockheight + 1)\n         prevblockhash = header.blockheight != 0 and self.get_block_hash(header.blockheight - 1)\n-        return header and self._header_model_to_dict(header, nextblockhash, prevblockhash)\n+        return self._header_model_to_dict(header, nextblockhash, prevblockhash)\ndiff --git a/spruned/repositories/repository.py b/spruned/repositories/repository.py\nnew file mode 100644\n--- /dev/null\n+++ b/spruned/repositories/repository.py\n@@ -0,0 +1,70 @@\n+from spruned import settings\n+from spruned.application.database import ldb_batch\n+from spruned.application.logging_factory import Logger\n+from spruned.repositories.headers_repository import HeadersSQLiteRepository\n+from spruned.repositories.blockchain_repository import BlockchainRepository\n+\n+\n+class Repository:\n+    def __init__(self, headers, blocks):\n+        self._headers_repository = headers\n+        self._blockchain_repository = blocks\n+        self.ldb = None\n+        self.sqlite = None\n+        self.cache = None\n+\n+    @property\n+    def headers(self) -> HeadersSQLiteRepository:\n+        return self._headers_repository\n+\n+    @property\n+    def blockchain(self) -> BlockchainRepository:\n+        return self._blockchain_repository\n+\n+    @classmethod\n+    def instance(cls):\n+        from spruned.application import database\n+        headers_repository = HeadersSQLiteRepository(database.sqlite)\n+        blocks_repository = BlockchainRepository(\n+            database.storage_ldb,\n+            settings.LEVELDB_BLOCKCHAIN_SLUG,\n+            settings.LEVELDB_BLOCKCHAIN_ADDRESS\n+        )\n+        i = cls(\n+            headers=headers_repository,\n+            blocks=blocks_repository\n+        )\n+        i.sqlite = database.sqlite\n+        i.ldb = database.storage_ldb\n+        return i\n+\n+    def ensure_integrity(self):\n+        self._ensure_no_stales_in_blockchain_repository()\n+\n+    @ldb_batch\n+    def _ensure_no_stales_in_blockchain_repository(self):\n+        index = self.cache.get_index()\n+        if not index:\n+            Logger.cache.debug('Cache index not found')\n+            return\n+        index = index.get('keys', {}).keys()\n+        if not index:\n+            Logger.cache.debug('Empty index found')\n+            return\n+        iterator = self.ldb.iterator()\n+        purged = 0\n+        for x in iterator:\n+            if x[0] not in index and x[0] != self.cache.cache_name:\n+                self.ldb.delete(x[0])\n+                purged += 1\n+        self.ldb.compact_range()\n+        Logger.cache.debug(\n+            'Purged from storage %s elements not tracked by cache, total tracked: %s',\n+            purged, len(index)\n+        )\n+        return\n+\n+    def set_cache(self, cache):\n+        self.cache = cache\n+        self.headers.set_cache(cache)\n+        self.blockchain.set_cache(cache)\ndiff --git a/spruned/services/bitgo_service.py b/spruned/services/bitgo_service.py\ndeleted file mode 100644\n--- a/spruned/services/bitgo_service.py\n+++ /dev/null\n@@ -1,47 +0,0 @@\n-from spruned.application import settings, exceptions\n-from spruned.application.abstracts import RPCAPIService\n-from spruned.application.logging_factory import Logger\n-from spruned.application.tools import normalize_transaction\n-from spruned.services.http_client import HTTPClient\n-\n-\n-class BitGoService(RPCAPIService):\n-    def __init__(self, coin, http_client=HTTPClient):\n-        assert coin == settings.Network.BITCOIN\n-        self.client = http_client(baseurl='https://www.bitgo.com/api/v1/')\n-        self.throttling_error_codes = []\n-\n-    async def get(self, path):\n-        try:\n-            return await self.client.get(path)\n-        except exceptions.HTTPClientException as e:\n-            from aiohttp import ClientResponseError\n-            cause: ClientResponseError = e.__cause__\n-            if isinstance(cause, ClientResponseError):\n-                if cause.code == 429:\n-                    self._increase_errors()\n-\n-    async def getrawtransaction(self, txid, **_):\n-        data = await self.get('tx/' + txid)\n-        return data and {\n-            'rawtx': normalize_transaction(data['hex']),\n-            'blockhash': data['blockhash'],\n-            'size': None,\n-            'txid': data['id'],\n-            'source': 'bitgo'\n-        }\n-\n-    async def getblock(self, blockhash):\n-        Logger.third_party.debug('getblock from %s' % self.__class__)\n-        data = await self.get('block/' + blockhash)\n-        return data and {\n-            'source': 'bitgo',\n-            'hash': data['id'],\n-            'tx': data['transactions'],\n-        }\n-\n-    async def gettxout(self, txid: str, index: int):  # pragma: no cover\n-        \"\"\"\n-        looks like bitgo is a dead end for getxout\n-        \"\"\"\n-        pass\ndiff --git a/spruned/services/bitpay_service.py b/spruned/services/bitpay_service.py\ndeleted file mode 100644\n--- a/spruned/services/bitpay_service.py\n+++ /dev/null\n@@ -1,18 +0,0 @@\n-from spruned.application import settings\n-from spruned.services.http_client import HTTPClient\n-from spruned.services.insight_service import InsightService\n-\n-\n-class BitpayService(InsightService):\n-    def __init__(self, coin, httpclient=HTTPClient, utxo_tracker=None):\n-        assert coin == settings.Network.BITCOIN\n-        self.client = httpclient(baseurl='https://insight.bitpay.com/api/')\n-        self.throttling_error_codes = []\n-        self.utxo_tracker = utxo_tracker\n-\n-\n-if __name__ == '__main__':\n-    import asyncio\n-    loop = asyncio.get_event_loop()\n-    api = BitpayService(settings.NETWORK)\n-    print(loop.run_until_complete(api.gettxout('8e4c29e2c37a1107f732492a94a94197bbbc6f93aa97b7b3e58852d42680b923', 0)))\ndiff --git a/spruned/services/blockcypher_service.py b/spruned/services/blockcypher_service.py\ndeleted file mode 100644\n--- a/spruned/services/blockcypher_service.py\n+++ /dev/null\n@@ -1,98 +0,0 @@\n-from datetime import datetime\n-import time\n-from typing import Dict\n-from spruned.application import settings\n-from spruned.application.abstracts import RPCAPIService\n-from spruned.application.logging_factory import Logger\n-from spruned.application.tools import normalize_transaction\n-from spruned.services.http_client import HTTPClient\n-\n-\n-class BlockCypherService(RPCAPIService):\n-    def __init__(self, coin, api_token=None, httpclient=HTTPClient, utxo_tracker=None):\n-        coin_url = {\n-            settings.Network.BITCOIN: 'btc/main/',\n-            settings.Network.BITCOIN_TESTNET: 'btc/testnet/'\n-        }[coin]\n-        self.client = httpclient(baseurl='https://api.blockcypher.com/v1/' + coin_url)\n-        self._e_d = datetime(1970, 1, 1)\n-        self.api_token = api_token\n-        self.throttling_error_codes = []\n-        self.utxo_tracker = utxo_tracker\n-\n-    async def getrawtransaction(self, txid, **_):\n-        query = '?includeHex=1&limit=1'\n-        query = self.api_token and query + '&token=%s' % self.api_token or query\n-        data = await self.get('txs/' + txid + query)\n-        return data and {\n-            'rawtx': normalize_transaction(data['hex']),\n-            'blockhash': data['block_hash'],\n-            'size': None,\n-            'txid': txid,\n-            'source': 'blockcypher'\n-        }\n-\n-    async def getblock(self, blockhash):\n-        Logger.third_party.debug('getblock from %s' % self.__class__)\n-        _s = 0\n-        _l = 500\n-        d = None\n-        while 1:\n-            # FIXME - Make it async concurr etc..\n-            query = '?txstart=%s&limit=%s' % (_s, _l)\n-            query = self.api_token and query + '&token=%s' % self.api_token or query\n-            res = await self.get('blocks/' + blockhash + query)\n-            if not res:\n-                return\n-            if not self.api_token:\n-                time.sleep(0.5)\n-            if d is None:\n-                d = res\n-            else:\n-                d['txids'].extend(res['txids'])\n-            if len(res['txids']) < 500:\n-                break\n-            _s += 500\n-            _l += 500\n-        return {\n-            'source': 'blockcypher',\n-            'hash': d['hash'],\n-            'tx': d['txids']\n-        }\n-\n-    def _track_spents(self, data):\n-        for i, _v in enumerate(data.get('vout', [])):\n-            _v.get('spent_by') and self.utxo_tracker.track_utxo_spent(\n-                data['txid'],\n-                i,\n-                spent_by=_v.get('spent_by')\n-            )\n-\n-    @staticmethod\n-    def _normalize_scripttype(script_type):\n-        return {\n-            \"pay-to-pubkey-hash\": \"pubkeyhash\",\n-            \"pay-to-script-hash\": \"scripthash\"\n-        }[script_type]\n-        # this is broken and needs to be extended\n-\n-    def _format_txout(self, data: Dict, index: int):\n-        return {\n-            \"in_block\": data.get(\"block_hash\"),\n-            \"in_block_height\": data.get(\"block_height\"),\n-            \"value_satoshi\": data[\"outputs\"][index][\"value\"],\n-            \"script_hex\": data[\"outputs\"][index][\"script\"],\n-            \"script_asm\": None,\n-            \"script_type\": self._normalize_scripttype(data[\"outputs\"][index][\"script_type\"]),\n-            \"addresses\": data[\"outputs\"][index].get(\"addresses\", []),\n-            \"unspent\": not bool(data[\"outputs\"][index].get(\"spent_by\", False))\n-        }\n-\n-    async def gettxout(self, txid: str, index: int):\n-        query = '?includeHex=1&limit=1'\n-        query = self.api_token and query + '&token=%s' % self.api_token or query\n-        data = await self.get('txs/' + txid + query)\n-        if not data or index >= len(data.get('outputs', [])):\n-            return\n-        self.utxo_tracker and self._track_spents(data)\n-        return self._format_txout(data, index)\ndiff --git a/spruned/services/blockexplorer_service.py b/spruned/services/blockexplorer_service.py\ndeleted file mode 100644\n--- a/spruned/services/blockexplorer_service.py\n+++ /dev/null\n@@ -1,11 +0,0 @@\n-from spruned.application import settings\n-from spruned.services.http_client import HTTPClient\n-from spruned.services.insight_service import InsightService\n-\n-\n-class BlockexplorerService(InsightService):\n-    def __init__(self, coin, httpclient=HTTPClient, utxo_tracker=None):\n-        assert coin == settings.Network.BITCOIN\n-        self.client = httpclient(baseurl='https://blockexplorer.com/api/')\n-        self.throttling_error_codes = []\n-        self.utxo_tracker = utxo_tracker\ndiff --git a/spruned/services/blocktrail_service.py b/spruned/services/blocktrail_service.py\ndeleted file mode 100644\n--- a/spruned/services/blocktrail_service.py\n+++ /dev/null\n@@ -1,60 +0,0 @@\n-from typing import Dict\n-\n-from spruned.application import settings\n-from spruned.application.abstracts import RPCAPIService\n-from spruned.services.http_client import HTTPClient\n-\n-\n-class BlocktrailService(RPCAPIService):\n-    def __init__(self, coin, api_key=None, httpclient=HTTPClient, utxo_tracker=None):\n-        coin_url = {\n-            settings.NETWORK.BITCOIN: 'btc/'\n-        }[coin]\n-        self.client = httpclient(baseurl='https://api.blocktrail.com/v1/' + coin_url)\n-        assert api_key is not None\n-        self.api_key = api_key\n-        self.throttling_error_codes = []\n-        self.utxo_tracker = utxo_tracker\n-\n-    async def getrawtransaction(self, txid, **_):\n-        url = 'transaction/' + txid + '?api_key=' + self.api_key\n-        data = await self.get(url)\n-        return data and {\n-            'source': 'blocktrail',\n-            'rawtx': None,\n-            'blockhash': data['block_hash'],\n-            'txid': txid\n-        }\n-\n-    async def getblock(self, blockhash):   # pragma: no cover\n-        pass\n-\n-    def _track_spents(self, data):\n-        for i, _v in enumerate(data.get('vout', [])):\n-            _v.get('spent_hash') and self.utxo_tracker.track_utxo_spent(\n-                data['hash'],\n-                i,\n-                spent_by=_v.get('spent_hash'),\n-                spent_at_index=_v.get('spent_index'),\n-            )\n-\n-    @staticmethod\n-    def _format_txout(data: Dict, index: int):\n-        return {\n-            \"in_block\": data.get(\"blockhash\"),\n-            \"in_block_height\": data.get(\"blockheight\"),\n-            \"value_satoshi\": data[\"outputs\"][index][\"value\"],\n-            \"script_hex\": data[\"outputs\"][index][\"script_hex\"],\n-            \"script_asm\": data[\"outputs\"][index][\"script\"],\n-            \"script_type\": data[\"outputs\"][index][\"type\"],\n-            \"addresses\": [x for x in [data[\"outputs\"][index].get(\"address\", None)] if x is not None],\n-            \"unspent\": not bool(data[\"outputs\"][index][\"spent_hash\"])\n-        }\n-\n-    async def gettxout(self, txid, index):\n-        url = 'transaction/' + txid + '?api_key=' + self.api_key\n-        data = await self.get(url)\n-        if not data:\n-            return\n-        self.utxo_tracker and self._track_spents(data)\n-        return self._format_txout(data, index)\ndiff --git a/spruned/services/chainflyer_service.py b/spruned/services/chainflyer_service.py\ndeleted file mode 100644\n--- a/spruned/services/chainflyer_service.py\n+++ /dev/null\n@@ -1,39 +0,0 @@\n-from spruned.application import settings\n-from spruned.application.abstracts import RPCAPIService\n-from spruned.application.logging_factory import Logger\n-from spruned.services.http_client import HTTPClient\n-\n-\n-class ChainFlyerService(RPCAPIService):\n-    def __init__(self, coin):\n-        assert coin == settings.Network.BITCOIN\n-        self.client = HTTPClient(baseurl='https://chainflyer.bitflyer.jp/v1/')\n-        self.throttling_error_codes = []\n-\n-    async def getrawtransaction(self, txid, **_):\n-        \"\"\"\n-        data = await self.get('tx/' + txid)\n-        return data and {\n-            'rawtx': None,\n-            'blockhash': None,\n-            'size': data['size'],\n-            'txid': data['tx_hash'],\n-            'source': 'chainflyer'\n-        }\n-        \"\"\"\n-        pass\n-\n-    async def getblock(self, blockhash):\n-        Logger.third_party.debug('getblock from %s' % self.__class__)\n-        data = await self.get('block/' + blockhash)\n-        return data and {\n-            'source': 'chainflyer',\n-            'hash': data['block_hash'],\n-            'tx': data['tx_hashes'],\n-        }\n-\n-    async def gettxout(self, txid: str, index: int):   # pragma: no cover\n-        \"\"\"\n-        chainflyer doesn't provide enough informations to build gettxout\n-        \"\"\"\n-        pass\ndiff --git a/spruned/services/chainso_service.py b/spruned/services/chainso_service.py\ndeleted file mode 100644\n--- a/spruned/services/chainso_service.py\n+++ /dev/null\n@@ -1,51 +0,0 @@\n-from typing import Dict\n-\n-from spruned.application import settings, exceptions\n-from spruned.application.abstracts import RPCAPIService\n-from spruned.application.logging_factory import Logger\n-from spruned.application.tools import normalize_transaction\n-from spruned.services.http_client import HTTPClient\n-\n-\n-class ChainSoService(RPCAPIService):\n-    def __init__(self, coin, utxo_tracker=None):\n-        self._coin_url = {\n-            settings.Network.BITCOIN: 'BTC/'\n-        }[coin]\n-        self.client = HTTPClient(baseurl='https://chain.so/api/v2/')\n-        self.errors = []\n-        self.errors_ttl = 5\n-        self.max_errors_before_downtime = 1\n-        self.throttling_error_codes = (429, )\n-        self.utxo_tracker = utxo_tracker\n-\n-    async def getrawtransaction(self, txid, **_):\n-        data = await self.get('get_tx/' + self._coin_url + txid)\n-        return data and data.get('success') and {\n-            'rawtx': normalize_transaction(data['data']['tx_hex']),\n-            'blockhash': data['data']['blockhash'],\n-            'txid': txid,\n-            'source': 'chainso'\n-        }\n-\n-    async def getblock(self, blockhash):\n-        Logger.third_party.debug('getblock from %s' % self.__class__)\n-        data = await self.get('get_block/' + self._coin_url + blockhash)\n-        return data and data.get('success') and {\n-            'source': 'chainso',\n-            'hash': data['data']['blockhash'],\n-            'tx': data['data']['txs']\n-        }\n-\n-    async def gettxout(self, txid: str, index: int):   # pragma: no cover\n-        \"\"\"\n-        https://chain.so/api#get-is-tx-output-spent\n-        \"\"\"\n-        pass\n-\n-\n-if __name__ == '__main__':\n-    import asyncio\n-    loop = asyncio.get_event_loop()\n-    api = ChainSoService(settings.NETWORK)\n-    print(loop.run_until_complete(api.gettxout('8e4c29e2c37a1107f732492a94a94197bbbc6f93aa97b7b3e58852d42680b923', 0)))\ndiff --git a/spruned/services/http_client.py b/spruned/services/http_client.py\ndeleted file mode 100644\n--- a/spruned/services/http_client.py\n+++ /dev/null\n@@ -1,43 +0,0 @@\n-import asyncio\n-import aiohttp\n-import async_timeout\n-from spruned.application import exceptions\n-from spruned.application.logging_factory import Logger\n-\n-\n-class HTTPClient:\n-    def __init__(self, baseurl):\n-        self.baseurl = baseurl\n-\n-    async def get(self, *a, json_response=True, **kw):\n-        url = self.baseurl + a[0]\n-        try:\n-            async with async_timeout.timeout(15):\n-                async with aiohttp.ClientSession() as session:\n-                    async with session as s:\n-                        header = {}\n-                        header['content-type'] = json_response and 'application/json' or 'text/html'\n-                        response = await s.get(url, headers=header, **kw)\n-                        response.raise_for_status()\n-                        res = await response.json() if json_response else await response.read()\n-        except (aiohttp.ClientResponseError, asyncio.TimeoutError, aiohttp.ClientError) as e:\n-            Logger.third_party.exception('Exception on call: %s' % url)\n-            raise exceptions.HTTPClientException from e\n-        return res\n-\n-    async def post(self, *a, json_response=True, **kw):\n-        url = self.baseurl + a[0]\n-        try:\n-            async with async_timeout.timeout(10):\n-                async with aiohttp.ClientSession() as session:\n-                    async with session as s:\n-\n-                            header = {}\n-                            header['content-type'] = json_response and 'application/json' or 'text/html'\n-                            response = await s.post(url, headers=header, **kw)\n-                            response.raise_for_status()\n-                            res = await response.json() if json_response else await response.read()\n-        except (aiohttp.ClientResponseError, asyncio.TimeoutError, aiohttp.ClientError) as e:\n-            Logger.third_party.exception('Exception on call: %s' % url)\n-            raise exceptions.HTTPClientException from e\n-        return res\ndiff --git a/spruned/services/insight_service.py b/spruned/services/insight_service.py\ndeleted file mode 100644\n--- a/spruned/services/insight_service.py\n+++ /dev/null\n@@ -1,60 +0,0 @@\n-from typing import Dict\n-from spruned.application.abstracts import RPCAPIService\n-from spruned.application.logging_factory import Logger\n-\n-\n-class InsightService(RPCAPIService):\n-    client = None\n-    throttling_error_codes = []\n-    utxo_tracker = None\n-\n-    async def getrawtransaction(self, txid, **_):\n-        data = await self.get('tx/' + txid)\n-        return data and {\n-            'rawtx': None,\n-            'blockhash': data['blockhash'],\n-            'txid': txid,\n-            \"source\": self.__class__.__name__.replace(\"Service\", \"\").lower(),\n-        }\n-\n-    async def getblock(self, blockhash):   # pragma: no cover\n-        pass\n-        \"\"\"\n-        Logger.third_party.debug('getblock from %s' % self.__class__)\n-        data = await self.get('block/' + blockhash)\n-        return data and {\n-            \"source\": self.__class__.__name__.replace(\"Service\", \"\").lower(),\n-            'hash': data['hash'],\n-            'tx': None\n-        }\n-        \"\"\"\n-\n-    def _track_spents(self, data):\n-        for i, _v in enumerate(data.get('vout', [])):\n-            _v.get('spentTxId') and self.utxo_tracker.track_utxo_spent(\n-                data['txid'],\n-                i,\n-                spent_by=_v.get('spentTxId'),\n-                spent_at_index=_v.get('spentIndex'),\n-                spent_at_height=_v.get('spentAtHeight')\n-            )\n-\n-    def _format_txout(self, data: Dict, index: int):\n-        return {\n-            \"source\": self.__class__.__name__.replace(\"Service\", \"\").lower(),\n-            \"in_block\": data[\"blockhash\"],\n-            \"in_block_height\": data[\"blockheight\"],\n-            \"value_satoshi\": int(float(data[\"vout\"][index][\"value\"])*10**8),\n-            \"script_hex\": data[\"vout\"][index][\"scriptPubKey\"][\"hex\"],\n-            \"script_asm\": data[\"vout\"][index][\"scriptPubKey\"][\"asm\"],\n-            \"script_type\": data[\"vout\"][index][\"scriptPubKey\"][\"type\"],\n-            \"addresses\": data[\"vout\"][index][\"scriptPubKey\"].get(\"addresses\", []),\n-            \"unspent\": not bool(data[\"vout\"][index][\"spentTxId\"])\n-        }\n-\n-    async def gettxout(self, txid, index):\n-        data = await self.get('tx/' + txid)\n-        if not data or index >= len(data.get('vout', [])):\n-            return\n-        self.utxo_tracker and self._track_spents(data)\n-        return self._format_txout(data, index)\n\\ No newline at end of file\ndiff --git a/spruned/services/localbitcoins_service.py b/spruned/services/localbitcoins_service.py\ndeleted file mode 100644\n--- a/spruned/services/localbitcoins_service.py\n+++ /dev/null\n@@ -1,11 +0,0 @@\n-from spruned.application import settings\n-from spruned.services.http_client import HTTPClient\n-from spruned.services.insight_service import InsightService\n-\n-\n-class LocalbitcoinsService(InsightService):\n-    def __init__(self, coin, httpclient=HTTPClient, utxo_tracker=None):\n-        assert coin == settings.Network.BITCOIN\n-        self.client = httpclient(baseurl='https://localbitcoinschain.com/api/')\n-        self.throttling_error_codes = []\n-        self.utxo_tracker = utxo_tracker\ndiff --git a/spruned/services/thirdparty_service.py b/spruned/services/thirdparty_service.py\ndeleted file mode 100644\n--- a/spruned/services/thirdparty_service.py\n+++ /dev/null\n@@ -1,116 +0,0 @@\n-import random\n-from spruned.application import exceptions\n-from spruned.application.abstracts import RPCAPIService\n-\n-# Services to avoid to obtain this information\n-\n-\n-class ThirdPartyServiceDelegate(RPCAPIService):\n-    \"\"\"\n-    this class is a necessary evil to track unspents.\n-\n-\n-    at the moment.\n-    \"\"\"\n-    def __init__(self):\n-        self._getrawtransaction_services = []\n-        self._gettxout_services = []\n-        self._getblock_services = []\n-        self.retries = 2\n-\n-    def add_getrawtransaction_service(self, service: RPCAPIService):\n-        self._getrawtransaction_services.append(service)\n-\n-    def add_gettxout_service(self, service: RPCAPIService):\n-        self._gettxout_services.append(service)\n-\n-    def add_getblock_service(self, service: RPCAPIService):\n-        self._getblock_services.append(service)\n-\n-    @property\n-    def getrawtransaction_services(self):\n-        return [ser for ser in self._getrawtransaction_services if ser.available]\n-\n-    @property\n-    def gettxout_services(self):\n-        return [ser for ser in self._gettxout_services if ser.available]\n-\n-    @property\n-    def getblock_services(self):\n-        return [ser for ser in self._getblock_services if ser.available]\n-\n-    async def _get(self, call, *a):\n-        s = {\n-            'getblock': self.getblock_services,\n-            'gettxout': self.gettxout_services,\n-            'getrawtransaction': self.getrawtransaction_services\n-        }\n-        result = None\n-        i = 0\n-        tries = []\n-        while not result:\n-            if i > self.retries*len(s[call])*10:\n-                raise exceptions.ServiceException\n-\n-            source = random.choice(s[call])\n-            if tries.count(source.__class__.__name__) > self.retries:\n-                i += 1\n-                continue\n-\n-            tries.append(source.__class__.__name__)\n-            result = await getattr(source, call)(*a)\n-        return result\n-\n-    async def getblock(self, blockhash: str):\n-        return await self._get('getblock', blockhash)\n-\n-    async def getrawtransaction(self, txid: str, verbose=False):\n-        return await self._get('getrawtransaction', txid)\n-\n-    async def gettxout(self, txid: str, index: int):\n-        return await self._get('gettxout', txid, index)\n-\n-\n-def builder():  # pragma: no cover\n-    from spruned.application import settings\n-\n-    from spruned.services.bitgo_service import BitGoService\n-    from spruned.services.bitpay_service import BitpayService\n-    from spruned.services.blockexplorer_service import BlockexplorerService\n-    from spruned.services.blocktrail_service import BlocktrailService\n-    from spruned.services.chainflyer_service import ChainFlyerService\n-    from spruned.services.chainso_service import ChainSoService\n-    from spruned.services.blockcypher_service import BlockCypherService\n-    from spruned.services.localbitcoins_service import LocalbitcoinsService\n-\n-    chainso = ChainSoService(settings.NETWORK)\n-    blocktrail = settings.BLOCKTRAIL_API_KEY and BlocktrailService(settings.NETWORK,\n-                                                                   api_key=settings.BLOCKTRAIL_API_KEY)\n-    blockcypher = BlockCypherService(settings.NETWORK, api_token=settings.BLOCKCYPHER_API_TOKEN)\n-    bitgo = BitGoService(settings.NETWORK)\n-    chainflyer = ChainFlyerService(settings.NETWORK)\n-    blockexplorer = BlockexplorerService(settings.NETWORK)\n-    bitpay = BitpayService(settings.NETWORK)\n-    localbitcoins = LocalbitcoinsService(settings.NETWORK)\n-\n-    third_party_service = ThirdPartyServiceDelegate()\n-\n-    third_party_service.add_getblock_service(bitgo)\n-    third_party_service.add_getblock_service(blockcypher)\n-    third_party_service.add_getblock_service(chainflyer)\n-    third_party_service.add_getblock_service(chainso)\n-\n-    third_party_service.add_gettxout_service(blockcypher)\n-    third_party_service.add_gettxout_service(bitpay)\n-    third_party_service.add_gettxout_service(blockexplorer)\n-    third_party_service.add_gettxout_service(localbitcoins)\n-    blocktrail and third_party_service.add_gettxout_service(blocktrail)\n-\n-    third_party_service.add_getrawtransaction_service(bitgo)\n-    third_party_service.add_getrawtransaction_service(blockcypher)\n-    third_party_service.add_getrawtransaction_service(blockexplorer)\n-    blocktrail and third_party_service.add_getrawtransaction_service(blocktrail)\n-    third_party_service.add_getrawtransaction_service(chainso)\n-    third_party_service.add_getrawtransaction_service(localbitcoins)\n-\n-    return third_party_service\ndiff --git a/spruned/application/settings.py b/spruned/settings.py\nsimilarity index 92%\nrename from spruned/application/settings.py\nrename to spruned/settings.py\n--- a/spruned/application/settings.py\n+++ b/spruned/settings.py\n@@ -33,7 +33,7 @@ class Network(Enum):\n # application\n DEBUG = True\n TESTNET = 0\n-CACHE = 1024\n+CACHE_SIZE = 1024 * 1024 * 50\n NETWORK = Network.BITCOIN\n SPRUNED_SERVICE_URL = 'https://spruned.mempool.co/data/'\n MIN_DATA_SOURCES = 1\n@@ -51,8 +51,7 @@ class Network(Enum):\n     'k.root-servers.net',\n     'l.root-servers.net',\n     'm.root-servers.net',\n-    'www.bitcoin.org',\n-    'www.google.com'\n+    'www.bitcoin.org'\n ]\n \n # third-party secrets\n@@ -61,13 +60,18 @@ class Network(Enum):\n \n # files\n SQLITE_DBNAME = ''\n+LEVELDB_BLOCKCHAIN_ADDRESS = ''\n \n if not TESTING:\n     FILE_DIRECTORY = '%s/.spruned' % Path.home()\n-    CACHE_ADDRESS = '%s/cache/' % FILE_DIRECTORY\n     STORAGE_ADDRESS = '%s/storage/' % FILE_DIRECTORY\n     LOGFILE = '%s/spruned.log' % FILE_DIRECTORY\n     SQLITE_DBNAME = '%sheaders.db' % STORAGE_ADDRESS\n+    LEVELDB_BLOCKCHAIN_ADDRESS = '%sdatabase.ldb' % STORAGE_ADDRESS\n+\n+\n+LEVELDB_BLOCKCHAIN_SLUG = b'blockchain'\n+LEVELDB_CACHE_SLUG = b'cache'\n \n # electrod\n ELECTROD_CONNECTIONS = 3\n", "test_patch": "diff --git a/test/test_electrod/__init__.py b/spruned/repositories/__init__.py\nsimilarity index 100%\nrename from test/test_electrod/__init__.py\nrename to spruned/repositories/__init__.py\ndiff --git a/test/test_application/test_repository.py b/test/test_application/test_repository.py\n--- a/test/test_application/test_repository.py\n+++ b/test/test_application/test_repository.py\n@@ -1,8 +1,8 @@\n import unittest\n \n-from spruned.application import settings\n-from spruned.application.headers_repository import HeadersSQLiteRepository\n-from spruned.application.database import session\n+from spruned import settings\n+from spruned.repositories.headers_repository import HeadersSQLiteRepository\n+from spruned.application.database import sqlite\n from spruned.daemon import exceptions\n from test.utils import make_headers\n \n@@ -10,7 +10,7 @@\n class TestRepository(unittest.TestCase):\n     def setUp(self):\n         assert not settings.SQLITE_DBNAME\n-        self.sut = HeadersSQLiteRepository(session)\n+        self.sut = HeadersSQLiteRepository(sqlite)\n \n     def tests_headers_repository_ok(self):\n         \"\"\"\ndiff --git a/test/test_application/test_vo_service.py b/test/test_application/test_vo_service.py\n--- a/test/test_application/test_vo_service.py\n+++ b/test/test_application/test_vo_service.py\n@@ -1,8 +1,8 @@\n+\"\"\"\n import asyncio\n import unittest\n from unittest.mock import Mock, create_autospec\n import binascii\n-from spruned.application.cache import CacheFileInterface\n from spruned.application.spruned_vo_service import SprunedVOService\n from spruned.services.thirdparty_service import ThirdPartyServiceDelegate\n from test.utils import async_coro\n@@ -12,13 +12,13 @@ class TestVOService(unittest.TestCase):\n     def setUp(self):\n         self.loop = asyncio.get_event_loop()\n         self.electrod = Mock()\n-        self.cache = create_autospec(CacheFileInterface)\n+        self.p2p = Mock()\n         self.utxo_tracker = Mock()\n         self.repository = Mock()\n         self.source = create_autospec(ThirdPartyServiceDelegate)\n-        self.sut = SprunedVOService(self.electrod, utxo_tracker=self.utxo_tracker, repository=self.repository)\n-        self.sut.add_cache(self.cache)\n-        self.sut.add_source(self.source)\n+        self.sut = SprunedVOService(\n+            self.electrod, self.p2p, utxo_tracker=self.utxo_tracker, repository=self.repository\n+        )\n         hb = '000000206ad001ecab39a3267ac6db2ccea9e27907b011bc70324c00000000000000000048043a6a' \\\n              '574d8d826af9477804d3a4ac116a411d194c0b86d950168163c4d4232364ad5aa38955175cd60695'\n         hh = '000000000000000000376267d342878f869cb68192ff5d73f5f1953ae83e3e1e'\n@@ -48,39 +48,21 @@ def setUp(self):\n \n     def tearDown(self):\n         self.electrod.reset_mock()\n-        self.cache.reset_mock()\n         self.utxo_tracker.reset_mock()\n         self.repository.reset_mock()\n \n-    def test_getblock(self):\n-        self.cache.get.return_value = None\n+    def test_getblock_non_verbose(self):\n         self.repository.get_best_header.return_value = {'block_height': 513980}\n         self.repository.get_block_header.return_value = self.header\n-        self.source.getblock.return_value = async_coro(\n-            {\n-                'hash': '000000000000000000376267d342878f869cb68192ff5d73f5f1953ae83e3e1e',\n-\n+        self.repository.get_block.return_value = {\n+                'block_hash': '000000000000000000376267d342878f869cb68192ff5d73f5f1953ae83e3e1e',\n+                'block_bytes': binascii.unhexlify('cafebabe'.encode())\n             }\n-        )\n+\n         block = self.loop.run_until_complete(\n-            self.sut.getblock('000000000000000000376267d342878f869cb68192ff5d73f5f1953ae83e3e1e')\n-        )\n-        self.assertEqual(\n-            block,\n-            {\n-                'hash': '000000000000000000376267d342878f869cb68192ff5d73f5f1953ae83e3e1e',\n-                'version': 536870912,\n-                'time': 1521312803,\n-                'versionHex': None,\n-                'mediantime': None,\n-                'nonce': 2500253276,\n-                'bits': 391481763,\n-                'difficulty': None,\n-                'chainwork': None,\n-                'previousblockhash': '0000000000000000004c3270bc11b00779e2a9ce2cdbc67a26a339abec01d06a', 'height': 513979,\n-                'confirmations': 1\n-            }\n+            self.sut.getblock('000000000000000000376267d342878f869cb68192ff5d73f5f1953ae83e3e1e', 0)\n         )\n+        self.assertEqual(block, 'cafebabe')\n \n     def test_getrawtransaction(self):\n         self.cache.get.return_value = None\n@@ -189,4 +171,5 @@ def test_gettxout_ok(self):\n         )\n         self.electrod.listunspents.return_value = async_coro([\n             {'tx_hash': TXID, 'tx_pos': INDEX}\n-        ])\n\\ No newline at end of file\n+        ])\n+\"\"\"\n\\ No newline at end of file\ndiff --git a/test/test_daemon/__init__.py b/test/test_daemon/__init__.py\nnew file mode 100644\ndiff --git a/test/test_daemon/test_electrod/__init__.py b/test/test_daemon/test_electrod/__init__.py\nnew file mode 100644\ndiff --git a/test/test_electrod/test_connection.py b/test/test_daemon/test_electrod/test_connection.py\nsimilarity index 99%\nrename from test/test_electrod/test_connection.py\nrename to test/test_daemon/test_electrod/test_connection.py\n--- a/test/test_electrod/test_connection.py\n+++ b/test/test_daemon/test_electrod/test_connection.py\n@@ -1,6 +1,6 @@\n import asyncio\n import unittest\n-from unittest.mock import Mock, call\n+from unittest.mock import Mock\n import binascii\n \n import time\ndiff --git a/test/test_electrod/test_connectionpool.py b/test/test_daemon/test_electrod/test_connectionpool.py\nsimilarity index 94%\nrename from test/test_electrod/test_connectionpool.py\nrename to test/test_daemon/test_electrod/test_connectionpool.py\n--- a/test/test_electrod/test_connectionpool.py\n+++ b/test/test_daemon/test_electrod/test_connectionpool.py\n@@ -1,25 +1,23 @@\n import asyncio\n import unittest\n-from unittest.mock import Mock, call, create_autospec\n-import binascii\n+from unittest.mock import Mock\n \n import time\n \n-from spruned.application.tools import async_delayed_task\n from spruned.daemon import exceptions\n-from spruned.daemon.electrod.electrod_connection import ElectrodConnection, ElectrodConnectionPool\n-from test.utils import async_coro, coro_call\n+from spruned.daemon.electrod.electrod_connection import ElectrodConnectionPool\n+from test.utils import async_coro\n \n \n async def connect(m):\n-    m._connect.return_value = async_coro(True)\n+    m._connect.return_value = True\n     m.connected = True\n     m.protocol = 'protocol'\n     return m._connect()\n \n \n async def disconnect(m):\n-    m._disconnect.return_value = async_coro(True)\n+    m._disconnect.return_value = True\n     m.connected = False\n     m.protocol = None\n     return m._disconnect()\n@@ -37,7 +35,7 @@ def setUp(self):\n             connections=3,\n             loop=self.electrod_loop,\n             delayer=self.delayer,\n-            electrum_servers=self.servers,\n+            peers=self.servers,\n             network_checker=self.network_checker,\n             connection_factory=self.connection_factory\n         )\n@@ -219,13 +217,13 @@ def test_call_missing_response(self):\n             )\n \n     def test_corners(self):\n-        s = [s for s in self.sut._servers]\n-        self.sut._servers = []\n+        s = [s for s in self.sut._peers]\n+        self.sut._peers = []\n         with self.assertRaises(exceptions.NoServersException):\n-            self.sut._pick_server()\n+            self.sut._pick_peer()\n         with self.assertRaises(exceptions.NoServersException):\n-            self.sut._pick_multiple_servers(2)\n-        self.sut._servers = s\n+            self.sut._pick_multiple_peers(2)\n+        self.sut._peers = s\n         with self.assertRaises(exceptions.NoPeersException):\n             self.sut._pick_connection()\n         with self.assertRaises(exceptions.NoPeersException):\n@@ -233,12 +231,12 @@ def test_corners(self):\n         self.assertIsNone(\n             self.sut._pick_connection(fail_silent=True)\n         )\n-        self.sut._servers = ['cafebabe']\n+        self.sut._peers = ['cafebabe']\n         self.sut._connections.append(Mock(hostname='cafebabe', connected=True))\n         with self.assertRaises(exceptions.NoServersException):\n-            self.sut._pick_server()\n+            self.sut._pick_peer()\n         with self.assertRaises(exceptions.NoServersException):\n-            self.sut._pick_multiple_servers(1)\n+            self.sut._pick_multiple_peers(1)\n \n     def test__handle_peer_error_disconnected(self):\n         conn = Mock(connected=False)\n@@ -296,11 +294,13 @@ def test_on_headers_callback(self):\n \n     def test_on_peer_error(self):\n         peer = Mock(is_online=True, connected=False, _errors=[])\n+        peer.add_error = lambda *x: peer._errors.append(x[0]) if x else peer._errors.append(int(time.time()))\n         self.loop.run_until_complete(self.sut.on_peer_error(peer))\n         self.assertEqual(len(peer._errors), 1)\n \n     def test_on_peer_error_during_connection(self):\n         peer = Mock(is_online=True, connected=False, _errors=[])\n+        peer.add_error = lambda *x: peer._errors.append(x[0]) if x else peer._errors.append(int(time.time()))\n         self.network_checker.return_value = False\n         self.loop.run_until_complete(self.sut.on_peer_error(peer, error_type='connect'))\n         self.assertEqual(len(peer._errors), 0)\ndiff --git a/test/test_electrod/test_interface.py b/test/test_daemon/test_electrod/test_interface.py\nsimilarity index 100%\nrename from test/test_electrod/test_interface.py\nrename to test/test_daemon/test_electrod/test_interface.py\ndiff --git a/test/test_electrod/test_reactor.py b/test/test_daemon/test_headers_reactor.py\nsimilarity index 99%\nrename from test/test_electrod/test_reactor.py\nrename to test/test_daemon/test_headers_reactor.py\n--- a/test/test_electrod/test_reactor.py\n+++ b/test/test_daemon/test_headers_reactor.py\n@@ -3,23 +3,23 @@\n from unittest.mock import Mock, create_autospec, call\n import time\n \n-from spruned.application import settings\n+from spruned import settings\n from spruned.application.abstracts import HeadersRepository\n from spruned.daemon import exceptions\n from spruned.daemon.electrod.electrod_interface import ElectrodInterface\n-from spruned.daemon.electrod.electrod_reactor import ElectrodReactor\n+from spruned.daemon.tasks.headers_reactor import HeadersReactor\n from test.utils import async_coro, coro_call, in_range, make_headers\n import warnings\n \n \n-class TestElectrodReactor(unittest.TestCase):\n+class TestHeadersReactor(unittest.TestCase):\n     def setUp(self):\n         self.repo = create_autospec(HeadersRepository)\n         self.interface = create_autospec(ElectrodInterface)\n         self.electrod_loop = Mock()\n         self.electrod_loop.create_task.side_effect = lambda x: x\n         self.delay_task_runner = Mock()\n-        self.sut = ElectrodReactor(\n+        self.sut = HeadersReactor(\n             self.repo, self.interface, loop=self.electrod_loop, delayed_task=self.delay_task_runner\n         )\n         self.loop = asyncio.get_event_loop()\n@@ -491,12 +491,11 @@ def test_check_new_header_same_header(self):\n             \"block_height\": 1,\n             \"block_hash\": \"ff\"*32, \"timestamp\": header_timestamp - 700, 'header_bytes': b'', 'prev_block_hash': '00'*32\n         }\n-        peer = Mock()\n+        peer = Mock(server_info='mock_peer')\n         self.interface.get_header.side_effect = [async_coro((peer, net_header))]\n         self.sut.synced = True\n         self.sut.set_last_processed_header(loc_header)\n         self.assertFalse(self.sut.lock.locked())\n-        peer = Mock(server_info='mock_peer')\n         self.loop.run_until_complete(self.sut.check_headers())\n         Mock.assert_called_once_with(self.delay_task_runner, coro_call('check_headers'), in_range(655, 660))\n         Mock.assert_has_calls(\ndiff --git a/test/test_electrod/test_service.py b/test/test_electrod/test_service.py\ndeleted file mode 100644\n--- a/test/test_electrod/test_service.py\n+++ /dev/null\n@@ -1,54 +0,0 @@\n-import asyncio\n-import unittest\n-from unittest.mock import Mock, call, create_autospec\n-import binascii\n-\n-import time\n-\n-from spruned.application.tools import async_delayed_task\n-from spruned.daemon.electrod.electrod_connection import ElectrodConnection\n-from spruned.daemon.electrod.electrod_interface import ElectrodInterface\n-from spruned.daemon.electrod.electrod_service import ElectrodService\n-from test.utils import async_coro, coro_call\n-\n-\n-class TestElectrodService(unittest.TestCase):\n-    def setUp(self):\n-        self.interface = create_autospec(ElectrodInterface)\n-        self.sut = ElectrodService(self.interface)\n-        self.loop = asyncio.get_event_loop()\n-        self.assertEqual(self.sut.available, True)  # mmmh... this may be implemented\n-\n-    def tearDown(self):\n-        self.interface.reset_mock()\n-\n-    def test_getrawtransaction(self):\n-        self.interface.getrawtransaction.return_value = async_coro('ff'*32)\n-        res = self.loop.run_until_complete(self.sut.getrawtransaction('cafebabe'))\n-        self.assertEqual(res, 'ff'*32)\n-        Mock.assert_called_once_with(self.interface.getrawtransaction, 'cafebabe')\n-\n-    def test_getrawtransaction_verbose(self):\n-        self.interface.getrawtransaction.return_value = async_coro('ff' * 32)\n-        res = self.loop.run_until_complete(self.sut.getrawtransaction('cafebabe', verbose=True))\n-        self.assertEqual(res, 'ff' * 32)\n-        Mock.assert_called_once_with(self.interface.getrawtransaction, 'cafebabe')\n-\n-    def test_estimatefee(self):\n-        self.interface.estimatefee.return_value = async_coro(123)\n-        res = self.loop.run_until_complete(self.sut.estimatefee(6))\n-        self.assertEqual(res, 123)\n-        Mock.assert_called_once_with(self.interface.estimatefee, 6)\n-\n-    def test_listunspents(self):\n-        self.interface.listunspents.return_value = async_coro({'unspents': 'list'})\n-        res = self.loop.run_until_complete(self.sut.listunspents('cafebabe'))\n-        self.assertEqual(res, {'unspents': 'list'})\n-        Mock.assert_called_once_with(self.interface.listunspents, 'cafebabe')\n-\n-    def test_merkleproof(self):\n-        self.interface.get_merkleproof.return_value = async_coro({'merkle': 'proof'})\n-        res = self.loop.run_until_complete(self.sut.getmerkleproof('cafebabe', 10000))\n-        self.assertEqual(res, {'merkle': 'proof'})\n-        Mock.assert_called_once_with(self.interface.get_merkleproof, 'cafebabe', 10000)\n-\n", "problem_statement": "", "hints_text": "", "created_at": "2018-03-24T15:13:34Z"}