Precomputed hashx status #17

Merged
jackrobison merged 7 commits from precomputed-hashx-status into master 2022-04-07 18:06:18 +02:00
3 changed files with 5 additions and 9 deletions
Showing only changes of commit a57b69391b - Show all commits

View file

@ -41,7 +41,7 @@ mempool_process_time_metric = Histogram(
)
class MemPool:
class HubMemPool:
def __init__(self, coin, db: 'HubDB', refresh_secs=1.0):
self.coin = coin
self._db = db

View file

@ -3,7 +3,7 @@ import time
import asyncio
from scribe.blockchain.daemon import LBCDaemon
from scribe.hub.session import SessionManager
from scribe.hub.mempool import MemPool
from scribe.hub.mempool import HubMemPool
from scribe.hub.udp import StatusServer
from scribe.service import BlockchainReaderService
from scribe.elasticsearch import ElasticNotifierClientProtocol
@ -16,7 +16,7 @@ class HubServerService(BlockchainReaderService):
self.mempool_notifications = set()
self.status_server = StatusServer()
self.daemon = LBCDaemon(env.coin, env.daemon_url) # only needed for broadcasting txs
self.mempool = MemPool(self.env.coin, self.db)
self.mempool = HubMemPool(self.env.coin, self.db)
self.session_manager = SessionManager(
env, self.db, self.mempool, self.daemon,
self.shutdown_event,

View file

@ -30,7 +30,7 @@ if typing.TYPE_CHECKING:
from scribe.db import HubDB
from scribe.env import Env
from scribe.blockchain.daemon import LBCDaemon
from scribe.hub.mempool import MemPool
from scribe.hub.mempool import HubMemPool
BAD_REQUEST = 1
DAEMON_ERROR = 2
@ -38,13 +38,10 @@ DAEMON_ERROR = 2
log = logging.getLogger(__name__)
SignatureInfo = namedtuple('SignatureInfo', 'min_args max_args '
'required_names other_names')
def scripthash_to_hashX(scripthash: str) -> bytes:
try:
bin_hash = hex_str_to_hash(scripthash)
@ -136,7 +133,6 @@ class SessionManager:
tx_replied_count_metric = Counter("replied_transaction", "Number of transactions responded", namespace=NAMESPACE)
urls_to_resolve_count_metric = Counter("urls_to_resolve", "Number of urls to resolve", namespace=NAMESPACE)
resolved_url_count_metric = Counter("resolved_url", "Number of resolved urls", namespace=NAMESPACE)
interrupt_count_metric = Counter("interrupt", "Number of interrupted queries", namespace=NAMESPACE)
db_operational_error_metric = Counter(
"operational_error", "Number of queries that raised operational errors", namespace=NAMESPACE
@ -168,7 +164,7 @@ class SessionManager:
namespace=NAMESPACE, buckets=HISTOGRAM_BUCKETS
)
def __init__(self, env: 'Env', db: 'HubDB', mempool: 'MemPool',
def __init__(self, env: 'Env', db: 'HubDB', mempool: 'HubMemPool',
daemon: 'LBCDaemon', shutdown_event: asyncio.Event,
on_available_callback: typing.Callable[[], None], on_unavailable_callback: typing.Callable[[], None]):
env.max_send = max(350000, env.max_send)