From f32861923e0565d11e10989eeebd6122d1ab130b Mon Sep 17 00:00:00 2001 From: Alex Grintsvayg Date: Wed, 2 May 2018 13:23:57 -0400 Subject: [PATCH 01/55] update blob download script --- scripts/download_blob_from_peer.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/scripts/download_blob_from_peer.py b/scripts/download_blob_from_peer.py index dc688956d..80b4c40c3 100644 --- a/scripts/download_blob_from_peer.py +++ b/scripts/download_blob_from_peer.py @@ -14,7 +14,7 @@ from lbrynet.core import log_support, Wallet, Peer from lbrynet.core.SinglePeerDownloader import SinglePeerDownloader from lbrynet.core.StreamDescriptor import BlobStreamDescriptorReader from lbrynet.core.BlobManager import DiskBlobManager -from lbrynet.dht.hashannouncer import DummyHashAnnouncer +from lbrynet.database.storage import SQLiteStorage log = logging.getLogger() @@ -45,13 +45,13 @@ def main(args=None): @defer.inlineCallbacks def download_it(peer, timeout, blob_hash): tmp_dir = yield threads.deferToThread(tempfile.mkdtemp) - announcer = DummyHashAnnouncer() - tmp_blob_manager = DiskBlobManager(announcer, tmp_dir, tmp_dir) + storage = SQLiteStorage(tmp_dir, reactor) + yield storage.setup() + tmp_blob_manager = DiskBlobManager(tmp_dir, storage) config = {'auto_connect': True} if conf.settings['lbryum_wallet_dir']: config['lbryum_path'] = conf.settings['lbryum_wallet_dir'] - storage = Wallet.InMemoryStorage() wallet = Wallet.LBRYumWallet(storage, config) downloader = SinglePeerDownloader() @@ -76,8 +76,9 @@ def download_it(peer, timeout, blob_hash): pass if info: break - time.sleep( - 0.1) # there's some kind of race condition where it sometimes doesnt write the blob to disk in time + + # there's some kind of race condition where it sometimes doesnt write the blob to disk in time + time.sleep(0.1) if info is not None: pprint(info) From 129d2687b95f5c7cba0383210d9aa3855809dc1d Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Mon, 30 Apr 2018 13:31:09 -0400 Subject: [PATCH 02/55] download headers from s3 when more than 10 chunks behind --- CHANGELOG.md | 3 +- lbrynet/conf.py | 3 +- lbrynet/core/Wallet.py | 105 ++++++++++++++++++++++++++++++++---- lbrynet/txlbryum/factory.py | 3 ++ 4 files changed, 102 insertions(+), 12 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index a261709e0..49125aa34 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -44,11 +44,12 @@ at anytime. * regenerate api keys on startup if the using authentication * support both positional and keyword args for api calls * `peer_list` to return a list of dictionaries instead of a list of lists, added peer node ids to the results + * download blockchain headers from s3 before starting the wallet when the local height is more than `s3_headers_depth` (a config setting) blocks behind ### Added * virtual kademlia network and mock udp transport for dht integration tests * integration tests for bootstrapping the dht - * configurable `concurrent_announcers` setting + * configurable `concurrent_announcers` and `s3_headers_depth` settings * `peer_ping` command ### Removed diff --git a/lbrynet/conf.py b/lbrynet/conf.py index 3edee1437..0be7a423e 100644 --- a/lbrynet/conf.py +++ b/lbrynet/conf.py @@ -294,7 +294,8 @@ ADJUSTABLE_SETTINGS = { 'use_keyring': (bool, False), 'wallet': (str, LBRYUM_WALLET), 'blockchain_name': (str, 'lbrycrd_main'), - 'lbryum_servers': (list, [('lbryum8.lbry.io', 50001), ('lbryum9.lbry.io', 50001)], server_list) + 'lbryum_servers': (list, [('lbryum8.lbry.io', 50001), ('lbryum9.lbry.io', 50001)], server_list), + 's3_headers_depth': (int, 96 * 10) # download headers from s3 when the local height is more than 10 chunks behind } diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index a4fa5f4d8..889e7514f 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -1,3 +1,4 @@ +import os from collections import defaultdict, deque import datetime import logging @@ -5,7 +6,11 @@ from decimal import Decimal from zope.interface import implements from twisted.internet import threads, reactor, defer, task from twisted.python.failure import Failure +from twisted.python.threadpool import ThreadPool +from twisted._threads._ithreads import AlreadyQuit from twisted.internet.error import ConnectionAborted +from txrequests import Session as _TxRequestsSession +from requests import Session as requestsSession from lbryum import wallet as lbryum_wallet from lbryum.network import Network @@ -13,12 +18,14 @@ from lbryum.simple_config import SimpleConfig from lbryum.constants import COIN from lbryum.commands import Commands from lbryum.errors import InvalidPassword +from lbryum.constants import HEADERS_URL, HEADER_SIZE from lbryschema.uri import parse_lbry_uri from lbryschema.claim import ClaimDict from lbryschema.error import DecodeError from lbryschema.decode import smart_decode +from lbrynet.txlbryum.factory import StratumClient from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet from lbrynet.core.utils import DeferredDict from lbrynet.core.client.ClientRequest import ClientRequest @@ -29,6 +36,29 @@ from lbrynet.core.Error import DownloadCanceledError, RequestCanceledError log = logging.getLogger(__name__) +class TxRequestsSession(_TxRequestsSession): + # Session from txrequests would throw AlreadyQuit errors, this catches them + def __init__(self, pool=None, minthreads=1, maxthreads=4, **kwargs): + requestsSession.__init__(self, **kwargs) # pylint: disable=non-parent-init-called + self.ownPool = False + if pool is None: + self.ownPool = True + pool = ThreadPool(minthreads=minthreads, maxthreads=maxthreads) + # unclosed ThreadPool leads to reactor hangs at shutdown + # this is a problem in many situation, so better enforce pool stop here + + def stop_pool(): + try: + pool.stop() + except AlreadyQuit: + pass + + reactor.addSystemEventTrigger("after", "shutdown", stop_pool) + self.pool = pool + if self.ownPool: + pool.start() + + class ReservedPoints(object): def __init__(self, identifier, amount): self.identifier = identifier @@ -86,18 +116,73 @@ class Wallet(object): self._batch_count = 20 self._pending_claim_checker = task.LoopingCall(self.fetch_and_save_heights_for_pending_claims) + @defer.inlineCallbacks + def fetch_headers_from_s3(self): + with TxRequestsSession() as s: + r = yield s.get(HEADERS_URL) + raw_headers = r.content + if not len(raw_headers) % HEADER_SIZE: # should be divisible by the header size + s3_height = (len(raw_headers) / HEADER_SIZE) - 1 + local_height = self.local_header_file_height() + if s3_height > local_height: + with open(os.path.join(self.config.path, "blockchain_headers"), "w") as headers_file: + headers_file.write(raw_headers) + log.info("updated headers from s3") + else: + log.warning("s3 is more out of date than we are") + else: + log.error("invalid size for headers from s3") + + def local_header_file_height(self): + headers_path = os.path.join(self.config.path, "blockchain_headers") + if os.path.isfile(headers_path): + return max((os.stat(headers_path).st_size / 112) - 1, 0) + return 0 + + @defer.inlineCallbacks + def get_remote_height(self, server, port): + connected = defer.Deferred() + client = StratumClient(connected) + reactor.connectTCP(server, port, client) + yield connected + remote_height = yield client.blockchain_block_get_server_height() + client.client.transport.loseConnection() + defer.returnValue(remote_height) + + @defer.inlineCallbacks + def should_download_headers_from_s3(self): + from lbrynet import conf + if conf.settings['blockchain_name'] != "lbrycrd_main": + defer.returnValue(False) + s3_headers_depth = conf.settings['s3_headers_depth'] + if not s3_headers_depth: + defer.returnValue(False) + local_height = self.local_header_file_height() + for server_url in self.config.get('default_servers'): + port = int(self.config.get('default_servers')[server_url]['t']) + try: + remote_height = yield self.get_remote_height(server_url, port) + log.debug("%s:%i remote height: %i, local height: %s", server_url, port, remote_height, local_height) + if remote_height > local_height + s3_headers_depth: + defer.returnValue(True) + except Exception as err: + log.warning("error requesting remote height from %s:%i - %s", server_url, port, err) + defer.returnValue(False) + + @defer.inlineCallbacks def start(self): + should_download_headers = yield self.should_download_headers_from_s3() + if should_download_headers: + try: + yield self.fetch_headers_from_s3() + except Exception as err: + log.error("failed to fetch headers from s3: %s", err) log.info("Starting wallet.") - - def start_manage(): - self.stopped = False - self.manage() - self._pending_claim_checker.start(30) - return True - - d = self._start() - d.addCallback(lambda _: start_manage()) - return d + yield self._start() + self.stopped = False + self.manage() + self._pending_claim_checker.start(30) + defer.returnValue(True) @staticmethod def log_stop_error(err): diff --git a/lbrynet/txlbryum/factory.py b/lbrynet/txlbryum/factory.py index 72af607d1..6c59d83a3 100644 --- a/lbrynet/txlbryum/factory.py +++ b/lbrynet/txlbryum/factory.py @@ -105,3 +105,6 @@ class StratumClient(ClientFactory): def blockchain_address_get_history(self, address): return self._rpc('blockchain.address.get_history', [address]) + + def blockchain_block_get_server_height(self): + return self._rpc('blockchain.block.get_server_height', []) From f42733ecba38cb5d0b9d23a0f4869b125ed0e8b3 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Wed, 2 May 2018 14:45:01 -0400 Subject: [PATCH 03/55] fix tests --- lbrynet/tests/functional/test_misc.py | 15 +++++++-------- lbrynet/tests/mocks.py | 14 +++++--------- .../unit/core/client/test_ConnectionManager.py | 2 +- .../unit/core/server/test_DHTHashAnnouncer.py | 2 +- lbrynet/tests/unit/core/test_BlobManager.py | 2 +- lbrynet/tests/unit/database/test_SQLiteStorage.py | 2 +- .../tests/unit/lbrynet_daemon/auth/test_server.py | 3 ++- 7 files changed, 18 insertions(+), 22 deletions(-) diff --git a/lbrynet/tests/functional/test_misc.py b/lbrynet/tests/functional/test_misc.py index 355fa1d14..e806da5c2 100644 --- a/lbrynet/tests/functional/test_misc.py +++ b/lbrynet/tests/functional/test_misc.py @@ -22,7 +22,6 @@ from twisted.internet import defer, threads, task from twisted.trial.unittest import TestCase from twisted.python.failure import Failure -from lbrynet.dht.node import Node from lbrynet.core.PeerManager import PeerManager from lbrynet.core.RateLimiter import DummyRateLimiter, RateLimiter from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory @@ -115,7 +114,7 @@ class LbryUploader(object): node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, peer_port=5553, dht_node_port=4445, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, - dht_node_class=Node, is_generous=self.is_generous, external_ip="127.0.0.1") + dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) if self.ul_rate_limit is not None: self.session.rate_limiter.set_ul_limit(self.ul_rate_limit) @@ -207,7 +206,7 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, db_dir, blob_dir = mk_db_and_blob_dir() session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, - node_id="abcd" + str(n), dht_node_port=4446, + node_id="abcd" + str(n), dht_node_port=4446, dht_node_class=FakeNode, peer_finder=peer_finder, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=peer_port, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, @@ -315,7 +314,7 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero db_dir, blob_dir = mk_db_and_blob_dir() session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="efgh", - peer_finder=peer_finder, hash_announcer=hash_announcer, + peer_finder=peer_finder, hash_announcer=hash_announcer, dht_node_class=FakeNode, blob_dir=blob_dir, peer_port=peer_port, dht_node_port=4446, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, @@ -497,7 +496,7 @@ class TestTransfer(TestCase): blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, - dht_node_class=Node, is_generous=self.is_generous, external_ip="127.0.0.1") + dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1") self.lbry_file_manager = EncryptedFileManager( self.session, sd_identifier) @@ -582,7 +581,7 @@ class TestTransfer(TestCase): self.session = Session( conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, - blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, + blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, dht_node_class=FakeNode, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], external_ip="127.0.0.1") @@ -662,7 +661,7 @@ class TestTransfer(TestCase): db_dir, blob_dir = mk_db_and_blob_dir() self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, - node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, + node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, dht_node_class=FakeNode, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, blob_tracker_class=DummyBlobAvailabilityTracker, @@ -769,7 +768,7 @@ class TestTransfer(TestCase): sd_identifier = StreamDescriptorIdentifier() db_dir, blob_dir = mk_db_and_blob_dir() - self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, + self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, dht_node_class=FakeNode, node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, diff --git a/lbrynet/tests/mocks.py b/lbrynet/tests/mocks.py index d4c52a2cd..d2bce3730 100644 --- a/lbrynet/tests/mocks.py +++ b/lbrynet/tests/mocks.py @@ -9,6 +9,7 @@ from lbrynet.core.client.ClientRequest import ClientRequest from lbrynet.core.Error import RequestCanceledError from lbrynet.core import BlobAvailability from lbrynet.core.utils import generate_id +from lbrynet.dht.node import Node as RealNode from lbrynet.daemon import ExchangeRateManager as ERM from lbrynet import conf from util import debug_kademlia_packet @@ -24,15 +25,9 @@ class FakeLBRYFile(object): self.file_name = 'fake_lbry_file' -class Node(object): - def __init__(self, peer_finder=None, peer_manager=None, dht_node_port=None, peer_port=3333, **kwargs): - self.peer_finder = peer_finder - self.peer_manager = peer_manager - self.peerPort = peer_port - self.udpPort = dht_node_port - - def joinNetwork(self, *args): - return defer.succeed(True) +class Node(RealNode): + def joinNetwork(self, known_node_addresses=None): + return defer.succeed(None) def stop(self): return defer.succeed(None) @@ -392,6 +387,7 @@ create_stream_sd_file = { def mock_conf_settings(obj, settings={}): + conf.initialize_settings(False) original_settings = conf.settings conf.settings = conf.Config(conf.FIXED_SETTINGS, conf.ADJUSTABLE_SETTINGS) conf.settings.installation_id = conf.settings.get_installation_id() diff --git a/lbrynet/tests/unit/core/client/test_ConnectionManager.py b/lbrynet/tests/unit/core/client/test_ConnectionManager.py index 107afa997..61f177127 100644 --- a/lbrynet/tests/unit/core/client/test_ConnectionManager.py +++ b/lbrynet/tests/unit/core/client/test_ConnectionManager.py @@ -116,7 +116,7 @@ class MocServerProtocolFactory(ServerFactory): class TestIntegrationConnectionManager(unittest.TestCase): def setUp(self): - conf.initialize_settings() + conf.initialize_settings(False) self.TEST_PEER = Peer(LOCAL_HOST, PEER_PORT) self.downloader = MocDownloader() diff --git a/lbrynet/tests/unit/core/server/test_DHTHashAnnouncer.py b/lbrynet/tests/unit/core/server/test_DHTHashAnnouncer.py index 0d3999c0b..2f67d5567 100644 --- a/lbrynet/tests/unit/core/server/test_DHTHashAnnouncer.py +++ b/lbrynet/tests/unit/core/server/test_DHTHashAnnouncer.py @@ -34,7 +34,7 @@ class DHTHashAnnouncerTest(unittest.TestCase): @defer.inlineCallbacks def setUp(self): from lbrynet.conf import initialize_settings - initialize_settings() + initialize_settings(False) self.num_blobs = 10 self.blobs_to_announce = [] for i in range(0, self.num_blobs): diff --git a/lbrynet/tests/unit/core/test_BlobManager.py b/lbrynet/tests/unit/core/test_BlobManager.py index 48b6df982..7526ee2fc 100644 --- a/lbrynet/tests/unit/core/test_BlobManager.py +++ b/lbrynet/tests/unit/core/test_BlobManager.py @@ -17,7 +17,7 @@ from lbrynet.core.cryptoutils import get_lbry_hash_obj class BlobManagerTest(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - conf.initialize_settings() + conf.initialize_settings(False) self.blob_dir = tempfile.mkdtemp() self.db_dir = tempfile.mkdtemp() self.bm = DiskBlobManager(self.blob_dir, SQLiteStorage(self.db_dir)) diff --git a/lbrynet/tests/unit/database/test_SQLiteStorage.py b/lbrynet/tests/unit/database/test_SQLiteStorage.py index 7cd69c3ff..5bfe72988 100644 --- a/lbrynet/tests/unit/database/test_SQLiteStorage.py +++ b/lbrynet/tests/unit/database/test_SQLiteStorage.py @@ -85,7 +85,7 @@ class StorageTest(unittest.TestCase): @defer.inlineCallbacks def setUp(self): - conf.initialize_settings() + conf.initialize_settings(False) self.db_dir = tempfile.mkdtemp() self.storage = SQLiteStorage(self.db_dir) yield self.storage.setup() diff --git a/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py b/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py index ea1cefb55..80fa4aa7c 100644 --- a/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py +++ b/lbrynet/tests/unit/lbrynet_daemon/auth/test_server.py @@ -1,6 +1,6 @@ import mock from twisted.trial import unittest - +from lbrynet import conf from lbrynet.tests.mocks import mock_conf_settings from lbrynet.daemon.auth import server @@ -10,6 +10,7 @@ class AuthJSONRPCServerTest(unittest.TestCase): # and add useful general utilities like this # onto it. def setUp(self): + conf.initialize_settings(False) self.server = server.AuthJSONRPCServer(use_authentication=False) def test_get_server_port(self): From f1e36823025199461836733e2a1b2e22db35596e Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Wed, 2 May 2018 17:16:00 -0400 Subject: [PATCH 04/55] socket might not have TCP_KEEPx attributes --- lbrynet/txlbryum/client.py | 24 +++++++++++++++++------- 1 file changed, 17 insertions(+), 7 deletions(-) diff --git a/lbrynet/txlbryum/client.py b/lbrynet/txlbryum/client.py index 96a6a08e1..d01b5eeb6 100644 --- a/lbrynet/txlbryum/client.py +++ b/lbrynet/txlbryum/client.py @@ -6,7 +6,7 @@ from twisted.internet import defer, error from twisted.protocols.basic import LineOnlyReceiver from errors import RemoteServiceException, ProtocolException, ServiceException -log = logging.getLogger() +log = logging.getLogger(__name__) class StratumClientProtocol(LineOnlyReceiver): @@ -29,12 +29,22 @@ class StratumClientProtocol(LineOnlyReceiver): try: self.transport.setTcpNoDelay(True) self.transport.setTcpKeepAlive(True) - self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, - 120) # Seconds before sending keepalive probes - self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, - 1) # Interval in seconds between keepalive probes - self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, - 5) # Failed keepalive probles before declaring other end dead + if hasattr(socket, "TCP_KEEPIDLE"): + self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, + 120) # Seconds before sending keepalive probes + else: + log.debug("TCP_KEEPIDLE not available") + if hasattr(socket, "TCP_KEEPINTVL"): + self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, + 1) # Interval in seconds between keepalive probes + else: + log.debug("TCP_KEEPINTVL not available") + if hasattr(socket, "TCP_KEEPCNT"): + self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, + 5) # Failed keepalive probles before declaring other end dead + else: + log.debug("TCP_KEEPCNT not available") + except Exception as err: # Supported only by the socket transport, # but there's really no better place in code to trigger this. From 6977d6944e54aca0c45c0e6cbaea75f66e49b884 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Wed, 2 May 2018 17:25:00 -0400 Subject: [PATCH 05/55] show local, remote, and s3 header heights in the log --- lbrynet/core/Wallet.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 889e7514f..4e54fd58c 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -127,7 +127,7 @@ class Wallet(object): if s3_height > local_height: with open(os.path.join(self.config.path, "blockchain_headers"), "w") as headers_file: headers_file.write(raw_headers) - log.info("updated headers from s3") + log.info("fetched headers from s3 (s3 height: %i)", s3_height) else: log.warning("s3 is more out of date than we are") else: @@ -162,7 +162,7 @@ class Wallet(object): port = int(self.config.get('default_servers')[server_url]['t']) try: remote_height = yield self.get_remote_height(server_url, port) - log.debug("%s:%i remote height: %i, local height: %s", server_url, port, remote_height, local_height) + log.info("%s:%i height: %i, local height: %s", server_url, port, remote_height, local_height) if remote_height > local_height + s3_headers_depth: defer.returnValue(True) except Exception as err: From df2339231aa50a8ae9a3b8b2e431fbc0766474d8 Mon Sep 17 00:00:00 2001 From: Thomas Zarebczan Date: Fri, 4 May 2018 00:47:08 -0400 Subject: [PATCH 06/55] magic --- lbrynet/core/Wallet.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 889e7514f..8dc665e93 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -125,7 +125,7 @@ class Wallet(object): s3_height = (len(raw_headers) / HEADER_SIZE) - 1 local_height = self.local_header_file_height() if s3_height > local_height: - with open(os.path.join(self.config.path, "blockchain_headers"), "w") as headers_file: + with open(os.path.join(self.config.path, "blockchain_headers"), "wb") as headers_file: headers_file.write(raw_headers) log.info("updated headers from s3") else: From b389e59452cce04baf557bcefbe1b41605e825f1 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Fri, 4 May 2018 22:12:43 -0300 Subject: [PATCH 07/55] replace pycrypto->cryptography on tests --- lbrynet/tests/functional/test_misc.py | 17 ++++----------- lbrynet/tests/functional/test_streamify.py | 4 ++-- lbrynet/tests/mocks.py | 21 ++++++++++++++----- .../tests/unit/cryptstream/test_cryptblob.py | 12 ++++++----- .../test_EncryptedFileCreator.py | 6 +++--- 5 files changed, 32 insertions(+), 28 deletions(-) diff --git a/lbrynet/tests/functional/test_misc.py b/lbrynet/tests/functional/test_misc.py index e806da5c2..34e0d80c9 100644 --- a/lbrynet/tests/functional/test_misc.py +++ b/lbrynet/tests/functional/test_misc.py @@ -7,8 +7,7 @@ import sys import random import unittest -from Crypto import Random -from Crypto.Hash import MD5 +from hashlib import md5 from lbrynet import conf from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager from lbrynet.core.Session import Session @@ -98,9 +97,6 @@ class LbryUploader(object): from twisted.internet import reactor self.reactor = reactor logging.debug("Starting the uploader") - Random.atfork() - r = random.Random() - r.seed("start_lbry_uploader") wallet = FakeWallet() peer_manager = PeerManager() peer_finder = FakePeerFinder(5553, peer_manager, 1) @@ -191,10 +187,6 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event, logging.debug("Starting the uploader") - Random.atfork() - - r = random.Random() - r.seed("start_lbry_reuploader") wallet = FakeWallet() peer_port = 5553 + n @@ -297,7 +289,6 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero logging.debug("Starting the uploader") - Random.atfork() wallet = FakeWallet() peer_manager = PeerManager() @@ -515,7 +506,7 @@ class TestTransfer(TestCase): def check_md5_sum(): f = open(os.path.join(db_dir, 'test_file')) - hashsum = MD5.new() + hashsum = md5() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be") @@ -688,7 +679,7 @@ class TestTransfer(TestCase): def check_md5_sum(): f = open(os.path.join(db_dir, 'test_file')) - hashsum = MD5.new() + hashsum = md5() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be") @@ -811,7 +802,7 @@ class TestTransfer(TestCase): def check_md5_sum(): f = open('test_file') - hashsum = MD5.new() + hashsum = md5() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "e5941d615f53312fd66638239c1f90d5") diff --git a/lbrynet/tests/functional/test_streamify.py b/lbrynet/tests/functional/test_streamify.py index c84630272..cda06758b 100644 --- a/lbrynet/tests/functional/test_streamify.py +++ b/lbrynet/tests/functional/test_streamify.py @@ -2,7 +2,7 @@ import os import shutil import tempfile -from Crypto.Hash import MD5 +from hashlib import md5 from twisted.trial.unittest import TestCase from twisted.internet import defer, threads @@ -127,7 +127,7 @@ class TestStreamify(TestCase): self.assertTrue(lbry_file.sd_hash, sd_hash) yield lbry_file.start() f = open('test_file') - hashsum = MD5.new() + hashsum = md5() hashsum.update(f.read()) self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") diff --git a/lbrynet/tests/mocks.py b/lbrynet/tests/mocks.py index d2bce3730..7de4927c7 100644 --- a/lbrynet/tests/mocks.py +++ b/lbrynet/tests/mocks.py @@ -1,7 +1,10 @@ +import base64 import struct import io -from Crypto.PublicKey import RSA +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives.asymmetric import rsa +from cryptography.hazmat.primitives import serialization from twisted.internet import defer, error from twisted.python.failure import Failure @@ -15,6 +18,12 @@ from lbrynet import conf from util import debug_kademlia_packet KB = 2**10 +PUBLIC_EXPOENT = 65537 # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html + + +def decode_rsa_key(pem_key): + decoded = base64.b64decode(''.join(pem_key.splitlines()[1:-1])) + return serialization.load_der_public_key(decoded, default_backend()) class FakeLBRYFile(object): @@ -137,9 +146,9 @@ class PointTraderKeyQueryHandler(object): if self.query_identifiers[0] in queries: new_encoded_pub_key = queries[self.query_identifiers[0]] try: - RSA.importKey(new_encoded_pub_key) + decode_rsa_key(new_encoded_pub_key) except (ValueError, TypeError, IndexError): - return defer.fail(Failure(ValueError("Client sent an invalid public key"))) + return defer.fail(Failure(ValueError("Client sent an invalid public key: {}".format(new_encoded_pub_key)))) self.public_key = new_encoded_pub_key self.wallet.set_public_key_for_peer(self.peer, self.public_key) fields = {'public_key': self.wallet.encoded_public_key} @@ -152,8 +161,10 @@ class PointTraderKeyQueryHandler(object): class Wallet(object): def __init__(self): - self.private_key = RSA.generate(1024) - self.encoded_public_key = self.private_key.publickey().exportKey() + self.private_key = rsa.generate_private_key(public_exponent=PUBLIC_EXPOENT, + key_size=1024, backend=default_backend()) + self.encoded_public_key = self.private_key.public_key().public_bytes(serialization.Encoding.PEM, + serialization.PublicFormat.PKCS1) self._config = None self.network = None self.wallet = None diff --git a/lbrynet/tests/unit/cryptstream/test_cryptblob.py b/lbrynet/tests/unit/cryptstream/test_cryptblob.py index 2378c5770..90719166e 100644 --- a/lbrynet/tests/unit/cryptstream/test_cryptblob.py +++ b/lbrynet/tests/unit/cryptstream/test_cryptblob.py @@ -5,11 +5,13 @@ from lbrynet.blob.blob_file import MAX_BLOB_SIZE from lbrynet.tests.mocks import mock_conf_settings -from Crypto import Random -from Crypto.Cipher import AES +from cryptography.hazmat.primitives.ciphers.algorithms import AES import random import string import StringIO +import os + +AES_BLOCK_SIZE_BYTES = AES.block_size / 8 class MocBlob(object): def __init__(self): @@ -44,8 +46,8 @@ class TestCryptBlob(unittest.TestCase): # max blob size is 2*2**20 -1 ( -1 due to required padding in the end ) blob = MocBlob() blob_num = 0 - key = Random.new().read(AES.block_size) - iv = Random.new().read(AES.block_size) + key = os.urandom(AES_BLOCK_SIZE_BYTES) + iv = os.urandom(AES_BLOCK_SIZE_BYTES) maker = CryptBlob.CryptStreamBlobMaker(key, iv, blob_num, blob) write_size = size_of_data string_to_encrypt = random_string(size_of_data) @@ -54,7 +56,7 @@ class TestCryptBlob(unittest.TestCase): done, num_bytes = maker.write(string_to_encrypt) yield maker.close() self.assertEqual(size_of_data, num_bytes) - expected_encrypted_blob_size = ((size_of_data / AES.block_size) + 1) * AES.block_size + expected_encrypted_blob_size = ((size_of_data / AES_BLOCK_SIZE_BYTES) + 1) * AES_BLOCK_SIZE_BYTES self.assertEqual(expected_encrypted_blob_size, len(blob.data)) if size_of_data < MAX_BLOB_SIZE-1: diff --git a/lbrynet/tests/unit/lbryfilemanager/test_EncryptedFileCreator.py b/lbrynet/tests/unit/lbryfilemanager/test_EncryptedFileCreator.py index 07ad7e87f..6a4dcc8fd 100644 --- a/lbrynet/tests/unit/lbryfilemanager/test_EncryptedFileCreator.py +++ b/lbrynet/tests/unit/lbryfilemanager/test_EncryptedFileCreator.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from Crypto.Cipher import AES +from cryptography.hazmat.primitives.ciphers.algorithms import AES import mock from twisted.trial import unittest from twisted.internet import defer @@ -18,7 +18,7 @@ MB = 2**20 def iv_generator(): while True: - yield '3' * AES.block_size + yield '3' * (AES.block_size / 8) class CreateEncryptedFileTest(unittest.TestCase): @@ -47,7 +47,7 @@ class CreateEncryptedFileTest(unittest.TestCase): @defer.inlineCallbacks def create_file(self, filename): handle = mocks.GenFile(3*MB, '1') - key = '2'*AES.block_size + key = '2' * (AES.block_size / 8) out = yield EncryptedFileCreator.create_lbry_file(self.session, self.file_manager, filename, handle, key, iv_generator()) defer.returnValue(out) From fab932abb66ae64b2ddace2eacd45b417be493b9 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Fri, 4 May 2018 23:07:05 -0300 Subject: [PATCH 08/55] replace pycrypto->cryptography --- lbrynet/cryptstream/CryptStreamCreator.py | 10 +-- .../pointtraderclient/pointtraderclient.py | 69 +++++++++++-------- 2 files changed, 44 insertions(+), 35 deletions(-) diff --git a/lbrynet/cryptstream/CryptStreamCreator.py b/lbrynet/cryptstream/CryptStreamCreator.py index e39a50c1d..a3042ac61 100644 --- a/lbrynet/cryptstream/CryptStreamCreator.py +++ b/lbrynet/cryptstream/CryptStreamCreator.py @@ -1,12 +1,12 @@ """ Utility for creating Crypt Streams, which are encrypted blobs and associated metadata. """ - +import os import logging + +from cryptography.hazmat.primitives.ciphers.algorithms import AES from twisted.internet import interfaces, defer from zope.interface import implements -from Crypto import Random -from Crypto.Cipher import AES from lbrynet.cryptstream.CryptBlob import CryptStreamBlobMaker @@ -101,13 +101,13 @@ class CryptStreamCreator(object): @staticmethod def random_iv_generator(): while 1: - yield Random.new().read(AES.block_size) + yield os.urandom(AES.block_size / 8) def setup(self): """Create the symmetric key if it wasn't provided""" if self.key is None: - self.key = Random.new().read(AES.block_size) + self.key = os.urandom(AES.block_size / 8) return defer.succeed(True) diff --git a/lbrynet/pointtraderclient/pointtraderclient.py b/lbrynet/pointtraderclient/pointtraderclient.py index 4084ddc8a..030337185 100644 --- a/lbrynet/pointtraderclient/pointtraderclient.py +++ b/lbrynet/pointtraderclient/pointtraderclient.py @@ -1,16 +1,40 @@ +from cryptography.hazmat.backends import default_backend +from cryptography.hazmat.primitives import serialization, hashes +from cryptography.hazmat.primitives.asymmetric import rsa, padding, utils + from lbrynet import conf from twisted.web.client import Agent, FileBodyProducer, Headers, ResponseDone from twisted.internet import threads, defer, protocol -from Crypto.Hash import SHA -from Crypto.PublicKey import RSA -from Crypto.Signature import PKCS1_PSS +from hashlib import sha1 from StringIO import StringIO import time import json import binascii +def gen_rsa_key(bits): + PUBLIC_EXPOENT = 65537 # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html + return rsa.generate_private_key(public_exponent=PUBLIC_EXPOENT, + key_size=4096, backend=default_backend()) + + +def sign(private_key, recipient_public_key=None, amount=None): + encoded_public_key = private_key.public_key().public_bytes(serialization.Encoding.PEM, + serialization.PublicFormat.PKCS1) + timestamp = time.time() + h = sha1() + h.update(encoded_public_key) + if amount and recipient_public_key: + h.update(recipient_public_key) + h.update(str(amount)) + h.update(str(timestamp)) + signature = private_key.sign(h.digest(), padding.PSS(mgf=padding.MGF1(hashes.SHA1()), + salt_length=padding.PSS.MAX_LENGTH), + utils.Prehashed(hashes.SHA1())) + return encoded_public_key, timestamp, binascii.hexlify(signature) + + class BeginningPrinter(protocol.Protocol): def __init__(self, finished): self.finished = finished @@ -64,7 +88,9 @@ def print_error(err): def register_new_account(private_key): data = {} - data['pub_key'] = private_key.publickey().exportKey() + encoded_public_key = private_key.public_key().public_bytes(serialization.Encoding.PEM, + serialization.PublicFormat.PKCS1) + data['pub_key'] = encoded_public_key def get_success_from_body(body): r = json.loads(body) @@ -79,15 +105,7 @@ def register_new_account(private_key): def send_points(private_key, recipient_public_key, amount): - encoded_public_key = private_key.publickey().exportKey() - timestamp = time.time() - h = SHA.new() - h.update(encoded_public_key) - h.update(recipient_public_key) - h.update(str(amount)) - h.update(str(timestamp)) - signer = PKCS1_PSS.new(private_key) - signature = binascii.hexlify(signer.sign(h)) + encoded_public_key, timestamp, signature = sign(private_key, recipient_public_key, amount) data = {} data['sender_pub_key'] = encoded_public_key @@ -110,13 +128,7 @@ def send_points(private_key, recipient_public_key, amount): def get_recent_transactions(private_key): - encoded_public_key = private_key.publickey().exportKey() - timestamp = time.time() - h = SHA.new() - h.update(encoded_public_key) - h.update(str(timestamp)) - signer = PKCS1_PSS.new(private_key) - signature = binascii.hexlify(signer.sign(h)) + encoded_public_key, timestamp, signature = sign(private_key) data = {} data['pub_key'] = encoded_public_key @@ -140,13 +152,7 @@ def get_recent_transactions(private_key): def get_balance(private_key): - encoded_public_key = private_key.publickey().exportKey() - timestamp = time.time() - h = SHA.new() - h.update(encoded_public_key) - h.update(str(timestamp)) - signer = PKCS1_PSS.new(private_key) - signature = binascii.hexlify(signer.sign(h)) + encoded_public_key, timestamp, signature = sign(private_key) data = {} data['pub_key'] = encoded_public_key @@ -203,13 +209,15 @@ def run_full_test(): return dl def do_transfer(unused, amount): - d = send_points(keys[0], keys[1].publickey().exportKey(), amount) + encoded_public_key = keys[1].public_key().public_bytes(serialization.Encoding.PEM, + serialization.PublicFormat.PKCS1) + d = send_points(keys[0], encoded_public_key, amount) return d - d1 = threads.deferToThread(RSA.generate, 4096) + d1 = threads.deferToThread(gen_rsa_key, 4096) d1.addCallback(save_key) d1.addCallback(register_new_account) - d2 = threads.deferToThread(RSA.generate, 4096) + d2 = threads.deferToThread(gen_rsa_key, 4096) d2.addCallback(save_key) d2.addCallback(register_new_account) dlist = defer.DeferredList([d1, d2]) @@ -222,6 +230,7 @@ def run_full_test(): if __name__ == "__main__": + conf.initialize_settings() from twisted.internet import reactor From 44b08ae6c9dc2c356df42213e2ff7f4db38651fb Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Fri, 4 May 2018 23:59:08 -0300 Subject: [PATCH 09/55] remove pycrypto --- requirements.txt | 1 - setup.py | 1 - 2 files changed, 2 deletions(-) diff --git a/requirements.txt b/requirements.txt index 37361b412..c7ca316f3 100644 --- a/requirements.txt +++ b/requirements.txt @@ -16,7 +16,6 @@ git+https://github.com/lbryio/lbryschema.git@v0.0.15#egg=lbryschema git+https://github.com/lbryio/lbryum.git@v3.2.1#egg=lbryum miniupnpc==1.9 pbkdf2==1.3 -pycrypto==2.6.1 pyyaml==3.12 PyGithub==1.34 qrcode==5.2.2 diff --git a/setup.py b/setup.py index 7027c6a32..de95e8fde 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,6 @@ requires = [ 'lbryschema==0.0.15', 'lbryum==3.2.1', 'miniupnpc', - 'pycrypto', 'pyyaml', 'requests', 'txrequests', From 820789936f707c64d07745e7d9c4c7cb882dee4d Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 00:08:02 -0300 Subject: [PATCH 10/55] bump cryptography==2.2.2 --- requirements.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.txt b/requirements.txt index c7ca316f3..96b589b9b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ Twisted==16.6.0 -cryptography==2.0.3 +cryptography==2.2.2 appdirs==1.4.3 argparse==1.2.1 docopt==0.6.2 From 1077d8f93719c2e314b59c2740662b46100e9635 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 00:09:24 -0300 Subject: [PATCH 11/55] update CHANGELOG.md --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index 49125aa34..a3f6bddde 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,8 @@ at anytime. * ### Changed + * changed cryptography version to 2.2.2 + * removed pycrypto dependency, replacing all calls to cryptography * several internal dht functions to use inlineCallbacks * `DHTHashAnnouncer` and `Node` manage functions to use `LoopingCall`s instead of scheduling with `callLater`. * `store` kademlia rpc method to block on the call finishing and to return storing peer information From d03fc80eac6d233ee8632e5b3cbf0ba13af7cc92 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 00:50:42 -0300 Subject: [PATCH 12/55] make analytics use treq --- lbrynet/analytics.py | 28 +++++++++++++++------------- 1 file changed, 15 insertions(+), 13 deletions(-) diff --git a/lbrynet/analytics.py b/lbrynet/analytics.py index debd99728..6cd1f1e69 100644 --- a/lbrynet/analytics.py +++ b/lbrynet/analytics.py @@ -1,8 +1,8 @@ import collections import logging + +import treq from twisted.internet import defer, task -from requests import auth -from txrequests import Session from lbrynet import conf from lbrynet.core import looping_call_manager, utils, system_info @@ -216,8 +216,8 @@ class Manager(object): class Api(object): - def __init__(self, session, url, write_key, enabled): - self.session = session + def __init__(self, cookies, url, write_key, enabled): + self.cookies = cookies self.url = url self._write_key = write_key self._enabled = enabled @@ -232,14 +232,17 @@ class Api(object): # timeout will have expired. # # by forcing the connection to close, we will disable the keep-alive. + + def update_cookies(response): + self.cookies.update(response.cookies()) + return response + assert endpoint[0] == '/' - headers = {"Connection": "close"} - return self.session.post( - self.url + endpoint, - json=data, - auth=auth.HTTPBasicAuth(self._write_key, ''), - headers=headers - ) + headers = {b"Connection": b"close"} + d = treq.post(self.url + endpoint, auth=(self._write_key, ''), json=data, + headers=headers, cookies=self.cookies) + d.addCallback(update_cookies) + return d def track(self, event): """Send a single tracking event""" @@ -257,11 +260,10 @@ class Api(object): @classmethod def new_instance(cls, enabled=None): """Initialize an instance using values from the configuration""" - session = Session() if enabled is None: enabled = conf.settings['share_usage_data'] return cls( - session, + {}, conf.settings['ANALYTICS_ENDPOINT'], utils.deobfuscate(conf.settings['ANALYTICS_TOKEN']), enabled, From 3982e150915a5e18500d9a97394714d4ee33bc35 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 01:16:26 -0300 Subject: [PATCH 13/55] download headers from s3 using treq --- lbrynet/core/Wallet.py | 53 +++++++++++------------------------------- 1 file changed, 13 insertions(+), 40 deletions(-) diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 6770faca4..9e272a7d5 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -3,14 +3,12 @@ from collections import defaultdict, deque import datetime import logging from decimal import Decimal + +import treq from zope.interface import implements from twisted.internet import threads, reactor, defer, task from twisted.python.failure import Failure -from twisted.python.threadpool import ThreadPool -from twisted._threads._ithreads import AlreadyQuit from twisted.internet.error import ConnectionAborted -from txrequests import Session as _TxRequestsSession -from requests import Session as requestsSession from lbryum import wallet as lbryum_wallet from lbryum.network import Network @@ -36,29 +34,6 @@ from lbrynet.core.Error import DownloadCanceledError, RequestCanceledError log = logging.getLogger(__name__) -class TxRequestsSession(_TxRequestsSession): - # Session from txrequests would throw AlreadyQuit errors, this catches them - def __init__(self, pool=None, minthreads=1, maxthreads=4, **kwargs): - requestsSession.__init__(self, **kwargs) # pylint: disable=non-parent-init-called - self.ownPool = False - if pool is None: - self.ownPool = True - pool = ThreadPool(minthreads=minthreads, maxthreads=maxthreads) - # unclosed ThreadPool leads to reactor hangs at shutdown - # this is a problem in many situation, so better enforce pool stop here - - def stop_pool(): - try: - pool.stop() - except AlreadyQuit: - pass - - reactor.addSystemEventTrigger("after", "shutdown", stop_pool) - self.pool = pool - if self.ownPool: - pool.start() - - class ReservedPoints(object): def __init__(self, identifier, amount): self.identifier = identifier @@ -118,20 +93,18 @@ class Wallet(object): @defer.inlineCallbacks def fetch_headers_from_s3(self): - with TxRequestsSession() as s: - r = yield s.get(HEADERS_URL) - raw_headers = r.content - if not len(raw_headers) % HEADER_SIZE: # should be divisible by the header size - s3_height = (len(raw_headers) / HEADER_SIZE) - 1 - local_height = self.local_header_file_height() - if s3_height > local_height: - with open(os.path.join(self.config.path, "blockchain_headers"), "wb") as headers_file: - headers_file.write(raw_headers) - log.info("fetched headers from s3 (s3 height: %i)", s3_height) - else: - log.warning("s3 is more out of date than we are") + response = yield treq.get(HEADERS_URL) + if not response.length % HEADER_SIZE: # should be divisible by the header size + s3_height = (response.length / HEADER_SIZE) - 1 + local_height = self.local_header_file_height() + if s3_height > local_height: + with open(os.path.join(self.config.path, "blockchain_headers"), "wb") as headers_file: + yield treq.collect(response, headers_file.write) + log.info("fetched headers from s3 (s3 height: %i)", s3_height) else: - log.error("invalid size for headers from s3") + log.warning("s3 is more out of date than we are") + else: + log.error("invalid size for headers from s3") def local_header_file_height(self): headers_path = os.path.join(self.config.path, "blockchain_headers") From 0bf65836c7fe388d4782a335a4e5791ddb73c631 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 02:05:02 -0300 Subject: [PATCH 14/55] resume download from S3 instead of starting from scratch --- lbrynet/core/Wallet.py | 28 +++++++++++++++++++++------- 1 file changed, 21 insertions(+), 7 deletions(-) diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 9e272a7d5..64a3223c6 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -93,13 +93,23 @@ class Wallet(object): @defer.inlineCallbacks def fetch_headers_from_s3(self): - response = yield treq.get(HEADERS_URL) - if not response.length % HEADER_SIZE: # should be divisible by the header size - s3_height = (response.length / HEADER_SIZE) - 1 + local_header_size = self.local_header_file_size() + resume_header = {"Range": "bytes={}-".format(local_header_size)} + response = yield treq.get(HEADERS_URL, headers=resume_header) + got_406 = response.code == 406 # our file is bigger + final_size_after_download = response.length + local_header_size + # should have something to download and a final length divisible by the header size + if not got_406 and final_size_after_download and not final_size_after_download % HEADER_SIZE: + s3_height = (final_size_after_download / HEADER_SIZE) - 1 local_height = self.local_header_file_height() if s3_height > local_height: - with open(os.path.join(self.config.path, "blockchain_headers"), "wb") as headers_file: - yield treq.collect(response, headers_file.write) + if local_header_size: + log.info("Resuming download of %i bytes from s3", response.length) + with open(os.path.join(self.config.path, "blockchain_headers"), "a+b") as headers_file: + yield treq.collect(response, headers_file.write) + else: + with open(os.path.join(self.config.path, "blockchain_headers"), "wb") as headers_file: + yield treq.collect(response, headers_file.write) log.info("fetched headers from s3 (s3 height: %i)", s3_height) else: log.warning("s3 is more out of date than we are") @@ -107,11 +117,15 @@ class Wallet(object): log.error("invalid size for headers from s3") def local_header_file_height(self): + return max((self.local_header_file_size() / HEADER_SIZE) - 1, 0) + + def local_header_file_size(self): headers_path = os.path.join(self.config.path, "blockchain_headers") if os.path.isfile(headers_path): - return max((os.stat(headers_path).st_size / 112) - 1, 0) + return os.stat(headers_path).st_size return 0 + @defer.inlineCallbacks def get_remote_height(self, server, port): connected = defer.Deferred() @@ -136,7 +150,7 @@ class Wallet(object): try: remote_height = yield self.get_remote_height(server_url, port) log.info("%s:%i height: %i, local height: %s", server_url, port, remote_height, local_height) - if remote_height > local_height + s3_headers_depth: + if remote_height > (local_height + s3_headers_depth): defer.returnValue(True) except Exception as err: log.warning("error requesting remote height from %s:%i - %s", server_url, port, err) From 7f88dda0aebc39ca7c613cd1d955c36ccde147cb Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 02:18:03 -0300 Subject: [PATCH 15/55] use treq on loggly --- lbrynet/core/log_support.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/lbrynet/core/log_support.py b/lbrynet/core/log_support.py index a7a46dfeb..9e0a635d1 100644 --- a/lbrynet/core/log_support.py +++ b/lbrynet/core/log_support.py @@ -6,8 +6,7 @@ import os import sys import traceback -from txrequests import Session -from requests.exceptions import ConnectionError +import treq from twisted.internet import defer import twisted.python.log @@ -35,13 +34,13 @@ TRACE = 5 class HTTPSHandler(logging.Handler): - def __init__(self, url, fqdn=False, localname=None, facility=None, session=None): + def __init__(self, url, fqdn=False, localname=None, facility=None, cookies=None): logging.Handler.__init__(self) self.url = url self.fqdn = fqdn self.localname = localname self.facility = facility - self.session = session if session is not None else Session() + self.cookies = cookies or {} def get_full_message(self, record): if record.exc_info: @@ -52,10 +51,8 @@ class HTTPSHandler(logging.Handler): @defer.inlineCallbacks def _emit(self, record): payload = self.format(record) - try: - yield self.session.post(self.url, data=payload) - except ConnectionError: - pass + response = yield treq.post(self.url, data=payload, cookies=self.cookies) + self.cookies.update(response.cookies()) def emit(self, record): return self._emit(record) From e170f3db3ed51dd5a1b610df44156a4c958ad627 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 02:20:21 -0300 Subject: [PATCH 16/55] remove txrequests dependency --- lbrynet/core/system_info.py | 2 +- requirements.txt | 1 - setup.py | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/lbrynet/core/system_info.py b/lbrynet/core/system_info.py index 94a1f15f2..95cd74bc9 100644 --- a/lbrynet/core/system_info.py +++ b/lbrynet/core/system_info.py @@ -37,7 +37,7 @@ def get_platform(get_ip=True): "build": build_type.BUILD, # CI server sets this during build step } - # TODO: remove this from get_platform and add a get_external_ip function using txrequests + # TODO: remove this from get_platform and add a get_external_ip function using treq if get_ip: try: response = json.loads(urlopen("https://api.lbry.io/ip").read()) diff --git a/requirements.txt b/requirements.txt index 96b589b9b..bea7b8a65 100644 --- a/requirements.txt +++ b/requirements.txt @@ -20,7 +20,6 @@ pyyaml==3.12 PyGithub==1.34 qrcode==5.2.2 requests==2.9.1 -txrequests==0.9.5 service_identity==16.0.0 six>=1.9.0 slowaes==0.1a1 diff --git a/setup.py b/setup.py index de95e8fde..7cc30c751 100644 --- a/setup.py +++ b/setup.py @@ -25,7 +25,6 @@ requires = [ 'miniupnpc', 'pyyaml', 'requests', - 'txrequests', 'txJSON-RPC', 'zope.interface', 'docopt' From 8bfc35753af906e9be63fce325f61307d4dc8d50 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 02:20:55 -0300 Subject: [PATCH 17/55] update changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a3f6bddde..e448d2e46 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,7 @@ at anytime. * ### Changed + * changed txrequests for treq * changed cryptography version to 2.2.2 * removed pycrypto dependency, replacing all calls to cryptography * several internal dht functions to use inlineCallbacks From c90140f70124ba3fb4e528d359271aaf6f3d5048 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 02:33:37 -0300 Subject: [PATCH 18/55] make the exchange manager use treq instead of requests --- lbrynet/daemon/ExchangeRateManager.py | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/lbrynet/daemon/ExchangeRateManager.py b/lbrynet/daemon/ExchangeRateManager.py index 6fce06380..69310ea8c 100644 --- a/lbrynet/daemon/ExchangeRateManager.py +++ b/lbrynet/daemon/ExchangeRateManager.py @@ -1,8 +1,9 @@ import time -import requests import logging import json -from twisted.internet import defer, threads + +import treq +from twisted.internet import defer from twisted.internet.task import LoopingCall from lbrynet.core.Error import InvalidExchangeRateResponse @@ -52,9 +53,10 @@ class MarketFeed(object): def is_online(self): return self._online + @defer.inlineCallbacks def _make_request(self): - r = requests.get(self.url, self.params, timeout=self.REQUESTS_TIMEOUT) - return r.text + response = yield treq.get(self.url, params=self.params, timeout=self.REQUESTS_TIMEOUT) + defer.returnValue((yield response.content())) def _handle_response(self, response): return NotImplementedError @@ -75,7 +77,7 @@ class MarketFeed(object): self._online = False def _update_price(self): - d = threads.deferToThread(self._make_request) + d = self._make_request() d.addCallback(self._handle_response) d.addCallback(self._subtract_fee) d.addCallback(self._save_price) From 3ee0f9756d505f0d9696435336eb9fb39a7288bf Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Sat, 5 May 2018 20:51:49 -0300 Subject: [PATCH 19/55] add treq as a dependency + pep8 --- lbrynet/tests/functional/test_misc.py | 1 - lbrynet/tests/mocks.py | 3 ++- requirements.txt | 1 + setup.py | 1 + 4 files changed, 4 insertions(+), 2 deletions(-) diff --git a/lbrynet/tests/functional/test_misc.py b/lbrynet/tests/functional/test_misc.py index 34e0d80c9..b134b6da2 100644 --- a/lbrynet/tests/functional/test_misc.py +++ b/lbrynet/tests/functional/test_misc.py @@ -4,7 +4,6 @@ import os import platform import shutil import sys -import random import unittest from hashlib import md5 diff --git a/lbrynet/tests/mocks.py b/lbrynet/tests/mocks.py index 7de4927c7..25ac42e80 100644 --- a/lbrynet/tests/mocks.py +++ b/lbrynet/tests/mocks.py @@ -148,7 +148,8 @@ class PointTraderKeyQueryHandler(object): try: decode_rsa_key(new_encoded_pub_key) except (ValueError, TypeError, IndexError): - return defer.fail(Failure(ValueError("Client sent an invalid public key: {}".format(new_encoded_pub_key)))) + value_error = ValueError("Client sent an invalid public key: {}".format(new_encoded_pub_key)) + return defer.fail(Failure(value_error)) self.public_key = new_encoded_pub_key self.wallet.set_public_key_for_peer(self.peer, self.public_key) fields = {'public_key': self.wallet.encoded_public_key} diff --git a/requirements.txt b/requirements.txt index bea7b8a65..37725800c 100644 --- a/requirements.txt +++ b/requirements.txt @@ -26,3 +26,4 @@ slowaes==0.1a1 txJSON-RPC==0.5 wsgiref==0.1.2 zope.interface==4.3.3 +treq==17.8.0 diff --git a/setup.py b/setup.py index 7cc30c751..d4e4c0f43 100644 --- a/setup.py +++ b/setup.py @@ -27,6 +27,7 @@ requires = [ 'requests', 'txJSON-RPC', 'zope.interface', + 'treq', 'docopt' ] From 805bfbd18e8c6f84fc242fd2bdff6eeb048ddd34 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Mon, 7 May 2018 11:49:54 -0300 Subject: [PATCH 20/55] remove point trader testing client --- lbrynet/pointtraderclient/__init__.py | 10 - .../pointtraderclient/pointtraderclient.py | 239 ------------------ 2 files changed, 249 deletions(-) delete mode 100644 lbrynet/pointtraderclient/__init__.py delete mode 100644 lbrynet/pointtraderclient/pointtraderclient.py diff --git a/lbrynet/pointtraderclient/__init__.py b/lbrynet/pointtraderclient/__init__.py deleted file mode 100644 index 4c5b43dde..000000000 --- a/lbrynet/pointtraderclient/__init__.py +++ /dev/null @@ -1,10 +0,0 @@ -""" -A client library for sending and receiving payments on the point trader network. - -The point trader network is a simple payment system used solely for testing lbrynet-console. A user -creates a public key, registers it with the point trader server, and receives free points for -registering. The public key is used to spend points, and also used as an address to which points -are sent. To spend points, the public key signs a message containing the amount and the destination -public key and sends it to the point trader server. To check for payments, the recipient sends a -signed message asking the point trader server for its balance. -""" diff --git a/lbrynet/pointtraderclient/pointtraderclient.py b/lbrynet/pointtraderclient/pointtraderclient.py deleted file mode 100644 index 030337185..000000000 --- a/lbrynet/pointtraderclient/pointtraderclient.py +++ /dev/null @@ -1,239 +0,0 @@ -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import serialization, hashes -from cryptography.hazmat.primitives.asymmetric import rsa, padding, utils - -from lbrynet import conf - -from twisted.web.client import Agent, FileBodyProducer, Headers, ResponseDone -from twisted.internet import threads, defer, protocol -from hashlib import sha1 -from StringIO import StringIO -import time -import json -import binascii - - -def gen_rsa_key(bits): - PUBLIC_EXPOENT = 65537 # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html - return rsa.generate_private_key(public_exponent=PUBLIC_EXPOENT, - key_size=4096, backend=default_backend()) - - -def sign(private_key, recipient_public_key=None, amount=None): - encoded_public_key = private_key.public_key().public_bytes(serialization.Encoding.PEM, - serialization.PublicFormat.PKCS1) - timestamp = time.time() - h = sha1() - h.update(encoded_public_key) - if amount and recipient_public_key: - h.update(recipient_public_key) - h.update(str(amount)) - h.update(str(timestamp)) - signature = private_key.sign(h.digest(), padding.PSS(mgf=padding.MGF1(hashes.SHA1()), - salt_length=padding.PSS.MAX_LENGTH), - utils.Prehashed(hashes.SHA1())) - return encoded_public_key, timestamp, binascii.hexlify(signature) - - -class BeginningPrinter(protocol.Protocol): - def __init__(self, finished): - self.finished = finished - self.data = "" - - def dataReceived(self, bytes): - self.data = self.data + bytes - - def connectionLost(self, reason): - if reason.check(ResponseDone) is not None: - self.finished.callback(str(self.data)) - else: - self.finished.errback(reason) - - -def read_body(response): - d = defer.Deferred() - response.deliverBody(BeginningPrinter(d)) - return d - - -def get_body(response): - if response.code != 200: - print "\n\n\n\nbad error code\n\n\n\n" - raise ValueError(response.phrase) - else: - return read_body(response) - - -def get_body_from_request(path, data): - - from twisted.internet import reactor - - jsondata = FileBodyProducer(StringIO(json.dumps(data))) - agent = Agent(reactor) - d = agent.request( - 'POST', conf.settings['pointtrader_server'] + path, - Headers({'Content-Type': ['application/json']}), jsondata) - d.addCallback(get_body) - return d - - -def print_response(response): - pass - - -def print_error(err): - print err.getTraceback() - return err - - -def register_new_account(private_key): - data = {} - encoded_public_key = private_key.public_key().public_bytes(serialization.Encoding.PEM, - serialization.PublicFormat.PKCS1) - data['pub_key'] = encoded_public_key - - def get_success_from_body(body): - r = json.loads(body) - if not 'success' in r or r['success'] is False: - return False - return True - - d = get_body_from_request('/register/', data) - - d.addCallback(get_success_from_body) - return d - - -def send_points(private_key, recipient_public_key, amount): - encoded_public_key, timestamp, signature = sign(private_key, recipient_public_key, amount) - - data = {} - data['sender_pub_key'] = encoded_public_key - data['recipient_pub_key'] = recipient_public_key - data['amount'] = amount - data['timestamp'] = timestamp - data['signature'] = signature - - def get_success_from_body(body): - r = json.loads(body) - if not 'success' in r or r['success'] is False: - return False - return True - - d = get_body_from_request('/send-points/', data) - - d.addCallback(get_success_from_body) - - return d - - -def get_recent_transactions(private_key): - encoded_public_key, timestamp, signature = sign(private_key) - - data = {} - data['pub_key'] = encoded_public_key - data['timestamp'] = timestamp - data['signature'] = signature - data['end_time'] = 0 - data['start_time'] = 120 - - def get_transactions_from_body(body): - r = json.loads(body) - if "transactions" not in r: - raise ValueError("Invalid response: no 'transactions' field") - else: - return r['transactions'] - - d = get_body_from_request('/get-transactions/', data) - - d.addCallback(get_transactions_from_body) - - return d - - -def get_balance(private_key): - encoded_public_key, timestamp, signature = sign(private_key) - - data = {} - data['pub_key'] = encoded_public_key - data['timestamp'] = timestamp - data['signature'] = signature - - def get_balance_from_body(body): - r = json.loads(body) - if not 'balance' in r: - raise ValueError("Invalid response: no 'balance' field") - else: - return float(r['balance']) - - d = get_body_from_request('/get-balance/', data) - - d.addCallback(get_balance_from_body) - - return d - - -def run_full_test(): - - keys = [] - - def save_key(private_key): - keys.append(private_key) - return private_key - - def check_balances_and_transactions(unused, bal1, bal2, num_transactions): - - def assert_balance_is(actual, expected): - assert abs(actual - expected) < .05 - print "correct balance. actual:", str(actual), "expected:", str(expected) - return True - - def assert_transaction_length_is(transactions, expected_length): - assert len(transactions) == expected_length - print "correct transaction length" - return True - - d1 = get_balance(keys[0]) - d1.addCallback(assert_balance_is, bal1) - - d2 = get_balance(keys[1]) - d2.addCallback(assert_balance_is, bal2) - - d3 = get_recent_transactions(keys[0]) - d3.addCallback(assert_transaction_length_is, num_transactions) - - d4 = get_recent_transactions(keys[1]) - d4.addCallback(assert_transaction_length_is, num_transactions) - - dl = defer.DeferredList([d1, d2, d3, d4]) - return dl - - def do_transfer(unused, amount): - encoded_public_key = keys[1].public_key().public_bytes(serialization.Encoding.PEM, - serialization.PublicFormat.PKCS1) - d = send_points(keys[0], encoded_public_key, amount) - return d - - d1 = threads.deferToThread(gen_rsa_key, 4096) - d1.addCallback(save_key) - d1.addCallback(register_new_account) - d2 = threads.deferToThread(gen_rsa_key, 4096) - d2.addCallback(save_key) - d2.addCallback(register_new_account) - dlist = defer.DeferredList([d1, d2]) - dlist.addCallback(check_balances_and_transactions, 1000, 1000, 0) - dlist.addCallback(do_transfer, 50) - dlist.addCallback(check_balances_and_transactions, 950, 1050, 1) - dlist.addCallback(do_transfer, 75) - dlist.addCallback(check_balances_and_transactions, 875, 1125, 2) - dlist.addErrback(print_error) - - -if __name__ == "__main__": - conf.initialize_settings() - - from twisted.internet import reactor - - reactor.callLater(1, run_full_test) - reactor.callLater(25, reactor.stop) - reactor.run() From fd04c607b27ca68d74fac2a72e8452cfc9a2f7df Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Mon, 7 May 2018 13:11:31 -0300 Subject: [PATCH 21/55] typos and fixes from code review --- lbrynet/core/Wallet.py | 4 +++- lbrynet/tests/mocks.py | 4 ++-- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 64a3223c6..577e6e3d4 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -98,8 +98,10 @@ class Wallet(object): response = yield treq.get(HEADERS_URL, headers=resume_header) got_406 = response.code == 406 # our file is bigger final_size_after_download = response.length + local_header_size + if got_406: + log.warning("s3 is more out of date than we are") # should have something to download and a final length divisible by the header size - if not got_406 and final_size_after_download and not final_size_after_download % HEADER_SIZE: + elif final_size_after_download and not final_size_after_download % HEADER_SIZE: s3_height = (final_size_after_download / HEADER_SIZE) - 1 local_height = self.local_header_file_height() if s3_height > local_height: diff --git a/lbrynet/tests/mocks.py b/lbrynet/tests/mocks.py index 25ac42e80..12770c188 100644 --- a/lbrynet/tests/mocks.py +++ b/lbrynet/tests/mocks.py @@ -18,7 +18,7 @@ from lbrynet import conf from util import debug_kademlia_packet KB = 2**10 -PUBLIC_EXPOENT = 65537 # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html +PUBLIC_EXPONENT = 65537 # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html def decode_rsa_key(pem_key): @@ -162,7 +162,7 @@ class PointTraderKeyQueryHandler(object): class Wallet(object): def __init__(self): - self.private_key = rsa.generate_private_key(public_exponent=PUBLIC_EXPOENT, + self.private_key = rsa.generate_private_key(public_exponent=PUBLIC_EXPONENT, key_size=1024, backend=default_backend()) self.encoded_public_key = self.private_key.public_key().public_bytes(serialization.Encoding.PEM, serialization.PublicFormat.PKCS1) From 2299098884833ccf7017edb3b5d718305c93991c Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Mon, 7 May 2018 15:10:19 -0300 Subject: [PATCH 22/55] add integrity check for the headers file --- lbrynet/conf.py | 1 + lbrynet/core/Wallet.py | 24 +++++++++++++++++++++++- 2 files changed, 24 insertions(+), 1 deletion(-) diff --git a/lbrynet/conf.py b/lbrynet/conf.py index 0be7a423e..9c596216e 100644 --- a/lbrynet/conf.py +++ b/lbrynet/conf.py @@ -236,6 +236,7 @@ FIXED_SETTINGS = { 'SLACK_WEBHOOK': ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5' 'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='), 'WALLET_TYPES': [LBRYUM_WALLET, LBRYCRD_WALLET], + 'HEADERS_FILE_SHA256_CHECKSUM': (366295, 'b0c8197153a33ccbc52fb81a279588b6015b68b7726f73f6a2b81f7e25bfe4b9') } ADJUSTABLE_SETTINGS = { diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 577e6e3d4..00f1511a9 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -10,6 +10,7 @@ from twisted.internet import threads, reactor, defer, task from twisted.python.failure import Failure from twisted.internet.error import ConnectionAborted +from hashlib import sha256 from lbryum import wallet as lbryum_wallet from lbryum.network import Network from lbryum.simple_config import SimpleConfig @@ -127,7 +128,6 @@ class Wallet(object): return os.stat(headers_path).st_size return 0 - @defer.inlineCallbacks def get_remote_height(self, server, port): connected = defer.Deferred() @@ -143,6 +143,7 @@ class Wallet(object): from lbrynet import conf if conf.settings['blockchain_name'] != "lbrycrd_main": defer.returnValue(False) + self._check_header_file_integrity(conf) s3_headers_depth = conf.settings['s3_headers_depth'] if not s3_headers_depth: defer.returnValue(False) @@ -158,6 +159,27 @@ class Wallet(object): log.warning("error requesting remote height from %s:%i - %s", server_url, port, err) defer.returnValue(False) + def _check_header_file_integrity(self, conf): + # TODO: temporary workaround for usability. move to txlbryum and check headers instead of file integrity + hashsum = sha256() + checksum_height, checksum = conf.settings['HEADERS_FILE_SHA256_CHECKSUM'] + checksum_length_in_bytes = checksum_height * HEADER_SIZE + if self.local_header_file_size() < checksum_length_in_bytes: + return + headers_path = os.path.join(self.config.path, "blockchain_headers") + with open(headers_path, "rb") as headers_file: + hashsum.update(headers_file.read(checksum_length_in_bytes)) + current_checksum = hashsum.hexdigest() + if current_checksum != checksum: + msg = "Expected checksum {}, got {}".format(checksum, current_checksum) + log.warning("Wallet file corrupted, checksum mismatch. " + msg) + log.warning("Deleting header file so it can be downloaded again.") + os.unlink(headers_path) + elif (self.local_header_file_size() % HEADER_SIZE) != 0: + log.warning("Header file is good up to checkpoint height, but incomplete. Truncating to checkpoint.") + with open(headers_path, "rb+") as headers_file: + headers_file.truncate(checksum_length_in_bytes) + @defer.inlineCallbacks def start(self): should_download_headers = yield self.should_download_headers_from_s3() From d2e7ac5b6e39a91626793361655fc208399449d0 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Mon, 7 May 2018 15:10:27 -0300 Subject: [PATCH 23/55] update changelog for the integrity check and partial s3 downloads --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index e448d2e46..28a256d4e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,8 @@ at anytime. * ### Changed + * check headers file integrity on startup, removing/truncating the file to force re-download when necessary + * support partial headers file download from S3 * changed txrequests for treq * changed cryptography version to 2.2.2 * removed pycrypto dependency, replacing all calls to cryptography From 3a9f9a464e6ff2c4b658dd45c3a7b15a4731f83c Mon Sep 17 00:00:00 2001 From: Thomas Zarebczan Date: Tue, 8 May 2018 11:32:05 -0400 Subject: [PATCH 24/55] update changelog with 19.3 --- CHANGELOG.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 49125aa34..b232d1b5f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -57,7 +57,11 @@ at anytime. * old `blob_announce_all` command * `AuthJSONRPCServer.auth_required` decorator * unused `--wallet` argument to `lbrynet-daemon`, which used to be to support `PTCWallet`. - + +## [0.19.3] - 2018-05-04 +### Changed + * download blockchain headers from s3 before starting the wallet when the local height is more than s3_headers_depth (a config setting) blocks behind (https://github.com/lbryio/lbry/pull/1177) + * un-deprecated report_bug command (https://github.com/lbryio/lbry/commit/f8e418fb4448a3ed1531657f8b3c608fb568af85) ## [0.19.2] - 2018-03-28 ### Fixed From aa4240de857d63ed21c7ca6e254d060b16fd622c Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Tue, 8 May 2018 13:51:02 -0400 Subject: [PATCH 25/55] track successful reflector uploads in sqlite to minimize how many streams are attempted by auto re-reflect -increase the default `auto_re_reflect_interval` to a day --- CHANGELOG.md | 2 ++ lbrynet/conf.py | 6 ++--- lbrynet/daemon/Daemon.py | 2 +- lbrynet/database/migrator/dbmigrator.py | 2 ++ lbrynet/database/migrator/migrate7to8.py | 21 +++++++++++++++ lbrynet/database/storage.py | 28 ++++++++++++++++++++ lbrynet/file_manager/EncryptedFileManager.py | 8 +++--- lbrynet/reflector/client/client.py | 18 ++++++++++--- 8 files changed, 77 insertions(+), 10 deletions(-) create mode 100644 lbrynet/database/migrator/migrate7to8.py diff --git a/CHANGELOG.md b/CHANGELOG.md index 49125aa34..5b843d860 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -45,6 +45,8 @@ at anytime. * support both positional and keyword args for api calls * `peer_list` to return a list of dictionaries instead of a list of lists, added peer node ids to the results * download blockchain headers from s3 before starting the wallet when the local height is more than `s3_headers_depth` (a config setting) blocks behind + * track successful reflector uploads in sqlite to minimize how many streams are attempted by auto re-reflect + * increase the default `auto_re_reflect_interval` to a day ### Added * virtual kademlia network and mock udp transport for dht integration tests diff --git a/lbrynet/conf.py b/lbrynet/conf.py index 0be7a423e..1577794d9 100644 --- a/lbrynet/conf.py +++ b/lbrynet/conf.py @@ -280,10 +280,10 @@ ADJUSTABLE_SETTINGS = { 'peer_port': (int, 3333), 'pointtrader_server': (str, 'http://127.0.0.1:2424'), 'reflector_port': (int, 5566), - # if reflect_uploads is True, send files to reflector (after publishing as well as a - # periodic check in the event the initial upload failed or was disconnected part way through + # if reflect_uploads is True, send files to reflector after publishing (as well as a periodic check in the + # event the initial upload failed or was disconnected part way through, provided the auto_re_reflect_interval > 0) 'reflect_uploads': (bool, True), - 'auto_re_reflect_interval': (int, 3600), + 'auto_re_reflect_interval': (int, 86400), # set to 0 to disable 'reflector_servers': (list, [('reflector2.lbry.io', 5566)], server_list), 'run_reflector_server': (bool, False), 'sd_download_timeout': (int, 3), diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index df39a6137..43c67e0d7 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -199,7 +199,7 @@ class Daemon(AuthJSONRPCServer): self.connected_to_internet = True self.connection_status_code = None self.platform = None - self.current_db_revision = 7 + self.current_db_revision = 8 self.db_revision_file = conf.settings.get_db_revision_filename() self.session = None self._session_id = conf.settings.get_session_id() diff --git a/lbrynet/database/migrator/dbmigrator.py b/lbrynet/database/migrator/dbmigrator.py index a4057db38..ab1519380 100644 --- a/lbrynet/database/migrator/dbmigrator.py +++ b/lbrynet/database/migrator/dbmigrator.py @@ -16,6 +16,8 @@ def migrate_db(db_dir, start, end): from lbrynet.database.migrator.migrate5to6 import do_migration elif current == 6: from lbrynet.database.migrator.migrate6to7 import do_migration + elif current == 7: + from lbrynet.database.migrator.migrate7to8 import do_migration else: raise Exception("DB migration of version {} to {} is not available".format(current, current+1)) diff --git a/lbrynet/database/migrator/migrate7to8.py b/lbrynet/database/migrator/migrate7to8.py new file mode 100644 index 000000000..d048224e9 --- /dev/null +++ b/lbrynet/database/migrator/migrate7to8.py @@ -0,0 +1,21 @@ +import sqlite3 +import os + + +def do_migration(db_dir): + db_path = os.path.join(db_dir, "lbrynet.sqlite") + connection = sqlite3.connect(db_path) + cursor = connection.cursor() + + cursor.executescript( + """ + create table reflected_stream ( + sd_hash text not null, + reflector_address text not null, + timestamp integer, + primary key (sd_hash, reflector_address) + ); + """ + ) + connection.commit() + connection.close() diff --git a/lbrynet/database/storage.py b/lbrynet/database/storage.py index e3bdd649c..122f7a866 100644 --- a/lbrynet/database/storage.py +++ b/lbrynet/database/storage.py @@ -157,6 +157,13 @@ class SQLiteStorage(object): amount integer not null, address text not null ); + + create table if not exists reflected_stream ( + sd_hash text not null, + reflector_address text not null, + timestamp integer, + primary key (sd_hash, reflector_address) + ); """ def __init__(self, db_dir, reactor=None): @@ -765,3 +772,24 @@ class SQLiteStorage(object): (height, outpoint) ) return self.db.runInteraction(_save_claim_heights) + + # # # # # # # # # reflector functions # # # # # # # # # + + def update_reflected_stream(self, sd_hash, reflector_address, success=True): + if success: + return self.db.runOperation( + "insert or replace into reflected_stream values (?, ?, ?)", + (sd_hash, reflector_address, self.clock.seconds()) + ) + return self.db.runOperation( + "delete from reflected_stream where sd_hash=? and reflector_address=?", + (sd_hash, reflector_address) + ) + + def get_streams_to_re_reflect(self): + return self.run_and_return_list( + "select s.sd_hash from stream s " + "left outer join reflected_stream r on s.sd_hash=r.sd_hash " + "where r.timestamp is null or r.timestamp < ?", + self.clock.seconds() - conf.settings['auto_re_reflect_interval'] + ) diff --git a/lbrynet/file_manager/EncryptedFileManager.py b/lbrynet/file_manager/EncryptedFileManager.py index 5f91eae01..0fffd6e00 100644 --- a/lbrynet/file_manager/EncryptedFileManager.py +++ b/lbrynet/file_manager/EncryptedFileManager.py @@ -31,7 +31,7 @@ class EncryptedFileManager(object): def __init__(self, session, sd_identifier): - self.auto_re_reflect = conf.settings['reflect_uploads'] + self.auto_re_reflect = conf.settings['reflect_uploads'] and conf.settings['auto_re_reflect_interval'] > 0 self.auto_re_reflect_interval = conf.settings['auto_re_reflect_interval'] self.session = session self.storage = session.storage @@ -140,7 +140,7 @@ class EncryptedFileManager(object): log.info("Started %i lbry files", len(self.lbry_files)) if self.auto_re_reflect is True: - safe_start_looping_call(self.lbry_file_reflector, self.auto_re_reflect_interval) + safe_start_looping_call(self.lbry_file_reflector, self.auto_re_reflect_interval / 10) @defer.inlineCallbacks def _stop_lbry_file(self, lbry_file): @@ -253,8 +253,10 @@ class EncryptedFileManager(object): def reflect_lbry_files(self): sem = defer.DeferredSemaphore(self.CONCURRENT_REFLECTS) ds = [] + sd_hashes_to_reflect = yield self.storage.get_streams_to_re_reflect() for lbry_file in self.lbry_files: - ds.append(sem.run(reflect_file, lbry_file)) + if lbry_file.sd_hash in sd_hashes_to_reflect: + ds.append(sem.run(reflect_file, lbry_file)) yield defer.DeferredList(ds) @defer.inlineCallbacks diff --git a/lbrynet/reflector/client/client.py b/lbrynet/reflector/client/client.py index 329eeb5e0..09c4694c4 100644 --- a/lbrynet/reflector/client/client.py +++ b/lbrynet/reflector/client/client.py @@ -55,6 +55,16 @@ class EncryptedFileReflectorClient(Protocol): d.addCallback(lambda _: self.send_next_request()) d.addErrback(self.response_failure_handler) + def store_result(self, result): + if not self.needed_blobs or len(self.reflected_blobs) == len(self.needed_blobs): + reflected = True + else: + reflected = False + + d = self.blob_manager.storage.update_reflected_stream(self.sd_hash, self.transport.getPeer().host, reflected) + d.addCallback(lambda _: result) + return d + def connectionLost(self, reason): # make sure blob file readers get closed self.set_not_uploading() @@ -68,15 +78,17 @@ class EncryptedFileReflectorClient(Protocol): else: log.info('Finished sending reflector %i blobs for %s', len(self.reflected_blobs), self.stream_descriptor) - self.factory.finished_deferred.callback(self.reflected_blobs) + result = self.reflected_blobs elif reason.check(error.ConnectionLost): log.warning("Stopped reflecting %s after sending %i blobs", self.stream_descriptor, len(self.reflected_blobs)) - self.factory.finished_deferred.callback(self.reflected_blobs) + result = self.reflected_blobs else: log.info('Reflector finished for %s: %s', self.stream_descriptor, reason) - self.factory.finished_deferred.callback(reason) + result = reason + self.factory.finished_deferred.addCallback(self.store_result) + self.factory.finished_deferred.callback(result) # IConsumer stuff From a0a7187f7d208a81f7d81efc8327e22cf37fda44 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Tue, 8 May 2018 16:09:06 -0300 Subject: [PATCH 26/55] add integrity checks after s3 download as well --- lbrynet/core/Wallet.py | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/lbrynet/core/Wallet.py b/lbrynet/core/Wallet.py index 00f1511a9..8e7b4f429 100644 --- a/lbrynet/core/Wallet.py +++ b/lbrynet/core/Wallet.py @@ -113,7 +113,8 @@ class Wallet(object): else: with open(os.path.join(self.config.path, "blockchain_headers"), "wb") as headers_file: yield treq.collect(response, headers_file.write) - log.info("fetched headers from s3 (s3 height: %i)", s3_height) + log.info("fetched headers from s3 (s3 height: %i), now verifying integrity after download.", s3_height) + self._check_header_file_integrity() else: log.warning("s3 is more out of date than we are") else: @@ -143,7 +144,7 @@ class Wallet(object): from lbrynet import conf if conf.settings['blockchain_name'] != "lbrycrd_main": defer.returnValue(False) - self._check_header_file_integrity(conf) + self._check_header_file_integrity() s3_headers_depth = conf.settings['s3_headers_depth'] if not s3_headers_depth: defer.returnValue(False) @@ -159,8 +160,11 @@ class Wallet(object): log.warning("error requesting remote height from %s:%i - %s", server_url, port, err) defer.returnValue(False) - def _check_header_file_integrity(self, conf): + def _check_header_file_integrity(self): # TODO: temporary workaround for usability. move to txlbryum and check headers instead of file integrity + from lbrynet import conf + if conf.settings['blockchain_name'] != "lbrycrd_main": + return hashsum = sha256() checksum_height, checksum = conf.settings['HEADERS_FILE_SHA256_CHECKSUM'] checksum_length_in_bytes = checksum_height * HEADER_SIZE From df735252e54a3aeae229e4261d8cb9c45ce145d4 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Tue, 8 May 2018 16:49:17 -0300 Subject: [PATCH 27/55] verify streams only after migration --- lbrynet/daemon/Daemon.py | 6 +++--- lbrynet/file_manager/EncryptedFileManager.py | 20 +++++++++++--------- 2 files changed, 14 insertions(+), 12 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 43c67e0d7..0eaa24285 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -244,7 +244,7 @@ class Daemon(AuthJSONRPCServer): yield self._start_analytics() yield add_lbry_file_to_sd_identifier(self.sd_identifier) yield self._setup_stream_identifier() - yield self._setup_lbry_file_manager() + yield self._setup_lbry_file_manager(verify_streams=migrated) yield self._setup_query_handlers() yield self._setup_server() log.info("Starting balance: " + str(self.session.wallet.get_balance())) @@ -512,11 +512,11 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(migrated) @defer.inlineCallbacks - def _setup_lbry_file_manager(self): + def _setup_lbry_file_manager(self, verify_streams): log.info('Starting the file manager') self.startup_status = STARTUP_STAGES[3] self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) - yield self.lbry_file_manager.setup() + yield self.lbry_file_manager.setup(verify_streams) log.info('Done setting up file manager') def _start_analytics(self): diff --git a/lbrynet/file_manager/EncryptedFileManager.py b/lbrynet/file_manager/EncryptedFileManager.py index 0fffd6e00..73cc3fb12 100644 --- a/lbrynet/file_manager/EncryptedFileManager.py +++ b/lbrynet/file_manager/EncryptedFileManager.py @@ -42,9 +42,9 @@ class EncryptedFileManager(object): self.lbry_file_reflector = task.LoopingCall(self.reflect_lbry_files) @defer.inlineCallbacks - def setup(self): + def setup(self, verify_streams=False): yield self._add_to_sd_identifier() - yield self._start_lbry_files() + yield self._start_lbry_files(verify_streams) log.info("Started file manager") def get_lbry_file_status(self, lbry_file): @@ -97,7 +97,7 @@ class EncryptedFileManager(object): ) @defer.inlineCallbacks - def _start_lbry_file(self, file_info, payment_rate_manager): + def _start_lbry_file(self, file_info, payment_rate_manager, verify_stream): lbry_file = self._get_lbry_file( file_info['row_id'], file_info['stream_hash'], payment_rate_manager, file_info['sd_hash'], file_info['key'], file_info['stream_name'], file_info['file_name'], file_info['download_directory'], @@ -105,10 +105,12 @@ class EncryptedFileManager(object): ) yield lbry_file.get_claim_info() try: - # verify the stream is valid (we might have downloaded an invalid stream - # in the past when the validation check didn't work) - stream_info = yield get_sd_info(self.storage, file_info['stream_hash'], include_blobs=True) - validate_descriptor(stream_info) + # verify if the stream is valid (we might have downloaded an invalid stream + # in the past when the validation check didn't work. This runs after every + # migration to ensure blobs migrated from that past version gets verified) + if verify_stream: + stream_info = yield get_sd_info(self.storage, file_info['stream_hash'], include_blobs=True) + validate_descriptor(stream_info) except InvalidStreamDescriptorError as err: log.warning("Stream for descriptor %s is invalid (%s), cleaning it up", lbry_file.sd_hash, err.message) @@ -126,7 +128,7 @@ class EncryptedFileManager(object): log.warning("Failed to start %i", file_info.get('rowid')) @defer.inlineCallbacks - def _start_lbry_files(self): + def _start_lbry_files(self, verify_streams): files = yield self.session.storage.get_all_lbry_files() b_prm = self.session.base_payment_rate_manager payment_rate_manager = NegotiatedPaymentRateManager(b_prm, self.session.blob_tracker) @@ -134,7 +136,7 @@ class EncryptedFileManager(object): log.info("Starting %i files", len(files)) dl = [] for file_info in files: - dl.append(self._start_lbry_file(file_info, payment_rate_manager)) + dl.append(self._start_lbry_file(file_info, payment_rate_manager, verify_streams)) yield defer.DeferredList(dl) From acd330aa2a6cbc072ad41582b5b5bf4bacfa5e52 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Tue, 8 May 2018 17:49:49 -0300 Subject: [PATCH 28/55] get claim info and channel name using a single query --- lbrynet/database/storage.py | 19 +++++++++---------- 1 file changed, 9 insertions(+), 10 deletions(-) diff --git a/lbrynet/database/storage.py b/lbrynet/database/storage.py index 122f7a866..8c8d29717 100644 --- a/lbrynet/database/storage.py +++ b/lbrynet/database/storage.py @@ -718,16 +718,15 @@ class SQLiteStorage(object): return r def _get_claim(transaction): - claim_info = transaction.execute( - "select * from claim where claim_outpoint=?", (claim_outpoint, ) - ).fetchone() - result = _claim_response(*claim_info) - if result['channel_claim_id']: - channel_name_result = transaction.execute( - "select claim_name from claim where claim_id=?", (result['channel_claim_id'], ) - ).fetchone() - if channel_name_result: - result['channel_name'] = channel_name_result[0] + claim_info = transaction.execute("select c.*, " + "case when c.channel_claim_id is not null then " + "(select claim_name from claim where claim_id==c.channel_claim_id) " + "else null end as channel_name from claim c where claim_outpoint = ?", + (claim_outpoint,)).fetchone() + channel_name = claim_info[-1] + result = _claim_response(*claim_info[:-1]) + if channel_name: + result['channel_name'] = channel_name return result result = yield self.db.runInteraction(_get_claim) From d55ded78eed85ffe95eb9767ae542e62863e6951 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Tue, 8 May 2018 18:30:58 -0300 Subject: [PATCH 29/55] get claim with channel_name from a stream hash in a single query --- lbrynet/database/storage.py | 68 ++++++++++++++++++++++--------------- 1 file changed, 40 insertions(+), 28 deletions(-) diff --git a/lbrynet/database/storage.py b/lbrynet/database/storage.py index 8c8d29717..2e6bee68d 100644 --- a/lbrynet/database/storage.py +++ b/lbrynet/database/storage.py @@ -683,40 +683,34 @@ class SQLiteStorage(object): @defer.inlineCallbacks def get_content_claim(self, stream_hash, include_supports=True): - def _get_content_claim(transaction): - claim_id = transaction.execute( - "select claim.claim_outpoint from content_claim " - "inner join claim on claim.claim_outpoint=content_claim.claim_outpoint and content_claim.stream_hash=? " - "order by claim.rowid desc", (stream_hash, ) + def _get_claim_from_stream_hash(transaction): + claim_info = transaction.execute( + "select c.*, " + "case when c.channel_claim_id is not null then " + "(select claim_name from claim where claim_id==c.channel_claim_id) " + "else null end as channel_name from content_claim " + "inner join claim c on c.claim_outpoint=content_claim.claim_outpoint " + "and content_claim.stream_hash=? order by c.rowid desc", (stream_hash,) ).fetchone() - if not claim_id: + if not claim_info: return None - return claim_id[0] + channel_name = claim_info[-1] + result = _format_claim_response(*claim_info[:-1]) + if channel_name: + result['channel_name'] = channel_name + return result - content_claim_outpoint = yield self.db.runInteraction(_get_content_claim) - result = None - if content_claim_outpoint: - result = yield self.get_claim(content_claim_outpoint, include_supports) + result = yield self.db.runInteraction(_get_claim_from_stream_hash) + if result and include_supports: + supports = yield self.get_supports(result['claim_id']) + result['supports'] = supports + result['effective_amount'] = float( + sum([support['amount'] for support in supports]) + result['amount'] + ) defer.returnValue(result) @defer.inlineCallbacks def get_claim(self, claim_outpoint, include_supports=True): - def _claim_response(outpoint, claim_id, name, amount, height, serialized, channel_id, address, claim_sequence): - r = { - "name": name, - "claim_id": claim_id, - "address": address, - "claim_sequence": claim_sequence, - "value": ClaimDict.deserialize(serialized.decode('hex')).claim_dict, - "height": height, - "amount": float(Decimal(amount) / Decimal(COIN)), - "nout": int(outpoint.split(":")[1]), - "txid": outpoint.split(":")[0], - "channel_claim_id": channel_id, - "channel_name": None - } - return r - def _get_claim(transaction): claim_info = transaction.execute("select c.*, " "case when c.channel_claim_id is not null then " @@ -724,7 +718,7 @@ class SQLiteStorage(object): "else null end as channel_name from claim c where claim_outpoint = ?", (claim_outpoint,)).fetchone() channel_name = claim_info[-1] - result = _claim_response(*claim_info[:-1]) + result = _format_claim_response(*claim_info[:-1]) if channel_name: result['channel_name'] = channel_name return result @@ -792,3 +786,21 @@ class SQLiteStorage(object): "where r.timestamp is null or r.timestamp < ?", self.clock.seconds() - conf.settings['auto_re_reflect_interval'] ) + + +# Helper functions +def _format_claim_response(outpoint, claim_id, name, amount, height, serialized, channel_id, address, claim_sequence): + r = { + "name": name, + "claim_id": claim_id, + "address": address, + "claim_sequence": claim_sequence, + "value": ClaimDict.deserialize(serialized.decode('hex')).claim_dict, + "height": height, + "amount": float(Decimal(amount) / Decimal(COIN)), + "nout": int(outpoint.split(":")[1]), + "txid": outpoint.split(":")[0], + "channel_claim_id": channel_id, + "channel_name": None + } + return r From ca86af736eb794ff291c7cddd776db2aaa4c0dff Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Tue, 8 May 2018 19:46:29 -0300 Subject: [PATCH 30/55] add batching support to get_supports and tests --- lbrynet/database/storage.py | 9 +++++++-- .../tests/unit/database/test_SQLiteStorage.py | 19 +++++++++++++++++++ 2 files changed, 26 insertions(+), 2 deletions(-) diff --git a/lbrynet/database/storage.py b/lbrynet/database/storage.py index 2e6bee68d..8f7a4b2cf 100644 --- a/lbrynet/database/storage.py +++ b/lbrynet/database/storage.py @@ -552,7 +552,7 @@ class SQLiteStorage(object): ) return self.db.runInteraction(_save_support) - def get_supports(self, claim_id): + def get_supports(self, *claim_ids): def _format_support(outpoint, supported_id, amount, address): return { "txid": outpoint.split(":")[0], @@ -563,10 +563,15 @@ class SQLiteStorage(object): } def _get_supports(transaction): + if len(claim_ids) == 1: + bind = "=?" + else: + bind = "in ({})".format(','.join('?' for _ in range(len(claim_ids)))) return [ _format_support(*support_info) for support_info in transaction.execute( - "select * from support where claim_id=?", (claim_id, ) + "select * from support where claim_id {}".format(bind), + tuple(claim_ids) ).fetchall() ] diff --git a/lbrynet/tests/unit/database/test_SQLiteStorage.py b/lbrynet/tests/unit/database/test_SQLiteStorage.py index 5bfe72988..5df80ee2e 100644 --- a/lbrynet/tests/unit/database/test_SQLiteStorage.py +++ b/lbrynet/tests/unit/database/test_SQLiteStorage.py @@ -163,6 +163,25 @@ class BlobStorageTests(StorageTest): self.assertEqual(blob_hashes, []) +class SupportsStorageTests(StorageTest): + @defer.inlineCallbacks + def test_supports_storage(self): + claim_ids = [random_lbry_hash() for _ in range(10)] + random_supports = [{"txid": random_lbry_hash(), "nout":i, "address": "addr{}".format(i), "amount": i} + for i in range(20)] + expected_supports = {} + for idx, claim_id in enumerate(claim_ids): + yield self.storage.save_supports(claim_id, random_supports[idx*2:idx*2+2]) + for random_support in random_supports[idx*2:idx*2+2]: + random_support['claim_id'] = claim_id + expected_supports.setdefault(claim_id, []).append(random_support) + supports = yield self.storage.get_supports(claim_ids[0]) + self.assertEqual(supports, expected_supports[claim_ids[0]]) + all_supports = yield self.storage.get_supports(*claim_ids) + for support in all_supports: + self.assertIn(support, expected_supports[support['claim_id']]) + + class StreamStorageTests(StorageTest): @defer.inlineCallbacks def test_store_stream(self, stream_hash=None): From b6cedfec56b09bf7b5e5912a51eb22b4724982b7 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Wed, 9 May 2018 10:50:44 -0300 Subject: [PATCH 31/55] batch-start the file manager --- lbrynet/database/storage.py | 38 +++++++++++++++++++ .../file_manager/EncryptedFileDownloader.py | 19 ++++++---- lbrynet/file_manager/EncryptedFileManager.py | 9 +++-- 3 files changed, 55 insertions(+), 11 deletions(-) diff --git a/lbrynet/database/storage.py b/lbrynet/database/storage.py index 8f7a4b2cf..aae8f180a 100644 --- a/lbrynet/database/storage.py +++ b/lbrynet/database/storage.py @@ -714,6 +714,44 @@ class SQLiteStorage(object): ) defer.returnValue(result) + @defer.inlineCallbacks + def get_claims_from_stream_hashes(self, stream_hashes, include_supports=True): + def _batch_get_claim(transaction): + results = {} + bind = "({})".format(','.join('?' for _ in range(len(stream_hashes)))) + claim_infos = transaction.execute( + "select content_claim.stream_hash, c.*, " + "case when c.channel_claim_id is not null then " + "(select claim_name from claim where claim_id==c.channel_claim_id) " + "else null end as channel_name from content_claim " + "inner join claim c on c.claim_outpoint=content_claim.claim_outpoint " + "and content_claim.stream_hash in {} order by c.rowid desc".format(bind), + tuple(stream_hashes) + ).fetchall() + for claim_info in claim_infos: + channel_name = claim_info[-1] + stream_hash = claim_info[0] + result = _format_claim_response(*claim_info[1:-1]) + if channel_name: + result['channel_name'] = channel_name + results[stream_hash] = result + return results + + claims = yield self.db.runInteraction(_batch_get_claim) + if include_supports: + all_supports = {} + for support in (yield self.get_supports(*[claim['claim_id'] for claim in claims.values()])): + all_supports.setdefault(support['claim_id'], []).append(support) + for stream_hash in claims.keys(): + claim = claims[stream_hash] + supports = all_supports.get(claim['claim_id'], []) + claim['supports'] = supports + claim['effective_amount'] = float( + sum([support['amount'] for support in supports]) + claim['amount'] + ) + claims[stream_hash] = claim + defer.returnValue(claims) + @defer.inlineCallbacks def get_claim(self, claim_outpoint, include_supports=True): def _get_claim(transaction): diff --git a/lbrynet/file_manager/EncryptedFileDownloader.py b/lbrynet/file_manager/EncryptedFileDownloader.py index 2e2a054c1..25abd3e18 100644 --- a/lbrynet/file_manager/EncryptedFileDownloader.py +++ b/lbrynet/file_manager/EncryptedFileDownloader.py @@ -56,18 +56,21 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver): self.channel_name = None self.metadata = None + def set_claim_info(self, claim_info): + self.claim_id = claim_info['claim_id'] + self.txid = claim_info['txid'] + self.nout = claim_info['nout'] + self.channel_claim_id = claim_info['channel_claim_id'] + self.outpoint = "%s:%i" % (self.txid, self.nout) + self.claim_name = claim_info['name'] + self.channel_name = claim_info['channel_name'] + self.metadata = claim_info['value']['stream']['metadata'] + @defer.inlineCallbacks def get_claim_info(self, include_supports=True): claim_info = yield self.storage.get_content_claim(self.stream_hash, include_supports) if claim_info: - self.claim_id = claim_info['claim_id'] - self.txid = claim_info['txid'] - self.nout = claim_info['nout'] - self.channel_claim_id = claim_info['channel_claim_id'] - self.outpoint = "%s:%i" % (self.txid, self.nout) - self.claim_name = claim_info['name'] - self.channel_name = claim_info['channel_name'] - self.metadata = claim_info['value']['stream']['metadata'] + self.set_claim_info(claim_info) defer.returnValue(claim_info) diff --git a/lbrynet/file_manager/EncryptedFileManager.py b/lbrynet/file_manager/EncryptedFileManager.py index 73cc3fb12..d28006dbd 100644 --- a/lbrynet/file_manager/EncryptedFileManager.py +++ b/lbrynet/file_manager/EncryptedFileManager.py @@ -97,13 +97,14 @@ class EncryptedFileManager(object): ) @defer.inlineCallbacks - def _start_lbry_file(self, file_info, payment_rate_manager, verify_stream): + def _start_lbry_file(self, file_info, payment_rate_manager, verify_stream, claim_info): lbry_file = self._get_lbry_file( file_info['row_id'], file_info['stream_hash'], payment_rate_manager, file_info['sd_hash'], file_info['key'], file_info['stream_name'], file_info['file_name'], file_info['download_directory'], file_info['suggested_file_name'] ) - yield lbry_file.get_claim_info() + if claim_info: + lbry_file.set_claim_info(claim_info) try: # verify if the stream is valid (we might have downloaded an invalid stream # in the past when the validation check didn't work. This runs after every @@ -130,13 +131,15 @@ class EncryptedFileManager(object): @defer.inlineCallbacks def _start_lbry_files(self, verify_streams): files = yield self.session.storage.get_all_lbry_files() + claim_infos = yield self.session.storage.get_claims_from_stream_hashes([file['stream_hash'] for file in files]) b_prm = self.session.base_payment_rate_manager payment_rate_manager = NegotiatedPaymentRateManager(b_prm, self.session.blob_tracker) log.info("Starting %i files", len(files)) dl = [] for file_info in files: - dl.append(self._start_lbry_file(file_info, payment_rate_manager, verify_streams)) + claim_info = claim_infos.get(file_info['stream_hash']) + dl.append(self._start_lbry_file(file_info, payment_rate_manager, verify_streams, claim_info)) yield defer.DeferredList(dl) From e1f4623a652556d76ef9ef64289234354de16672 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Wed, 9 May 2018 12:40:03 -0300 Subject: [PATCH 32/55] add changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b94d5fd44..d024e5292 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,6 +30,8 @@ at anytime. * changed txrequests for treq * changed cryptography version to 2.2.2 * removed pycrypto dependency, replacing all calls to cryptography + * full verification of streams only during migration instead of every startup + * database batching functions for starting up the file manager * several internal dht functions to use inlineCallbacks * `DHTHashAnnouncer` and `Node` manage functions to use `LoopingCall`s instead of scheduling with `callLater`. * `store` kademlia rpc method to block on the call finishing and to return storing peer information From b2469801abb49c56782331d2202a4bc38cbf8633 Mon Sep 17 00:00:00 2001 From: Fornost461 Date: Sun, 13 May 2018 14:46:24 +0200 Subject: [PATCH 33/55] grammar (missing word) --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 2798ae99e..abdc28049 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ LBRY is a protocol that provides a fully decentralized network for the discovery It utilizes the [LBRY blockchain](https://github.com/lbryio/lbrycrd) as a global namespace and database of digital content. Blockchain entries contain searchable content metadata, identities, and rights and access rules. -LBRY also provides a data network consists of peers uploading and downloading data from other peers, possibly in exchange for payments, and a distributed hash table, used by peers to discover other peers. +LBRY also provides a data network that consists of peers uploading and downloading data from other peers, possibly in exchange for payments, and a distributed hash table, used by peers to discover other peers. ## Contributions From 208284a0f8f1f441d988bdb2c34798fe43c36150 Mon Sep 17 00:00:00 2001 From: Sergey Rozhnov Date: Wed, 16 May 2018 18:29:44 +0400 Subject: [PATCH 34/55] claim_list and claim_list_mine in Daemon return sorted results --- lbrynet/daemon/Daemon.py | 5 +- lbrynet/daemon/claims_comparator.py | 36 +++++++++++++ lbrynet/tests/unit/daemon/__init__.py | 0 .../unit/daemon/test_claims_comparator.py | 52 +++++++++++++++++++ 4 files changed, 92 insertions(+), 1 deletion(-) create mode 100644 lbrynet/daemon/claims_comparator.py create mode 100644 lbrynet/tests/unit/daemon/__init__.py create mode 100644 lbrynet/tests/unit/daemon/test_claims_comparator.py diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 43c67e0d7..9da91e320 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -36,6 +36,7 @@ from lbrynet.daemon.Downloader import GetStream from lbrynet.daemon.Publisher import Publisher from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager from lbrynet.daemon.auth.server import AuthJSONRPCServer +from lbrynet.daemon.claims_comparator import arrange_results from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager from lbrynet.core import utils, system_info from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob @@ -2293,6 +2294,7 @@ class Daemon(AuthJSONRPCServer): """ d = self.session.wallet.get_name_claims() + d.addCallback(arrange_results) d.addCallback(lambda claims: self._render_response(claims)) return d @@ -2331,7 +2333,8 @@ class Daemon(AuthJSONRPCServer): """ claims = yield self.session.wallet.get_claims_for_name(name) - defer.returnValue(claims) + result = arrange_results(claims) + defer.returnValue(result) @defer.inlineCallbacks def jsonrpc_claim_list_by_channel(self, page=0, page_size=10, uri=None, uris=[]): diff --git a/lbrynet/daemon/claims_comparator.py b/lbrynet/daemon/claims_comparator.py new file mode 100644 index 000000000..4a1ea9870 --- /dev/null +++ b/lbrynet/daemon/claims_comparator.py @@ -0,0 +1,36 @@ +_comparison_order = ['height', 'name', 'claim_id'] # TODO outpoint + + +def arrange_results(claims): + for claim in claims: + results = claim['result'] + sorted_results = sorted(results, cmp=_compare_results) + claim['result'] = sorted_results + return claims + + +def _compare_results(left, right): + """ + :type left: dict + :type right: dict + """ + result = 0 + + for attribute in _comparison_order: + left_value = left[attribute] + right_value = right[attribute] + sub_result = _cmp(left_value, right_value) + if sub_result is not 0: + result = sub_result + break + + return result + + +def _cmp(left, right): + if left == right: + return 0 + elif left < right: + return -1 + else: + return 1 diff --git a/lbrynet/tests/unit/daemon/__init__.py b/lbrynet/tests/unit/daemon/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/lbrynet/tests/unit/daemon/test_claims_comparator.py b/lbrynet/tests/unit/daemon/test_claims_comparator.py new file mode 100644 index 000000000..9651c2137 --- /dev/null +++ b/lbrynet/tests/unit/daemon/test_claims_comparator.py @@ -0,0 +1,52 @@ +import unittest + +from lbrynet.daemon.claims_comparator import arrange_results + + +class ClaimsComparatorTest(unittest.TestCase): + def test_arrange_results(self): + results = [ + { + 'height': 1, + 'name': 'res', + 'claim_id': 'ccc' + }, + { + 'height': 1, + 'name': 'res', + 'claim_id': 'aaa' + }, + { + 'height': 1, + 'name': 'res', + 'claim_id': 'bbb' + } + ] + data = {'result': results} + + expected = [ + { + 'height': 1, + 'name': 'res', + 'claim_id': 'aaa' + }, + { + 'height': 1, + 'name': 'res', + 'claim_id': 'bbb' + }, + { + 'height': 1, + 'name': 'res', + 'claim_id': 'ccc' + } + ] + claims = arrange_results([data]) + claim = claims[0] + actual = claim['result'] + + self.assertEqual(expected, actual) + + +if __name__ == '__main__': + unittest.main() From a7a5bb8887c1e98e4d99dbc2c548f2a3be8bfa82 Mon Sep 17 00:00:00 2001 From: Sergey Rozhnov Date: Thu, 17 May 2018 13:36:32 +0400 Subject: [PATCH 35/55] simplified claims comparison logic; refactored unit tests --- lbrynet/daemon/claims_comparator.py | 32 ++--------- .../unit/daemon/claims_comparator_cases.json | 53 +++++++++++++++++++ .../unit/daemon/test_claims_comparator.py | 53 +++++-------------- 3 files changed, 70 insertions(+), 68 deletions(-) create mode 100644 lbrynet/tests/unit/daemon/claims_comparator_cases.json diff --git a/lbrynet/daemon/claims_comparator.py b/lbrynet/daemon/claims_comparator.py index 4a1ea9870..44d27a955 100644 --- a/lbrynet/daemon/claims_comparator.py +++ b/lbrynet/daemon/claims_comparator.py @@ -1,36 +1,10 @@ -_comparison_order = ['height', 'name', 'claim_id'] # TODO outpoint - - def arrange_results(claims): for claim in claims: results = claim['result'] - sorted_results = sorted(results, cmp=_compare_results) + sorted_results = sorted(results, key=lambda d: (d['height'], d['name'], d['claim_id'], _outpoint(d))) claim['result'] = sorted_results return claims -def _compare_results(left, right): - """ - :type left: dict - :type right: dict - """ - result = 0 - - for attribute in _comparison_order: - left_value = left[attribute] - right_value = right[attribute] - sub_result = _cmp(left_value, right_value) - if sub_result is not 0: - result = sub_result - break - - return result - - -def _cmp(left, right): - if left == right: - return 0 - elif left < right: - return -1 - else: - return 1 +def _outpoint(claim): + return '{}:{}'.format(claim['txid'], claim['nout']) diff --git a/lbrynet/tests/unit/daemon/claims_comparator_cases.json b/lbrynet/tests/unit/daemon/claims_comparator_cases.json new file mode 100644 index 000000000..dbcb732b7 --- /dev/null +++ b/lbrynet/tests/unit/daemon/claims_comparator_cases.json @@ -0,0 +1,53 @@ +{ + "cases": [ + { + "description": "sort by claim_id", + "results": [ + { + "height": 1, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "fdsafa" + }, + { + "height": 1, + "name": "res", + "claim_id": "aaa", + "nout": 0, + "txid": "w5tv8uorgt" + }, + { + "height": 1, + "name": "res", + "claim_id": "bbb", + "nout": 0, + "txid": "aecfaewcfa" + } + ], + "expected": [ + { + "height": 1, + "name": "res", + "claim_id": "aaa", + "nout": 0, + "txid": "w5tv8uorgt" + }, + { + "height": 1, + "name": "res", + "claim_id": "bbb", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "fdsafa" + } + ] + } + ] +} \ No newline at end of file diff --git a/lbrynet/tests/unit/daemon/test_claims_comparator.py b/lbrynet/tests/unit/daemon/test_claims_comparator.py index 9651c2137..07fcbc503 100644 --- a/lbrynet/tests/unit/daemon/test_claims_comparator.py +++ b/lbrynet/tests/unit/daemon/test_claims_comparator.py @@ -1,51 +1,26 @@ +import json import unittest from lbrynet.daemon.claims_comparator import arrange_results class ClaimsComparatorTest(unittest.TestCase): + def setUp(self): + with open('claims_comparator_cases.json') as f: + document = json.load(f) + self.cases = document['cases'] + def test_arrange_results(self): - results = [ - { - 'height': 1, - 'name': 'res', - 'claim_id': 'ccc' - }, - { - 'height': 1, - 'name': 'res', - 'claim_id': 'aaa' - }, - { - 'height': 1, - 'name': 'res', - 'claim_id': 'bbb' - } - ] - data = {'result': results} + for case in self.cases: + results = case['results'] + data = {'result': results} + expected = case['expected'] - expected = [ - { - 'height': 1, - 'name': 'res', - 'claim_id': 'aaa' - }, - { - 'height': 1, - 'name': 'res', - 'claim_id': 'bbb' - }, - { - 'height': 1, - 'name': 'res', - 'claim_id': 'ccc' - } - ] - claims = arrange_results([data]) - claim = claims[0] - actual = claim['result'] + claims = arrange_results([data]) + claim = claims[0] + actual = claim['result'] - self.assertEqual(expected, actual) + self.assertEqual(expected, actual, case['description']) if __name__ == '__main__': From 3d0f74c8ce1469ad527c83cff564c6f76805a62c Mon Sep 17 00:00:00 2001 From: Sergey Rozhnov Date: Thu, 17 May 2018 13:49:09 +0400 Subject: [PATCH 36/55] implemented additional test cases for claims sorting; updated changelog --- CHANGELOG.md | 1 + .../unit/daemon/claims_comparator_cases.json | 147 ++++++++++++++++++ 2 files changed, 148 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b94d5fd44..b95a4f1eb 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -52,6 +52,7 @@ at anytime. * download blockchain headers from s3 before starting the wallet when the local height is more than `s3_headers_depth` (a config setting) blocks behind * track successful reflector uploads in sqlite to minimize how many streams are attempted by auto re-reflect * increase the default `auto_re_reflect_interval` to a day + * `claim_list` and `claim_list_mine` in Daemon `return` sorted results ### Added * virtual kademlia network and mock udp transport for dht integration tests diff --git a/lbrynet/tests/unit/daemon/claims_comparator_cases.json b/lbrynet/tests/unit/daemon/claims_comparator_cases.json index dbcb732b7..11592fbf1 100644 --- a/lbrynet/tests/unit/daemon/claims_comparator_cases.json +++ b/lbrynet/tests/unit/daemon/claims_comparator_cases.json @@ -48,6 +48,153 @@ "txid": "fdsafa" } ] + }, + { + "description": "sort by height", + "results": [ + { + "height": 1, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 3, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 2, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + } + ], + "expected": [ + { + "claim_id": "ccc", + "height": 1, + "name": "res", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "claim_id": "ccc", + "height": 2, + "name": "res", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "claim_id": "ccc", + "height": 3, + "name": "res", + "nout": 0, + "txid": "aecfaewcfa" + } + ] + }, + { + "description": "sort by name", + "results": [ + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res3", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res2", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + } + ], + "expected": [ + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res2", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res3", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + } + ] + }, + { + "description": "sort by outpoint", + "results": [ + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 2, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 1, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 3, + "txid": "aecfaewcfa" + } + ], + "expected": [ + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 1, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 2, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 3, + "txid": "aecfaewcfa" + } + ] } ] } \ No newline at end of file From b48492c1d67250e10dbf0fd598b57f71442a5560 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Tue, 15 May 2018 21:44:30 -0300 Subject: [PATCH 37/55] verify streams on a new migration instead --- lbrynet/daemon/Daemon.py | 8 +-- lbrynet/database/migrator/dbmigrator.py | 2 + lbrynet/database/migrator/migrate8to9.py | 54 ++++++++++++++++++++ lbrynet/file_manager/EncryptedFileManager.py | 46 +++++------------ 4 files changed, 74 insertions(+), 36 deletions(-) create mode 100644 lbrynet/database/migrator/migrate8to9.py diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 0eaa24285..3e5b407ee 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -199,7 +199,7 @@ class Daemon(AuthJSONRPCServer): self.connected_to_internet = True self.connection_status_code = None self.platform = None - self.current_db_revision = 8 + self.current_db_revision = 9 self.db_revision_file = conf.settings.get_db_revision_filename() self.session = None self._session_id = conf.settings.get_session_id() @@ -244,7 +244,7 @@ class Daemon(AuthJSONRPCServer): yield self._start_analytics() yield add_lbry_file_to_sd_identifier(self.sd_identifier) yield self._setup_stream_identifier() - yield self._setup_lbry_file_manager(verify_streams=migrated) + yield self._setup_lbry_file_manager() yield self._setup_query_handlers() yield self._setup_server() log.info("Starting balance: " + str(self.session.wallet.get_balance())) @@ -512,11 +512,11 @@ class Daemon(AuthJSONRPCServer): defer.returnValue(migrated) @defer.inlineCallbacks - def _setup_lbry_file_manager(self, verify_streams): + def _setup_lbry_file_manager(self): log.info('Starting the file manager') self.startup_status = STARTUP_STAGES[3] self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) - yield self.lbry_file_manager.setup(verify_streams) + yield self.lbry_file_manager.setup() log.info('Done setting up file manager') def _start_analytics(self): diff --git a/lbrynet/database/migrator/dbmigrator.py b/lbrynet/database/migrator/dbmigrator.py index ab1519380..196263f0a 100644 --- a/lbrynet/database/migrator/dbmigrator.py +++ b/lbrynet/database/migrator/dbmigrator.py @@ -18,6 +18,8 @@ def migrate_db(db_dir, start, end): from lbrynet.database.migrator.migrate6to7 import do_migration elif current == 7: from lbrynet.database.migrator.migrate7to8 import do_migration + elif current == 8: + from lbrynet.database.migrator.migrate8to9 import do_migration else: raise Exception("DB migration of version {} to {} is not available".format(current, current+1)) diff --git a/lbrynet/database/migrator/migrate8to9.py b/lbrynet/database/migrator/migrate8to9.py new file mode 100644 index 000000000..a518e9899 --- /dev/null +++ b/lbrynet/database/migrator/migrate8to9.py @@ -0,0 +1,54 @@ +import sqlite3 +import logging +import os + +from lbrynet.core.Error import InvalidStreamDescriptorError +from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, format_sd_info, format_blobs, validate_descriptor +from lbrynet.cryptstream.CryptBlob import CryptBlobInfo + +log = logging.getLogger(__name__) + + +def do_migration(db_dir): + db_path = os.path.join(db_dir, "lbrynet.sqlite") + blob_dir = os.path.join(db_dir, "blobfiles") + connection = sqlite3.connect(db_path) + cursor = connection.cursor() + + query = "select stream_name, stream_key, suggested_filename, sd_hash, stream_hash from stream" + streams = cursor.execute(query).fetchall() + + blobs = cursor.execute("select s.stream_hash, s.position, s.iv, b.blob_hash, b.blob_length from stream_blob s " + "left outer join blob b ON b.blob_hash=s.blob_hash order by s.position").fetchall() + blobs_by_stream = {} + for stream_hash, position, iv, blob_hash, blob_length in blobs: + blobs_by_stream.setdefault(stream_hash, []).append(CryptBlobInfo(blob_hash, position, blob_length or 0, iv)) + + for stream_name, stream_key, suggested_filename, sd_hash, stream_hash in streams: + sd_info = format_sd_info( + EncryptedFileStreamType, stream_name, stream_key, + suggested_filename, stream_hash, format_blobs(blobs_by_stream[stream_hash]) + ) + try: + validate_descriptor(sd_info) + except InvalidStreamDescriptorError as err: + log.warning("Stream for descriptor %s is invalid (%s), cleaning it up", + sd_hash, err.message) + blob_hashes = [blob.blob_hash for blob in blobs_by_stream[stream_hash]] + delete_stream(cursor, stream_hash, sd_hash, blob_hashes, blob_dir) + + connection.commit() + connection.close() + + +def delete_stream(transaction, stream_hash, sd_hash, blob_hashes, blob_dir): + transaction.execute("delete from content_claim where stream_hash=? ", (stream_hash,)) + transaction.execute("delete from file where stream_hash=? ", (stream_hash, )) + transaction.execute("delete from stream_blob where stream_hash=?", (stream_hash, )) + transaction.execute("delete from stream where stream_hash=? ", (stream_hash, )) + transaction.execute("delete from blob where blob_hash=?", (sd_hash, )) + for blob_hash in blob_hashes: + transaction.execute("delete from blob where blob_hash=?", (blob_hash, )) + file_path = os.path.join(blob_dir, blob_hash) + if os.path.isfile(file_path): + os.unlink(file_path) diff --git a/lbrynet/file_manager/EncryptedFileManager.py b/lbrynet/file_manager/EncryptedFileManager.py index d28006dbd..02245c39c 100644 --- a/lbrynet/file_manager/EncryptedFileManager.py +++ b/lbrynet/file_manager/EncryptedFileManager.py @@ -6,12 +6,11 @@ import logging from twisted.internet import defer, task, reactor from twisted.python.failure import Failure -from lbrynet.core.Error import InvalidStreamDescriptorError from lbrynet.reflector.reupload import reflect_file from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory -from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, get_sd_info, validate_descriptor +from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, get_sd_info from lbrynet.cryptstream.client.CryptStreamDownloader import AlreadyStoppedError from lbrynet.cryptstream.client.CryptStreamDownloader import CurrentlyStoppingError from lbrynet.core.utils import safe_start_looping_call, safe_stop_looping_call @@ -42,9 +41,9 @@ class EncryptedFileManager(object): self.lbry_file_reflector = task.LoopingCall(self.reflect_lbry_files) @defer.inlineCallbacks - def setup(self, verify_streams=False): + def setup(self): yield self._add_to_sd_identifier() - yield self._start_lbry_files(verify_streams) + yield self._start_lbry_files() log.info("Started file manager") def get_lbry_file_status(self, lbry_file): @@ -96,8 +95,7 @@ class EncryptedFileManager(object): suggested_file_name=suggested_file_name ) - @defer.inlineCallbacks - def _start_lbry_file(self, file_info, payment_rate_manager, verify_stream, claim_info): + def _start_lbry_file(self, file_info, payment_rate_manager, claim_info): lbry_file = self._get_lbry_file( file_info['row_id'], file_info['stream_hash'], payment_rate_manager, file_info['sd_hash'], file_info['key'], file_info['stream_name'], file_info['file_name'], file_info['download_directory'], @@ -106,42 +104,26 @@ class EncryptedFileManager(object): if claim_info: lbry_file.set_claim_info(claim_info) try: - # verify if the stream is valid (we might have downloaded an invalid stream - # in the past when the validation check didn't work. This runs after every - # migration to ensure blobs migrated from that past version gets verified) - if verify_stream: - stream_info = yield get_sd_info(self.storage, file_info['stream_hash'], include_blobs=True) - validate_descriptor(stream_info) - except InvalidStreamDescriptorError as err: - log.warning("Stream for descriptor %s is invalid (%s), cleaning it up", - lbry_file.sd_hash, err.message) - yield lbry_file.delete_data() - yield self.session.storage.delete_stream(lbry_file.stream_hash) - else: - try: - # restore will raise an Exception if status is unknown - lbry_file.restore(file_info['status']) - self.storage.content_claim_callbacks[lbry_file.stream_hash] = lbry_file.get_claim_info - self.lbry_files.append(lbry_file) - if len(self.lbry_files) % 500 == 0: - log.info("Started %i files", len(self.lbry_files)) - except Exception: - log.warning("Failed to start %i", file_info.get('rowid')) + # restore will raise an Exception if status is unknown + lbry_file.restore(file_info['status']) + self.storage.content_claim_callbacks[lbry_file.stream_hash] = lbry_file.get_claim_info + self.lbry_files.append(lbry_file) + if len(self.lbry_files) % 500 == 0: + log.info("Started %i files", len(self.lbry_files)) + except Exception: + log.warning("Failed to start %i", file_info.get('rowid')) @defer.inlineCallbacks - def _start_lbry_files(self, verify_streams): + def _start_lbry_files(self): files = yield self.session.storage.get_all_lbry_files() claim_infos = yield self.session.storage.get_claims_from_stream_hashes([file['stream_hash'] for file in files]) b_prm = self.session.base_payment_rate_manager payment_rate_manager = NegotiatedPaymentRateManager(b_prm, self.session.blob_tracker) log.info("Starting %i files", len(files)) - dl = [] for file_info in files: claim_info = claim_infos.get(file_info['stream_hash']) - dl.append(self._start_lbry_file(file_info, payment_rate_manager, verify_streams, claim_info)) - - yield defer.DeferredList(dl) + self._start_lbry_file(file_info, payment_rate_manager, claim_info) log.info("Started %i lbry files", len(self.lbry_files)) if self.auto_re_reflect is True: From 40bcf96c3dad46359f4c01c4989721a0cd6da903 Mon Sep 17 00:00:00 2001 From: Sergey Rozhnov Date: Fri, 18 May 2018 12:53:32 +0400 Subject: [PATCH 38/55] refactored sorting of claims and unit tests --- lbrynet/daemon/Daemon.py | 7 +- lbrynet/daemon/claims_comparator.py | 10 - .../unit/daemon/claims_comparator_cases.json | 200 ---------------- .../unit/daemon/test_claims_comparator.py | 217 ++++++++++++++++-- 4 files changed, 208 insertions(+), 226 deletions(-) delete mode 100644 lbrynet/daemon/claims_comparator.py delete mode 100644 lbrynet/tests/unit/daemon/claims_comparator_cases.json diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 9da91e320..c8b7e376f 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -36,7 +36,6 @@ from lbrynet.daemon.Downloader import GetStream from lbrynet.daemon.Publisher import Publisher from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager from lbrynet.daemon.auth.server import AuthJSONRPCServer -from lbrynet.daemon.claims_comparator import arrange_results from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager from lbrynet.core import utils, system_info from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob @@ -164,6 +163,12 @@ class AlwaysSend(object): return d +def arrange_results(claims): + for claim in claims: + claim['result'].sort(key=lambda d: (d['height'], d['name'], d['claim_id'], d['txid'], d['nout'])) + return claims + + class Daemon(AuthJSONRPCServer): """ LBRYnet daemon, a jsonrpc interface to lbry functions diff --git a/lbrynet/daemon/claims_comparator.py b/lbrynet/daemon/claims_comparator.py deleted file mode 100644 index 44d27a955..000000000 --- a/lbrynet/daemon/claims_comparator.py +++ /dev/null @@ -1,10 +0,0 @@ -def arrange_results(claims): - for claim in claims: - results = claim['result'] - sorted_results = sorted(results, key=lambda d: (d['height'], d['name'], d['claim_id'], _outpoint(d))) - claim['result'] = sorted_results - return claims - - -def _outpoint(claim): - return '{}:{}'.format(claim['txid'], claim['nout']) diff --git a/lbrynet/tests/unit/daemon/claims_comparator_cases.json b/lbrynet/tests/unit/daemon/claims_comparator_cases.json deleted file mode 100644 index 11592fbf1..000000000 --- a/lbrynet/tests/unit/daemon/claims_comparator_cases.json +++ /dev/null @@ -1,200 +0,0 @@ -{ - "cases": [ - { - "description": "sort by claim_id", - "results": [ - { - "height": 1, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "fdsafa" - }, - { - "height": 1, - "name": "res", - "claim_id": "aaa", - "nout": 0, - "txid": "w5tv8uorgt" - }, - { - "height": 1, - "name": "res", - "claim_id": "bbb", - "nout": 0, - "txid": "aecfaewcfa" - } - ], - "expected": [ - { - "height": 1, - "name": "res", - "claim_id": "aaa", - "nout": 0, - "txid": "w5tv8uorgt" - }, - { - "height": 1, - "name": "res", - "claim_id": "bbb", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "fdsafa" - } - ] - }, - { - "description": "sort by height", - "results": [ - { - "height": 1, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 3, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 2, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - } - ], - "expected": [ - { - "claim_id": "ccc", - "height": 1, - "name": "res", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "claim_id": "ccc", - "height": 2, - "name": "res", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "claim_id": "ccc", - "height": 3, - "name": "res", - "nout": 0, - "txid": "aecfaewcfa" - } - ] - }, - { - "description": "sort by name", - "results": [ - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res3", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res2", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - } - ], - "expected": [ - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res2", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res3", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - } - ] - }, - { - "description": "sort by outpoint", - "results": [ - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 2, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 1, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 3, - "txid": "aecfaewcfa" - } - ], - "expected": [ - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 1, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 2, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 3, - "txid": "aecfaewcfa" - } - ] - } - ] -} \ No newline at end of file diff --git a/lbrynet/tests/unit/daemon/test_claims_comparator.py b/lbrynet/tests/unit/daemon/test_claims_comparator.py index 07fcbc503..416a394d5 100644 --- a/lbrynet/tests/unit/daemon/test_claims_comparator.py +++ b/lbrynet/tests/unit/daemon/test_claims_comparator.py @@ -1,26 +1,213 @@ -import json import unittest -from lbrynet.daemon.claims_comparator import arrange_results +from lbrynet.daemon.Daemon import arrange_results class ClaimsComparatorTest(unittest.TestCase): - def setUp(self): - with open('claims_comparator_cases.json') as f: - document = json.load(f) - self.cases = document['cases'] + def test_arrange_results_when_sorted_by_claim_id(self): + self.run_test( + [ + { + "height": 1, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "fdsafa" + }, + { + "height": 1, + "name": "res", + "claim_id": "aaa", + "nout": 0, + "txid": "w5tv8uorgt" + }, + { + "height": 1, + "name": "res", + "claim_id": "bbb", + "nout": 0, + "txid": "aecfaewcfa" + } + ], + [ + { + "height": 1, + "name": "res", + "claim_id": "aaa", + "nout": 0, + "txid": "w5tv8uorgt" + }, + { + "height": 1, + "name": "res", + "claim_id": "bbb", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "fdsafa" + } + ]) - def test_arrange_results(self): - for case in self.cases: - results = case['results'] - data = {'result': results} - expected = case['expected'] + def test_arrange_results_when_sorted_by_height(self): + self.run_test( + [ + { + "height": 1, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 3, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 2, + "name": "res", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + } + ], + [ + { + "claim_id": "ccc", + "height": 1, + "name": "res", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "claim_id": "ccc", + "height": 2, + "name": "res", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "claim_id": "ccc", + "height": 3, + "name": "res", + "nout": 0, + "txid": "aecfaewcfa" + } + ]) - claims = arrange_results([data]) - claim = claims[0] - actual = claim['result'] + def test_arrange_results_when_sorted_by_name(self): + self.run_test( + [ + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res3", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res2", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + } + ], + [ + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res2", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res3", + "claim_id": "ccc", + "nout": 0, + "txid": "aecfaewcfa" + } + ]) - self.assertEqual(expected, actual, case['description']) + def test_arrange_results_when_sort_by_outpoint(self): + self.run_test( + [ + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 2, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 1, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 3, + "txid": "aecfaewcfa" + } + ], + [ + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 1, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 2, + "txid": "aecfaewcfa" + }, + { + "height": 1, + "name": "res1", + "claim_id": "ccc", + "nout": 3, + "txid": "aecfaewcfa" + } + ]) + + def run_test(self, results, expected): + data = {'result': results} + + claims = arrange_results([data]) + claim = claims[0] + actual = claim['result'] + + self.assertEqual(expected, actual) if __name__ == '__main__': From 13353bcfe4cdb4c700b1eff1601a7d2099e2748e Mon Sep 17 00:00:00 2001 From: Sergey Rozhnov Date: Fri, 18 May 2018 18:51:28 +0400 Subject: [PATCH 39/55] refactored unit test for sort_claim_results --- lbrynet/daemon/Daemon.py | 6 +- .../unit/daemon/test_claims_comparator.py | 228 +++--------------- 2 files changed, 33 insertions(+), 201 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index c8b7e376f..ac497c37c 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -163,7 +163,7 @@ class AlwaysSend(object): return d -def arrange_results(claims): +def sort_claim_results(claims): for claim in claims: claim['result'].sort(key=lambda d: (d['height'], d['name'], d['claim_id'], d['txid'], d['nout'])) return claims @@ -2299,7 +2299,7 @@ class Daemon(AuthJSONRPCServer): """ d = self.session.wallet.get_name_claims() - d.addCallback(arrange_results) + d.addCallback(sort_claim_results) d.addCallback(lambda claims: self._render_response(claims)) return d @@ -2338,7 +2338,7 @@ class Daemon(AuthJSONRPCServer): """ claims = yield self.session.wallet.get_claims_for_name(name) - result = arrange_results(claims) + result = sort_claim_results(claims) defer.returnValue(result) @defer.inlineCallbacks diff --git a/lbrynet/tests/unit/daemon/test_claims_comparator.py b/lbrynet/tests/unit/daemon/test_claims_comparator.py index 416a394d5..772ecb5e5 100644 --- a/lbrynet/tests/unit/daemon/test_claims_comparator.py +++ b/lbrynet/tests/unit/daemon/test_claims_comparator.py @@ -1,213 +1,45 @@ import unittest -from lbrynet.daemon.Daemon import arrange_results +from lbrynet.daemon.Daemon import sort_claim_results class ClaimsComparatorTest(unittest.TestCase): - def test_arrange_results_when_sorted_by_claim_id(self): - self.run_test( - [ - { - "height": 1, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "fdsafa" - }, - { - "height": 1, - "name": "res", - "claim_id": "aaa", - "nout": 0, - "txid": "w5tv8uorgt" - }, - { - "height": 1, - "name": "res", - "claim_id": "bbb", - "nout": 0, - "txid": "aecfaewcfa" - } - ], - [ - { - "height": 1, - "name": "res", - "claim_id": "aaa", - "nout": 0, - "txid": "w5tv8uorgt" - }, - { - "height": 1, - "name": "res", - "claim_id": "bbb", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "fdsafa" - } - ]) + def test_sort_claim_results_when_sorted_by_claim_id(self): + results = [{"height": 1, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "fdsafa"}, + {"height": 1, "name": "res", "claim_id": "aaa", "nout": 0, "txid": "w5tv8uorgt"}, + {"height": 1, "name": "res", "claim_id": "bbb", "nout": 0, "txid": "aecfaewcfa"}] + self.run_test(results, 'claim_id', ['aaa', 'bbb', 'ccc']) - def test_arrange_results_when_sorted_by_height(self): - self.run_test( - [ - { - "height": 1, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 3, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 2, - "name": "res", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - } - ], - [ - { - "claim_id": "ccc", - "height": 1, - "name": "res", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "claim_id": "ccc", - "height": 2, - "name": "res", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "claim_id": "ccc", - "height": 3, - "name": "res", - "nout": 0, - "txid": "aecfaewcfa" - } - ]) + def test_sort_claim_results_when_sorted_by_height(self): + results = [{"height": 1, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, + {"height": 3, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, + {"height": 2, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}] + self.run_test(results, 'height', [1, 2, 3]) - def test_arrange_results_when_sorted_by_name(self): - self.run_test( - [ - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res3", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res2", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - } - ], - [ - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res2", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res3", - "claim_id": "ccc", - "nout": 0, - "txid": "aecfaewcfa" - } - ]) + def test_sort_claim_results_when_sorted_by_name(self): + results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, + {"height": 1, "name": "res3", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, + {"height": 1, "name": "res2", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}] + self.run_test(results, 'name', ['res1', 'res2', 'res3']) - def test_arrange_results_when_sort_by_outpoint(self): - self.run_test( - [ - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 2, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 1, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 3, - "txid": "aecfaewcfa" - } - ], - [ - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 1, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 2, - "txid": "aecfaewcfa" - }, - { - "height": 1, - "name": "res1", - "claim_id": "ccc", - "nout": 3, - "txid": "aecfaewcfa" - } - ]) + def test_sort_claim_results_when_sorted_by_txid(self): + results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 2, "txid": "111"}, + {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 1, "txid": "222"}, + {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 3, "txid": "333"}] + self.run_test(results, 'txid', ['111', '222', '333']) - def run_test(self, results, expected): + def test_sort_claim_results_when_sorted_by_nout(self): + results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 2, "txid": "aecfaewcfa"}, + {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 1, "txid": "aecfaewcfa"}, + {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 3, "txid": "aecfaewcfa"}] + self.run_test(results, 'nout', [1, 2, 3]) + + def run_test(self, results, field, expected): data = {'result': results} - - claims = arrange_results([data]) + claims = sort_claim_results([data]) claim = claims[0] actual = claim['result'] - - self.assertEqual(expected, actual) + self.assertEqual(expected, [r[field] for r in actual]) if __name__ == '__main__': From 6d4b678b96701525efce55bb3f1ae48301e0b186 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Tue, 22 May 2018 17:15:34 -0400 Subject: [PATCH 40/55] Revert "WIP: feature/1098/sorted-claim-results" --- CHANGELOG.md | 1 - lbrynet/daemon/Daemon.py | 10 +--- lbrynet/tests/unit/daemon/__init__.py | 0 .../unit/daemon/test_claims_comparator.py | 46 ------------------- 4 files changed, 1 insertion(+), 56 deletions(-) delete mode 100644 lbrynet/tests/unit/daemon/__init__.py delete mode 100644 lbrynet/tests/unit/daemon/test_claims_comparator.py diff --git a/CHANGELOG.md b/CHANGELOG.md index b95a4f1eb..b94d5fd44 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -52,7 +52,6 @@ at anytime. * download blockchain headers from s3 before starting the wallet when the local height is more than `s3_headers_depth` (a config setting) blocks behind * track successful reflector uploads in sqlite to minimize how many streams are attempted by auto re-reflect * increase the default `auto_re_reflect_interval` to a day - * `claim_list` and `claim_list_mine` in Daemon `return` sorted results ### Added * virtual kademlia network and mock udp transport for dht integration tests diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index ac497c37c..43c67e0d7 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -163,12 +163,6 @@ class AlwaysSend(object): return d -def sort_claim_results(claims): - for claim in claims: - claim['result'].sort(key=lambda d: (d['height'], d['name'], d['claim_id'], d['txid'], d['nout'])) - return claims - - class Daemon(AuthJSONRPCServer): """ LBRYnet daemon, a jsonrpc interface to lbry functions @@ -2299,7 +2293,6 @@ class Daemon(AuthJSONRPCServer): """ d = self.session.wallet.get_name_claims() - d.addCallback(sort_claim_results) d.addCallback(lambda claims: self._render_response(claims)) return d @@ -2338,8 +2331,7 @@ class Daemon(AuthJSONRPCServer): """ claims = yield self.session.wallet.get_claims_for_name(name) - result = sort_claim_results(claims) - defer.returnValue(result) + defer.returnValue(claims) @defer.inlineCallbacks def jsonrpc_claim_list_by_channel(self, page=0, page_size=10, uri=None, uris=[]): diff --git a/lbrynet/tests/unit/daemon/__init__.py b/lbrynet/tests/unit/daemon/__init__.py deleted file mode 100644 index e69de29bb..000000000 diff --git a/lbrynet/tests/unit/daemon/test_claims_comparator.py b/lbrynet/tests/unit/daemon/test_claims_comparator.py deleted file mode 100644 index 772ecb5e5..000000000 --- a/lbrynet/tests/unit/daemon/test_claims_comparator.py +++ /dev/null @@ -1,46 +0,0 @@ -import unittest - -from lbrynet.daemon.Daemon import sort_claim_results - - -class ClaimsComparatorTest(unittest.TestCase): - def test_sort_claim_results_when_sorted_by_claim_id(self): - results = [{"height": 1, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "fdsafa"}, - {"height": 1, "name": "res", "claim_id": "aaa", "nout": 0, "txid": "w5tv8uorgt"}, - {"height": 1, "name": "res", "claim_id": "bbb", "nout": 0, "txid": "aecfaewcfa"}] - self.run_test(results, 'claim_id', ['aaa', 'bbb', 'ccc']) - - def test_sort_claim_results_when_sorted_by_height(self): - results = [{"height": 1, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, - {"height": 3, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, - {"height": 2, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}] - self.run_test(results, 'height', [1, 2, 3]) - - def test_sort_claim_results_when_sorted_by_name(self): - results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, - {"height": 1, "name": "res3", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, - {"height": 1, "name": "res2", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}] - self.run_test(results, 'name', ['res1', 'res2', 'res3']) - - def test_sort_claim_results_when_sorted_by_txid(self): - results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 2, "txid": "111"}, - {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 1, "txid": "222"}, - {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 3, "txid": "333"}] - self.run_test(results, 'txid', ['111', '222', '333']) - - def test_sort_claim_results_when_sorted_by_nout(self): - results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 2, "txid": "aecfaewcfa"}, - {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 1, "txid": "aecfaewcfa"}, - {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 3, "txid": "aecfaewcfa"}] - self.run_test(results, 'nout', [1, 2, 3]) - - def run_test(self, results, field, expected): - data = {'result': results} - claims = sort_claim_results([data]) - claim = claims[0] - actual = claim['result'] - self.assertEqual(expected, [r[field] for r in actual]) - - -if __name__ == '__main__': - unittest.main() From 187f920961502da516ffab4aa887e763ad251e11 Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Mon, 28 May 2018 10:57:48 -0400 Subject: [PATCH 41/55] disable Daemon headers check for now --- lbrynet/daemon/auth/server.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index 93c9f504c..4aa46ac1f 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -231,9 +231,9 @@ class AuthJSONRPCServer(AuthorizedBase): def _render(self, request): time_in = utils.now() - if not self._check_headers(request): - self._render_error(Failure(InvalidHeaderError()), request, None) - return server.NOT_DONE_YET + # if not self._check_headers(request): + # self._render_error(Failure(InvalidHeaderError()), request, None) + # return server.NOT_DONE_YET session = request.getSession() session_id = session.uid finished_deferred = request.notifyFinish() From 1b027fae8775027ca93cf458b19609f5c416eb06 Mon Sep 17 00:00:00 2001 From: Lex Berezhny Date: Mon, 28 May 2018 18:07:23 -0400 Subject: [PATCH 42/55] add some extra linux meta data to analytics --- lbrynet/core/system_info.py | 4 ++++ setup.py | 1 + 2 files changed, 5 insertions(+) diff --git a/lbrynet/core/system_info.py b/lbrynet/core/system_info.py index 95cd74bc9..abdca9145 100644 --- a/lbrynet/core/system_info.py +++ b/lbrynet/core/system_info.py @@ -2,6 +2,7 @@ import platform import json import subprocess import os +import distro from urllib2 import urlopen, URLError from lbryschema import __version__ as lbryschema_version @@ -36,6 +37,9 @@ def get_platform(get_ip=True): "lbryschema_version": lbryschema_version, "build": build_type.BUILD, # CI server sets this during build step } + if p["os_system"] == "Linux": + p["distro"] = distro.info() + p["desktop"] = os.environ.get('XDG_CURRENT_DESKTOP', 'Unknown') # TODO: remove this from get_platform and add a get_external_ip function using treq if get_ip: diff --git a/setup.py b/setup.py index d4e4c0f43..7076b14b5 100644 --- a/setup.py +++ b/setup.py @@ -17,6 +17,7 @@ from setuptools import setup, find_packages requires = [ 'Twisted', 'appdirs', + 'distro', 'base58', 'envparse', 'jsonrpc', From 89729860cb287dc943d596d4eaf4f90ac74e09a0 Mon Sep 17 00:00:00 2001 From: Lex Berezhny Date: Mon, 28 May 2018 18:19:33 -0400 Subject: [PATCH 43/55] added analytics change to CHANGELOG --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index b94d5fd44..a1fd594db 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -58,6 +58,7 @@ at anytime. * integration tests for bootstrapping the dht * configurable `concurrent_announcers` and `s3_headers_depth` settings * `peer_ping` command + * linux distro and desktop name added to analytics ### Removed * `announce_all` argument from `blob_announce` From d950a62200338cf1791d8717a0180862deeef319 Mon Sep 17 00:00:00 2001 From: Lex Berezhny Date: Mon, 28 May 2018 18:47:56 -0400 Subject: [PATCH 44/55] include the new linux analytics in track events --- lbrynet/analytics.py | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/lbrynet/analytics.py b/lbrynet/analytics.py index 6cd1f1e69..c759e242c 100644 --- a/lbrynet/analytics.py +++ b/lbrynet/analytics.py @@ -185,7 +185,7 @@ class Manager(object): @staticmethod def _make_context(platform, wallet): - return { + context= { 'app': { 'name': 'lbrynet', 'version': platform['lbrynet_version'], @@ -206,6 +206,10 @@ class Manager(object): 'version': '1.0.0' }, } + if 'desktop' in platform and 'distro' in platform: + context['os']['desktop'] = platform['desktop'] + context['os']['distro'] = platform['distro'] + return context @staticmethod def _if_deferred(maybe_deferred, callback, *args, **kwargs): From 09e272a642ceaa986a1230eadf2acf0e4e73aeef Mon Sep 17 00:00:00 2001 From: Lex Berezhny Date: Tue, 29 May 2018 12:45:42 -0400 Subject: [PATCH 45/55] style fix --- lbrynet/analytics.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lbrynet/analytics.py b/lbrynet/analytics.py index c759e242c..cec87199c 100644 --- a/lbrynet/analytics.py +++ b/lbrynet/analytics.py @@ -185,7 +185,7 @@ class Manager(object): @staticmethod def _make_context(platform, wallet): - context= { + context = { 'app': { 'name': 'lbrynet', 'version': platform['lbrynet_version'], From 11b882879a449f23e29b619c6e3e50a1d1c4cbc3 Mon Sep 17 00:00:00 2001 From: Sergey Rozhnov Date: Fri, 25 May 2018 11:09:13 +0400 Subject: [PATCH 46/55] predictable result sorting for `claim_list` and `claim_list_mine` --- CHANGELOG.md | 1 + lbrynet/daemon/Daemon.py | 8 +++- .../unit/daemon/test_claims_comparator.py | 43 +++++++++++++++++++ 3 files changed, 51 insertions(+), 1 deletion(-) create mode 100644 lbrynet/tests/unit/daemon/test_claims_comparator.py diff --git a/CHANGELOG.md b/CHANGELOG.md index b94d5fd44..e8590e6ee 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -52,6 +52,7 @@ at anytime. * download blockchain headers from s3 before starting the wallet when the local height is more than `s3_headers_depth` (a config setting) blocks behind * track successful reflector uploads in sqlite to minimize how many streams are attempted by auto re-reflect * increase the default `auto_re_reflect_interval` to a day + * predictable result sorting for `claim_list` and `claim_list_mine` ### Added * virtual kademlia network and mock udp transport for dht integration tests diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 43c67e0d7..77f4790d2 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -163,6 +163,11 @@ class AlwaysSend(object): return d +def sort_claim_results(claims): + claims.sort(key=lambda d: (d['height'], d['name'], d['claim_id'], d['txid'], d['nout'])) + return claims + + class Daemon(AuthJSONRPCServer): """ LBRYnet daemon, a jsonrpc interface to lbry functions @@ -2330,7 +2335,8 @@ class Daemon(AuthJSONRPCServer): } """ - claims = yield self.session.wallet.get_claims_for_name(name) + claims = yield self.session.wallet.get_claims_for_name(name) # type: dict + sort_claim_results(claims['claims']) defer.returnValue(claims) @defer.inlineCallbacks diff --git a/lbrynet/tests/unit/daemon/test_claims_comparator.py b/lbrynet/tests/unit/daemon/test_claims_comparator.py new file mode 100644 index 000000000..4a4333aba --- /dev/null +++ b/lbrynet/tests/unit/daemon/test_claims_comparator.py @@ -0,0 +1,43 @@ +import unittest + +from lbrynet.daemon.Daemon import sort_claim_results + + +class ClaimsComparatorTest(unittest.TestCase): + def test_sort_claim_results_when_sorted_by_claim_id(self): + results = [{"height": 1, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "fdsafa"}, + {"height": 1, "name": "res", "claim_id": "aaa", "nout": 0, "txid": "w5tv8uorgt"}, + {"height": 1, "name": "res", "claim_id": "bbb", "nout": 0, "txid": "aecfaewcfa"}] + self.run_test(results, 'claim_id', ['aaa', 'bbb', 'ccc']) + + def test_sort_claim_results_when_sorted_by_height(self): + results = [{"height": 1, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, + {"height": 3, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, + {"height": 2, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}] + self.run_test(results, 'height', [1, 2, 3]) + + def test_sort_claim_results_when_sorted_by_name(self): + results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, + {"height": 1, "name": "res3", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}, + {"height": 1, "name": "res2", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}] + self.run_test(results, 'name', ['res1', 'res2', 'res3']) + + def test_sort_claim_results_when_sorted_by_txid(self): + results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 2, "txid": "111"}, + {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 1, "txid": "222"}, + {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 3, "txid": "333"}] + self.run_test(results, 'txid', ['111', '222', '333']) + + def test_sort_claim_results_when_sorted_by_nout(self): + results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 2, "txid": "aecfaewcfa"}, + {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 1, "txid": "aecfaewcfa"}, + {"height": 1, "name": "res1", "claim_id": "ccc", "nout": 3, "txid": "aecfaewcfa"}] + self.run_test(results, 'nout', [1, 2, 3]) + + def run_test(self, results, field, expected): + actual = sort_claim_results(results) + self.assertEqual(expected, [r[field] for r in actual]) + + +if __name__ == '__main__': + unittest.main() From 9b1e687b9bacbbfd3b46589a436deaeda9df95bf Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Tue, 29 May 2018 14:36:00 -0400 Subject: [PATCH 47/55] pylint --- lbrynet/daemon/auth/server.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lbrynet/daemon/auth/server.py b/lbrynet/daemon/auth/server.py index 4aa46ac1f..a0d365a35 100644 --- a/lbrynet/daemon/auth/server.py +++ b/lbrynet/daemon/auth/server.py @@ -13,7 +13,7 @@ from txjsonrpc import jsonrpclib from traceback import format_exc from lbrynet import conf -from lbrynet.core.Error import InvalidAuthenticationToken, InvalidHeaderError +from lbrynet.core.Error import InvalidAuthenticationToken from lbrynet.core import utils from lbrynet.daemon.auth.util import APIKey, get_auth_message from lbrynet.daemon.auth.client import LBRY_SECRET From 6a344539a0942aadf127612e2b11540a3218d564 Mon Sep 17 00:00:00 2001 From: Lex Berezhny Date: Tue, 29 May 2018 14:49:06 -0400 Subject: [PATCH 48/55] moved import distro into conditional statement that only runs on Linux --- lbrynet/core/system_info.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lbrynet/core/system_info.py b/lbrynet/core/system_info.py index abdca9145..3e81e8011 100644 --- a/lbrynet/core/system_info.py +++ b/lbrynet/core/system_info.py @@ -2,7 +2,6 @@ import platform import json import subprocess import os -import distro from urllib2 import urlopen, URLError from lbryschema import __version__ as lbryschema_version @@ -38,6 +37,7 @@ def get_platform(get_ip=True): "build": build_type.BUILD, # CI server sets this during build step } if p["os_system"] == "Linux": + import distro p["distro"] = distro.info() p["desktop"] = os.environ.get('XDG_CURRENT_DESKTOP', 'Unknown') From 02bcc98a6ad5adb66ade29a71e6fb6e7994a70fc Mon Sep 17 00:00:00 2001 From: Jack Robison Date: Tue, 29 May 2018 15:56:58 -0400 Subject: [PATCH 49/55] Bump version 0.20.0rc9 --> 0.20.0rc10 Signed-off-by: Jack Robison --- lbrynet/__init__.py | 2 +- requirements.txt | 4 ++-- setup.py | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/lbrynet/__init__.py b/lbrynet/__init__.py index 5dbc21c5d..63cd39148 100644 --- a/lbrynet/__init__.py +++ b/lbrynet/__init__.py @@ -1,6 +1,6 @@ import logging -__version__ = "0.20.0rc9" +__version__ = "0.20.0rc10" version = tuple(__version__.split('.')) logging.getLogger(__name__).addHandler(logging.NullHandler()) diff --git a/requirements.txt b/requirements.txt index 37725800c..7ac7daacf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -12,8 +12,8 @@ GitPython==2.1.3 jsonrpc==1.2 jsonrpclib==0.1.7 keyring==10.4.0 -git+https://github.com/lbryio/lbryschema.git@v0.0.15#egg=lbryschema -git+https://github.com/lbryio/lbryum.git@v3.2.1#egg=lbryum +git+https://github.com/lbryio/lbryschema.git@v0.0.16rc2#egg=lbryschema +git+https://github.com/lbryio/lbryum.git@v3.2.2rc1#egg=lbryum miniupnpc==1.9 pbkdf2==1.3 pyyaml==3.12 diff --git a/setup.py b/setup.py index 7076b14b5..08bea2635 100644 --- a/setup.py +++ b/setup.py @@ -21,8 +21,8 @@ requires = [ 'base58', 'envparse', 'jsonrpc', - 'lbryschema==0.0.15', - 'lbryum==3.2.1', + 'lbryschema==0.0.16rc2', + 'lbryum==3.2.2rc1', 'miniupnpc', 'pyyaml', 'requests', From 513f5d0568e48ffbc9d09dfd0fff364f9289893d Mon Sep 17 00:00:00 2001 From: Akinwale Ariwodola Date: Thu, 24 May 2018 11:32:58 +0100 Subject: [PATCH 50/55] added certifi package for Twisted SSL verification on Windows --- lbrynet/daemon/Daemon.py | 6 ++++++ requirements.txt | 1 + 2 files changed, 7 insertions(+) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index 0304e458a..b553b62bf 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -1,7 +1,9 @@ import binascii +import certifi import logging.handlers import mimetypes import os +import sys import base58 import requests import urllib @@ -231,6 +233,10 @@ class Daemon(AuthJSONRPCServer): @defer.inlineCallbacks def setup(self): + # Set SSL_CERT_FILE env variable for Twisted SSL verification on Windows + if 'win' in sys.platform: + os.environ['SSL_CERT_FILE'] = certifi.where() + reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) configure_loggly_handler() diff --git a/requirements.txt b/requirements.txt index 7ac7daacf..cad0bf752 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,3 +1,4 @@ +certifi==2018.4.16 Twisted==16.6.0 cryptography==2.2.2 appdirs==1.4.3 From 03968f377ec9d491163990c88fff57b8e78c9ab1 Mon Sep 17 00:00:00 2001 From: Victor Shyba Date: Thu, 24 May 2018 17:12:47 -0300 Subject: [PATCH 51/55] change fix position to before anything else starts --- lbrynet/daemon/Daemon.py | 5 ----- lbrynet/daemon/DaemonControl.py | 6 ++++++ 2 files changed, 6 insertions(+), 5 deletions(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index b553b62bf..a9c44996d 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -1,5 +1,4 @@ import binascii -import certifi import logging.handlers import mimetypes import os @@ -233,10 +232,6 @@ class Daemon(AuthJSONRPCServer): @defer.inlineCallbacks def setup(self): - # Set SSL_CERT_FILE env variable for Twisted SSL verification on Windows - if 'win' in sys.platform: - os.environ['SSL_CERT_FILE'] = certifi.where() - reactor.addSystemEventTrigger('before', 'shutdown', self._shutdown) configure_loggly_handler() diff --git a/lbrynet/daemon/DaemonControl.py b/lbrynet/daemon/DaemonControl.py index 23cd04450..2b8f40260 100644 --- a/lbrynet/daemon/DaemonControl.py +++ b/lbrynet/daemon/DaemonControl.py @@ -1,3 +1,9 @@ +import certifi +# Set SSL_CERT_FILE env variable for Twisted SSL verification on Windows +# This needs to happen before anything else +if 'win' in sys.platform: + os.environ['SSL_CERT_FILE'] = certifi.where() + from lbrynet.core import log_support import argparse From 97547e904ead2af4fa100a7e071a535a91a06301 Mon Sep 17 00:00:00 2001 From: Akinwale Ariwodola Date: Thu, 24 May 2018 21:28:09 +0100 Subject: [PATCH 52/55] add os and sys imports --- lbrynet/daemon/DaemonControl.py | 3 +++ 1 file changed, 3 insertions(+) diff --git a/lbrynet/daemon/DaemonControl.py b/lbrynet/daemon/DaemonControl.py index 2b8f40260..42d5cae76 100644 --- a/lbrynet/daemon/DaemonControl.py +++ b/lbrynet/daemon/DaemonControl.py @@ -1,4 +1,7 @@ import certifi +import os +import sys + # Set SSL_CERT_FILE env variable for Twisted SSL verification on Windows # This needs to happen before anything else if 'win' in sys.platform: From c4550ef5b662a3004fbd46eadf7de2f97b11bfd9 Mon Sep 17 00:00:00 2001 From: Akinwale Ariwodola Date: Thu, 24 May 2018 22:19:42 +0100 Subject: [PATCH 53/55] remove unused import in lbrynet.daemon.Daemon --- lbrynet/daemon/Daemon.py | 1 - 1 file changed, 1 deletion(-) diff --git a/lbrynet/daemon/Daemon.py b/lbrynet/daemon/Daemon.py index a9c44996d..0304e458a 100644 --- a/lbrynet/daemon/Daemon.py +++ b/lbrynet/daemon/Daemon.py @@ -2,7 +2,6 @@ import binascii import logging.handlers import mimetypes import os -import sys import base58 import requests import urllib From 0d35621a9a730341021f8869bfe938f821eaab9b Mon Sep 17 00:00:00 2001 From: Akinwale Ariwodola Date: Thu, 24 May 2018 22:55:02 +0100 Subject: [PATCH 54/55] updated changelog --- CHANGELOG.md | 1 + 1 file changed, 1 insertion(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index bcc8008b0..200c87d6a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -62,6 +62,7 @@ at anytime. * configurable `concurrent_announcers` and `s3_headers_depth` settings * `peer_ping` command * linux distro and desktop name added to analytics + * certifi module for Twisted SSL verification on Windows ### Removed * `announce_all` argument from `blob_announce` From 3d17c92bd3028888e7786589f085a937e699ba91 Mon Sep 17 00:00:00 2001 From: Lex Berezhny Date: Tue, 29 May 2018 22:36:25 -0400 Subject: [PATCH 55/55] moved import certifi to run only on windows --- lbrynet/daemon/DaemonControl.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/lbrynet/daemon/DaemonControl.py b/lbrynet/daemon/DaemonControl.py index 42d5cae76..8d73c9ce0 100644 --- a/lbrynet/daemon/DaemonControl.py +++ b/lbrynet/daemon/DaemonControl.py @@ -1,10 +1,10 @@ -import certifi import os import sys # Set SSL_CERT_FILE env variable for Twisted SSL verification on Windows # This needs to happen before anything else if 'win' in sys.platform: + import certifi os.environ['SSL_CERT_FILE'] = certifi.where() from lbrynet.core import log_support