Merge branch 'master' into feature/file_list_sorting

This commit is contained in:
Lex Berezhny 2018-05-29 22:47:09 -04:00 committed by GitHub
commit 23df1d0316
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
38 changed files with 595 additions and 451 deletions

View file

@ -25,6 +25,13 @@ at anytime.
* *
### Changed ### Changed
* check headers file integrity on startup, removing/truncating the file to force re-download when necessary
* support partial headers file download from S3
* changed txrequests for treq
* changed cryptography version to 2.2.2
* removed pycrypto dependency, replacing all calls to cryptography
* full verification of streams only during migration instead of every startup
* database batching functions for starting up the file manager
* several internal dht functions to use inlineCallbacks * several internal dht functions to use inlineCallbacks
* `DHTHashAnnouncer` and `Node` manage functions to use `LoopingCall`s instead of scheduling with `callLater`. * `DHTHashAnnouncer` and `Node` manage functions to use `LoopingCall`s instead of scheduling with `callLater`.
* `store` kademlia rpc method to block on the call finishing and to return storing peer information * `store` kademlia rpc method to block on the call finishing and to return storing peer information
@ -44,13 +51,19 @@ at anytime.
* regenerate api keys on startup if the using authentication * regenerate api keys on startup if the using authentication
* support both positional and keyword args for api calls * support both positional and keyword args for api calls
* `peer_list` to return a list of dictionaries instead of a list of lists, added peer node ids to the results * `peer_list` to return a list of dictionaries instead of a list of lists, added peer node ids to the results
* download blockchain headers from s3 before starting the wallet when the local height is more than `s3_headers_depth` (a config setting) blocks behind
* track successful reflector uploads in sqlite to minimize how many streams are attempted by auto re-reflect
* increase the default `auto_re_reflect_interval` to a day
* predictable result sorting for `claim_list` and `claim_list_mine`
### Added ### Added
* virtual kademlia network and mock udp transport for dht integration tests * virtual kademlia network and mock udp transport for dht integration tests
* integration tests for bootstrapping the dht * integration tests for bootstrapping the dht
* configurable `concurrent_announcers` setting * configurable `concurrent_announcers` and `s3_headers_depth` settings
* `peer_ping` command * `peer_ping` command
* `--sort` option in `file_list` * `--sort` option in `file_list`
* linux distro and desktop name added to analytics
* certifi module for Twisted SSL verification on Windows
### Removed ### Removed
* `announce_all` argument from `blob_announce` * `announce_all` argument from `blob_announce`
@ -58,6 +71,10 @@ at anytime.
* `AuthJSONRPCServer.auth_required` decorator * `AuthJSONRPCServer.auth_required` decorator
* unused `--wallet` argument to `lbrynet-daemon`, which used to be to support `PTCWallet`. * unused `--wallet` argument to `lbrynet-daemon`, which used to be to support `PTCWallet`.
## [0.19.3] - 2018-05-04
### Changed
* download blockchain headers from s3 before starting the wallet when the local height is more than s3_headers_depth (a config setting) blocks behind (https://github.com/lbryio/lbry/pull/1177)
* un-deprecated report_bug command (https://github.com/lbryio/lbry/commit/f8e418fb4448a3ed1531657f8b3c608fb568af85)
## [0.19.2] - 2018-03-28 ## [0.19.2] - 2018-03-28
### Fixed ### Fixed

View file

@ -35,7 +35,7 @@ LBRY is a protocol that provides a fully decentralized network for the discovery
It utilizes the [LBRY blockchain](https://github.com/lbryio/lbrycrd) as a global namespace and database of digital content. Blockchain entries contain searchable content metadata, identities, and rights and access rules. It utilizes the [LBRY blockchain](https://github.com/lbryio/lbrycrd) as a global namespace and database of digital content. Blockchain entries contain searchable content metadata, identities, and rights and access rules.
LBRY also provides a data network consists of peers uploading and downloading data from other peers, possibly in exchange for payments, and a distributed hash table, used by peers to discover other peers. LBRY also provides a data network that consists of peers uploading and downloading data from other peers, possibly in exchange for payments, and a distributed hash table, used by peers to discover other peers.
## Contributions ## Contributions

View file

@ -1,6 +1,6 @@
import logging import logging
__version__ = "0.20.0rc9" __version__ = "0.20.0rc10"
version = tuple(__version__.split('.')) version = tuple(__version__.split('.'))
logging.getLogger(__name__).addHandler(logging.NullHandler()) logging.getLogger(__name__).addHandler(logging.NullHandler())

View file

@ -1,8 +1,8 @@
import collections import collections
import logging import logging
import treq
from twisted.internet import defer, task from twisted.internet import defer, task
from requests import auth
from txrequests import Session
from lbrynet import conf from lbrynet import conf
from lbrynet.core import looping_call_manager, utils, system_info from lbrynet.core import looping_call_manager, utils, system_info
@ -185,7 +185,7 @@ class Manager(object):
@staticmethod @staticmethod
def _make_context(platform, wallet): def _make_context(platform, wallet):
return { context = {
'app': { 'app': {
'name': 'lbrynet', 'name': 'lbrynet',
'version': platform['lbrynet_version'], 'version': platform['lbrynet_version'],
@ -206,6 +206,10 @@ class Manager(object):
'version': '1.0.0' 'version': '1.0.0'
}, },
} }
if 'desktop' in platform and 'distro' in platform:
context['os']['desktop'] = platform['desktop']
context['os']['distro'] = platform['distro']
return context
@staticmethod @staticmethod
def _if_deferred(maybe_deferred, callback, *args, **kwargs): def _if_deferred(maybe_deferred, callback, *args, **kwargs):
@ -216,8 +220,8 @@ class Manager(object):
class Api(object): class Api(object):
def __init__(self, session, url, write_key, enabled): def __init__(self, cookies, url, write_key, enabled):
self.session = session self.cookies = cookies
self.url = url self.url = url
self._write_key = write_key self._write_key = write_key
self._enabled = enabled self._enabled = enabled
@ -232,14 +236,17 @@ class Api(object):
# timeout will have expired. # timeout will have expired.
# #
# by forcing the connection to close, we will disable the keep-alive. # by forcing the connection to close, we will disable the keep-alive.
def update_cookies(response):
self.cookies.update(response.cookies())
return response
assert endpoint[0] == '/' assert endpoint[0] == '/'
headers = {"Connection": "close"} headers = {b"Connection": b"close"}
return self.session.post( d = treq.post(self.url + endpoint, auth=(self._write_key, ''), json=data,
self.url + endpoint, headers=headers, cookies=self.cookies)
json=data, d.addCallback(update_cookies)
auth=auth.HTTPBasicAuth(self._write_key, ''), return d
headers=headers
)
def track(self, event): def track(self, event):
"""Send a single tracking event""" """Send a single tracking event"""
@ -257,11 +264,10 @@ class Api(object):
@classmethod @classmethod
def new_instance(cls, enabled=None): def new_instance(cls, enabled=None):
"""Initialize an instance using values from the configuration""" """Initialize an instance using values from the configuration"""
session = Session()
if enabled is None: if enabled is None:
enabled = conf.settings['share_usage_data'] enabled = conf.settings['share_usage_data']
return cls( return cls(
session, {},
conf.settings['ANALYTICS_ENDPOINT'], conf.settings['ANALYTICS_ENDPOINT'],
utils.deobfuscate(conf.settings['ANALYTICS_TOKEN']), utils.deobfuscate(conf.settings['ANALYTICS_TOKEN']),
enabled, enabled,

View file

@ -236,6 +236,7 @@ FIXED_SETTINGS = {
'SLACK_WEBHOOK': ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5' 'SLACK_WEBHOOK': ('nUE0pUZ6Yl9bo29epl5moTSwnl5wo20ip2IlqzywMKZiIQSFZR5'
'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='), 'AHx4mY0VmF0WQZ1ESEP9kMHZlp1WzJwWOoKN3ImR1M2yUAaMyqGZ='),
'WALLET_TYPES': [LBRYUM_WALLET, LBRYCRD_WALLET], 'WALLET_TYPES': [LBRYUM_WALLET, LBRYCRD_WALLET],
'HEADERS_FILE_SHA256_CHECKSUM': (366295, 'b0c8197153a33ccbc52fb81a279588b6015b68b7726f73f6a2b81f7e25bfe4b9')
} }
ADJUSTABLE_SETTINGS = { ADJUSTABLE_SETTINGS = {
@ -280,10 +281,10 @@ ADJUSTABLE_SETTINGS = {
'peer_port': (int, 3333), 'peer_port': (int, 3333),
'pointtrader_server': (str, 'http://127.0.0.1:2424'), 'pointtrader_server': (str, 'http://127.0.0.1:2424'),
'reflector_port': (int, 5566), 'reflector_port': (int, 5566),
# if reflect_uploads is True, send files to reflector (after publishing as well as a # if reflect_uploads is True, send files to reflector after publishing (as well as a periodic check in the
# periodic check in the event the initial upload failed or was disconnected part way through # event the initial upload failed or was disconnected part way through, provided the auto_re_reflect_interval > 0)
'reflect_uploads': (bool, True), 'reflect_uploads': (bool, True),
'auto_re_reflect_interval': (int, 3600), 'auto_re_reflect_interval': (int, 86400), # set to 0 to disable
'reflector_servers': (list, [('reflector2.lbry.io', 5566)], server_list), 'reflector_servers': (list, [('reflector2.lbry.io', 5566)], server_list),
'run_reflector_server': (bool, False), 'run_reflector_server': (bool, False),
'sd_download_timeout': (int, 3), 'sd_download_timeout': (int, 3),
@ -294,7 +295,8 @@ ADJUSTABLE_SETTINGS = {
'use_keyring': (bool, False), 'use_keyring': (bool, False),
'wallet': (str, LBRYUM_WALLET), 'wallet': (str, LBRYUM_WALLET),
'blockchain_name': (str, 'lbrycrd_main'), 'blockchain_name': (str, 'lbrycrd_main'),
'lbryum_servers': (list, [('lbryum8.lbry.io', 50001), ('lbryum9.lbry.io', 50001)], server_list) 'lbryum_servers': (list, [('lbryum8.lbry.io', 50001), ('lbryum9.lbry.io', 50001)], server_list),
's3_headers_depth': (int, 96 * 10) # download headers from s3 when the local height is more than 10 chunks behind
} }

View file

@ -1,24 +1,30 @@
import os
from collections import defaultdict, deque from collections import defaultdict, deque
import datetime import datetime
import logging import logging
from decimal import Decimal from decimal import Decimal
import treq
from zope.interface import implements from zope.interface import implements
from twisted.internet import threads, reactor, defer, task from twisted.internet import threads, reactor, defer, task
from twisted.python.failure import Failure from twisted.python.failure import Failure
from twisted.internet.error import ConnectionAborted from twisted.internet.error import ConnectionAborted
from hashlib import sha256
from lbryum import wallet as lbryum_wallet from lbryum import wallet as lbryum_wallet
from lbryum.network import Network from lbryum.network import Network
from lbryum.simple_config import SimpleConfig from lbryum.simple_config import SimpleConfig
from lbryum.constants import COIN from lbryum.constants import COIN
from lbryum.commands import Commands from lbryum.commands import Commands
from lbryum.errors import InvalidPassword from lbryum.errors import InvalidPassword
from lbryum.constants import HEADERS_URL, HEADER_SIZE
from lbryschema.uri import parse_lbry_uri from lbryschema.uri import parse_lbry_uri
from lbryschema.claim import ClaimDict from lbryschema.claim import ClaimDict
from lbryschema.error import DecodeError from lbryschema.error import DecodeError
from lbryschema.decode import smart_decode from lbryschema.decode import smart_decode
from lbrynet.txlbryum.factory import StratumClient
from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHandler, IWallet
from lbrynet.core.utils import DeferredDict from lbrynet.core.utils import DeferredDict
from lbrynet.core.client.ClientRequest import ClientRequest from lbrynet.core.client.ClientRequest import ClientRequest
@ -86,18 +92,112 @@ class Wallet(object):
self._batch_count = 20 self._batch_count = 20
self._pending_claim_checker = task.LoopingCall(self.fetch_and_save_heights_for_pending_claims) self._pending_claim_checker = task.LoopingCall(self.fetch_and_save_heights_for_pending_claims)
def start(self): @defer.inlineCallbacks
log.info("Starting wallet.") def fetch_headers_from_s3(self):
local_header_size = self.local_header_file_size()
resume_header = {"Range": "bytes={}-".format(local_header_size)}
response = yield treq.get(HEADERS_URL, headers=resume_header)
got_406 = response.code == 406 # our file is bigger
final_size_after_download = response.length + local_header_size
if got_406:
log.warning("s3 is more out of date than we are")
# should have something to download and a final length divisible by the header size
elif final_size_after_download and not final_size_after_download % HEADER_SIZE:
s3_height = (final_size_after_download / HEADER_SIZE) - 1
local_height = self.local_header_file_height()
if s3_height > local_height:
if local_header_size:
log.info("Resuming download of %i bytes from s3", response.length)
with open(os.path.join(self.config.path, "blockchain_headers"), "a+b") as headers_file:
yield treq.collect(response, headers_file.write)
else:
with open(os.path.join(self.config.path, "blockchain_headers"), "wb") as headers_file:
yield treq.collect(response, headers_file.write)
log.info("fetched headers from s3 (s3 height: %i), now verifying integrity after download.", s3_height)
self._check_header_file_integrity()
else:
log.warning("s3 is more out of date than we are")
else:
log.error("invalid size for headers from s3")
def start_manage(): def local_header_file_height(self):
return max((self.local_header_file_size() / HEADER_SIZE) - 1, 0)
def local_header_file_size(self):
headers_path = os.path.join(self.config.path, "blockchain_headers")
if os.path.isfile(headers_path):
return os.stat(headers_path).st_size
return 0
@defer.inlineCallbacks
def get_remote_height(self, server, port):
connected = defer.Deferred()
client = StratumClient(connected)
reactor.connectTCP(server, port, client)
yield connected
remote_height = yield client.blockchain_block_get_server_height()
client.client.transport.loseConnection()
defer.returnValue(remote_height)
@defer.inlineCallbacks
def should_download_headers_from_s3(self):
from lbrynet import conf
if conf.settings['blockchain_name'] != "lbrycrd_main":
defer.returnValue(False)
self._check_header_file_integrity()
s3_headers_depth = conf.settings['s3_headers_depth']
if not s3_headers_depth:
defer.returnValue(False)
local_height = self.local_header_file_height()
for server_url in self.config.get('default_servers'):
port = int(self.config.get('default_servers')[server_url]['t'])
try:
remote_height = yield self.get_remote_height(server_url, port)
log.info("%s:%i height: %i, local height: %s", server_url, port, remote_height, local_height)
if remote_height > (local_height + s3_headers_depth):
defer.returnValue(True)
except Exception as err:
log.warning("error requesting remote height from %s:%i - %s", server_url, port, err)
defer.returnValue(False)
def _check_header_file_integrity(self):
# TODO: temporary workaround for usability. move to txlbryum and check headers instead of file integrity
from lbrynet import conf
if conf.settings['blockchain_name'] != "lbrycrd_main":
return
hashsum = sha256()
checksum_height, checksum = conf.settings['HEADERS_FILE_SHA256_CHECKSUM']
checksum_length_in_bytes = checksum_height * HEADER_SIZE
if self.local_header_file_size() < checksum_length_in_bytes:
return
headers_path = os.path.join(self.config.path, "blockchain_headers")
with open(headers_path, "rb") as headers_file:
hashsum.update(headers_file.read(checksum_length_in_bytes))
current_checksum = hashsum.hexdigest()
if current_checksum != checksum:
msg = "Expected checksum {}, got {}".format(checksum, current_checksum)
log.warning("Wallet file corrupted, checksum mismatch. " + msg)
log.warning("Deleting header file so it can be downloaded again.")
os.unlink(headers_path)
elif (self.local_header_file_size() % HEADER_SIZE) != 0:
log.warning("Header file is good up to checkpoint height, but incomplete. Truncating to checkpoint.")
with open(headers_path, "rb+") as headers_file:
headers_file.truncate(checksum_length_in_bytes)
@defer.inlineCallbacks
def start(self):
should_download_headers = yield self.should_download_headers_from_s3()
if should_download_headers:
try:
yield self.fetch_headers_from_s3()
except Exception as err:
log.error("failed to fetch headers from s3: %s", err)
log.info("Starting wallet.")
yield self._start()
self.stopped = False self.stopped = False
self.manage() self.manage()
self._pending_claim_checker.start(30) self._pending_claim_checker.start(30)
return True defer.returnValue(True)
d = self._start()
d.addCallback(lambda _: start_manage())
return d
@staticmethod @staticmethod
def log_stop_error(err): def log_stop_error(err):

View file

@ -6,8 +6,7 @@ import os
import sys import sys
import traceback import traceback
from txrequests import Session import treq
from requests.exceptions import ConnectionError
from twisted.internet import defer from twisted.internet import defer
import twisted.python.log import twisted.python.log
@ -35,13 +34,13 @@ TRACE = 5
class HTTPSHandler(logging.Handler): class HTTPSHandler(logging.Handler):
def __init__(self, url, fqdn=False, localname=None, facility=None, session=None): def __init__(self, url, fqdn=False, localname=None, facility=None, cookies=None):
logging.Handler.__init__(self) logging.Handler.__init__(self)
self.url = url self.url = url
self.fqdn = fqdn self.fqdn = fqdn
self.localname = localname self.localname = localname
self.facility = facility self.facility = facility
self.session = session if session is not None else Session() self.cookies = cookies or {}
def get_full_message(self, record): def get_full_message(self, record):
if record.exc_info: if record.exc_info:
@ -52,10 +51,8 @@ class HTTPSHandler(logging.Handler):
@defer.inlineCallbacks @defer.inlineCallbacks
def _emit(self, record): def _emit(self, record):
payload = self.format(record) payload = self.format(record)
try: response = yield treq.post(self.url, data=payload, cookies=self.cookies)
yield self.session.post(self.url, data=payload) self.cookies.update(response.cookies())
except ConnectionError:
pass
def emit(self, record): def emit(self, record):
return self._emit(record) return self._emit(record)

View file

@ -36,8 +36,12 @@ def get_platform(get_ip=True):
"lbryschema_version": lbryschema_version, "lbryschema_version": lbryschema_version,
"build": build_type.BUILD, # CI server sets this during build step "build": build_type.BUILD, # CI server sets this during build step
} }
if p["os_system"] == "Linux":
import distro
p["distro"] = distro.info()
p["desktop"] = os.environ.get('XDG_CURRENT_DESKTOP', 'Unknown')
# TODO: remove this from get_platform and add a get_external_ip function using txrequests # TODO: remove this from get_platform and add a get_external_ip function using treq
if get_ip: if get_ip:
try: try:
response = json.loads(urlopen("https://api.lbry.io/ip").read()) response = json.loads(urlopen("https://api.lbry.io/ip").read())

View file

@ -1,12 +1,12 @@
""" """
Utility for creating Crypt Streams, which are encrypted blobs and associated metadata. Utility for creating Crypt Streams, which are encrypted blobs and associated metadata.
""" """
import os
import logging import logging
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from twisted.internet import interfaces, defer from twisted.internet import interfaces, defer
from zope.interface import implements from zope.interface import implements
from Crypto import Random
from Crypto.Cipher import AES
from lbrynet.cryptstream.CryptBlob import CryptStreamBlobMaker from lbrynet.cryptstream.CryptBlob import CryptStreamBlobMaker
@ -101,13 +101,13 @@ class CryptStreamCreator(object):
@staticmethod @staticmethod
def random_iv_generator(): def random_iv_generator():
while 1: while 1:
yield Random.new().read(AES.block_size) yield os.urandom(AES.block_size / 8)
def setup(self): def setup(self):
"""Create the symmetric key if it wasn't provided""" """Create the symmetric key if it wasn't provided"""
if self.key is None: if self.key is None:
self.key = Random.new().read(AES.block_size) self.key = os.urandom(AES.block_size / 8)
return defer.succeed(True) return defer.succeed(True)

View file

@ -166,6 +166,11 @@ class AlwaysSend(object):
return d return d
def sort_claim_results(claims):
claims.sort(key=lambda d: (d['height'], d['name'], d['claim_id'], d['txid'], d['nout']))
return claims
class Daemon(AuthJSONRPCServer): class Daemon(AuthJSONRPCServer):
""" """
LBRYnet daemon, a jsonrpc interface to lbry functions LBRYnet daemon, a jsonrpc interface to lbry functions
@ -202,7 +207,7 @@ class Daemon(AuthJSONRPCServer):
self.connected_to_internet = True self.connected_to_internet = True
self.connection_status_code = None self.connection_status_code = None
self.platform = None self.platform = None
self.current_db_revision = 7 self.current_db_revision = 9
self.db_revision_file = conf.settings.get_db_revision_filename() self.db_revision_file = conf.settings.get_db_revision_filename()
self.session = None self.session = None
self._session_id = conf.settings.get_session_id() self._session_id = conf.settings.get_session_id()
@ -2361,7 +2366,8 @@ class Daemon(AuthJSONRPCServer):
} }
""" """
claims = yield self.session.wallet.get_claims_for_name(name) claims = yield self.session.wallet.get_claims_for_name(name) # type: dict
sort_claim_results(claims['claims'])
defer.returnValue(claims) defer.returnValue(claims)
@defer.inlineCallbacks @defer.inlineCallbacks

View file

@ -1,3 +1,12 @@
import os
import sys
# Set SSL_CERT_FILE env variable for Twisted SSL verification on Windows
# This needs to happen before anything else
if 'win' in sys.platform:
import certifi
os.environ['SSL_CERT_FILE'] = certifi.where()
from lbrynet.core import log_support from lbrynet.core import log_support
import argparse import argparse

View file

@ -1,8 +1,9 @@
import time import time
import requests
import logging import logging
import json import json
from twisted.internet import defer, threads
import treq
from twisted.internet import defer
from twisted.internet.task import LoopingCall from twisted.internet.task import LoopingCall
from lbrynet.core.Error import InvalidExchangeRateResponse from lbrynet.core.Error import InvalidExchangeRateResponse
@ -52,9 +53,10 @@ class MarketFeed(object):
def is_online(self): def is_online(self):
return self._online return self._online
@defer.inlineCallbacks
def _make_request(self): def _make_request(self):
r = requests.get(self.url, self.params, timeout=self.REQUESTS_TIMEOUT) response = yield treq.get(self.url, params=self.params, timeout=self.REQUESTS_TIMEOUT)
return r.text defer.returnValue((yield response.content()))
def _handle_response(self, response): def _handle_response(self, response):
return NotImplementedError return NotImplementedError
@ -75,7 +77,7 @@ class MarketFeed(object):
self._online = False self._online = False
def _update_price(self): def _update_price(self):
d = threads.deferToThread(self._make_request) d = self._make_request()
d.addCallback(self._handle_response) d.addCallback(self._handle_response)
d.addCallback(self._subtract_fee) d.addCallback(self._subtract_fee)
d.addCallback(self._save_price) d.addCallback(self._save_price)

View file

@ -13,7 +13,7 @@ from txjsonrpc import jsonrpclib
from traceback import format_exc from traceback import format_exc
from lbrynet import conf from lbrynet import conf
from lbrynet.core.Error import InvalidAuthenticationToken, InvalidHeaderError from lbrynet.core.Error import InvalidAuthenticationToken
from lbrynet.core import utils from lbrynet.core import utils
from lbrynet.daemon.auth.util import APIKey, get_auth_message from lbrynet.daemon.auth.util import APIKey, get_auth_message
from lbrynet.daemon.auth.client import LBRY_SECRET from lbrynet.daemon.auth.client import LBRY_SECRET
@ -231,9 +231,9 @@ class AuthJSONRPCServer(AuthorizedBase):
def _render(self, request): def _render(self, request):
time_in = utils.now() time_in = utils.now()
if not self._check_headers(request): # if not self._check_headers(request):
self._render_error(Failure(InvalidHeaderError()), request, None) # self._render_error(Failure(InvalidHeaderError()), request, None)
return server.NOT_DONE_YET # return server.NOT_DONE_YET
session = request.getSession() session = request.getSession()
session_id = session.uid session_id = session.uid
finished_deferred = request.notifyFinish() finished_deferred = request.notifyFinish()

View file

@ -16,6 +16,10 @@ def migrate_db(db_dir, start, end):
from lbrynet.database.migrator.migrate5to6 import do_migration from lbrynet.database.migrator.migrate5to6 import do_migration
elif current == 6: elif current == 6:
from lbrynet.database.migrator.migrate6to7 import do_migration from lbrynet.database.migrator.migrate6to7 import do_migration
elif current == 7:
from lbrynet.database.migrator.migrate7to8 import do_migration
elif current == 8:
from lbrynet.database.migrator.migrate8to9 import do_migration
else: else:
raise Exception("DB migration of version {} to {} is not available".format(current, raise Exception("DB migration of version {} to {} is not available".format(current,
current+1)) current+1))

View file

@ -0,0 +1,21 @@
import sqlite3
import os
def do_migration(db_dir):
db_path = os.path.join(db_dir, "lbrynet.sqlite")
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
cursor.executescript(
"""
create table reflected_stream (
sd_hash text not null,
reflector_address text not null,
timestamp integer,
primary key (sd_hash, reflector_address)
);
"""
)
connection.commit()
connection.close()

View file

@ -0,0 +1,54 @@
import sqlite3
import logging
import os
from lbrynet.core.Error import InvalidStreamDescriptorError
from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, format_sd_info, format_blobs, validate_descriptor
from lbrynet.cryptstream.CryptBlob import CryptBlobInfo
log = logging.getLogger(__name__)
def do_migration(db_dir):
db_path = os.path.join(db_dir, "lbrynet.sqlite")
blob_dir = os.path.join(db_dir, "blobfiles")
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
query = "select stream_name, stream_key, suggested_filename, sd_hash, stream_hash from stream"
streams = cursor.execute(query).fetchall()
blobs = cursor.execute("select s.stream_hash, s.position, s.iv, b.blob_hash, b.blob_length from stream_blob s "
"left outer join blob b ON b.blob_hash=s.blob_hash order by s.position").fetchall()
blobs_by_stream = {}
for stream_hash, position, iv, blob_hash, blob_length in blobs:
blobs_by_stream.setdefault(stream_hash, []).append(CryptBlobInfo(blob_hash, position, blob_length or 0, iv))
for stream_name, stream_key, suggested_filename, sd_hash, stream_hash in streams:
sd_info = format_sd_info(
EncryptedFileStreamType, stream_name, stream_key,
suggested_filename, stream_hash, format_blobs(blobs_by_stream[stream_hash])
)
try:
validate_descriptor(sd_info)
except InvalidStreamDescriptorError as err:
log.warning("Stream for descriptor %s is invalid (%s), cleaning it up",
sd_hash, err.message)
blob_hashes = [blob.blob_hash for blob in blobs_by_stream[stream_hash]]
delete_stream(cursor, stream_hash, sd_hash, blob_hashes, blob_dir)
connection.commit()
connection.close()
def delete_stream(transaction, stream_hash, sd_hash, blob_hashes, blob_dir):
transaction.execute("delete from content_claim where stream_hash=? ", (stream_hash,))
transaction.execute("delete from file where stream_hash=? ", (stream_hash, ))
transaction.execute("delete from stream_blob where stream_hash=?", (stream_hash, ))
transaction.execute("delete from stream where stream_hash=? ", (stream_hash, ))
transaction.execute("delete from blob where blob_hash=?", (sd_hash, ))
for blob_hash in blob_hashes:
transaction.execute("delete from blob where blob_hash=?", (blob_hash, ))
file_path = os.path.join(blob_dir, blob_hash)
if os.path.isfile(file_path):
os.unlink(file_path)

View file

@ -157,6 +157,13 @@ class SQLiteStorage(object):
amount integer not null, amount integer not null,
address text not null address text not null
); );
create table if not exists reflected_stream (
sd_hash text not null,
reflector_address text not null,
timestamp integer,
primary key (sd_hash, reflector_address)
);
""" """
def __init__(self, db_dir, reactor=None): def __init__(self, db_dir, reactor=None):
@ -545,7 +552,7 @@ class SQLiteStorage(object):
) )
return self.db.runInteraction(_save_support) return self.db.runInteraction(_save_support)
def get_supports(self, claim_id): def get_supports(self, *claim_ids):
def _format_support(outpoint, supported_id, amount, address): def _format_support(outpoint, supported_id, amount, address):
return { return {
"txid": outpoint.split(":")[0], "txid": outpoint.split(":")[0],
@ -556,10 +563,15 @@ class SQLiteStorage(object):
} }
def _get_supports(transaction): def _get_supports(transaction):
if len(claim_ids) == 1:
bind = "=?"
else:
bind = "in ({})".format(','.join('?' for _ in range(len(claim_ids))))
return [ return [
_format_support(*support_info) _format_support(*support_info)
for support_info in transaction.execute( for support_info in transaction.execute(
"select * from support where claim_id=?", (claim_id, ) "select * from support where claim_id {}".format(bind),
tuple(claim_ids)
).fetchall() ).fetchall()
] ]
@ -676,51 +688,82 @@ class SQLiteStorage(object):
@defer.inlineCallbacks @defer.inlineCallbacks
def get_content_claim(self, stream_hash, include_supports=True): def get_content_claim(self, stream_hash, include_supports=True):
def _get_content_claim(transaction): def _get_claim_from_stream_hash(transaction):
claim_id = transaction.execute( claim_info = transaction.execute(
"select claim.claim_outpoint from content_claim " "select c.*, "
"inner join claim on claim.claim_outpoint=content_claim.claim_outpoint and content_claim.stream_hash=? " "case when c.channel_claim_id is not null then "
"order by claim.rowid desc", (stream_hash, ) "(select claim_name from claim where claim_id==c.channel_claim_id) "
"else null end as channel_name from content_claim "
"inner join claim c on c.claim_outpoint=content_claim.claim_outpoint "
"and content_claim.stream_hash=? order by c.rowid desc", (stream_hash,)
).fetchone() ).fetchone()
if not claim_id: if not claim_info:
return None return None
return claim_id[0] channel_name = claim_info[-1]
result = _format_claim_response(*claim_info[:-1])
if channel_name:
result['channel_name'] = channel_name
return result
content_claim_outpoint = yield self.db.runInteraction(_get_content_claim) result = yield self.db.runInteraction(_get_claim_from_stream_hash)
result = None if result and include_supports:
if content_claim_outpoint: supports = yield self.get_supports(result['claim_id'])
result = yield self.get_claim(content_claim_outpoint, include_supports) result['supports'] = supports
result['effective_amount'] = float(
sum([support['amount'] for support in supports]) + result['amount']
)
defer.returnValue(result) defer.returnValue(result)
@defer.inlineCallbacks @defer.inlineCallbacks
def get_claim(self, claim_outpoint, include_supports=True): def get_claims_from_stream_hashes(self, stream_hashes, include_supports=True):
def _claim_response(outpoint, claim_id, name, amount, height, serialized, channel_id, address, claim_sequence): def _batch_get_claim(transaction):
r = { results = {}
"name": name, bind = "({})".format(','.join('?' for _ in range(len(stream_hashes))))
"claim_id": claim_id, claim_infos = transaction.execute(
"address": address, "select content_claim.stream_hash, c.*, "
"claim_sequence": claim_sequence, "case when c.channel_claim_id is not null then "
"value": ClaimDict.deserialize(serialized.decode('hex')).claim_dict, "(select claim_name from claim where claim_id==c.channel_claim_id) "
"height": height, "else null end as channel_name from content_claim "
"amount": float(Decimal(amount) / Decimal(COIN)), "inner join claim c on c.claim_outpoint=content_claim.claim_outpoint "
"nout": int(outpoint.split(":")[1]), "and content_claim.stream_hash in {} order by c.rowid desc".format(bind),
"txid": outpoint.split(":")[0], tuple(stream_hashes)
"channel_claim_id": channel_id, ).fetchall()
"channel_name": None for claim_info in claim_infos:
} channel_name = claim_info[-1]
return r stream_hash = claim_info[0]
result = _format_claim_response(*claim_info[1:-1])
if channel_name:
result['channel_name'] = channel_name
results[stream_hash] = result
return results
claims = yield self.db.runInteraction(_batch_get_claim)
if include_supports:
all_supports = {}
for support in (yield self.get_supports(*[claim['claim_id'] for claim in claims.values()])):
all_supports.setdefault(support['claim_id'], []).append(support)
for stream_hash in claims.keys():
claim = claims[stream_hash]
supports = all_supports.get(claim['claim_id'], [])
claim['supports'] = supports
claim['effective_amount'] = float(
sum([support['amount'] for support in supports]) + claim['amount']
)
claims[stream_hash] = claim
defer.returnValue(claims)
@defer.inlineCallbacks
def get_claim(self, claim_outpoint, include_supports=True):
def _get_claim(transaction): def _get_claim(transaction):
claim_info = transaction.execute( claim_info = transaction.execute("select c.*, "
"select * from claim where claim_outpoint=?", (claim_outpoint, ) "case when c.channel_claim_id is not null then "
).fetchone() "(select claim_name from claim where claim_id==c.channel_claim_id) "
result = _claim_response(*claim_info) "else null end as channel_name from claim c where claim_outpoint = ?",
if result['channel_claim_id']: (claim_outpoint,)).fetchone()
channel_name_result = transaction.execute( channel_name = claim_info[-1]
"select claim_name from claim where claim_id=?", (result['channel_claim_id'], ) result = _format_claim_response(*claim_info[:-1])
).fetchone() if channel_name:
if channel_name_result: result['channel_name'] = channel_name
result['channel_name'] = channel_name_result[0]
return result return result
result = yield self.db.runInteraction(_get_claim) result = yield self.db.runInteraction(_get_claim)
@ -765,3 +808,42 @@ class SQLiteStorage(object):
(height, outpoint) (height, outpoint)
) )
return self.db.runInteraction(_save_claim_heights) return self.db.runInteraction(_save_claim_heights)
# # # # # # # # # reflector functions # # # # # # # # #
def update_reflected_stream(self, sd_hash, reflector_address, success=True):
if success:
return self.db.runOperation(
"insert or replace into reflected_stream values (?, ?, ?)",
(sd_hash, reflector_address, self.clock.seconds())
)
return self.db.runOperation(
"delete from reflected_stream where sd_hash=? and reflector_address=?",
(sd_hash, reflector_address)
)
def get_streams_to_re_reflect(self):
return self.run_and_return_list(
"select s.sd_hash from stream s "
"left outer join reflected_stream r on s.sd_hash=r.sd_hash "
"where r.timestamp is null or r.timestamp < ?",
self.clock.seconds() - conf.settings['auto_re_reflect_interval']
)
# Helper functions
def _format_claim_response(outpoint, claim_id, name, amount, height, serialized, channel_id, address, claim_sequence):
r = {
"name": name,
"claim_id": claim_id,
"address": address,
"claim_sequence": claim_sequence,
"value": ClaimDict.deserialize(serialized.decode('hex')).claim_dict,
"height": height,
"amount": float(Decimal(amount) / Decimal(COIN)),
"nout": int(outpoint.split(":")[1]),
"txid": outpoint.split(":")[0],
"channel_claim_id": channel_id,
"channel_name": None
}
return r

View file

@ -56,10 +56,7 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
self.channel_name = None self.channel_name = None
self.metadata = None self.metadata = None
@defer.inlineCallbacks def set_claim_info(self, claim_info):
def get_claim_info(self, include_supports=True):
claim_info = yield self.storage.get_content_claim(self.stream_hash, include_supports)
if claim_info:
self.claim_id = claim_info['claim_id'] self.claim_id = claim_info['claim_id']
self.txid = claim_info['txid'] self.txid = claim_info['txid']
self.nout = claim_info['nout'] self.nout = claim_info['nout']
@ -69,6 +66,12 @@ class ManagedEncryptedFileDownloader(EncryptedFileSaver):
self.channel_name = claim_info['channel_name'] self.channel_name = claim_info['channel_name']
self.metadata = claim_info['value']['stream']['metadata'] self.metadata = claim_info['value']['stream']['metadata']
@defer.inlineCallbacks
def get_claim_info(self, include_supports=True):
claim_info = yield self.storage.get_content_claim(self.stream_hash, include_supports)
if claim_info:
self.set_claim_info(claim_info)
defer.returnValue(claim_info) defer.returnValue(claim_info)
@property @property

View file

@ -6,12 +6,11 @@ import logging
from twisted.internet import defer, task, reactor from twisted.internet import defer, task, reactor
from twisted.python.failure import Failure from twisted.python.failure import Failure
from lbrynet.core.Error import InvalidStreamDescriptorError
from lbrynet.reflector.reupload import reflect_file from lbrynet.reflector.reupload import reflect_file
from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager from lbrynet.core.PaymentRateManager import NegotiatedPaymentRateManager
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, get_sd_info, validate_descriptor from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, get_sd_info
from lbrynet.cryptstream.client.CryptStreamDownloader import AlreadyStoppedError from lbrynet.cryptstream.client.CryptStreamDownloader import AlreadyStoppedError
from lbrynet.cryptstream.client.CryptStreamDownloader import CurrentlyStoppingError from lbrynet.cryptstream.client.CryptStreamDownloader import CurrentlyStoppingError
from lbrynet.core.utils import safe_start_looping_call, safe_stop_looping_call from lbrynet.core.utils import safe_start_looping_call, safe_stop_looping_call
@ -31,7 +30,7 @@ class EncryptedFileManager(object):
def __init__(self, session, sd_identifier): def __init__(self, session, sd_identifier):
self.auto_re_reflect = conf.settings['reflect_uploads'] self.auto_re_reflect = conf.settings['reflect_uploads'] and conf.settings['auto_re_reflect_interval'] > 0
self.auto_re_reflect_interval = conf.settings['auto_re_reflect_interval'] self.auto_re_reflect_interval = conf.settings['auto_re_reflect_interval']
self.session = session self.session = session
self.storage = session.storage self.storage = session.storage
@ -96,25 +95,14 @@ class EncryptedFileManager(object):
suggested_file_name=suggested_file_name suggested_file_name=suggested_file_name
) )
@defer.inlineCallbacks def _start_lbry_file(self, file_info, payment_rate_manager, claim_info):
def _start_lbry_file(self, file_info, payment_rate_manager):
lbry_file = self._get_lbry_file( lbry_file = self._get_lbry_file(
file_info['row_id'], file_info['stream_hash'], payment_rate_manager, file_info['sd_hash'], file_info['row_id'], file_info['stream_hash'], payment_rate_manager, file_info['sd_hash'],
file_info['key'], file_info['stream_name'], file_info['file_name'], file_info['download_directory'], file_info['key'], file_info['stream_name'], file_info['file_name'], file_info['download_directory'],
file_info['suggested_file_name'] file_info['suggested_file_name']
) )
yield lbry_file.get_claim_info() if claim_info:
try: lbry_file.set_claim_info(claim_info)
# verify the stream is valid (we might have downloaded an invalid stream
# in the past when the validation check didn't work)
stream_info = yield get_sd_info(self.storage, file_info['stream_hash'], include_blobs=True)
validate_descriptor(stream_info)
except InvalidStreamDescriptorError as err:
log.warning("Stream for descriptor %s is invalid (%s), cleaning it up",
lbry_file.sd_hash, err.message)
yield lbry_file.delete_data()
yield self.session.storage.delete_stream(lbry_file.stream_hash)
else:
try: try:
# restore will raise an Exception if status is unknown # restore will raise an Exception if status is unknown
lbry_file.restore(file_info['status']) lbry_file.restore(file_info['status'])
@ -128,19 +116,18 @@ class EncryptedFileManager(object):
@defer.inlineCallbacks @defer.inlineCallbacks
def _start_lbry_files(self): def _start_lbry_files(self):
files = yield self.session.storage.get_all_lbry_files() files = yield self.session.storage.get_all_lbry_files()
claim_infos = yield self.session.storage.get_claims_from_stream_hashes([file['stream_hash'] for file in files])
b_prm = self.session.base_payment_rate_manager b_prm = self.session.base_payment_rate_manager
payment_rate_manager = NegotiatedPaymentRateManager(b_prm, self.session.blob_tracker) payment_rate_manager = NegotiatedPaymentRateManager(b_prm, self.session.blob_tracker)
log.info("Starting %i files", len(files)) log.info("Starting %i files", len(files))
dl = []
for file_info in files: for file_info in files:
dl.append(self._start_lbry_file(file_info, payment_rate_manager)) claim_info = claim_infos.get(file_info['stream_hash'])
self._start_lbry_file(file_info, payment_rate_manager, claim_info)
yield defer.DeferredList(dl)
log.info("Started %i lbry files", len(self.lbry_files)) log.info("Started %i lbry files", len(self.lbry_files))
if self.auto_re_reflect is True: if self.auto_re_reflect is True:
safe_start_looping_call(self.lbry_file_reflector, self.auto_re_reflect_interval) safe_start_looping_call(self.lbry_file_reflector, self.auto_re_reflect_interval / 10)
@defer.inlineCallbacks @defer.inlineCallbacks
def _stop_lbry_file(self, lbry_file): def _stop_lbry_file(self, lbry_file):
@ -253,7 +240,9 @@ class EncryptedFileManager(object):
def reflect_lbry_files(self): def reflect_lbry_files(self):
sem = defer.DeferredSemaphore(self.CONCURRENT_REFLECTS) sem = defer.DeferredSemaphore(self.CONCURRENT_REFLECTS)
ds = [] ds = []
sd_hashes_to_reflect = yield self.storage.get_streams_to_re_reflect()
for lbry_file in self.lbry_files: for lbry_file in self.lbry_files:
if lbry_file.sd_hash in sd_hashes_to_reflect:
ds.append(sem.run(reflect_file, lbry_file)) ds.append(sem.run(reflect_file, lbry_file))
yield defer.DeferredList(ds) yield defer.DeferredList(ds)

View file

@ -1,10 +0,0 @@
"""
A client library for sending and receiving payments on the point trader network.
The point trader network is a simple payment system used solely for testing lbrynet-console. A user
creates a public key, registers it with the point trader server, and receives free points for
registering. The public key is used to spend points, and also used as an address to which points
are sent. To spend points, the public key signs a message containing the amount and the destination
public key and sends it to the point trader server. To check for payments, the recipient sends a
signed message asking the point trader server for its balance.
"""

View file

@ -1,230 +0,0 @@
from lbrynet import conf
from twisted.web.client import Agent, FileBodyProducer, Headers, ResponseDone
from twisted.internet import threads, defer, protocol
from Crypto.Hash import SHA
from Crypto.PublicKey import RSA
from Crypto.Signature import PKCS1_PSS
from StringIO import StringIO
import time
import json
import binascii
class BeginningPrinter(protocol.Protocol):
def __init__(self, finished):
self.finished = finished
self.data = ""
def dataReceived(self, bytes):
self.data = self.data + bytes
def connectionLost(self, reason):
if reason.check(ResponseDone) is not None:
self.finished.callback(str(self.data))
else:
self.finished.errback(reason)
def read_body(response):
d = defer.Deferred()
response.deliverBody(BeginningPrinter(d))
return d
def get_body(response):
if response.code != 200:
print "\n\n\n\nbad error code\n\n\n\n"
raise ValueError(response.phrase)
else:
return read_body(response)
def get_body_from_request(path, data):
from twisted.internet import reactor
jsondata = FileBodyProducer(StringIO(json.dumps(data)))
agent = Agent(reactor)
d = agent.request(
'POST', conf.settings['pointtrader_server'] + path,
Headers({'Content-Type': ['application/json']}), jsondata)
d.addCallback(get_body)
return d
def print_response(response):
pass
def print_error(err):
print err.getTraceback()
return err
def register_new_account(private_key):
data = {}
data['pub_key'] = private_key.publickey().exportKey()
def get_success_from_body(body):
r = json.loads(body)
if not 'success' in r or r['success'] is False:
return False
return True
d = get_body_from_request('/register/', data)
d.addCallback(get_success_from_body)
return d
def send_points(private_key, recipient_public_key, amount):
encoded_public_key = private_key.publickey().exportKey()
timestamp = time.time()
h = SHA.new()
h.update(encoded_public_key)
h.update(recipient_public_key)
h.update(str(amount))
h.update(str(timestamp))
signer = PKCS1_PSS.new(private_key)
signature = binascii.hexlify(signer.sign(h))
data = {}
data['sender_pub_key'] = encoded_public_key
data['recipient_pub_key'] = recipient_public_key
data['amount'] = amount
data['timestamp'] = timestamp
data['signature'] = signature
def get_success_from_body(body):
r = json.loads(body)
if not 'success' in r or r['success'] is False:
return False
return True
d = get_body_from_request('/send-points/', data)
d.addCallback(get_success_from_body)
return d
def get_recent_transactions(private_key):
encoded_public_key = private_key.publickey().exportKey()
timestamp = time.time()
h = SHA.new()
h.update(encoded_public_key)
h.update(str(timestamp))
signer = PKCS1_PSS.new(private_key)
signature = binascii.hexlify(signer.sign(h))
data = {}
data['pub_key'] = encoded_public_key
data['timestamp'] = timestamp
data['signature'] = signature
data['end_time'] = 0
data['start_time'] = 120
def get_transactions_from_body(body):
r = json.loads(body)
if "transactions" not in r:
raise ValueError("Invalid response: no 'transactions' field")
else:
return r['transactions']
d = get_body_from_request('/get-transactions/', data)
d.addCallback(get_transactions_from_body)
return d
def get_balance(private_key):
encoded_public_key = private_key.publickey().exportKey()
timestamp = time.time()
h = SHA.new()
h.update(encoded_public_key)
h.update(str(timestamp))
signer = PKCS1_PSS.new(private_key)
signature = binascii.hexlify(signer.sign(h))
data = {}
data['pub_key'] = encoded_public_key
data['timestamp'] = timestamp
data['signature'] = signature
def get_balance_from_body(body):
r = json.loads(body)
if not 'balance' in r:
raise ValueError("Invalid response: no 'balance' field")
else:
return float(r['balance'])
d = get_body_from_request('/get-balance/', data)
d.addCallback(get_balance_from_body)
return d
def run_full_test():
keys = []
def save_key(private_key):
keys.append(private_key)
return private_key
def check_balances_and_transactions(unused, bal1, bal2, num_transactions):
def assert_balance_is(actual, expected):
assert abs(actual - expected) < .05
print "correct balance. actual:", str(actual), "expected:", str(expected)
return True
def assert_transaction_length_is(transactions, expected_length):
assert len(transactions) == expected_length
print "correct transaction length"
return True
d1 = get_balance(keys[0])
d1.addCallback(assert_balance_is, bal1)
d2 = get_balance(keys[1])
d2.addCallback(assert_balance_is, bal2)
d3 = get_recent_transactions(keys[0])
d3.addCallback(assert_transaction_length_is, num_transactions)
d4 = get_recent_transactions(keys[1])
d4.addCallback(assert_transaction_length_is, num_transactions)
dl = defer.DeferredList([d1, d2, d3, d4])
return dl
def do_transfer(unused, amount):
d = send_points(keys[0], keys[1].publickey().exportKey(), amount)
return d
d1 = threads.deferToThread(RSA.generate, 4096)
d1.addCallback(save_key)
d1.addCallback(register_new_account)
d2 = threads.deferToThread(RSA.generate, 4096)
d2.addCallback(save_key)
d2.addCallback(register_new_account)
dlist = defer.DeferredList([d1, d2])
dlist.addCallback(check_balances_and_transactions, 1000, 1000, 0)
dlist.addCallback(do_transfer, 50)
dlist.addCallback(check_balances_and_transactions, 950, 1050, 1)
dlist.addCallback(do_transfer, 75)
dlist.addCallback(check_balances_and_transactions, 875, 1125, 2)
dlist.addErrback(print_error)
if __name__ == "__main__":
from twisted.internet import reactor
reactor.callLater(1, run_full_test)
reactor.callLater(25, reactor.stop)
reactor.run()

View file

@ -55,6 +55,16 @@ class EncryptedFileReflectorClient(Protocol):
d.addCallback(lambda _: self.send_next_request()) d.addCallback(lambda _: self.send_next_request())
d.addErrback(self.response_failure_handler) d.addErrback(self.response_failure_handler)
def store_result(self, result):
if not self.needed_blobs or len(self.reflected_blobs) == len(self.needed_blobs):
reflected = True
else:
reflected = False
d = self.blob_manager.storage.update_reflected_stream(self.sd_hash, self.transport.getPeer().host, reflected)
d.addCallback(lambda _: result)
return d
def connectionLost(self, reason): def connectionLost(self, reason):
# make sure blob file readers get closed # make sure blob file readers get closed
self.set_not_uploading() self.set_not_uploading()
@ -68,15 +78,17 @@ class EncryptedFileReflectorClient(Protocol):
else: else:
log.info('Finished sending reflector %i blobs for %s', log.info('Finished sending reflector %i blobs for %s',
len(self.reflected_blobs), self.stream_descriptor) len(self.reflected_blobs), self.stream_descriptor)
self.factory.finished_deferred.callback(self.reflected_blobs) result = self.reflected_blobs
elif reason.check(error.ConnectionLost): elif reason.check(error.ConnectionLost):
log.warning("Stopped reflecting %s after sending %i blobs", log.warning("Stopped reflecting %s after sending %i blobs",
self.stream_descriptor, len(self.reflected_blobs)) self.stream_descriptor, len(self.reflected_blobs))
self.factory.finished_deferred.callback(self.reflected_blobs) result = self.reflected_blobs
else: else:
log.info('Reflector finished for %s: %s', self.stream_descriptor, log.info('Reflector finished for %s: %s', self.stream_descriptor,
reason) reason)
self.factory.finished_deferred.callback(reason) result = reason
self.factory.finished_deferred.addCallback(self.store_result)
self.factory.finished_deferred.callback(result)
# IConsumer stuff # IConsumer stuff

View file

@ -4,11 +4,9 @@ import os
import platform import platform
import shutil import shutil
import sys import sys
import random
import unittest import unittest
from Crypto import Random from hashlib import md5
from Crypto.Hash import MD5
from lbrynet import conf from lbrynet import conf
from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager
from lbrynet.core.Session import Session from lbrynet.core.Session import Session
@ -22,7 +20,6 @@ from twisted.internet import defer, threads, task
from twisted.trial.unittest import TestCase from twisted.trial.unittest import TestCase
from twisted.python.failure import Failure from twisted.python.failure import Failure
from lbrynet.dht.node import Node
from lbrynet.core.PeerManager import PeerManager from lbrynet.core.PeerManager import PeerManager
from lbrynet.core.RateLimiter import DummyRateLimiter, RateLimiter from lbrynet.core.RateLimiter import DummyRateLimiter, RateLimiter
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
@ -99,9 +96,6 @@ class LbryUploader(object):
from twisted.internet import reactor from twisted.internet import reactor
self.reactor = reactor self.reactor = reactor
logging.debug("Starting the uploader") logging.debug("Starting the uploader")
Random.atfork()
r = random.Random()
r.seed("start_lbry_uploader")
wallet = FakeWallet() wallet = FakeWallet()
peer_manager = PeerManager() peer_manager = PeerManager()
peer_finder = FakePeerFinder(5553, peer_manager, 1) peer_finder = FakePeerFinder(5553, peer_manager, 1)
@ -115,7 +109,7 @@ class LbryUploader(object):
node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer, node_id="abcd", peer_finder=peer_finder, hash_announcer=hash_announcer,
peer_port=5553, dht_node_port=4445, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, peer_port=5553, dht_node_port=4445, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker,
dht_node_class=Node, is_generous=self.is_generous, external_ip="127.0.0.1") dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1")
self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier) self.lbry_file_manager = EncryptedFileManager(self.session, self.sd_identifier)
if self.ul_rate_limit is not None: if self.ul_rate_limit is not None:
self.session.rate_limiter.set_ul_limit(self.ul_rate_limit) self.session.rate_limiter.set_ul_limit(self.ul_rate_limit)
@ -192,10 +186,6 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event,
logging.debug("Starting the uploader") logging.debug("Starting the uploader")
Random.atfork()
r = random.Random()
r.seed("start_lbry_reuploader")
wallet = FakeWallet() wallet = FakeWallet()
peer_port = 5553 + n peer_port = 5553 + n
@ -207,7 +197,7 @@ def start_lbry_reuploader(sd_hash, kill_event, dead_event,
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir,
node_id="abcd" + str(n), dht_node_port=4446, node_id="abcd" + str(n), dht_node_port=4446, dht_node_class=FakeNode,
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=peer_port, blob_dir=blob_dir, peer_port=peer_port,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
@ -298,7 +288,6 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero
logging.debug("Starting the uploader") logging.debug("Starting the uploader")
Random.atfork()
wallet = FakeWallet() wallet = FakeWallet()
peer_manager = PeerManager() peer_manager = PeerManager()
@ -315,7 +304,7 @@ def start_blob_uploader(blob_hash_queue, kill_event, dead_event, slow, is_genero
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="efgh", session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="efgh",
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_finder=peer_finder, hash_announcer=hash_announcer, dht_node_class=FakeNode,
blob_dir=blob_dir, peer_port=peer_port, dht_node_port=4446, blob_dir=blob_dir, peer_port=peer_port, dht_node_port=4446,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker,
@ -497,7 +486,7 @@ class TestTransfer(TestCase):
blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, blob_dir=blob_dir, peer_port=5553, dht_node_port=4445,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker,
dht_node_class=Node, is_generous=self.is_generous, external_ip="127.0.0.1") dht_node_class=FakeNode, is_generous=self.is_generous, external_ip="127.0.0.1")
self.lbry_file_manager = EncryptedFileManager( self.lbry_file_manager = EncryptedFileManager(
self.session, sd_identifier) self.session, sd_identifier)
@ -516,7 +505,7 @@ class TestTransfer(TestCase):
def check_md5_sum(): def check_md5_sum():
f = open(os.path.join(db_dir, 'test_file')) f = open(os.path.join(db_dir, 'test_file'))
hashsum = MD5.new() hashsum = md5()
hashsum.update(f.read()) hashsum.update(f.read())
self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be") self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be")
@ -582,7 +571,7 @@ class TestTransfer(TestCase):
self.session = Session( self.session = Session(
conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd", conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, node_id="abcd",
peer_finder=peer_finder, hash_announcer=hash_announcer, peer_finder=peer_finder, hash_announcer=hash_announcer,
blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, blob_dir=blob_dir, peer_port=5553, dht_node_port=4445, dht_node_class=FakeNode,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker,
is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], external_ip="127.0.0.1") is_generous=conf.ADJUSTABLE_SETTINGS['is_generous_host'][1], external_ip="127.0.0.1")
@ -662,7 +651,7 @@ class TestTransfer(TestCase):
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir,
node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, dht_node_class=FakeNode,
hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553, hash_announcer=hash_announcer, blob_dir=blob_dir, peer_port=5553,
use_upnp=False, rate_limiter=rate_limiter, wallet=wallet, use_upnp=False, rate_limiter=rate_limiter, wallet=wallet,
blob_tracker_class=DummyBlobAvailabilityTracker, blob_tracker_class=DummyBlobAvailabilityTracker,
@ -689,7 +678,7 @@ class TestTransfer(TestCase):
def check_md5_sum(): def check_md5_sum():
f = open(os.path.join(db_dir, 'test_file')) f = open(os.path.join(db_dir, 'test_file'))
hashsum = MD5.new() hashsum = md5()
hashsum.update(f.read()) hashsum.update(f.read())
self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be") self.assertEqual(hashsum.hexdigest(), "4ca2aafb4101c1e42235aad24fbb83be")
@ -769,7 +758,7 @@ class TestTransfer(TestCase):
sd_identifier = StreamDescriptorIdentifier() sd_identifier = StreamDescriptorIdentifier()
db_dir, blob_dir = mk_db_and_blob_dir() db_dir, blob_dir = mk_db_and_blob_dir()
self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, self.session = Session(conf.ADJUSTABLE_SETTINGS['data_rate'][1], db_dir=db_dir, dht_node_class=FakeNode,
node_id="abcd", peer_finder=peer_finder, dht_node_port=4445, node_id="abcd", peer_finder=peer_finder, dht_node_port=4445,
hash_announcer=hash_announcer, blob_dir=blob_dir, hash_announcer=hash_announcer, blob_dir=blob_dir,
peer_port=5553, use_upnp=False, rate_limiter=rate_limiter, peer_port=5553, use_upnp=False, rate_limiter=rate_limiter,
@ -812,7 +801,7 @@ class TestTransfer(TestCase):
def check_md5_sum(): def check_md5_sum():
f = open('test_file') f = open('test_file')
hashsum = MD5.new() hashsum = md5()
hashsum.update(f.read()) hashsum.update(f.read())
self.assertEqual(hashsum.hexdigest(), "e5941d615f53312fd66638239c1f90d5") self.assertEqual(hashsum.hexdigest(), "e5941d615f53312fd66638239c1f90d5")

View file

@ -2,7 +2,7 @@ import os
import shutil import shutil
import tempfile import tempfile
from Crypto.Hash import MD5 from hashlib import md5
from twisted.trial.unittest import TestCase from twisted.trial.unittest import TestCase
from twisted.internet import defer, threads from twisted.internet import defer, threads
@ -127,7 +127,7 @@ class TestStreamify(TestCase):
self.assertTrue(lbry_file.sd_hash, sd_hash) self.assertTrue(lbry_file.sd_hash, sd_hash)
yield lbry_file.start() yield lbry_file.start()
f = open('test_file') f = open('test_file')
hashsum = MD5.new() hashsum = md5()
hashsum.update(f.read()) hashsum.update(f.read())
self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b") self.assertEqual(hashsum.hexdigest(), "68959747edc73df45e45db6379dd7b3b")

View file

@ -1,7 +1,10 @@
import base64
import struct import struct
import io import io
from Crypto.PublicKey import RSA from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from twisted.internet import defer, error from twisted.internet import defer, error
from twisted.python.failure import Failure from twisted.python.failure import Failure
@ -9,11 +12,18 @@ from lbrynet.core.client.ClientRequest import ClientRequest
from lbrynet.core.Error import RequestCanceledError from lbrynet.core.Error import RequestCanceledError
from lbrynet.core import BlobAvailability from lbrynet.core import BlobAvailability
from lbrynet.core.utils import generate_id from lbrynet.core.utils import generate_id
from lbrynet.dht.node import Node as RealNode
from lbrynet.daemon import ExchangeRateManager as ERM from lbrynet.daemon import ExchangeRateManager as ERM
from lbrynet import conf from lbrynet import conf
from util import debug_kademlia_packet from util import debug_kademlia_packet
KB = 2**10 KB = 2**10
PUBLIC_EXPONENT = 65537 # http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
def decode_rsa_key(pem_key):
decoded = base64.b64decode(''.join(pem_key.splitlines()[1:-1]))
return serialization.load_der_public_key(decoded, default_backend())
class FakeLBRYFile(object): class FakeLBRYFile(object):
@ -24,15 +34,9 @@ class FakeLBRYFile(object):
self.file_name = 'fake_lbry_file' self.file_name = 'fake_lbry_file'
class Node(object): class Node(RealNode):
def __init__(self, peer_finder=None, peer_manager=None, dht_node_port=None, peer_port=3333, **kwargs): def joinNetwork(self, known_node_addresses=None):
self.peer_finder = peer_finder return defer.succeed(None)
self.peer_manager = peer_manager
self.peerPort = peer_port
self.udpPort = dht_node_port
def joinNetwork(self, *args):
return defer.succeed(True)
def stop(self): def stop(self):
return defer.succeed(None) return defer.succeed(None)
@ -142,9 +146,10 @@ class PointTraderKeyQueryHandler(object):
if self.query_identifiers[0] in queries: if self.query_identifiers[0] in queries:
new_encoded_pub_key = queries[self.query_identifiers[0]] new_encoded_pub_key = queries[self.query_identifiers[0]]
try: try:
RSA.importKey(new_encoded_pub_key) decode_rsa_key(new_encoded_pub_key)
except (ValueError, TypeError, IndexError): except (ValueError, TypeError, IndexError):
return defer.fail(Failure(ValueError("Client sent an invalid public key"))) value_error = ValueError("Client sent an invalid public key: {}".format(new_encoded_pub_key))
return defer.fail(Failure(value_error))
self.public_key = new_encoded_pub_key self.public_key = new_encoded_pub_key
self.wallet.set_public_key_for_peer(self.peer, self.public_key) self.wallet.set_public_key_for_peer(self.peer, self.public_key)
fields = {'public_key': self.wallet.encoded_public_key} fields = {'public_key': self.wallet.encoded_public_key}
@ -157,8 +162,10 @@ class PointTraderKeyQueryHandler(object):
class Wallet(object): class Wallet(object):
def __init__(self): def __init__(self):
self.private_key = RSA.generate(1024) self.private_key = rsa.generate_private_key(public_exponent=PUBLIC_EXPONENT,
self.encoded_public_key = self.private_key.publickey().exportKey() key_size=1024, backend=default_backend())
self.encoded_public_key = self.private_key.public_key().public_bytes(serialization.Encoding.PEM,
serialization.PublicFormat.PKCS1)
self._config = None self._config = None
self.network = None self.network = None
self.wallet = None self.wallet = None
@ -392,6 +399,7 @@ create_stream_sd_file = {
def mock_conf_settings(obj, settings={}): def mock_conf_settings(obj, settings={}):
conf.initialize_settings(False)
original_settings = conf.settings original_settings = conf.settings
conf.settings = conf.Config(conf.FIXED_SETTINGS, conf.ADJUSTABLE_SETTINGS) conf.settings = conf.Config(conf.FIXED_SETTINGS, conf.ADJUSTABLE_SETTINGS)
conf.settings.installation_id = conf.settings.get_installation_id() conf.settings.installation_id = conf.settings.get_installation_id()

View file

@ -116,7 +116,7 @@ class MocServerProtocolFactory(ServerFactory):
class TestIntegrationConnectionManager(unittest.TestCase): class TestIntegrationConnectionManager(unittest.TestCase):
def setUp(self): def setUp(self):
conf.initialize_settings() conf.initialize_settings(False)
self.TEST_PEER = Peer(LOCAL_HOST, PEER_PORT) self.TEST_PEER = Peer(LOCAL_HOST, PEER_PORT)
self.downloader = MocDownloader() self.downloader = MocDownloader()

View file

@ -34,7 +34,7 @@ class DHTHashAnnouncerTest(unittest.TestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def setUp(self): def setUp(self):
from lbrynet.conf import initialize_settings from lbrynet.conf import initialize_settings
initialize_settings() initialize_settings(False)
self.num_blobs = 10 self.num_blobs = 10
self.blobs_to_announce = [] self.blobs_to_announce = []
for i in range(0, self.num_blobs): for i in range(0, self.num_blobs):

View file

@ -17,7 +17,7 @@ from lbrynet.core.cryptoutils import get_lbry_hash_obj
class BlobManagerTest(unittest.TestCase): class BlobManagerTest(unittest.TestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def setUp(self): def setUp(self):
conf.initialize_settings() conf.initialize_settings(False)
self.blob_dir = tempfile.mkdtemp() self.blob_dir = tempfile.mkdtemp()
self.db_dir = tempfile.mkdtemp() self.db_dir = tempfile.mkdtemp()
self.bm = DiskBlobManager(self.blob_dir, SQLiteStorage(self.db_dir)) self.bm = DiskBlobManager(self.blob_dir, SQLiteStorage(self.db_dir))

View file

@ -5,11 +5,13 @@ from lbrynet.blob.blob_file import MAX_BLOB_SIZE
from lbrynet.tests.mocks import mock_conf_settings from lbrynet.tests.mocks import mock_conf_settings
from Crypto import Random from cryptography.hazmat.primitives.ciphers.algorithms import AES
from Crypto.Cipher import AES
import random import random
import string import string
import StringIO import StringIO
import os
AES_BLOCK_SIZE_BYTES = AES.block_size / 8
class MocBlob(object): class MocBlob(object):
def __init__(self): def __init__(self):
@ -44,8 +46,8 @@ class TestCryptBlob(unittest.TestCase):
# max blob size is 2*2**20 -1 ( -1 due to required padding in the end ) # max blob size is 2*2**20 -1 ( -1 due to required padding in the end )
blob = MocBlob() blob = MocBlob()
blob_num = 0 blob_num = 0
key = Random.new().read(AES.block_size) key = os.urandom(AES_BLOCK_SIZE_BYTES)
iv = Random.new().read(AES.block_size) iv = os.urandom(AES_BLOCK_SIZE_BYTES)
maker = CryptBlob.CryptStreamBlobMaker(key, iv, blob_num, blob) maker = CryptBlob.CryptStreamBlobMaker(key, iv, blob_num, blob)
write_size = size_of_data write_size = size_of_data
string_to_encrypt = random_string(size_of_data) string_to_encrypt = random_string(size_of_data)
@ -54,7 +56,7 @@ class TestCryptBlob(unittest.TestCase):
done, num_bytes = maker.write(string_to_encrypt) done, num_bytes = maker.write(string_to_encrypt)
yield maker.close() yield maker.close()
self.assertEqual(size_of_data, num_bytes) self.assertEqual(size_of_data, num_bytes)
expected_encrypted_blob_size = ((size_of_data / AES.block_size) + 1) * AES.block_size expected_encrypted_blob_size = ((size_of_data / AES_BLOCK_SIZE_BYTES) + 1) * AES_BLOCK_SIZE_BYTES
self.assertEqual(expected_encrypted_blob_size, len(blob.data)) self.assertEqual(expected_encrypted_blob_size, len(blob.data))
if size_of_data < MAX_BLOB_SIZE-1: if size_of_data < MAX_BLOB_SIZE-1:

View file

@ -0,0 +1,43 @@
import unittest
from lbrynet.daemon.Daemon import sort_claim_results
class ClaimsComparatorTest(unittest.TestCase):
def test_sort_claim_results_when_sorted_by_claim_id(self):
results = [{"height": 1, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "fdsafa"},
{"height": 1, "name": "res", "claim_id": "aaa", "nout": 0, "txid": "w5tv8uorgt"},
{"height": 1, "name": "res", "claim_id": "bbb", "nout": 0, "txid": "aecfaewcfa"}]
self.run_test(results, 'claim_id', ['aaa', 'bbb', 'ccc'])
def test_sort_claim_results_when_sorted_by_height(self):
results = [{"height": 1, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"},
{"height": 3, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"},
{"height": 2, "name": "res", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}]
self.run_test(results, 'height', [1, 2, 3])
def test_sort_claim_results_when_sorted_by_name(self):
results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"},
{"height": 1, "name": "res3", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"},
{"height": 1, "name": "res2", "claim_id": "ccc", "nout": 0, "txid": "aecfaewcfa"}]
self.run_test(results, 'name', ['res1', 'res2', 'res3'])
def test_sort_claim_results_when_sorted_by_txid(self):
results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 2, "txid": "111"},
{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 1, "txid": "222"},
{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 3, "txid": "333"}]
self.run_test(results, 'txid', ['111', '222', '333'])
def test_sort_claim_results_when_sorted_by_nout(self):
results = [{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 2, "txid": "aecfaewcfa"},
{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 1, "txid": "aecfaewcfa"},
{"height": 1, "name": "res1", "claim_id": "ccc", "nout": 3, "txid": "aecfaewcfa"}]
self.run_test(results, 'nout', [1, 2, 3])
def run_test(self, results, field, expected):
actual = sort_claim_results(results)
self.assertEqual(expected, [r[field] for r in actual])
if __name__ == '__main__':
unittest.main()

View file

@ -85,7 +85,7 @@ class StorageTest(unittest.TestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def setUp(self): def setUp(self):
conf.initialize_settings() conf.initialize_settings(False)
self.db_dir = tempfile.mkdtemp() self.db_dir = tempfile.mkdtemp()
self.storage = SQLiteStorage(self.db_dir) self.storage = SQLiteStorage(self.db_dir)
yield self.storage.setup() yield self.storage.setup()
@ -163,6 +163,25 @@ class BlobStorageTests(StorageTest):
self.assertEqual(blob_hashes, []) self.assertEqual(blob_hashes, [])
class SupportsStorageTests(StorageTest):
@defer.inlineCallbacks
def test_supports_storage(self):
claim_ids = [random_lbry_hash() for _ in range(10)]
random_supports = [{"txid": random_lbry_hash(), "nout":i, "address": "addr{}".format(i), "amount": i}
for i in range(20)]
expected_supports = {}
for idx, claim_id in enumerate(claim_ids):
yield self.storage.save_supports(claim_id, random_supports[idx*2:idx*2+2])
for random_support in random_supports[idx*2:idx*2+2]:
random_support['claim_id'] = claim_id
expected_supports.setdefault(claim_id, []).append(random_support)
supports = yield self.storage.get_supports(claim_ids[0])
self.assertEqual(supports, expected_supports[claim_ids[0]])
all_supports = yield self.storage.get_supports(*claim_ids)
for support in all_supports:
self.assertIn(support, expected_supports[support['claim_id']])
class StreamStorageTests(StorageTest): class StreamStorageTests(StorageTest):
@defer.inlineCallbacks @defer.inlineCallbacks
def test_store_stream(self, stream_hash=None): def test_store_stream(self, stream_hash=None):

View file

@ -1,5 +1,5 @@
# -*- coding: utf-8 -*- # -*- coding: utf-8 -*-
from Crypto.Cipher import AES from cryptography.hazmat.primitives.ciphers.algorithms import AES
import mock import mock
from twisted.trial import unittest from twisted.trial import unittest
from twisted.internet import defer from twisted.internet import defer
@ -18,7 +18,7 @@ MB = 2**20
def iv_generator(): def iv_generator():
while True: while True:
yield '3' * AES.block_size yield '3' * (AES.block_size / 8)
class CreateEncryptedFileTest(unittest.TestCase): class CreateEncryptedFileTest(unittest.TestCase):
@ -47,7 +47,7 @@ class CreateEncryptedFileTest(unittest.TestCase):
@defer.inlineCallbacks @defer.inlineCallbacks
def create_file(self, filename): def create_file(self, filename):
handle = mocks.GenFile(3*MB, '1') handle = mocks.GenFile(3*MB, '1')
key = '2'*AES.block_size key = '2' * (AES.block_size / 8)
out = yield EncryptedFileCreator.create_lbry_file(self.session, self.file_manager, filename, handle, out = yield EncryptedFileCreator.create_lbry_file(self.session, self.file_manager, filename, handle,
key, iv_generator()) key, iv_generator())
defer.returnValue(out) defer.returnValue(out)

View file

@ -1,6 +1,6 @@
import mock import mock
from twisted.trial import unittest from twisted.trial import unittest
from lbrynet import conf
from lbrynet.tests.mocks import mock_conf_settings from lbrynet.tests.mocks import mock_conf_settings
from lbrynet.daemon.auth import server from lbrynet.daemon.auth import server
@ -10,6 +10,7 @@ class AuthJSONRPCServerTest(unittest.TestCase):
# and add useful general utilities like this # and add useful general utilities like this
# onto it. # onto it.
def setUp(self): def setUp(self):
conf.initialize_settings(False)
self.server = server.AuthJSONRPCServer(use_authentication=False) self.server = server.AuthJSONRPCServer(use_authentication=False)
def test_get_server_port(self): def test_get_server_port(self):

View file

@ -6,7 +6,7 @@ from twisted.internet import defer, error
from twisted.protocols.basic import LineOnlyReceiver from twisted.protocols.basic import LineOnlyReceiver
from errors import RemoteServiceException, ProtocolException, ServiceException from errors import RemoteServiceException, ProtocolException, ServiceException
log = logging.getLogger() log = logging.getLogger(__name__)
class StratumClientProtocol(LineOnlyReceiver): class StratumClientProtocol(LineOnlyReceiver):
@ -29,12 +29,22 @@ class StratumClientProtocol(LineOnlyReceiver):
try: try:
self.transport.setTcpNoDelay(True) self.transport.setTcpNoDelay(True)
self.transport.setTcpKeepAlive(True) self.transport.setTcpKeepAlive(True)
if hasattr(socket, "TCP_KEEPIDLE"):
self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE, self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPIDLE,
120) # Seconds before sending keepalive probes 120) # Seconds before sending keepalive probes
else:
log.debug("TCP_KEEPIDLE not available")
if hasattr(socket, "TCP_KEEPINTVL"):
self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL, self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPINTVL,
1) # Interval in seconds between keepalive probes 1) # Interval in seconds between keepalive probes
else:
log.debug("TCP_KEEPINTVL not available")
if hasattr(socket, "TCP_KEEPCNT"):
self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT, self.transport.socket.setsockopt(socket.SOL_TCP, socket.TCP_KEEPCNT,
5) # Failed keepalive probles before declaring other end dead 5) # Failed keepalive probles before declaring other end dead
else:
log.debug("TCP_KEEPCNT not available")
except Exception as err: except Exception as err:
# Supported only by the socket transport, # Supported only by the socket transport,
# but there's really no better place in code to trigger this. # but there's really no better place in code to trigger this.

View file

@ -105,3 +105,6 @@ class StratumClient(ClientFactory):
def blockchain_address_get_history(self, address): def blockchain_address_get_history(self, address):
return self._rpc('blockchain.address.get_history', [address]) return self._rpc('blockchain.address.get_history', [address])
def blockchain_block_get_server_height(self):
return self._rpc('blockchain.block.get_server_height', [])

View file

@ -1,5 +1,6 @@
certifi==2018.4.16
Twisted==16.6.0 Twisted==16.6.0
cryptography==2.0.3 cryptography==2.2.2
appdirs==1.4.3 appdirs==1.4.3
argparse==1.2.1 argparse==1.2.1
docopt==0.6.2 docopt==0.6.2
@ -12,19 +13,18 @@ GitPython==2.1.3
jsonrpc==1.2 jsonrpc==1.2
jsonrpclib==0.1.7 jsonrpclib==0.1.7
keyring==10.4.0 keyring==10.4.0
git+https://github.com/lbryio/lbryschema.git@v0.0.15#egg=lbryschema git+https://github.com/lbryio/lbryschema.git@v0.0.16rc2#egg=lbryschema
git+https://github.com/lbryio/lbryum.git@v3.2.1#egg=lbryum git+https://github.com/lbryio/lbryum.git@v3.2.2rc1#egg=lbryum
miniupnpc==1.9 miniupnpc==1.9
pbkdf2==1.3 pbkdf2==1.3
pycrypto==2.6.1
pyyaml==3.12 pyyaml==3.12
PyGithub==1.34 PyGithub==1.34
qrcode==5.2.2 qrcode==5.2.2
requests==2.9.1 requests==2.9.1
txrequests==0.9.5
service_identity==16.0.0 service_identity==16.0.0
six>=1.9.0 six>=1.9.0
slowaes==0.1a1 slowaes==0.1a1
txJSON-RPC==0.5 txJSON-RPC==0.5
wsgiref==0.1.2 wsgiref==0.1.2
zope.interface==4.3.3 zope.interface==4.3.3
treq==17.8.0

View file

@ -14,7 +14,7 @@ from lbrynet.core import log_support, Wallet, Peer
from lbrynet.core.SinglePeerDownloader import SinglePeerDownloader from lbrynet.core.SinglePeerDownloader import SinglePeerDownloader
from lbrynet.core.StreamDescriptor import BlobStreamDescriptorReader from lbrynet.core.StreamDescriptor import BlobStreamDescriptorReader
from lbrynet.core.BlobManager import DiskBlobManager from lbrynet.core.BlobManager import DiskBlobManager
from lbrynet.dht.hashannouncer import DummyHashAnnouncer from lbrynet.database.storage import SQLiteStorage
log = logging.getLogger() log = logging.getLogger()
@ -45,13 +45,13 @@ def main(args=None):
@defer.inlineCallbacks @defer.inlineCallbacks
def download_it(peer, timeout, blob_hash): def download_it(peer, timeout, blob_hash):
tmp_dir = yield threads.deferToThread(tempfile.mkdtemp) tmp_dir = yield threads.deferToThread(tempfile.mkdtemp)
announcer = DummyHashAnnouncer() storage = SQLiteStorage(tmp_dir, reactor)
tmp_blob_manager = DiskBlobManager(announcer, tmp_dir, tmp_dir) yield storage.setup()
tmp_blob_manager = DiskBlobManager(tmp_dir, storage)
config = {'auto_connect': True} config = {'auto_connect': True}
if conf.settings['lbryum_wallet_dir']: if conf.settings['lbryum_wallet_dir']:
config['lbryum_path'] = conf.settings['lbryum_wallet_dir'] config['lbryum_path'] = conf.settings['lbryum_wallet_dir']
storage = Wallet.InMemoryStorage()
wallet = Wallet.LBRYumWallet(storage, config) wallet = Wallet.LBRYumWallet(storage, config)
downloader = SinglePeerDownloader() downloader = SinglePeerDownloader()
@ -76,8 +76,9 @@ def download_it(peer, timeout, blob_hash):
pass pass
if info: if info:
break break
time.sleep(
0.1) # there's some kind of race condition where it sometimes doesnt write the blob to disk in time # there's some kind of race condition where it sometimes doesnt write the blob to disk in time
time.sleep(0.1)
if info is not None: if info is not None:
pprint(info) pprint(info)

View file

@ -17,18 +17,18 @@ from setuptools import setup, find_packages
requires = [ requires = [
'Twisted', 'Twisted',
'appdirs', 'appdirs',
'distro',
'base58', 'base58',
'envparse', 'envparse',
'jsonrpc', 'jsonrpc',
'lbryschema==0.0.15', 'lbryschema==0.0.16rc2',
'lbryum==3.2.1', 'lbryum==3.2.2rc1',
'miniupnpc', 'miniupnpc',
'pycrypto',
'pyyaml', 'pyyaml',
'requests', 'requests',
'txrequests',
'txJSON-RPC', 'txJSON-RPC',
'zope.interface', 'zope.interface',
'treq',
'docopt' 'docopt'
] ]