Merge branch 'master' into patch-1

This commit is contained in:
Alyssa Callahan 2018-11-07 10:18:58 -05:00 committed by GitHub
commit 9fbb9f2e98
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
216 changed files with 6241 additions and 1270 deletions

View file

@ -9,7 +9,7 @@
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
ignore=CVS,schema
# Add files or directories matching the regex patterns to the
# blacklist. The regex matches against base names, not paths.

View file

@ -10,9 +10,8 @@ jobs:
name: "pylint lbrynet"
install:
- pip install pylint
- pip install git+https://github.com/lbryio/torba.git
- pip install git+https://github.com/lbryio/lbryschema.git
- pip install -e .
- pip install git+https://github.com/lbryio/torba.git#egg=torba[server]
- pip install -e .[wallet-server]
script: pylint lbrynet
- &tests
@ -20,11 +19,7 @@ jobs:
name: "Unit Tests w/ Python 3.7"
install:
- pip install coverage
- pip install git+https://github.com/lbryio/electrumx.git#lbryumx
- pip install git+https://github.com/lbryio/orchstr8.git
- pip install git+https://github.com/lbryio/lbryschema.git
- pip install git+https://github.com/lbryio/lbryumx.git
- pip install git+https://github.com/lbryio/torba.git
- pip install git+https://github.com/lbryio/torba.git#egg=torba[server]
- pip install -e .[test]
script:
- HOME=/tmp coverage run -p --source=lbrynet -m unittest discover -v tests.unit.wallet
@ -49,10 +44,6 @@ jobs:
- name: "Integration Tests"
install:
- pip install tox-travis coverage
- pushd .. && git clone https://github.com/lbryio/electrumx.git --branch lbryumx && popd
- pushd .. && git clone https://github.com/lbryio/orchstr8.git && popd
- pushd .. && git clone https://github.com/lbryio/lbryschema.git && popd
- pushd .. && git clone https://github.com/lbryio/lbryumx.git && cd lbryumx && git checkout afd34f323dd94c516108a65240f7d17aea8efe85 && cd .. && popd
- pushd .. && git clone https://github.com/lbryio/torba.git && popd
script: tox
after_success:
@ -83,11 +74,10 @@ jobs:
install:
- pip3 install pyinstaller
- pip3 install git+https://github.com/lbryio/torba.git
- pip3 install git+https://github.com/lbryio/lbryschema.git
- python scripts/set_build.py
- pip3 install -e .
script:
- pyinstaller -F -n lbrynet lbrynet/cli.py
- pyinstaller -F -n lbrynet lbrynet/extras/cli.py
- chmod +x dist/lbrynet
- zip -j dist/lbrynet-${OS}.zip dist/lbrynet
- ./dist/lbrynet --version

View file

@ -100,7 +100,7 @@
<div class="md-flex__cell md-flex__cell--shrink">
<a href="/" title="LBRY" class="md-header-nav__button md-logo">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" width="24" height="24">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" alt="LBRY logo" width="24" height="24">
</a>
</div>
@ -196,7 +196,7 @@
<label class="md-nav__title md-nav__title--site" for="drawer">
<span class="md-nav__button md-logo">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" width="48" height="48">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" alt="LBRY logo" width="48" height="48">
</span>
LBRY

View file

@ -7,7 +7,7 @@ from cryptography.hazmat.primitives.ciphers import Cipher, modes
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from cryptography.hazmat.primitives.padding import PKCS7
from cryptography.hazmat.backends import default_backend
from lbrynet.core.BlobInfo import BlobInfo
from lbrynet.p2p.BlobInfo import BlobInfo
from lbrynet.blob.blob_file import MAX_BLOB_SIZE
log = logging.getLogger(__name__)

View file

@ -6,7 +6,7 @@ import logging
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from twisted.internet import defer
from lbrynet.cryptstream.CryptBlob import CryptStreamBlobMaker
from lbrynet.blob.CryptBlob import CryptStreamBlobMaker
log = logging.getLogger(__name__)

View file

@ -9,9 +9,9 @@ from binascii import hexlify
from twisted.internet import defer
from twisted.protocols.basic import FileSender
from lbrynet.core.StreamDescriptor import BlobStreamDescriptorWriter, EncryptedFileStreamType
from lbrynet.core.StreamDescriptor import format_sd_info, get_stream_hash, validate_descriptor
from lbrynet.cryptstream.CryptStreamCreator import CryptStreamCreator
from lbrynet.p2p.StreamDescriptor import BlobStreamDescriptorWriter, EncryptedFileStreamType
from lbrynet.p2p.StreamDescriptor import format_sd_info, get_stream_hash, validate_descriptor
from lbrynet.blob.CryptStreamCreator import CryptStreamCreator
log = logging.getLogger(__name__)

View file

@ -5,14 +5,14 @@ import logging
from binascii import hexlify, unhexlify
from twisted.internet import defer
from lbrynet import conf
from lbrynet.core.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.core.HTTPBlobDownloader import HTTPBlobDownloader
from lbrynet.core.utils import short_hash
from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaver
from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileDownloader
from lbrynet.file_manager.EncryptedFileStatusReport import EncryptedFileStatusReport
from lbrynet.core.StreamDescriptor import save_sd_info
from lbrynet.extras.daemon import conf
from lbrynet.p2p.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.p2p.HTTPBlobDownloader import HTTPBlobDownloader
from lbrynet.p2p.utils import short_hash
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileSaver
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileDownloader
from lbrynet.blob.EncryptedFileStatusReport import EncryptedFileStatusReport
from lbrynet.p2p.StreamDescriptor import save_sd_info
log = logging.getLogger(__name__)

View file

@ -7,15 +7,14 @@ from binascii import hexlify, unhexlify
from twisted.internet import defer, task, reactor
from twisted.python.failure import Failure
from lbrynet.reflector.reupload import reflect_file
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, get_sd_info
from lbrynet.cryptstream.client.CryptStreamDownloader import AlreadyStoppedError
from lbrynet.cryptstream.client.CryptStreamDownloader import CurrentlyStoppingError
from lbrynet.core.utils import safe_start_looping_call, safe_stop_looping_call
from lbrynet import conf
from lbrynet.extras.reflector.reupload import reflect_file
from lbrynet.blob.EncryptedFileDownloader import ManagedEncryptedFileDownloader
from lbrynet.blob.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet.p2p.StreamDescriptor import EncryptedFileStreamType, get_sd_info
from lbrynet.blob.client.CryptStreamDownloader import AlreadyStoppedError
from lbrynet.blob.client.CryptStreamDownloader import CurrentlyStoppingError
from lbrynet.p2p.utils import safe_start_looping_call, safe_stop_looping_call
from lbrynet.extras.daemon import conf
log = logging.getLogger(__name__)

View file

@ -3,8 +3,8 @@ import os
from twisted.internet import defer, threads
from twisted.web.client import FileBodyProducer
from twisted.python.failure import Failure
from lbrynet.core.Error import DownloadCanceledError, InvalidDataError, InvalidBlobHashError
from lbrynet.core.utils import is_valid_blobhash
from lbrynet.p2p.Error import DownloadCanceledError, InvalidDataError, InvalidBlobHashError
from lbrynet.p2p.utils import is_valid_blobhash
from lbrynet.blob.writer import HashBlobWriter
from lbrynet.blob.reader import HashBlobReader

View file

@ -1,6 +1,6 @@
import binascii
from twisted.internet import defer
from lbrynet.cryptstream.CryptBlob import StreamBlobDecryptor
from lbrynet.blob.CryptBlob import StreamBlobDecryptor
class CryptBlobHandler:

View file

@ -1,10 +1,10 @@
from binascii import unhexlify
import logging
from lbrynet.core.client.BlobRequester import BlobRequester
from lbrynet.core.client.ConnectionManager import ConnectionManager
from lbrynet.core.client.DownloadManager import DownloadManager
from lbrynet.core.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.cryptstream.client.CryptBlobHandler import CryptBlobHandler
from lbrynet.p2p.client.BlobRequester import BlobRequester
from lbrynet.p2p.client.ConnectionManager import ConnectionManager
from lbrynet.p2p.client.DownloadManager import DownloadManager
from lbrynet.p2p.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.blob.client.CryptBlobHandler import CryptBlobHandler
from twisted.internet import defer
from twisted.python.failure import Failure

View file

@ -3,11 +3,11 @@ import logging
import traceback
from binascii import hexlify, unhexlify
from lbrynet.core.StreamDescriptor import save_sd_info
from lbrynet.cryptstream.client.CryptStreamDownloader import CryptStreamDownloader
from lbrynet.core.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.core.Error import FileOpenError
from lbrynet.lbry_file.client.EncryptedFileMetadataHandler import EncryptedFileMetadataHandler
from lbrynet.p2p.StreamDescriptor import save_sd_info
from lbrynet.blob.client.CryptStreamDownloader import CryptStreamDownloader
from lbrynet.p2p.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.p2p.Error import FileOpenError
from lbrynet.blob.client.EncryptedFileMetadataHandler import EncryptedFileMetadataHandler
from twisted.internet import defer, threads

View file

@ -1,6 +1,6 @@
from lbrynet.core.StreamDescriptor import EncryptedFileStreamType
from lbrynet.core.StreamDescriptor import EncryptedFileStreamDescriptorValidator
from lbrynet.core.DownloadOption import DownloadOption, DownloadOptionChoice
from lbrynet.p2p.StreamDescriptor import EncryptedFileStreamType
from lbrynet.p2p.StreamDescriptor import EncryptedFileStreamDescriptorValidator
from lbrynet.p2p.DownloadOption import DownloadOption, DownloadOptionChoice
def add_lbry_file_to_sd_identifier(sd_identifier):

View file

@ -3,7 +3,7 @@ import logging
from io import BytesIO
from twisted.internet import defer
from twisted.web.client import FileBodyProducer
from lbrynet.core.cryptoutils import get_lbry_hash_obj
from lbrynet.p2p.cryptoutils import get_lbry_hash_obj
log = logging.getLogger(__name__)

View file

@ -1,8 +1,8 @@
import logging
from io import BytesIO
from twisted.python.failure import Failure
from lbrynet.core.Error import DownloadCanceledError, InvalidDataError
from lbrynet.core.cryptoutils import get_lbry_hash_obj
from lbrynet.p2p.Error import DownloadCanceledError, InvalidDataError
from lbrynet.p2p.cryptoutils import get_lbry_hash_obj
log = logging.getLogger(__name__)

View file

@ -1,8 +0,0 @@
"""
Classes and functions for dealing with Crypt Streams.
Crypt Streams are encrypted blobs and metadata tying those blobs together. At least some of the
metadata is generally stored in a Stream Descriptor File, for example containing a public key
used to bind blobs to the stream and a symmetric key used to encrypt the blobs. The list of blobs
may or may not be present.
"""

View file

@ -2,8 +2,8 @@ import binascii
import logging
from twisted.internet import defer, task
from lbrynet.core import utils
from lbrynet import conf
from lbrynet.p2p import utils
from lbrynet.extras.daemon import conf
log = logging.getLogger(__name__)

View file

@ -7,7 +7,7 @@
# The docstrings in this module contain epytext markup; API documentation
# may be created by processing this file with epydoc: http://epydoc.sf.net
from lbrynet.core.utils import generate_id
from lbrynet.p2p.utils import generate_id
from . import constants

View file

@ -5,9 +5,9 @@ from functools import reduce
from twisted.internet import defer, error, task
from lbrynet.core.utils import generate_id, DeferredDict
from lbrynet.core.call_later_manager import CallLaterManager
from lbrynet.core.PeerManager import PeerManager
from lbrynet.p2p.utils import generate_id, DeferredDict
from lbrynet.p2p.call_later_manager import CallLaterManager
from lbrynet.p2p.PeerManager import PeerManager
from .error import TimeoutError
from . import constants
from . import routingtable
@ -580,7 +580,7 @@ class Node(MockKademliaHelper):
"""
return generate_id()
# from lbrynet.core.utils import profile_deferred
# from lbrynet.p2p.utils import profile_deferred
# @profile_deferred()
@defer.inlineCallbacks
def _iterativeFind(self, key, startupShortlist=None, rpc='findNode', exclude=None):

View file

@ -2,8 +2,7 @@ import binascii
import logging
from twisted.internet import defer
from lbrynet import conf
from lbrynet.extras.daemon import conf
log = logging.getLogger(__name__)

View file

@ -118,7 +118,20 @@ class KademliaProtocol(protocol.DatagramProtocol):
return args
return args + ({b'protocolVersion': self._protocolVersion},)
@defer.inlineCallbacks
def sendRPC(self, contact, method, args):
for _ in range(constants.rpcAttempts):
try:
response = yield self._sendRPC(contact, method, args)
return response
except TimeoutError:
if contact.contact_is_good:
log.debug("RETRY %s ON %s", method, contact)
continue
else:
raise
def _sendRPC(self, contact, method, args):
"""
Sends an RPC to the specified contact
@ -153,11 +166,12 @@ class KademliaProtocol(protocol.DatagramProtocol):
df = defer.Deferred()
def _remove_contact(failure): # remove the contact from the routing table and track the failure
contact.update_last_failed()
try:
self._node.removeContact(contact)
if not contact.contact_is_good:
self._node.removeContact(contact)
except (ValueError, IndexError):
pass
contact.update_last_failed()
return failure
def _update_contact(result): # refresh the contact in the routing table

View file

@ -20,11 +20,11 @@ from docopt import docopt
from textwrap import dedent
from lbrynet import __name__ as lbrynet_name
from lbrynet.daemon.Daemon import Daemon
from lbrynet.daemon.DaemonControl import start as daemon_main
from lbrynet.daemon.DaemonConsole import main as daemon_console
from lbrynet.daemon.auth.client import LBRYAPIClient
from lbrynet.core.system_info import get_platform
from lbrynet.extras.daemon.Daemon import Daemon
from lbrynet.extras.daemon.DaemonControl import start as daemon_main
from lbrynet.extras.daemon.DaemonConsole import main as daemon_console
from lbrynet.extras.daemon.auth.client import LBRYAPIClient
from lbrynet.p2p.system_info import get_platform
async def execute_command(method, params, conf_path=None):

View file

@ -1,7 +1,7 @@
import logging
from twisted.internet import defer
from lbrynet.core.Error import ComponentStartConditionNotMet
from lbrynet.p2p.Error import ComponentStartConditionNotMet
log = logging.getLogger(__name__)
@ -60,7 +60,8 @@ class ComponentManager:
try:
component = self.get_component(condition.component)
result = condition.evaluate(component)
except Exception as err:
except Exception:
log.exception('failed to evaluate condition:')
result = False
return result, "" if result else condition.message

View file

@ -7,30 +7,30 @@ import binascii
from hashlib import sha256
from types import SimpleNamespace
from twisted.internet import defer, threads, reactor, error, task
import lbryschema
import lbrynet.schema
from aioupnp import __version__ as aioupnp_version
from aioupnp.upnp import UPnP
from aioupnp.fault import UPnPError
from lbrynet import conf
from lbrynet.core.utils import DeferredDict
from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager
from lbrynet.core.RateLimiter import RateLimiter
from lbrynet.core.BlobManager import DiskBlobManager
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.wallet.network import Network
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
from lbrynet.daemon.Component import Component
from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager
from lbrynet.database.storage import SQLiteStorage
from lbrynet.extras.daemon import conf
from lbrynet.p2p.utils import DeferredDict
from lbrynet.p2p.PaymentRateManager import OnlyFreePaymentsManager
from lbrynet.p2p.RateLimiter import RateLimiter
from lbrynet.p2p.BlobManager import DiskBlobManager
from lbrynet.p2p.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType
from lbrynet.extras.wallet import LbryWalletManager
from lbrynet.extras.wallet import Network
from lbrynet.p2p.server.BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet.p2p.server.ServerProtocol import ServerProtocolFactory
from .Component import Component
from .ExchangeRateManager import ExchangeRateManager
from lbrynet.extras.daemon.storage import SQLiteStorage
from lbrynet.dht import node, hashannouncer
from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager
from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaverFactory
from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
from lbrynet.reflector import ServerFactory as reflector_server_factory
from lbrynet.blob.EncryptedFileManager import EncryptedFileManager
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileSaverFactory
from lbrynet.blob.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
from lbrynet.extras.reflector import ServerFactory as reflector_server_factory
from lbrynet.core.utils import generate_id
from lbrynet.p2p.utils import generate_id
log = logging.getLogger(__name__)
@ -95,7 +95,7 @@ class ConfigSettings:
@staticmethod
def get_external_ip():
from lbrynet.core.system_info import get_platform
from lbrynet.p2p.system_info import get_platform
platform = get_platform(get_ip=True)
return platform['ip']
@ -159,7 +159,7 @@ class DatabaseComponent(Component):
'Your database is revision %i, expected %i' %
(old_revision, self.get_current_db_revision()))
if old_revision < self.get_current_db_revision():
from lbrynet.database.migrator import dbmigrator
from lbrynet.extras.daemon.migrator import dbmigrator
log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision())
yield threads.deferToThread(
dbmigrator.migrate_db, GCS('data_dir'), old_revision, self.get_current_db_revision()
@ -353,7 +353,7 @@ class WalletComponent(Component):
conf.settings.ensure_wallet_dir()
log.info("Starting torba wallet")
storage = self.component_manager.get_component(DATABASE_COMPONENT)
lbryschema.BLOCKCHAIN_NAME = conf.settings['blockchain_name']
lbrynet.schema.BLOCKCHAIN_NAME = conf.settings['blockchain_name']
self.wallet_manager = yield f2d(LbryWalletManager.from_lbrynet_config(conf.settings, storage))
self.wallet_manager.old_db = storage
yield f2d(self.wallet_manager.start())

View file

@ -14,41 +14,41 @@ from twisted.internet import defer, reactor
from twisted.internet.task import LoopingCall
from twisted.python.failure import Failure
from torba.constants import COIN
from torba.baseaccount import SingleKey, HierarchicalDeterministic
from torba.client.constants import COIN
from torba.client.baseaccount import SingleKey, HierarchicalDeterministic
from lbryschema.claim import ClaimDict
from lbryschema.uri import parse_lbry_uri
from lbryschema.error import URIParseError, DecodeError
from lbryschema.validator import validate_claim_id
from lbryschema.address import decode_address
from lbryschema.decode import smart_decode
from lbrynet.schema.claim import ClaimDict
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.schema.error import URIParseError, DecodeError
from lbrynet.schema.validator import validate_claim_id
from lbrynet.schema.address import decode_address
from lbrynet.schema.decode import smart_decode
# TODO: importing this when internet is disabled raises a socket.gaierror
from lbrynet.core.system_info import get_lbrynet_version
from lbrynet import conf
from lbrynet.reflector import reupload
from lbrynet.daemon.Components import d2f, f2d
from lbrynet.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT
from lbrynet.daemon.Components import STREAM_IDENTIFIER_COMPONENT, FILE_MANAGER_COMPONENT, RATE_LIMITER_COMPONENT
from lbrynet.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT, PAYMENT_RATE_COMPONENT, UPNP_COMPONENT
from lbrynet.daemon.ComponentManager import RequiredCondition
from lbrynet.daemon.Downloader import GetStream
from lbrynet.daemon.Publisher import Publisher
from lbrynet.daemon.auth.server import AuthJSONRPCServer
from lbrynet.core import utils, system_info
from lbrynet.core.StreamDescriptor import download_sd_blob
from lbrynet.core.Error import InsufficientFundsError, UnknownNameError
from lbrynet.core.Error import DownloadDataTimeout, DownloadSDTimeout
from lbrynet.core.Error import NullFundsError, NegativeFundsError
from lbrynet.core.Error import ResolveError
from lbrynet.p2p.system_info import get_lbrynet_version
from lbrynet.extras.daemon import conf
from lbrynet.extras.reflector import reupload
from .Components import d2f, f2d
from .Components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT
from .Components import STREAM_IDENTIFIER_COMPONENT, FILE_MANAGER_COMPONENT, RATE_LIMITER_COMPONENT
from .Components import EXCHANGE_RATE_MANAGER_COMPONENT, PAYMENT_RATE_COMPONENT, UPNP_COMPONENT
from .ComponentManager import RequiredCondition
from .Downloader import GetStream
from .Publisher import Publisher
from .auth.server import AuthJSONRPCServer
from lbrynet.p2p import utils, system_info
from lbrynet.p2p.StreamDescriptor import download_sd_blob
from lbrynet.p2p.Error import InsufficientFundsError, UnknownNameError
from lbrynet.p2p.Error import DownloadDataTimeout, DownloadSDTimeout
from lbrynet.p2p.Error import NullFundsError, NegativeFundsError
from lbrynet.p2p.Error import ResolveError
from lbrynet.dht.error import TimeoutError
from lbrynet.core.Peer import Peer
from lbrynet.core.SinglePeerDownloader import SinglePeerDownloader
from lbrynet.core.client.StandaloneBlobDownloader import StandaloneBlobDownloader
from lbrynet.wallet.account import Account as LBCAccount
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.wallet.dewies import dewies_to_lbc, lbc_to_dewies
from lbrynet.p2p.Peer import Peer
from lbrynet.p2p.SinglePeerDownloader import SinglePeerDownloader
from lbrynet.p2p.client.StandaloneBlobDownloader import StandaloneBlobDownloader
from lbrynet.extras.wallet import LbryWalletManager
from lbrynet.extras.wallet.account import Account as LBCAccount
from lbrynet.extras.wallet.dewies import dewies_to_lbc, lbc_to_dewies
log = logging.getLogger(__name__)
requires = AuthJSONRPCServer.requires
@ -820,7 +820,7 @@ class Daemon(AuthJSONRPCServer):
'ip': (str) remote ip, if available,
'lbrynet_version': (str) lbrynet_version,
'lbryum_version': (str) lbryum_version,
'lbryschema_version': (str) lbryschema_version,
'lbrynet.schema_version': (str) lbrynet.schema_version,
'os_release': (str) os release string
'os_system': (str) os name
'platform': (str) platform string
@ -2271,7 +2271,7 @@ class Daemon(AuthJSONRPCServer):
}
}
# this will be used to verify the format with lbryschema
# this will be used to verify the format with lbrynet.schema
claim_copy = deepcopy(claim_dict)
if sources is not None:
claim_dict['stream']['source'] = sources
@ -2292,7 +2292,7 @@ class Daemon(AuthJSONRPCServer):
raise Exception("no source provided to publish")
try:
ClaimDict.load_dict(claim_copy)
# the metadata to use in the claim can be serialized by lbryschema
# the metadata to use in the claim can be serialized by lbrynet.schema
except DecodeError as err:
# there was a problem with a metadata field, raise an error here rather than
# waiting to find out when we go to publish the claim (after having made the stream)

View file

@ -6,12 +6,11 @@ import logging.handlers
from twisted.internet import defer, reactor, threads
from aiohttp import client_exceptions
from lbrynet import analytics
from lbrynet import conf
from lbrynet.core import utils
from lbrynet.core import log_support
from lbrynet.daemon.auth.client import LBRYAPIClient
from lbrynet.daemon.Daemon import Daemon
from lbrynet.extras.daemon import analytics, conf
from lbrynet.p2p import utils
from lbrynet.p2p import log_support
from .auth.client import LBRYAPIClient
from .Daemon import Daemon
log = logging.getLogger(__name__)

View file

@ -7,16 +7,16 @@ if 'win' in sys.platform:
import certifi
os.environ['SSL_CERT_FILE'] = certifi.where()
from lbrynet.core import log_support
from lbrynet.p2p import log_support
import argparse
import logging.handlers
from twisted.internet import reactor
from lbrynet import conf
from lbrynet.core import utils, system_info
from lbrynet.daemon.Daemon import Daemon
from lbrynet.extras.daemon import conf
from lbrynet.p2p import utils, system_info
from .Daemon import Daemon
log = logging.getLogger(__name__)

View file

@ -3,17 +3,17 @@ import os
from twisted.internet import defer
from twisted.internet.task import LoopingCall
from lbrynet.daemon.Components import f2d
from lbryschema.fee import Fee
from .Components import f2d
from lbrynet.schema.fee import Fee
from lbrynet.core.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed, InvalidStreamDescriptorError
from lbrynet.core.Error import DownloadDataTimeout, DownloadCanceledError, DownloadSDTimeout
from lbrynet.core.utils import safe_start_looping_call, safe_stop_looping_call
from lbrynet.core.StreamDescriptor import download_sd_blob
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet import conf
from torba.constants import COIN
from lbrynet.wallet.dewies import dewies_to_lbc
from lbrynet.p2p.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed, InvalidStreamDescriptorError
from lbrynet.p2p.Error import DownloadDataTimeout, DownloadCanceledError, DownloadSDTimeout
from lbrynet.p2p.utils import safe_start_looping_call, safe_stop_looping_call
from lbrynet.p2p.StreamDescriptor import download_sd_blob
from lbrynet.blob.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet.extras.daemon import conf
from torba.client.constants import COIN
from lbrynet.extras.wallet.dewies import dewies_to_lbc
INITIALIZING_CODE = 'initializing'
DOWNLOAD_METADATA_CODE = 'downloading_metadata'

View file

@ -6,7 +6,7 @@ import treq
from twisted.internet import defer
from twisted.internet.task import LoopingCall
from lbrynet.core.Error import InvalidExchangeRateResponse, CurrencyConversionError
from lbrynet.p2p.Error import InvalidExchangeRateResponse, CurrencyConversionError
log = logging.getLogger(__name__)

View file

@ -3,7 +3,7 @@ import logging
import mimetypes
import os
from lbrynet.file_manager.EncryptedFileCreator import create_lbry_file
from lbrynet.blob.EncryptedFileCreator import create_lbry_file
log = logging.getLogger(__name__)

View file

@ -4,8 +4,8 @@ import logging
import treq
from twisted.internet import defer, task
from lbrynet import conf
from lbrynet.core import looping_call_manager, utils, system_info
from lbrynet.extras.daemon import conf
from lbrynet.p2p import looping_call_manager, utils, system_info
# Things We Track
SERVER_STARTUP = 'Server Startup'

View file

@ -3,8 +3,8 @@ import aiohttp
import logging
from urllib.parse import urlparse
from lbrynet import conf
from lbrynet.daemon.auth.keyring import Keyring, APIKey
from lbrynet.extras.daemon import conf
from .keyring import Keyring, APIKey
log = logging.getLogger(__name__)
USER_AGENT = "AuthServiceProxy/0.1"

View file

@ -3,7 +3,7 @@ import logging
from twisted.web import server, guard, resource
from twisted.cred import portal
from lbrynet import conf
from lbrynet.extras.daemon import conf
from .auth import PasswordChecker, HttpPasswordRealm
from ..auth.keyring import Keyring

View file

@ -13,7 +13,7 @@ from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from twisted.internet import ssl
import logging
from lbrynet import conf
from lbrynet.extras.daemon import conf
log = logging.getLogger(__name__)

View file

@ -14,16 +14,16 @@ from twisted.internet.error import ConnectionDone, ConnectionLost
from txjsonrpc import jsonrpclib
from traceback import format_exc
from lbrynet import conf, analytics
from lbrynet.core.Error import InvalidAuthenticationToken
from lbrynet.core import utils
from lbrynet.core.Error import ComponentsNotStarted, ComponentStartConditionNotMet
from lbrynet.core.looping_call_manager import LoopingCallManager
from lbrynet.daemon.ComponentManager import ComponentManager
from lbrynet.extras.daemon import analytics, conf
from lbrynet.p2p.Error import InvalidAuthenticationToken
from lbrynet.p2p import utils
from lbrynet.p2p.Error import ComponentsNotStarted, ComponentStartConditionNotMet
from lbrynet.p2p.looping_call_manager import LoopingCallManager
from lbrynet.extras.daemon.ComponentManager import ComponentManager
from .keyring import APIKey, Keyring
from .undecorated import undecorated
from .factory import AuthJSONRPCResource
from lbrynet.daemon.json_response_encoder import JSONResponseEncoder
from lbrynet.extras.daemon.json_response_encoder import JSONResponseEncoder
log = logging.getLogger(__name__)
EMPTY_PARAMS = [{}]

View file

@ -7,15 +7,15 @@ import sys
import yaml
import envparse
from appdirs import user_data_dir, user_config_dir
from lbrynet.core import utils
from lbrynet.core.Error import InvalidCurrencyError, NoSuchDirectoryError
from lbrynet.p2p import utils
from lbrynet.p2p.Error import InvalidCurrencyError, NoSuchDirectoryError
from lbrynet.androidhelpers.paths import (
android_internal_storage_dir,
android_app_internal_storage_dir
)
try:
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
from .winpaths import get_path, FOLDERID, UserHandle
except (ImportError, ValueError, NameError):
# Android platform: NameError: name 'c_wchar' is not defined
pass

View file

@ -1,11 +1,15 @@
import logging
from decimal import Decimal
from binascii import hexlify
from datetime import datetime
from json import JSONEncoder
from ecdsa import BadSignatureError
from lbrynet.wallet.transaction import Transaction, Output
from lbrynet.wallet.dewies import dewies_to_lbc
from lbrynet.wallet.ledger import MainNetLedger
from lbrynet.extras.wallet import MainNetLedger
from lbrynet.extras.wallet.transaction import Transaction, Output
from lbrynet.extras.wallet.dewies import dewies_to_lbc
log = logging.getLogger(__name__)
class JSONResponseEncoder(JSONEncoder):
@ -75,6 +79,12 @@ class JSONResponseEncoder(JSONEncoder):
)
except BadSignatureError:
output['valid_signature'] = False
except ValueError:
log.exception(
'txo.id: %s, txo.channel.id:%s, output: %s',
txo.id, txo.channel.id, output
)
output['valid_signature'] = False
if txo.script.is_claim_name:
output['type'] = 'claim'

View file

@ -7,21 +7,21 @@ def migrate_db(db_dir, start, end):
current = start
while current < end:
if current == 1:
from lbrynet.database.migrator.migrate1to2 import do_migration
from .migrate1to2 import do_migration
elif current == 2:
from lbrynet.database.migrator.migrate2to3 import do_migration
from .migrate2to3 import do_migration
elif current == 3:
from lbrynet.database.migrator.migrate3to4 import do_migration
from .migrate3to4 import do_migration
elif current == 4:
from lbrynet.database.migrator.migrate4to5 import do_migration
from .migrate4to5 import do_migration
elif current == 5:
from lbrynet.database.migrator.migrate5to6 import do_migration
from .migrate5to6 import do_migration
elif current == 6:
from lbrynet.database.migrator.migrate6to7 import do_migration
from .migrate6to7 import do_migration
elif current == 7:
from lbrynet.database.migrator.migrate7to8 import do_migration
from .migrate7to8 import do_migration
elif current == 8:
from lbrynet.database.migrator.migrate8to9 import do_migration
from .migrate8to9 import do_migration
else:
raise Exception("DB migration of version {} to {} is not available".format(current,
current+1))

View file

@ -2,8 +2,8 @@ import sqlite3
import os
import json
import logging
from lbryschema.decode import smart_decode
from lbrynet import conf
from lbrynet.schema.decode import smart_decode
from lbrynet.extras.daemon import conf
log = logging.getLogger(__name__)

View file

@ -2,9 +2,9 @@ import sqlite3
import logging
import os
from lbrynet.core.Error import InvalidStreamDescriptorError
from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, format_sd_info, format_blobs, validate_descriptor
from lbrynet.cryptstream.CryptBlob import CryptBlobInfo
from lbrynet.p2p.Error import InvalidStreamDescriptorError
from lbrynet.p2p.StreamDescriptor import EncryptedFileStreamType, format_sd_info, format_blobs, validate_descriptor
from lbrynet.blob.CryptBlob import CryptBlobInfo
log = logging.getLogger(__name__)

View file

@ -7,12 +7,12 @@ from decimal import Decimal
from twisted.internet import defer, task, threads
from twisted.enterprise import adbapi
from lbryschema.claim import ClaimDict
from lbryschema.decode import smart_decode
from lbrynet import conf
from lbrynet.cryptstream.CryptBlob import CryptBlobInfo
from lbrynet.schema.claim import ClaimDict
from lbrynet.schema.decode import smart_decode
from lbrynet.extras.daemon import conf
from lbrynet.blob.CryptBlob import CryptBlobInfo
from lbrynet.dht.constants import dataExpireTimeout
from torba.constants import COIN
from torba.client.constants import COIN
log = logging.getLogger(__name__)

View file

@ -1,4 +1,5 @@
from __future__ import print_function
# Copyright (c) 2014 Michael Kropat
import sys
import ctypes
from ctypes import windll, wintypes
@ -156,7 +157,7 @@ def get_path(folderid, user_handle=UserHandle.common):
if __name__ == '__main__':
if len(sys.argv) < 2 or sys.argv[1] in ['-?', '/?']:
print('python knownpaths.py FOLDERID {current|common}')
print('python winpaths.py FOLDERID {current|common}')
sys.exit(0)
try:

View file

@ -63,7 +63,6 @@ If the transfer was not successful (False), the blob is re-added to the needed_b
Blob requests continue for each of the blobs the client has queued to send, when completed
the client disconnects.
"""
from lbrynet.reflector.server.server import ReflectorServerFactory as ServerFactory
from lbrynet.reflector.client.client import EncryptedFileReflectorClientFactory as ClientFactory
from lbrynet.reflector.client.blob import BlobReflectorClientFactory as BlobClientFactory
from .server.server import ReflectorServerFactory as ServerFactory
from .client.client import EncryptedFileReflectorClientFactory as ClientFactory
from .client.blob import BlobReflectorClientFactory as BlobClientFactory

View file

@ -5,7 +5,7 @@ from twisted.protocols.basic import FileSender
from twisted.internet.protocol import Protocol, ClientFactory
from twisted.internet import defer, error
from lbrynet.reflector.common import IncompleteResponse, REFLECTOR_V2
from lbrynet.extras.reflector.common import IncompleteResponse, REFLECTOR_V2
log = logging.getLogger(__name__)

View file

@ -6,8 +6,8 @@ from twisted.protocols.basic import FileSender
from twisted.internet.protocol import Protocol, ClientFactory
from twisted.internet import defer, error
from lbrynet.reflector.common import IncompleteResponse, ReflectorRequestError
from lbrynet.reflector.common import REFLECTOR_V1, REFLECTOR_V2
from lbrynet.extras.reflector.common import IncompleteResponse, ReflectorRequestError
from lbrynet.extras.reflector.common import REFLECTOR_V1, REFLECTOR_V2
log = logging.getLogger(__name__)

View file

@ -1,8 +1,8 @@
import random
from twisted.internet import reactor, defer
from lbrynet import conf
from lbrynet.reflector import ClientFactory, BlobClientFactory
from lbrynet.extras.daemon import conf
from . import ClientFactory, BlobClientFactory
def _is_ip(host):

View file

@ -3,12 +3,12 @@ import json
from twisted.python import failure
from twisted.internet import error, defer
from twisted.internet.protocol import Protocol, ServerFactory
from lbrynet.core.utils import is_valid_blobhash
from lbrynet.core.Error import DownloadCanceledError, InvalidBlobHashError
from lbrynet.core.StreamDescriptor import BlobStreamDescriptorReader
from lbrynet.core.StreamDescriptor import save_sd_info
from lbrynet.reflector.common import REFLECTOR_V1, REFLECTOR_V2
from lbrynet.reflector.common import ReflectorRequestError, ReflectorClientVersionError
from lbrynet.p2p.utils import is_valid_blobhash
from lbrynet.p2p.Error import DownloadCanceledError, InvalidBlobHashError
from lbrynet.p2p.StreamDescriptor import BlobStreamDescriptorReader
from lbrynet.p2p.StreamDescriptor import save_sd_info
from lbrynet.extras.reflector.common import REFLECTOR_V1, REFLECTOR_V2
from lbrynet.extras.reflector.common import ReflectorRequestError, ReflectorClientVersionError
log = logging.getLogger(__name__)

View file

@ -4,6 +4,8 @@ __node_bin__ = ''
__node_url__ = (
'https://github.com/lbryio/lbrycrd/releases/download/v0.12.2.1/lbrycrd-linux.zip'
)
__electrumx__ = 'lbryumx.coin.LBCRegTest'
__spvserver__ = 'lbrynet.extras.wallet.server.coin.LBCRegTest'
from .ledger import MainNetLedger, RegTestLedger
from .manager import LbryWalletManager
from .network import Network

View file

@ -1,11 +1,11 @@
import json
import logging
from torba.baseaccount import BaseAccount
from torba.basetransaction import TXORef
from torba.client.baseaccount import BaseAccount
from torba.client.basetransaction import TXORef
from lbryschema.claim import ClaimDict
from lbryschema.signer import SECP256k1, get_signer
from lbrynet.schema.claim import ClaimDict
from lbrynet.schema.signer import SECP256k1, get_signer
log = logging.getLogger(__name__)

View file

@ -1,7 +1,7 @@
import six
import struct
import binascii
from torba.hash import double_sha256
from torba.client.hash import double_sha256
class InvalidProofError(Exception):

View file

@ -1,4 +1,4 @@
from torba.basedatabase import BaseDatabase
from torba.client.basedatabase import BaseDatabase
class WalletDatabase(BaseDatabase):
@ -64,7 +64,7 @@ class WalletDatabase(BaseDatabase):
if channel_ids:
channels = {
txo.claim_id: txo for txo in
(await super().get_utxos(
(await self.get_claims(
my_account=my_account,
claim_id__in=channel_ids
))

View file

@ -1,6 +1,6 @@
import re
import textwrap
from torba.constants import COIN
from torba.client.constants import COIN
def lbc_to_dewies(lbc):

View file

@ -2,9 +2,9 @@ import struct
from typing import Optional
from binascii import hexlify, unhexlify
from torba.baseheader import BaseHeaders
from torba.util import ArithUint256
from torba.hash import sha512, double_sha256, ripemd160
from torba.client.baseheader import BaseHeaders
from torba.client.util import ArithUint256
from torba.client.hash import sha512, double_sha256, ripemd160
class Headers(BaseHeaders):

View file

@ -2,9 +2,9 @@ import asyncio
import logging
from binascii import unhexlify
from lbryschema.error import URIParseError
from lbryschema.uri import parse_lbry_uri
from torba.baseledger import BaseLedger
from lbrynet.schema.error import URIParseError
from lbrynet.schema.uri import parse_lbry_uri
from torba.client.baseledger import BaseLedger
from .resolve import Resolver
from .account import Account

View file

@ -9,10 +9,10 @@ from typing import Optional
from twisted.internet import defer
from lbryschema.schema import SECP256k1
from torba.basemanager import BaseWalletManager
from lbrynet.schema.schema import SECP256k1
from torba.client.basemanager import BaseWalletManager
from lbryschema.claim import ClaimDict
from lbrynet.schema.claim import ClaimDict
from .ledger import MainNetLedger
from .account import BaseAccount, generate_certificate

View file

@ -1,4 +1,4 @@
from torba.basenetwork import BaseNetwork
from torba.client.basenetwork import BaseNetwork
class Network(BaseNetwork):

View file

@ -3,12 +3,12 @@ import logging
from ecdsa import BadSignatureError
from binascii import unhexlify, hexlify
from lbrynet.core.Error import UnknownNameError, UnknownClaimID, UnknownURI, UnknownOutpoint
from lbryschema.address import is_address
from lbryschema.claim import ClaimDict
from lbryschema.decode import smart_decode
from lbryschema.error import DecodeError
from lbryschema.uri import parse_lbry_uri
from lbrynet.p2p.Error import UnknownNameError, UnknownClaimID, UnknownURI, UnknownOutpoint
from lbrynet.schema.address import is_address
from lbrynet.schema.claim import ClaimDict
from lbrynet.schema.decode import smart_decode
from lbrynet.schema.error import DecodeError
from lbrynet.schema.uri import parse_lbry_uri
from .claim_proofs import verify_proof, InvalidProofError
log = logging.getLogger(__name__)
@ -235,7 +235,7 @@ class Resolver:
# these results can include those where `signature_is_valid` is False. if they are skipped,
# page indexing becomes tricky, as the number of results isn't known until after having
# processed them.
# TODO: fix ^ in lbryschema
# TODO: fix ^ in lbrynet.schema
async def iter_validate_channel_claims():
formatted_claims = []

View file

@ -1,5 +1,5 @@
from torba.basescript import BaseInputScript, BaseOutputScript, Template
from torba.basescript import PUSH_SINGLE, PUSH_INTEGER, OP_DROP, OP_2DROP, PUSH_SUBSCRIPT, OP_VERIFY
from torba.client.basescript import BaseInputScript, BaseOutputScript, Template
from torba.client.basescript import PUSH_SINGLE, PUSH_INTEGER, OP_DROP, OP_2DROP, PUSH_SUBSCRIPT, OP_VERIFY
class InputScript(BaseInputScript):

View file

@ -0,0 +1,174 @@
import hashlib
import struct
import msgpack
from torba.server.hash import hash_to_hex_str
from torba.server.block_processor import BlockProcessor
from lbrynet.schema.proto.claim_pb2 import Claim
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.schema.decode import smart_decode
from .model import NameClaim, ClaimInfo, ClaimUpdate, ClaimSupport
class LBRYBlockProcessor(BlockProcessor):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.env.coin.NET == "regtest":
self.prefetcher.polling_delay = 0.5
self.should_validate_signatures = self.env.boolean('VALIDATE_CLAIM_SIGNATURES', False)
self.logger.info("LbryumX Block Processor - Validating signatures: {}".format(self.should_validate_signatures))
def advance_blocks(self, blocks):
# save height, advance blocks as usual, then hook our claim tx processing
height = self.height + 1
super().advance_blocks(blocks)
pending_undo = []
for index, block in enumerate(blocks):
undo = self.advance_claim_txs(block.transactions, height + index)
pending_undo.append((height+index, undo,))
self.db.write_undo(pending_undo)
def advance_claim_txs(self, txs, height):
# TODO: generate claim undo info!
undo_info = []
add_undo = undo_info.append
update_inputs = set()
for tx, txid in txs:
update_inputs.clear()
if tx.has_claims:
for index, output in enumerate(tx.outputs):
claim = output.claim
if isinstance(claim, NameClaim):
add_undo(self.advance_claim_name_transaction(output, height, txid, index))
elif isinstance(claim, ClaimUpdate):
update_input = self.db.get_update_input(claim, tx.inputs)
if update_input:
update_inputs.add(update_input)
add_undo(self.advance_update_claim(output, height, txid, index))
else:
info = (hash_to_hex_str(txid), hash_to_hex_str(claim.claim_id),)
self.logger.error("REJECTED: {} updating {}".format(*info))
elif isinstance(claim, ClaimSupport):
self.advance_support(claim, txid, index, height, output.value)
for txin in tx.inputs:
if txin not in update_inputs:
abandoned_claim_id = self.db.abandon_spent(txin.prev_hash, txin.prev_idx)
if abandoned_claim_id:
add_undo((abandoned_claim_id, self.db.get_claim_info(abandoned_claim_id)))
return undo_info
def advance_update_claim(self, output, height, txid, nout):
claim_id = output.claim.claim_id
claim_info = self.claim_info_from_output(output, txid, nout, height)
old_claim_info = self.db.get_claim_info(claim_id)
self.db.put_claim_id_for_outpoint(old_claim_info.txid, old_claim_info.nout, None)
if old_claim_info.cert_id:
self.db.remove_claim_from_certificate_claims(old_claim_info.cert_id, claim_id)
if claim_info.cert_id:
self.db.put_claim_id_signed_by_cert_id(claim_info.cert_id, claim_id)
self.db.put_claim_info(claim_id, claim_info)
self.db.put_claim_id_for_outpoint(txid, nout, claim_id)
return claim_id, old_claim_info
def advance_claim_name_transaction(self, output, height, txid, nout):
claim_id = claim_id_hash(txid, nout)
claim_info = self.claim_info_from_output(output, txid, nout, height)
if claim_info.cert_id:
self.db.put_claim_id_signed_by_cert_id(claim_info.cert_id, claim_id)
self.db.put_claim_info(claim_id, claim_info)
self.db.put_claim_for_name(claim_info.name, claim_id)
self.db.put_claim_id_for_outpoint(txid, nout, claim_id)
return claim_id, None
def backup_from_undo_info(self, claim_id, undo_claim_info):
"""
Undo information holds a claim state **before** a transaction changes it
There are 4 possibilities when processing it, of which only 3 are valid ones:
1. the claim is known and the undo info has info, it was an update
2. the claim is known and the undo info doesn't hold any info, it was claimed
3. the claim in unknown and the undo info has info, it was abandoned
4. the claim is unknown and the undo info does't hold info, error!
"""
undo_claim_info = ClaimInfo(*undo_claim_info) if undo_claim_info else None
current_claim_info = self.db.get_claim_info(claim_id)
if current_claim_info and undo_claim_info:
# update, remove current claim
self.db.remove_claim_id_for_outpoint(current_claim_info.txid, current_claim_info.nout)
if current_claim_info.cert_id:
self.db.remove_claim_from_certificate_claims(current_claim_info.cert_id, claim_id)
elif current_claim_info and not undo_claim_info:
# claim, abandon it
self.db.abandon_spent(current_claim_info.txid, current_claim_info.nout)
elif not current_claim_info and undo_claim_info:
# abandon, reclaim it (happens below)
pass
else:
# should never happen, unless the database got into an inconsistent state
raise Exception("Unexpected situation occurred on backup, this means the database is inconsistent. "
"Please report. Resetting the data folder (reindex) solves it for now.")
if undo_claim_info:
self.db.put_claim_info(claim_id, undo_claim_info)
if undo_claim_info.cert_id:
cert_id = self._checksig(undo_claim_info.name, undo_claim_info.value, undo_claim_info.address)
self.db.put_claim_id_signed_by_cert_id(cert_id, claim_id)
self.db.put_claim_for_name(undo_claim_info.name, claim_id)
self.db.put_claim_id_for_outpoint(undo_claim_info.txid, undo_claim_info.nout, claim_id)
def backup_txs(self, txs):
self.logger.info("Reorg at height {} with {} transactions.".format(self.height, len(txs)))
undo_info = msgpack.loads(self.db.claim_undo_db.get(struct.pack(">I", self.height)), use_list=False)
for claim_id, undo_claim_info in reversed(undo_info):
self.backup_from_undo_info(claim_id, undo_claim_info)
return super().backup_txs(txs)
def backup_blocks(self, raw_blocks):
self.db.batched_flush_claims()
super().backup_blocks(raw_blocks=raw_blocks)
self.db.batched_flush_claims()
def shutdown(self):
self.db.shutdown()
async def flush(self, flush_utxos):
self.db.batched_flush_claims()
return await super().flush(flush_utxos)
def advance_support(self, claim_support, txid, nout, height, amount):
# TODO: check for more controller claim rules, like takeover or ordering
pass
def claim_info_from_output(self, output, txid, nout, height):
amount = output.value
address = self.coin.address_from_script(output.pk_script)
name, value, cert_id = output.claim.name, output.claim.value, None
assert txid and address
cert_id = self._checksig(name, value, address)
return ClaimInfo(name, value, txid, nout, amount, address, height, cert_id)
def _checksig(self, name, value, address):
try:
parse_lbry_uri(name.decode()) # skip invalid names
cert_id = Claim.FromString(value).publisherSignature.certificateId[::-1] or None
if not self.should_validate_signatures:
return cert_id
if cert_id:
cert_claim = self.db.get_claim_info(cert_id)
if cert_claim:
certificate = smart_decode(cert_claim.value)
claim_dict = smart_decode(value)
claim_dict.validate_signature(address, certificate)
return cert_id
except Exception as e:
pass
def claim_id_hash(txid, n):
# TODO: This should be in lbryschema
packed = txid + struct.pack('>I', n)
md = hashlib.new('ripemd160')
md.update(hashlib.sha256(packed).digest())
return md.digest()

View file

@ -0,0 +1,139 @@
import struct
from torba.server.script import ScriptPubKey, _match_ops, OpCodes
from torba.server.util import cachedproperty
from torba.server.hash import hash_to_hex_str, HASHX_LEN
from hashlib import sha256
from torba.server.coins import Coin, CoinError
from .opcodes import decode_claim_script, opcodes as lbry_opcodes
class LBC(Coin):
from .session import LBRYElectrumX
from .block_processor import LBRYBlockProcessor
from .tx import LBRYDeserializer
from .daemon import LBCDaemon
from .db import LBRYDB
DAEMON = LBCDaemon
SESSIONCLS = LBRYElectrumX
BLOCK_PROCESSOR = LBRYBlockProcessor
DB = LBRYDB
DESERIALIZER = LBRYDeserializer
NAME = "LBRY"
SHORTNAME = "LBC"
NET = "mainnet"
BASIC_HEADER_SIZE = 112
CHUNK_SIZE = 96
XPUB_VERBYTES = bytes.fromhex("019C354f")
XPRV_VERBYTES = bytes.fromhex("019C3118")
P2PKH_VERBYTE = bytes.fromhex("55")
P2SH_VERBYTES = bytes.fromhex("7A")
WIF_BYTE = bytes.fromhex("1C")
GENESIS_HASH = ('9c89283ba0f3227f6c03b70216b9f665'
'f0118d5e0fa729cedf4fb34d6a34f463')
TX_COUNT = 2716936
TX_COUNT_HEIGHT = 329554
TX_PER_BLOCK = 1
RPC_PORT = 9245
REORG_LIMIT = 200
PEERS = [
]
@classmethod
def genesis_block(cls, block):
'''Check the Genesis block is the right one for this coin.
Return the block less its unspendable coinbase.
'''
header = cls.block_header(block, 0)
header_hex_hash = hash_to_hex_str(cls.header_hash(header))
if header_hex_hash != cls.GENESIS_HASH:
raise CoinError('genesis block has hash {} expected {}'
.format(header_hex_hash, cls.GENESIS_HASH))
return block
@classmethod
def electrum_header(cls, header, height):
version, = struct.unpack('<I', header[:4])
timestamp, bits, nonce = struct.unpack('<III', header[100:112])
return {
'version': version,
'prev_block_hash': hash_to_hex_str(header[4:36]),
'merkle_root': hash_to_hex_str(header[36:68]),
'claim_trie_root': hash_to_hex_str(header[68:100]),
'timestamp': timestamp,
'bits': bits,
'nonce': nonce,
'block_height': height,
}
@cachedproperty
def address_handlers(self):
return ScriptPubKey.PayToHandlers(
address=self.P2PKH_address_from_hash160,
script_hash=self.P2SH_address_from_hash160,
pubkey=self.P2PKH_address_from_pubkey,
unspendable=lambda: None,
strange=self.claim_address_handler,
)
@classmethod
def address_from_script(cls, script):
'''Given a pk_script, return the adddress it pays to, or None.'''
return ScriptPubKey.pay_to(cls.address_handlers, script)
@classmethod
def claim_address_handler(cls, script):
'''Parse a claim script, returns the address
'''
decoded = decode_claim_script(script)
if not decoded:
return None
ops = []
for op, data, _ in decoded[1]:
if not data:
ops.append(op)
else:
ops.append((op, data,))
match = _match_ops
TO_ADDRESS_OPS = [OpCodes.OP_DUP, OpCodes.OP_HASH160, -1,
OpCodes.OP_EQUALVERIFY, OpCodes.OP_CHECKSIG]
TO_P2SH_OPS = [OpCodes.OP_HASH160, -1, OpCodes.OP_EQUAL]
TO_PUBKEY_OPS = [-1, OpCodes.OP_CHECKSIG]
if match(ops, TO_ADDRESS_OPS):
return cls.P2PKH_address_from_hash160(ops[2][-1])
if match(ops, TO_P2SH_OPS):
return cls.P2SH_address_from_hash160(ops[1][-1])
if match(ops, TO_PUBKEY_OPS):
return cls.P2PKH_address_from_pubkey(ops[0][-1])
if ops and ops[0] == OpCodes.OP_RETURN:
return None
return None
@classmethod
def hashX_from_script(cls, script):
'''
Overrides electrumx hashX from script by extracting addresses from claim scripts.
'''
if script and script[0] == OpCodes.OP_RETURN:
return None
if script[0] in [
lbry_opcodes.OP_CLAIM_NAME,
lbry_opcodes.OP_SUPPORT_CLAIM,
lbry_opcodes.OP_UPDATE_CLAIM
]:
return cls.address_to_hashX(cls.claim_address_handler(script))
else:
return sha256(script).digest()[:HASHX_LEN]
class LBCRegTest(LBC):
NET = "regtest"
GENESIS_HASH = '6e3fcf1299d4ec5d79c3a4c91d624a4acf9e2e173d95a1a0504f677669687556'
XPUB_VERBYTES = bytes.fromhex('043587cf')
XPRV_VERBYTES = bytes.fromhex('04358394')
P2PKH_VERBYTE = bytes.fromhex("6f")
P2SH_VERBYTES = bytes.fromhex("c4")

View file

@ -0,0 +1,60 @@
from aiorpcx import RPCError
from functools import wraps
from torba.server.daemon import Daemon, DaemonError
def handles_errors(decorated_function):
@wraps(decorated_function)
async def wrapper(*args, **kwargs):
try:
return await decorated_function(*args, **kwargs)
except DaemonError as daemon_error:
raise RPCError(1, daemon_error.args[0])
return wrapper
class LBCDaemon(Daemon):
@handles_errors
async def getrawtransaction(self, hex_hash, verbose=False):
return await super().getrawtransaction(hex_hash=hex_hash, verbose=verbose)
@handles_errors
async def getclaimbyid(self, claim_id):
'''Given a claim id, retrieves claim information.'''
return await self._send_single('getclaimbyid', (claim_id,))
@handles_errors
async def getclaimsbyids(self, claim_ids):
'''Given a list of claim ids, batches calls to retrieve claim information.'''
return await self._send_vector('getclaimbyid', ((claim_id,) for claim_id in claim_ids))
@handles_errors
async def getclaimsforname(self, name):
'''Given a name, retrieves all claims matching that name.'''
return await self._send_single('getclaimsforname', (name,))
@handles_errors
async def getclaimsfortx(self, txid):
'''Given a txid, returns the claims it make.'''
return await self._send_single('getclaimsfortx', (txid,))
@handles_errors
async def getnameproof(self, name, block_hash=None):
'''Given a name and optional block_hash, returns a name proof and winner, if any.'''
return await self._send_single('getnameproof', (name, block_hash,) if block_hash else (name,))
@handles_errors
async def getvalueforname(self, name):
'''Given a name, returns the winning claim value.'''
return await self._send_single('getvalueforname', (name,))
@handles_errors
async def claimname(self, name, hexvalue, amount):
'''Claim a name, used for functional tests only.'''
return await self._send_single('claimname', (name, hexvalue, float(amount)))
@handles_errors
async def generate(self, number_of_blocks):
'''Generates regtest blocks, used for functional tests only.'''
return await self._send_single('generate', (int(number_of_blocks),))

View file

@ -0,0 +1,212 @@
import msgpack
import struct
import time
from torba.server.hash import hash_to_hex_str
from torba.server.db import DB
from .model import ClaimInfo
class LBRYDB(DB):
def __init__(self, *args, **kwargs):
self.claim_cache = {}
self.claims_for_name_cache = {}
self.claims_signed_by_cert_cache = {}
self.outpoint_to_claim_id_cache = {}
self.claims_db = self.names_db = self.signatures_db = self.outpoint_to_claim_id_db = self.claim_undo_db = None
# stores deletes not yet flushed to disk
self.pending_abandons = {}
super().__init__(*args, **kwargs)
def shutdown(self):
self.batched_flush_claims()
self.claims_db.close()
self.names_db.close()
self.signatures_db.close()
self.outpoint_to_claim_id_db.close()
self.claim_undo_db.close()
self.utxo_db.close()
# electrumx ones
self.utxo_db.close()
self.history.close_db()
async def _open_dbs(self, for_sync, compacting):
await super()._open_dbs(for_sync=for_sync, compacting=compacting)
def log_reason(message, is_for_sync):
reason = 'sync' if is_for_sync else 'serving'
self.logger.info('{} for {}'.format(message, reason))
if self.claims_db:
if self.claims_db.for_sync == for_sync:
return
log_reason('closing claim DBs to re-open', for_sync)
self.claims_db.close()
self.names_db.close()
self.signatures_db.close()
self.outpoint_to_claim_id_db.close()
self.claim_undo_db.close()
self.claims_db = self.db_class('claims', for_sync)
self.names_db = self.db_class('names', for_sync)
self.signatures_db = self.db_class('signatures', for_sync)
self.outpoint_to_claim_id_db = self.db_class('outpoint_claim_id', for_sync)
self.claim_undo_db = self.db_class('claim_undo', for_sync)
log_reason('opened claim DBs', self.claims_db.for_sync)
def flush_dbs(self, flush_data, flush_utxos, estimate_txs_remaining):
# flush claims together with utxos as they are parsed together
self.batched_flush_claims()
return super().flush_dbs(flush_data, flush_utxos, estimate_txs_remaining)
def batched_flush_claims(self):
with self.claims_db.write_batch() as claims_batch:
with self.names_db.write_batch() as names_batch:
with self.signatures_db.write_batch() as signed_claims_batch:
with self.outpoint_to_claim_id_db.write_batch() as outpoint_batch:
self.flush_claims(claims_batch, names_batch, signed_claims_batch,
outpoint_batch)
def flush_claims(self, batch, names_batch, signed_claims_batch, outpoint_batch):
flush_start = time.time()
write_claim, write_name, write_cert = batch.put, names_batch.put, signed_claims_batch.put
write_outpoint = outpoint_batch.put
delete_claim, delete_outpoint, delete_name = batch.delete, outpoint_batch.delete, names_batch.delete
delete_cert = signed_claims_batch.delete
for claim_id, outpoints in self.pending_abandons.items():
claim = self.get_claim_info(claim_id)
self.remove_claim_for_name(claim.name, claim_id)
if claim.cert_id:
self.remove_claim_from_certificate_claims(claim.cert_id, claim_id)
self.remove_certificate(claim_id)
self.claim_cache[claim_id] = None
for txid, tx_index in outpoints:
self.put_claim_id_for_outpoint(txid, tx_index, None)
for key, claim in self.claim_cache.items():
if claim:
write_claim(key, claim)
else:
delete_claim(key)
for name, claims in self.claims_for_name_cache.items():
if not claims:
delete_name(name)
else:
write_name(name, msgpack.dumps(claims))
for cert_id, claims in self.claims_signed_by_cert_cache.items():
if not claims:
delete_cert(cert_id)
else:
write_cert(cert_id, msgpack.dumps(claims))
for key, claim_id in self.outpoint_to_claim_id_cache.items():
if claim_id:
write_outpoint(key, claim_id)
else:
delete_outpoint(key)
self.logger.info('flushed at height {:,d} with {:,d} claims, {:,d} outpoints, {:,d} names '
'and {:,d} certificates added while {:,d} were abandoned in {:.1f}s, committing...'
.format(self.db_height,
len(self.claim_cache), len(self.outpoint_to_claim_id_cache),
len(self.claims_for_name_cache),
len(self.claims_signed_by_cert_cache), len(self.pending_abandons),
time.time() - flush_start))
self.claim_cache = {}
self.claims_for_name_cache = {}
self.claims_signed_by_cert_cache = {}
self.outpoint_to_claim_id_cache = {}
self.pending_abandons = {}
def assert_flushed(self, flush_data):
super().assert_flushed(flush_data)
assert not self.claim_cache
assert not self.claims_for_name_cache
assert not self.claims_signed_by_cert_cache
assert not self.outpoint_to_claim_id_cache
assert not self.pending_abandons
def abandon_spent(self, tx_hash, tx_idx):
claim_id = self.get_claim_id_from_outpoint(tx_hash, tx_idx)
if claim_id:
self.logger.info("[!] Abandon: {}".format(hash_to_hex_str(claim_id)))
self.pending_abandons.setdefault(claim_id, []).append((tx_hash, tx_idx,))
return claim_id
def put_claim_id_for_outpoint(self, tx_hash, tx_idx, claim_id):
self.logger.info("[+] Adding outpoint: {}:{} for {}.".format(hash_to_hex_str(tx_hash), tx_idx,
hash_to_hex_str(claim_id) if claim_id else None))
self.outpoint_to_claim_id_cache[tx_hash + struct.pack('>I', tx_idx)] = claim_id
def remove_claim_id_for_outpoint(self, tx_hash, tx_idx):
self.logger.info("[-] Remove outpoint: {}:{}.".format(hash_to_hex_str(tx_hash), tx_idx))
self.outpoint_to_claim_id_cache[tx_hash + struct.pack('>I', tx_idx)] = None
def get_claim_id_from_outpoint(self, tx_hash, tx_idx):
key = tx_hash + struct.pack('>I', tx_idx)
return self.outpoint_to_claim_id_cache.get(key) or self.outpoint_to_claim_id_db.get(key)
def get_claims_for_name(self, name):
if name in self.claims_for_name_cache:
return self.claims_for_name_cache[name]
db_claims = self.names_db.get(name)
return msgpack.loads(db_claims) if db_claims else {}
def put_claim_for_name(self, name, claim_id):
self.logger.info("[+] Adding claim {} for name {}.".format(hash_to_hex_str(claim_id), name))
claims = self.get_claims_for_name(name)
claims.setdefault(claim_id, max(claims.values() or [0]) + 1)
self.claims_for_name_cache[name] = claims
def remove_claim_for_name(self, name, claim_id):
self.logger.info("[-] Removing claim from name: {} - {}".format(hash_to_hex_str(claim_id), name))
claims = self.get_claims_for_name(name)
claim_n = claims.pop(claim_id)
for _claim_id, number in claims.items():
if number > claim_n:
claims[_claim_id] = number - 1
self.claims_for_name_cache[name] = claims
def get_signed_claim_ids_by_cert_id(self, cert_id):
if cert_id in self.claims_signed_by_cert_cache:
return self.claims_signed_by_cert_cache[cert_id]
db_claims = self.signatures_db.get(cert_id)
return msgpack.loads(db_claims, use_list=True) if db_claims else []
def put_claim_id_signed_by_cert_id(self, cert_id, claim_id):
self.logger.info("[+] Adding signature: {} - {}".format(hash_to_hex_str(claim_id), hash_to_hex_str(cert_id)))
certs = self.get_signed_claim_ids_by_cert_id(cert_id)
certs.append(claim_id)
self.claims_signed_by_cert_cache[cert_id] = certs
def remove_certificate(self, cert_id):
self.logger.info("[-] Removing certificate: {}".format(hash_to_hex_str(cert_id)))
self.claims_signed_by_cert_cache[cert_id] = []
def remove_claim_from_certificate_claims(self, cert_id, claim_id):
self.logger.info("[-] Removing signature: {} - {}".format(hash_to_hex_str(claim_id), hash_to_hex_str(cert_id)))
certs = self.get_signed_claim_ids_by_cert_id(cert_id)
if claim_id in certs:
certs.remove(claim_id)
self.claims_signed_by_cert_cache[cert_id] = certs
def get_claim_info(self, claim_id):
serialized = self.claim_cache.get(claim_id) or self.claims_db.get(claim_id)
return ClaimInfo.from_serialized(serialized) if serialized else None
def put_claim_info(self, claim_id, claim_info):
self.logger.info("[+] Adding claim info for: {}".format(hash_to_hex_str(claim_id)))
self.claim_cache[claim_id] = claim_info.serialized
def get_update_input(self, claim, inputs):
claim_id = claim.claim_id
claim_info = self.get_claim_info(claim_id)
if not claim_info:
return False
for input in inputs:
if input.prev_hash == claim_info.txid and input.prev_idx == claim_info.nout:
return input
return False
def write_undo(self, pending_undo):
with self.claim_undo_db.write_batch() as writer:
for height, undo_info in pending_undo:
writer.put(struct.pack(">I", height), msgpack.dumps(undo_info))

View file

@ -0,0 +1,47 @@
from collections import namedtuple
import msgpack
from torba.server.util import cachedproperty
# Classes representing data and their serializers, if any.
class ClaimInfo(namedtuple("NameClaim", "name value txid nout amount address height cert_id")):
'''Claim information as its stored on database'''
@classmethod
def from_serialized(cls, serialized):
return cls(*msgpack.loads(serialized))
@property
def serialized(self):
return msgpack.dumps(self)
class NameClaim(namedtuple("NameClaim", "name value")):
pass
class ClaimUpdate(namedtuple("ClaimUpdate", "name claim_id value")):
pass
class ClaimSupport(namedtuple("ClaimSupport", "name claim_id")):
pass
class LBRYTx(namedtuple("Tx", "version inputs outputs locktime")):
'''Transaction that can contain claim, update or support in its outputs.'''
@cachedproperty
def is_coinbase(self):
return self.inputs[0].is_coinbase
@cachedproperty
def has_claims(self):
for output in self.outputs:
if output.claim:
return True
return False
class TxClaimOutput(namedtuple("TxClaimOutput", "value pk_script claim")):
pass

View file

@ -0,0 +1,126 @@
import struct
from torba.server.enum import Enumeration
from .model import NameClaim, ClaimSupport, ClaimUpdate
# TODO: Take this to lbryschema (it's also on lbryum and lbryum-server)
opcodes = Enumeration("Opcodes", [
("OP_0", 0), ("OP_PUSHDATA1", 76), "OP_PUSHDATA2", "OP_PUSHDATA4", "OP_1NEGATE", "OP_RESERVED",
"OP_1", "OP_2", "OP_3", "OP_4", "OP_5", "OP_6", "OP_7",
"OP_8", "OP_9", "OP_10", "OP_11", "OP_12", "OP_13", "OP_14", "OP_15", "OP_16",
"OP_NOP", "OP_VER", "OP_IF", "OP_NOTIF", "OP_VERIF", "OP_VERNOTIF", "OP_ELSE", "OP_ENDIF",
"OP_VERIFY",
"OP_RETURN", "OP_TOALTSTACK", "OP_FROMALTSTACK", "OP_2DROP", "OP_2DUP", "OP_3DUP", "OP_2OVER",
"OP_2ROT", "OP_2SWAP",
"OP_IFDUP", "OP_DEPTH", "OP_DROP", "OP_DUP", "OP_NIP", "OP_OVER", "OP_PICK", "OP_ROLL",
"OP_ROT",
"OP_SWAP", "OP_TUCK", "OP_CAT", "OP_SUBSTR", "OP_LEFT", "OP_RIGHT", "OP_SIZE", "OP_INVERT",
"OP_AND",
"OP_OR", "OP_XOR", "OP_EQUAL", "OP_EQUALVERIFY", "OP_RESERVED1", "OP_RESERVED2", "OP_1ADD",
"OP_1SUB", "OP_2MUL",
"OP_2DIV", "OP_NEGATE", "OP_ABS", "OP_NOT", "OP_0NOTEQUAL", "OP_ADD", "OP_SUB", "OP_MUL",
"OP_DIV",
"OP_MOD", "OP_LSHIFT", "OP_RSHIFT", "OP_BOOLAND", "OP_BOOLOR",
"OP_NUMEQUAL", "OP_NUMEQUALVERIFY", "OP_NUMNOTEQUAL", "OP_LESSTHAN",
"OP_GREATERTHAN", "OP_LESSTHANOREQUAL", "OP_GREATERTHANOREQUAL", "OP_MIN", "OP_MAX",
"OP_WITHIN", "OP_RIPEMD160", "OP_SHA1", "OP_SHA256", "OP_HASH160",
"OP_HASH256", "OP_CODESEPARATOR", "OP_CHECKSIG", "OP_CHECKSIGVERIFY", "OP_CHECKMULTISIG",
"OP_CHECKMULTISIGVERIFY", "OP_NOP1", "OP_NOP2", "OP_NOP3", "OP_NOP4", "OP_NOP5",
"OP_CLAIM_NAME",
"OP_SUPPORT_CLAIM", "OP_UPDATE_CLAIM",
("OP_SINGLEBYTE_END", 0xF0),
("OP_DOUBLEBYTE_BEGIN", 0xF000),
"OP_PUBKEY", "OP_PUBKEYHASH",
("OP_INVALIDOPCODE", 0xFFFF),
])
def script_GetOp(bytes):
i = 0
while i < len(bytes):
vch = None
opcode = bytes[i]
i += 1
if opcode <= opcodes.OP_PUSHDATA4:
nSize = opcode
if opcode == opcodes.OP_PUSHDATA1:
nSize = bytes[i]
i += 1
elif opcode == opcodes.OP_PUSHDATA2:
(nSize,) = struct.unpack_from('<H', bytes, i)
i += 2
elif opcode == opcodes.OP_PUSHDATA4:
(nSize,) = struct.unpack_from('<I', bytes, i)
i += 4
if i + nSize > len(bytes):
vch = "_INVALID_" + bytes[i:]
i = len(bytes)
else:
vch = bytes[i:i + nSize]
i += nSize
yield (opcode, vch, i)
def decode_claim_script(bytes_script):
try:
decoded_script = [x for x in script_GetOp(bytes_script)]
except Exception as e:
print(e)
return None
if len(decoded_script) <= 6:
return False
op = 0
claim_type = decoded_script[op][0]
if claim_type == opcodes.OP_UPDATE_CLAIM:
if len(decoded_script) <= 7:
return False
if claim_type not in [
opcodes.OP_CLAIM_NAME,
opcodes.OP_SUPPORT_CLAIM,
opcodes.OP_UPDATE_CLAIM
]:
return False
op += 1
value = None
claim_id = None
claim = None
if not 0 <= decoded_script[op][0] <= opcodes.OP_PUSHDATA4:
return False
name = decoded_script[op][1]
op += 1
if not 0 <= decoded_script[op][0] <= opcodes.OP_PUSHDATA4:
return False
if decoded_script[0][0] in [
opcodes.OP_SUPPORT_CLAIM,
opcodes.OP_UPDATE_CLAIM
]:
claim_id = decoded_script[op][1]
if len(claim_id) != 20:
return False
else:
value = decoded_script[op][1]
op += 1
if decoded_script[0][0] == opcodes.OP_UPDATE_CLAIM:
value = decoded_script[op][1]
op += 1
if decoded_script[op][0] != opcodes.OP_2DROP:
return False
op += 1
if decoded_script[op][0] != opcodes.OP_DROP and decoded_script[0][0] == opcodes.OP_CLAIM_NAME:
return False
elif decoded_script[op][0] != opcodes.OP_2DROP and decoded_script[0][0] == opcodes.OP_UPDATE_CLAIM:
return False
op += 1
if decoded_script[0][0] == opcodes.OP_CLAIM_NAME:
if name is None or value is None:
return False
claim = NameClaim(name, value)
elif decoded_script[0][0] == opcodes.OP_UPDATE_CLAIM:
if name is None or value is None or claim_id is None:
return False
claim = ClaimUpdate(name, claim_id, value)
elif decoded_script[0][0] == opcodes.OP_SUPPORT_CLAIM:
if name is None or claim_id is None:
return False
claim = ClaimSupport(name, claim_id)
return claim, decoded_script[op:]

View file

@ -0,0 +1,395 @@
import math
from binascii import unhexlify, hexlify
from aiorpcx import RPCError
from torba.server.hash import hash_to_hex_str
from torba.server.session import ElectrumX
from torba.server import util
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.schema.error import URIParseError
from .block_processor import LBRYBlockProcessor
from .db import LBRYDB
class LBRYElectrumX(ElectrumX):
PROTOCOL_MIN = (0, 0) # temporary, for supporting 0.10 protocol
max_errors = math.inf # don't disconnect people for errors! let them happen...
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# fixme: this is a rebase hack, we need to go through ChainState instead later
self.daemon = self.session_mgr.daemon
self.bp: LBRYBlockProcessor = self.session_mgr.bp
self.db: LBRYDB = self.bp.db
# fixme: lbryum specific subscribe
self.subscribe_height = False
def set_request_handlers(self, ptuple):
super().set_request_handlers(ptuple)
handlers = {
'blockchain.transaction.get_height': self.transaction_get_height,
'blockchain.claimtrie.getclaimbyid': self.claimtrie_getclaimbyid,
'blockchain.claimtrie.getclaimsforname': self.claimtrie_getclaimsforname,
'blockchain.claimtrie.getclaimsbyids': self.claimtrie_getclaimsbyids,
'blockchain.claimtrie.getvalue': self.claimtrie_getvalue,
'blockchain.claimtrie.getnthclaimforname': self.claimtrie_getnthclaimforname,
'blockchain.claimtrie.getclaimsintx': self.claimtrie_getclaimsintx,
'blockchain.claimtrie.getclaimssignedby': self.claimtrie_getclaimssignedby,
'blockchain.claimtrie.getclaimssignedbynthtoname': self.claimtrie_getclaimssignedbynthtoname,
'blockchain.claimtrie.getvalueforuri': self.claimtrie_getvalueforuri,
'blockchain.claimtrie.getvaluesforuris': self.claimtrie_getvalueforuris,
'blockchain.claimtrie.getclaimssignedbyid': self.claimtrie_getclaimssignedbyid,
'blockchain.block.get_server_height': self.get_server_height,
'blockchain.block.get_block': self.get_block,
}
# fixme: methods we use but shouldnt be using anymore. To be removed when torba goes out
handlers.update({
'blockchain.numblocks.subscribe': self.numblocks_subscribe,
'blockchain.utxo.get_address': self.utxo_get_address,
'blockchain.transaction.broadcast':
self.transaction_broadcast_1_0,
'blockchain.transaction.get': self.transaction_get,
})
self.request_handlers.update(handlers)
async def utxo_get_address(self, tx_hash, index):
# fixme: lbryum
# Used only for electrum client command-line requests. We no
# longer index by address, so need to request the raw
# transaction. So it works for any TXO not just UTXOs.
self.assert_tx_hash(tx_hash)
try:
index = int(index)
if index < 0:
raise ValueError
except ValueError:
raise RPCError(1, "index has to be >= 0 and integer")
raw_tx = await self.daemon_request('getrawtransaction', tx_hash)
if not raw_tx:
return None
raw_tx = util.hex_to_bytes(raw_tx)
tx = self.coin.DESERIALIZER(raw_tx).read_tx()
if index >= len(tx.outputs):
return None
return self.coin.address_from_script(tx.outputs[index].pk_script)
async def transaction_broadcast_1_0(self, raw_tx):
# fixme: lbryum
# An ugly API: current Electrum clients only pass the raw
# transaction in hex and expect error messages to be returned in
# the result field. And the server shouldn't be doing the client's
# user interface job here.
try:
return await self.transaction_broadcast(raw_tx)
except RPCError as e:
return e.message
async def numblocks_subscribe(self):
# fixme workaround for lbryum
'''Subscribe to get height of new blocks.'''
self.subscribe_height = True
return self.bp.height
async def notify(self, height, touched):
# fixme workaround for lbryum
await super().notify(height, touched)
if self.subscribe_height and height != self.notified_height:
self.send_notification('blockchain.numblocks.subscribe', (height,))
async def transaction_get(self, tx_hash, verbose=False):
# fixme: workaround for lbryum sending the height instead of True/False.
# fixme: lbryum_server ignored that and always used False, but this is out of spec
if verbose not in (True, False):
verbose = False
return await self.daemon_request('getrawtransaction', tx_hash, verbose)
async def get_block(self, block_hash):
return await self.daemon.deserialised_block(block_hash)
async def get_server_height(self):
return self.bp.height
async def transaction_get_height(self, tx_hash):
self.assert_tx_hash(tx_hash)
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
if transaction_info and 'hex' in transaction_info and 'confirmations' in transaction_info:
# an unconfirmed transaction from lbrycrdd will not have a 'confirmations' field
height = self.db.db_height
height = height - transaction_info['confirmations']
return height
elif transaction_info and 'hex' in transaction_info:
return -1
return None
async def claimtrie_getclaimssignedby(self, name):
winning_claim = await self.daemon.getvalueforname(name)
if winning_claim:
return await self.claimtrie_getclaimssignedbyid(winning_claim['claimId'])
async def claimtrie_getclaimssignedbyid(self, certificate_id):
claim_ids = self.get_claim_ids_signed_by(certificate_id)
return await self.batched_formatted_claims_from_daemon(claim_ids)
def get_claim_ids_signed_by(self, certificate_id):
raw_certificate_id = unhexlify(certificate_id)[::-1]
raw_claim_ids = self.db.get_signed_claim_ids_by_cert_id(raw_certificate_id)
return list(map(hash_to_hex_str, raw_claim_ids))
def get_signed_claims_with_name_for_channel(self, channel_id, name):
claim_ids_for_name = list(self.db.get_claims_for_name(name.encode('ISO-8859-1')).keys())
claim_ids_for_name = set(map(hash_to_hex_str, claim_ids_for_name))
channel_claim_ids = set(self.get_claim_ids_signed_by(channel_id))
return claim_ids_for_name.intersection(channel_claim_ids)
async def claimtrie_getclaimssignedbynthtoname(self, name, n):
n = int(n)
for claim_id, sequence in self.db.get_claims_for_name(name.encode('ISO-8859-1')).items():
if n == sequence:
return await self.claimtrie_getclaimssignedbyid(hash_to_hex_str(claim_id))
async def claimtrie_getclaimsintx(self, txid):
# TODO: this needs further discussion.
# Code on lbryum-server is wrong and we need to gather what we clearly expect from this command
claim_ids = [claim['claimId'] for claim in (await self.daemon.getclaimsfortx(txid)) if 'claimId' in claim]
return await self.batched_formatted_claims_from_daemon(claim_ids)
async def claimtrie_getvalue(self, name, block_hash=None):
proof = await self.daemon.getnameproof(name, block_hash)
result = {'proof': proof, 'supports': []}
if proof_has_winning_claim(proof):
tx_hash, nout = proof['txhash'], int(proof['nOut'])
transaction_info = await self.daemon.getrawtransaction(tx_hash, True)
result['transaction'] = transaction_info['hex']
result['height'] = (self.db.db_height - transaction_info['confirmations']) + 1
raw_claim_id = self.db.get_claim_id_from_outpoint(unhexlify(tx_hash)[::-1], nout)
sequence = self.db.get_claims_for_name(name.encode('ISO-8859-1')).get(raw_claim_id)
if sequence:
claim_id = hexlify(raw_claim_id[::-1]).decode()
claim_info = await self.daemon.getclaimbyid(claim_id)
if not claim_info or not claim_info.get('value'):
claim_info = await self.slow_get_claim_by_id_using_name(claim_id)
result['claim_sequence'] = sequence
result['claim_id'] = claim_id
supports = self.format_supports_from_daemon(claim_info.get('supports', [])) # fixme: lbrycrd#124
result['supports'] = supports
else:
self.logger.warning('tx has no claims in db: %s %s', tx_hash, nout)
return result
async def claimtrie_getnthclaimforname(self, name, n):
n = int(n)
for claim_id, sequence in self.db.get_claims_for_name(name.encode('ISO-8859-1')).items():
if n == sequence:
return await self.claimtrie_getclaimbyid(hash_to_hex_str(claim_id))
async def claimtrie_getclaimsforname(self, name):
claims = await self.daemon.getclaimsforname(name)
if claims:
claims['claims'] = [self.format_claim_from_daemon(claim, name) for claim in claims['claims']]
claims['supports_without_claims'] = claims['supports without claims']
del claims['supports without claims']
claims['last_takeover_height'] = claims['nLastTakeoverHeight']
del claims['nLastTakeoverHeight']
return claims
return {}
async def batched_formatted_claims_from_daemon(self, claim_ids):
claims = await self.daemon.getclaimsbyids(claim_ids)
result = []
for claim, claim_id in zip(claims, claim_ids):
if claim and claim.get('value'):
result.append(self.format_claim_from_daemon(claim))
else:
recovered_claim = await self.slow_get_claim_by_id_using_name(claim_id)
if recovered_claim:
result.append(self.format_claim_from_daemon(recovered_claim))
return result
def format_claim_from_daemon(self, claim, name=None):
'''Changes the returned claim data to the format expected by lbrynet and adds missing fields.'''
if not claim:
return {}
name = name or claim['name']
claim_id = claim['claimId']
raw_claim_id = unhexlify(claim_id)[::-1]
if not self.db.get_claim_info(raw_claim_id):
#raise RPCError("Lbrycrd has {} but not lbryumx, please submit a bug report.".format(claim_id))
return {}
address = self.db.get_claim_info(raw_claim_id).address.decode()
sequence = self.db.get_claims_for_name(name.encode('ISO-8859-1')).get(raw_claim_id)
if not sequence:
return {}
supports = self.format_supports_from_daemon(claim.get('supports', [])) # fixme: lbrycrd#124
amount = get_from_possible_keys(claim, 'amount', 'nAmount')
height = get_from_possible_keys(claim, 'height', 'nHeight')
effective_amount = get_from_possible_keys(claim, 'effective amount', 'nEffectiveAmount')
valid_at_height = get_from_possible_keys(claim, 'valid at height', 'nValidAtHeight')
return {
"name": name,
"claim_id": claim['claimId'],
"txid": claim['txid'],
"nout": claim['n'],
"amount": amount,
"depth": self.db.db_height - height,
"height": height,
"value": hexlify(claim['value'].encode('ISO-8859-1')).decode(),
"claim_sequence": sequence, # from index
"address": address, # from index
"supports": supports, # fixme: to be included in lbrycrd#124
"effective_amount": effective_amount,
"valid_at_height": valid_at_height # TODO PR lbrycrd to include it
}
def format_supports_from_daemon(self, supports):
return [[support['txid'], support['n'], get_from_possible_keys(support, 'amount', 'nAmount')] for
support in supports]
async def claimtrie_getclaimbyid(self, claim_id):
self.assert_claim_id(claim_id)
claim = await self.daemon.getclaimbyid(claim_id)
if not claim or not claim.get('value'):
claim = await self.slow_get_claim_by_id_using_name(claim_id)
return self.format_claim_from_daemon(claim)
async def claimtrie_getclaimsbyids(self, *claim_ids):
claims = await self.batched_formatted_claims_from_daemon(claim_ids)
return dict(zip(claim_ids, claims))
def assert_tx_hash(self, value):
'''Raise an RPCError if the value is not a valid transaction
hash.'''
try:
if len(util.hex_to_bytes(value)) == 32:
return
except Exception:
pass
raise RPCError(1, f'{value} should be a transaction hash')
def assert_claim_id(self, value):
'''Raise an RPCError if the value is not a valid claim id
hash.'''
try:
if len(util.hex_to_bytes(value)) == 20:
return
except Exception:
pass
raise RPCError(1, f'{value} should be a claim id hash')
async def slow_get_claim_by_id_using_name(self, claim_id):
# TODO: temporary workaround for a lbrycrd bug on indexing. Should be removed when it gets stable
raw_claim_id = unhexlify(claim_id)[::-1]
claim = self.db.get_claim_info(raw_claim_id)
if claim:
name = claim.name.decode('ISO-8859-1')
claims = await self.daemon.getclaimsforname(name)
for claim in claims['claims']:
if claim['claimId'] == claim_id:
claim['name'] = name
self.logger.warning(
'Recovered a claim missing from lbrycrd index: %s %s', name, claim_id
)
return claim
async def claimtrie_getvalueforuri(self, block_hash, uri, known_certificates=None):
# TODO: this thing is huge, refactor
CLAIM_ID = "claim_id"
WINNING = "winning"
SEQUENCE = "sequence"
uri = uri
block_hash = block_hash
try:
parsed_uri = parse_lbry_uri(uri)
except URIParseError as err:
return {'error': err.message}
result = {}
if parsed_uri.is_channel:
certificate = None
# TODO: this is also done on the else, refactor
if parsed_uri.claim_id:
certificate_info = await self.claimtrie_getclaimbyid(parsed_uri.claim_id)
if certificate_info and certificate_info['name'] == parsed_uri.name:
certificate = {'resolution_type': CLAIM_ID, 'result': certificate_info}
elif parsed_uri.claim_sequence:
certificate_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence)
if certificate_info:
certificate = {'resolution_type': SEQUENCE, 'result': certificate_info}
else:
certificate_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash)
if certificate_info:
certificate = {'resolution_type': WINNING, 'result': certificate_info}
if certificate and 'claim_id' not in certificate['result']:
return result
if certificate and not parsed_uri.path:
result['certificate'] = certificate
channel_id = certificate['result']['claim_id']
claims_in_channel = await self.claimtrie_getclaimssignedbyid(channel_id)
result['unverified_claims_in_channel'] = {claim['claim_id']: (claim['name'], claim['height'])
for claim in claims_in_channel if claim}
elif certificate:
result['certificate'] = certificate
channel_id = certificate['result']['claim_id']
claim_ids_matching_name = self.get_signed_claims_with_name_for_channel(channel_id, parsed_uri.path)
claims = await self.batched_formatted_claims_from_daemon(claim_ids_matching_name)
claims_in_channel = {claim['claim_id']: (claim['name'], claim['height'])
for claim in claims}
result['unverified_claims_for_name'] = claims_in_channel
else:
claim = None
if parsed_uri.claim_id:
claim_info = await self.claimtrie_getclaimbyid(parsed_uri.claim_id)
if claim_info and claim_info['name'] == parsed_uri.name:
claim = {'resolution_type': CLAIM_ID, 'result': claim_info}
elif parsed_uri.claim_sequence:
claim_info = await self.claimtrie_getnthclaimforname(parsed_uri.name, parsed_uri.claim_sequence)
if claim_info:
claim = {'resolution_type': SEQUENCE, 'result': claim_info}
else:
claim_info = await self.claimtrie_getvalue(parsed_uri.name, block_hash)
if claim_info:
claim = {'resolution_type': WINNING, 'result': claim_info}
if (claim and
# is not an unclaimed winning name
(claim['resolution_type'] != WINNING or proof_has_winning_claim(claim['result']['proof']))):
raw_claim_id = unhexlify(claim['result']['claim_id'])[::-1]
raw_certificate_id = self.db.get_claim_info(raw_claim_id).cert_id
if raw_certificate_id:
certificate_id = hash_to_hex_str(raw_certificate_id)
certificate = await self.claimtrie_getclaimbyid(certificate_id)
if certificate:
certificate = {'resolution_type': CLAIM_ID,
'result': certificate}
result['certificate'] = certificate
result['claim'] = claim
return result
async def claimtrie_getvalueforuris(self, block_hash, *uris):
MAX_BATCH_URIS = 500
if len(uris) > MAX_BATCH_URIS:
raise Exception("Exceeds max batch uris of {}".format(MAX_BATCH_URIS))
return {uri: await self.claimtrie_getvalueforuri(block_hash, uri) for uri in uris}
# TODO: get it all concurrently when lbrycrd pending changes goes into a stable release
#async def getvalue(uri):
# value = await self.claimtrie_getvalueforuri(block_hash, uri)
# return uri, value,
#return dict([await asyncio.gather(*tuple(getvalue(uri) for uri in uris))][0])
def proof_has_winning_claim(proof):
return {'txhash', 'nOut'}.issubset(proof.keys())
def get_from_possible_keys(dictionary, *keys):
for key in keys:
if key in dictionary:
return dictionary[key]

View file

@ -0,0 +1,21 @@
from torba.server.tx import Deserializer
from .opcodes import decode_claim_script
from .model import TxClaimOutput, LBRYTx
class LBRYDeserializer(Deserializer):
def _read_output(self):
value = self._read_le_int64()
script = self._read_varbytes() # pk_script
claim = decode_claim_script(script)
claim = claim[0] if claim else None
return TxClaimOutput(value, script, claim)
def read_tx(self):
return LBRYTx(
self._read_le_int32(), # version
self._read_inputs(), # inputs
self._read_outputs(), # outputs
self._read_le_uint32() # locktime
)

View file

@ -2,12 +2,12 @@ import struct
from binascii import hexlify, unhexlify
from typing import List, Iterable, Optional
from lbryschema.decode import smart_decode
from lbrynet.schema.decode import smart_decode
from .account import Account
from torba.basetransaction import BaseTransaction, BaseInput, BaseOutput
from torba.hash import hash160
from torba.client.basetransaction import BaseTransaction, BaseInput, BaseOutput
from torba.client.hash import hash160
from lbryschema.claim import ClaimDict
from lbrynet.schema.claim import ClaimDict
from .script import InputScript, OutputScript

View file

@ -1,7 +0,0 @@
"""
Classes and functions used to create and download LBRY Files.
LBRY Files are Crypt Streams created from any regular file. The whole file is read
at the time that the LBRY File is created, so all constituent blobs are known and
included in the stream descriptor file.
"""

View file

@ -1,722 +0,0 @@
"""
Interfaces which are implemented by various classes within LBRYnet.
"""
from zope.interface import Interface
class IPeerFinder(Interface):
"""
Used to find peers by sha384 hashes which they claim to be associated with.
"""
def find_peers_for_blob(self, blob_hash):
"""
Look for peers claiming to be associated with a sha384 hashsum.
@param blob_hash: The sha384 hashsum to use to look up peers.
@type blob_hash: string, hex encoded
@return: a Deferred object which fires with a list of Peer objects
@rtype: Deferred which fires with [Peer]
"""
class IRequestSender(Interface):
"""
Used to connect to a peer, send requests to it, and return the responses to those requests.
"""
def add_request(self, request):
"""
Add a request to the next message that will be sent to the peer
@param request: a request to be sent to the peer in the next message
@type request: ClientRequest
@return: Deferred object which will callback with the response to this request, a dict
@rtype: Deferred which fires with dict
"""
def add_blob_request(self, blob_request):
"""Add a request for a blob to the next message that will be sent to the peer.
This will cause the protocol to call blob_request.write(data)
for all incoming data, after the response message has been
parsed out, until blob_request.finished_deferred fires.
@param blob_request: the request for the blob
@type blob_request: ClientBlobRequest
@return: Deferred object which will callback with the response to this request
@rtype: Deferred which fires with dict
"""
class IRequestCreator(Interface):
"""
Send requests, via an IRequestSender, to peers.
"""
def send_next_request(self, peer, protocol):
"""Create a Request object for the peer and then give the protocol that request.
@param peer: the Peer object which the request will be sent to.
@type peer: Peer
@param protocol: the protocol to pass the request to.
@type protocol: object which implements IRequestSender
@return: Deferred object which will callback with True or
False depending on whether a Request was sent
@rtype: Deferred which fires with boolean
"""
def get_new_peers(self):
"""
Get some new peers which the request creator wants to send requests to.
@return: Deferred object which will callback with [Peer]
@rtype: Deferred which fires with [Peer]
"""
class IMetadataHandler(Interface):
"""
Get metadata for the IDownloadManager.
"""
def get_initial_blobs(self):
"""Return metadata about blobs that are known to be associated with
the stream at the time that the stream is set up.
@return: Deferred object which will call back with a list of BlobInfo objects
@rtype: Deferred which fires with [BlobInfo]
"""
def final_blob_num(self):
"""
If the last blob in the stream is known, return its blob_num. Otherwise, return None.
@return: integer representing the final blob num in the stream, or None
@rtype: integer or None
"""
class IDownloadManager(Interface):
"""
Manage the downloading of an associated group of blobs, referred to as a stream.
These objects keep track of metadata about the stream, are responsible for starting and stopping
other components, and handle communication between other components.
"""
def start_downloading(self):
"""
Load the initial metadata about the stream and then start the other components.
@return: Deferred which fires when the other components have been started.
@rtype: Deferred which fires with boolean
"""
def resume_downloading(self):
"""
Start the other components after they have been stopped.
@return: Deferred which fires when the other components have been started.
@rtype: Deferred which fires with boolean
"""
def pause_downloading(self):
"""
Stop the other components.
@return: Deferred which fires when the other components have been stopped.
@rtype: Deferred which fires with boolean
"""
def add_blobs_to_download(self, blobs):
"""
Add blobs to the list of blobs that should be downloaded
@param blobs: list of BlobInfos that are associated with the stream being downloaded
@type blobs: [BlobInfo]
@return: DeferredList which fires with the result of adding each previously unknown BlobInfo
to the list of known BlobInfos.
@rtype: DeferredList which fires with [(boolean, Failure/None)]
"""
def stream_position(self):
"""
Returns the blob_num of the next blob needed in the stream.
If the stream already has all of the blobs it needs, then this will return the blob_num
of the last blob in the stream plus 1.
@return: the blob_num of the next blob needed, or the last blob_num + 1.
@rtype: integer
"""
def needed_blobs(self):
"""Returns a list of BlobInfos representing all of the blobs that the
stream still needs to download.
@return: the list of BlobInfos representing blobs that the stream still needs to download.
@rtype: [BlobInfo]
"""
def final_blob_num(self):
"""
If the last blob in the stream is known, return its blob_num. If not, return None.
@return: The blob_num of the last blob in the stream, or None if it is unknown.
@rtype: integer or None
"""
def handle_blob(self, blob_num):
"""This function is called when the next blob in the stream is ready
to be handled, whatever that may mean.
@param blob_num: The blob_num of the blob that is ready to be handled.
@type blob_num: integer
@return: A Deferred which fires when the blob has been 'handled'
@rtype: Deferred which can fire with anything
"""
class IConnectionManager(Interface):
"""
Connects to peers so that IRequestCreators can send their requests.
"""
def get_next_request(self, peer, protocol):
"""Ask all IRequestCreators belonging to this object to create a
Request for peer and give it to protocol
@param peer: the peer which the request will be sent to.
@type peer: Peer
@param protocol: the protocol which the request should be sent to by the IRequestCreator.
@type protocol: IRequestSender
@return: Deferred object which will callback with True or
False depending on whether the IRequestSender should send
the request or hang up
@rtype: Deferred which fires with boolean
"""
def protocol_disconnected(self, peer, protocol):
"""
Inform the IConnectionManager that the protocol has been disconnected
@param peer: The peer which the connection was to.
@type peer: Peer
@param protocol: The protocol which was disconnected.
@type protocol: Protocol
@return: None
"""
class IProgressManager(Interface):
"""Responsible for keeping track of the progress of the download.
Specifically, it is their responsibility to decide which blobs
need to be downloaded and keep track of the progress of the
download
"""
def stream_position(self):
"""
Returns the blob_num of the next blob needed in the stream.
If the stream already has all of the blobs it needs, then this will return the blob_num
of the last blob in the stream plus 1.
@return: the blob_num of the next blob needed, or the last blob_num + 1.
@rtype: integer
"""
def needed_blobs(self):
"""Returns a list of BlobInfos representing all of the blobs that the
stream still needs to download.
@return: the list of BlobInfos representing blobs that the stream still needs to download.
@rtype: [BlobInfo]
"""
def blob_downloaded(self, blob, blob_info):
"""
Mark that a blob has been downloaded and does not need to be downloaded again
@param blob: the blob that has been downloaded.
@type blob: Blob
@param blob_info: the metadata of the blob that has been downloaded.
@type blob_info: BlobInfo
@return: None
"""
class IBlobHandler(Interface):
"""
Responsible for doing whatever should be done with blobs that have been downloaded.
"""
def blob_downloaded(self, blob, blob_info):
"""
Do whatever the downloader is supposed to do when a blob has been downloaded
@param blob: The downloaded blob
@type blob: Blob
@param blob_info: The metadata of the downloaded blob
@type blob_info: BlobInfo
@return: A Deferred which fires when the blob has been handled.
@rtype: Deferred which can fire with anything
"""
class IRateLimited(Interface):
"""
Have the ability to be throttled (temporarily stopped).
"""
def throttle_upload(self):
"""
Stop uploading data until unthrottle_upload is called.
@return: None
"""
def throttle_download(self):
"""
Stop downloading data until unthrottle_upload is called.
@return: None
"""
def unthrottle_upload(self):
"""
Resume uploading data at will until throttle_upload is called.
@return: None
"""
def unthrottle_downlad(self):
"""
Resume downloading data at will until throttle_download is called.
@return: None
"""
class IRateLimiter(Interface):
"""
Can keep track of download and upload rates and can throttle objects which implement the
IRateLimited interface.
"""
def report_dl_bytes(self, num_bytes):
"""
Inform the IRateLimiter that num_bytes have been downloaded.
@param num_bytes: the number of bytes that have been downloaded
@type num_bytes: integer
@return: None
"""
def report_ul_bytes(self, num_bytes):
"""
Inform the IRateLimiter that num_bytes have been uploaded.
@param num_bytes: the number of bytes that have been uploaded
@type num_bytes: integer
@return: None
"""
def register_protocol(self, protocol):
"""Register an IRateLimited object with the IRateLimiter so that the
IRateLimiter can throttle it
@param protocol: An object implementing the interface IRateLimited
@type protocol: Object implementing IRateLimited
@return: None
"""
def unregister_protocol(self, protocol):
"""Unregister an IRateLimited object so that it won't be throttled any more.
@param protocol: An object implementing the interface
IRateLimited, which was previously registered with this
IRateLimiter via "register_protocol"
@type protocol: Object implementing IRateLimited
@return: None
"""
class IRequestHandler(Interface):
"""
Pass client queries on to IQueryHandlers
"""
def register_query_handler(self, query_handler, query_identifiers):
"""Register a query handler, which will be passed any queries that
match any of the identifiers in query_identifiers
@param query_handler: the object which will handle queries
matching the given query_identifiers
@type query_handler: Object implementing IQueryHandler
@param query_identifiers: A list of strings representing the query identifiers
for queries that should be passed to this handler
@type query_identifiers: [string]
@return: None
"""
def register_blob_sender(self, blob_sender):
"""
Register a blob sender which will be called after the response has
finished to see if it wants to send a blob
@param blob_sender: the object which will upload the blob to the client.
@type blob_sender: IBlobSender
@return: None
"""
class IBlobSender(Interface):
"""
Upload blobs to clients.
"""
def send_blob_if_requested(self, consumer):
"""
If a blob has been requested, write it to 'write' func of the consumer and then
callback the returned deferred when it has all been written
@param consumer: the object implementing IConsumer which the file will be written to
@type consumer: object which implements IConsumer
@return: Deferred which will fire when the blob sender is done, which will be
immediately if no blob should be sent.
@rtype: Deferred which fires with anything
"""
class IQueryHandler(Interface):
"""
Respond to requests from clients.
"""
def register_with_request_handler(self, request_handler, peer):
"""
Register with the request handler to receive queries
@param request_handler: the object implementing IRequestHandler to register with
@type request_handler: object implementing IRequestHandler
@param peer: the Peer which this query handler will be answering requests from
@type peer: Peer
@return: None
"""
def handle_queries(self, queries):
"""
Return responses to queries from the client.
@param queries: a dict representing the query_identifiers:queries that should be handled
@type queries: {string: dict}
@return: a Deferred object which will callback with a dict of query responses
@rtype: Deferred which fires with {string: dict}
"""
class IQueryHandlerFactory(Interface):
"""
Construct IQueryHandlers to handle queries from each new client that connects.
"""
def build_query_handler(self):
"""
Create an object that implements the IQueryHandler interface
@return: object that implements IQueryHandler
"""
class IStreamDownloaderOptions(Interface):
def get_downloader_options(self, sd_validator, payment_rate_manager):
"""
Return the list of options that can be used to modify IStreamDownloader behavior
@param sd_validator: object containing stream metadata, which the options may depend on
@type sd_validator: object which implements IStreamDescriptorValidator interface
@param payment_rate_manager: The payment rate manager currently in effect for the downloader
@type payment_rate_manager: PaymentRateManager
@return: [DownloadOption]
@rtype: [DownloadOption]
"""
class IStreamDownloaderFactory(Interface):
"""Construct IStreamDownloaders and provide options that will be
passed to those IStreamDownloaders.
"""
def can_download(self, sd_validator, payment_rate_manager):
"""Decide whether the downloaders created by this factory can
download the stream described by sd_validator
@param sd_validator: object containing stream metadata
@type sd_validator: object which implements IStreamDescriptorValidator interface
@param payment_rate_manager: The payment rate manager currently in effect for the downloader
@type payment_rate_manager: PaymentRateManager
@return: True if the downloaders can download the stream, False otherwise
@rtype: bool
"""
def make_downloader(self, sd_validator, options, payment_rate_manager):
"""Create an object that implements the IStreamDownloader interface
@param sd_validator: object containing stream metadata which
will be given to the IStreamDownloader
@type sd_validator: object which implements IStreamDescriptorValidator interface
@param options: a list of values that will be used by the IStreamDownloaderFactory to
construct the IStreamDownloader. the options are in the same order as they were given
by get_downloader_options.
@type options: [Object]
@param payment_rate_manager: the PaymentRateManager which the IStreamDownloader should use.
@type payment_rate_manager: PaymentRateManager
@return: a Deferred which fires with the downloader object
@rtype: Deferred which fires with IStreamDownloader
"""
def get_description(self):
"""
Return a string detailing what this downloader does with streams
@return: short description of what the IStreamDownloader does.
@rtype: string
"""
class IStreamDownloader(Interface):
"""
Use metadata and data from the network for some useful purpose.
"""
def start(self):
"""start downloading the stream
@return: a Deferred which fires when the stream is finished
downloading, or errbacks when the stream is cancelled.
@rtype: Deferred which fires with anything
"""
def insufficient_funds(self, err):
"""
this function informs the stream downloader that funds are too low to finish downloading.
@return: None
"""
class IStreamDescriptorValidator(Interface):
"""
Pull metadata out of Stream Descriptor Files and perform some
validation on the metadata.
"""
def validate(self):
"""
@return: whether the stream descriptor passes validation checks
@rtype: boolean
"""
def info_to_show(self):
"""
@return: A list of tuples representing metadata that should be
presented to the user before starting the download
@rtype: [(string, string)]
"""
class IWallet(Interface):
"""Send and receive payments.
To send a payment, a payment reservation must be obtained
first. This guarantees that a payment isn't promised if it can't
be paid. When the service in question is rendered, the payment
reservation must be given to the IWallet along with the final
price. The reservation can also be canceled.
"""
def stop(self):
"""Send out any unsent payments, close any connections, and stop
checking for incoming payments.
@return: None
"""
def start(self):
"""
Set up any connections and start checking for incoming payments
@return: None
"""
def get_info_exchanger(self):
"""
Get the object that will be used to find the payment addresses of peers.
@return: The object that will be used to find the payment addresses of peers.
@rtype: An object implementing IRequestCreator
"""
def get_wallet_info_query_handler_factory(self):
"""
Get the object that will be used to give our payment address to peers.
This must return an object implementing IQueryHandlerFactory. It will be used to
create IQueryHandler objects that will be registered with an IRequestHandler.
@return: The object that will be used to give our payment address to peers.
@rtype: An object implementing IQueryHandlerFactory
"""
def reserve_points(self, peer, amount):
"""Ensure a certain amount of points are available to be sent as
payment, before the service is rendered
@param peer: The peer to which the payment will ultimately be sent
@type peer: Peer
@param amount: The amount of points to reserve
@type amount: float
@return: A ReservedPoints object which is given to send_points
once the service has been rendered
@rtype: ReservedPoints
"""
def cancel_point_reservation(self, reserved_points):
"""
Return all of the points that were reserved previously for some ReservedPoints object
@param reserved_points: ReservedPoints previously returned by reserve_points
@type reserved_points: ReservedPoints
@return: None
"""
def send_points(self, reserved_points, amount):
"""
Schedule a payment to be sent to a peer
@param reserved_points: ReservedPoints object previously returned by reserve_points.
@type reserved_points: ReservedPoints
@param amount: amount of points to actually send, must be less than or equal to the
amount reserved in reserved_points
@type amount: float
@return: Deferred which fires when the payment has been scheduled
@rtype: Deferred which fires with anything
"""
def get_balance(self):
"""
Return the balance of this wallet
@return: Deferred which fires with the balance of the wallet
@rtype: Deferred which fires with float
"""
def add_expected_payment(self, peer, amount):
"""
Increase the number of points expected to be paid by a peer
@param peer: the peer which is expected to pay the points
@type peer: Peer
@param amount: the amount of points expected to be paid
@type amount: float
@return: None
"""
class IBlobPriceModel(Interface):
"""
A blob price model
Used by INegotiationStrategy classes
"""
def calculate_price(self, blob):
"""
Calculate the price for a blob
@param blob: a blob hash
@type blob: str
@return: blob price target
@type: Decimal
"""
class INegotiationStrategy(Interface):
"""
Strategy to negotiate download payment rates
"""
def make_offer(self, peer, blobs):
"""
Make a rate offer for the given peer and blobs
@param peer: peer to make an offer to
@type: str
@param blobs: blob hashes to make an offer for
@type: list
@return: rate offer
@rtype: Offer
"""
def respond_to_offer(self, offer, peer, blobs):
"""
Respond to a rate offer given by a peer
@param offer: offer to reply to
@type: Offer
@param peer: peer to make an offer to
@type: str
@param blobs: blob hashes to make an offer for
@type: list
@return: accepted, rejected, or unset offer
@rtype: Offer
"""

View file

@ -4,8 +4,8 @@ from twisted.internet import defer, task
from twisted.internet.error import ConnectingCancelledError
from twisted.web._newclient import ResponseNeverReceived
import treq
from lbrynet.core.utils import DeferredDict
from lbrynet.core.Error import DownloadCanceledError
from lbrynet.p2p.utils import DeferredDict
from lbrynet.p2p.Error import DownloadCanceledError
log = logging.getLogger(__name__)

View file

@ -1,5 +1,5 @@
from lbrynet.core.Strategy import get_default_strategy, OnlyFreeStrategy
from lbrynet import conf
from lbrynet.p2p.Strategy import get_default_strategy, OnlyFreeStrategy
from lbrynet.extras.daemon import conf
from decimal import Decimal

View file

@ -1,6 +1,6 @@
import datetime
from collections import defaultdict
from lbrynet.core import utils
from lbrynet.p2p import utils
# Do not create this object except through PeerManager
class Peer:

View file

@ -1,4 +1,4 @@
from lbrynet.core.Peer import Peer
from lbrynet.p2p.Peer import Peer
class PeerManager:

View file

@ -1,8 +1,6 @@
from zope.interface import implementer
from decimal import Decimal
from lbrynet.interfaces import IBlobPriceModel
from lbrynet import conf
from lbrynet.extras.daemon import conf
def get_default_price_model(blob_tracker, base_price, **kwargs):
@ -28,7 +26,6 @@ class MeanAvailabilityWeightedPrice:
blob_tracker (BlobAvailabilityTracker): blob availability tracker
"""
implementer(IBlobPriceModel)
def __init__(self, tracker, base_price=None, alpha=1.0):
self.blob_tracker = tracker

Some files were not shown because too many files have changed in this diff Show more