Merge branch 'master' into master

This commit is contained in:
Alyssa Callahan 2018-11-27 15:39:48 -05:00 committed by GitHub
commit 5a1f42ee54
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
239 changed files with 7329 additions and 2050 deletions

View file

@ -9,7 +9,7 @@
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS
ignore=CVS,schema
# Add files or directories matching the regex patterns to the
# blacklist. The regex matches against base names, not paths.

View file

@ -9,22 +9,19 @@ jobs:
- stage: code quality
name: "pylint lbrynet"
install:
- pip install astroid==2.0.4
# newer astroid fails in pylint so we pre-install older version
- pip install pylint
- pip install git+https://github.com/lbryio/torba.git
- pip install git+https://github.com/lbryio/lbryschema.git
- pip install git+https://github.com/lbryio/torba.git#egg=torba
- pip install -e .
script: pylint lbrynet
- &tests
stage: test
name: "Unit Tests w/ Python 3.7"
name: "Unit Tests"
install:
- pip install coverage
- pip install git+https://github.com/lbryio/electrumx.git#lbryumx
- pip install git+https://github.com/lbryio/orchstr8.git
- pip install git+https://github.com/lbryio/lbryschema.git
- pip install git+https://github.com/lbryio/lbryumx.git
- pip install git+https://github.com/lbryio/torba.git
- pip install git+https://github.com/lbryio/torba.git#egg=torba
- pip install -e .[test]
script:
- HOME=/tmp coverage run -p --source=lbrynet -m unittest discover -v tests.unit.wallet
@ -34,25 +31,12 @@ jobs:
- bash <(curl -s https://codecov.io/bash)
- <<: *tests
name: "Unit Tests w/ Python 3.6"
python: "3.6"
- <<: *tests
name: "DHT Tests w/ Python 3.7"
script: HOME=/tmp coverage run --source=lbrynet -m twisted.trial --reactor=asyncio tests.functional
- <<: *tests
name: "DHT Tests w/ Python 3.6"
python: "3.6"
name: "DHT Tests"
script: HOME=/tmp coverage run --source=lbrynet -m twisted.trial --reactor=asyncio tests.functional
- name: "Integration Tests"
install:
- pip install tox-travis coverage
- pushd .. && git clone https://github.com/lbryio/electrumx.git --branch lbryumx && popd
- pushd .. && git clone https://github.com/lbryio/orchstr8.git && popd
- pushd .. && git clone https://github.com/lbryio/lbryschema.git && popd
- pushd .. && git clone https://github.com/lbryio/lbryumx.git && cd lbryumx && git checkout afd34f323dd94c516108a65240f7d17aea8efe85 && cd .. && popd
- pushd .. && git clone https://github.com/lbryio/torba.git && popd
script: tox
after_success:
@ -65,9 +49,9 @@ jobs:
services:
- docker
install:
- docker pull cdrx/pyinstaller-windows:python3-32bit
- docker pull lbry/pyinstaller34_32bits:py371
script:
- docker run -v "$(pwd):/src/lbry" cdrx/pyinstaller-windows:python3-32bit lbry/scripts/wine_build.sh
- docker run -v "$(pwd):/src/lbry" lbry/pyinstaller34_32bits:py371 lbry/scripts/wine_build.sh
- sudo zip -j dist/lbrynet-windows.zip dist/lbrynet.exe
addons:
artifacts:
@ -79,15 +63,13 @@ jobs:
- &build
name: "Linux"
python: "3.6"
install:
- pip3 install pyinstaller
- pip3 install git+https://github.com/lbryio/torba.git
- pip3 install git+https://github.com/lbryio/lbryschema.git
- python scripts/set_build.py
- pip3 install -e .
script:
- pyinstaller -F -n lbrynet lbrynet/cli.py
- pyinstaller -F -n lbrynet lbrynet/extras/cli.py
- chmod +x dist/lbrynet
- zip -j dist/lbrynet-${OS}.zip dist/lbrynet
- ./dist/lbrynet --version
@ -104,9 +86,19 @@ jobs:
- <<: *build
name: "Mac"
os: osx
osx_image: xcode9.4
osx_image: xcode6.4
language: generic
env: OS=mac
cache: false
install:
- brew update
- brew install python3
- pip3 install pyinstaller
- git clone https://github.com/lbryio/torba.git --depth 1
- sed -i -e "s/'plyvel',//" torba/setup.py
- cd torba && pip3 install -e . && cd ..
- python scripts/set_build.py
- pip3 install -e .
cache:
directories:

View file

@ -1,17 +1,17 @@
# Installing LBRY
If only the json-rpc API server is needed the recommended way to install LBRY is to use a pre-built binary. We provide binaries for all major operating systems. See the [README](README.md).
If only the JSON-RPC API server is needed, the recommended way to install LBRY is to use a pre-built binary. We provide binaries for all major operating systems. See the [README](README.md).
These instructions are for installing LBRY from source, which is recommended if you are interested in doing development work or LBRY is not available on your operating system (godspeed, TempleOS users).
Here's a video walkthrough of this setup which is itself hosted by the LBRY network and provided via [spee.ch](https://github.com/lbryio/spee.ch):
Here's a video walkthrough of this setup, which is itself hosted by the LBRY network and provided via [spee.ch](https://github.com/lbryio/spee.ch):
[![Setup for development](https://spee.ch/2018-10-04-17-13-54-017046806.png)](https://spee.ch/967f99344308f1e90f0620d91b6c93e4dfb240e0/lbrynet-dev-setup.mp4)
## Prerequisites
Running `lbrynet` from source requires Python 3.6 or higher (3.7 is preferred). Get the installer for your OS [here](https://www.python.org/downloads/release/python-370/)
After installing python 3 you'll need to install some additional libraries depending on your operating system.
After installing python 3, you'll need to install some additional libraries depending on your operating system.
### Virtualenv
@ -42,7 +42,7 @@ sudo apt-get install build-essential python3.7 python3.7-dev git python-virtuale
On Raspbian, you will also need to install `python-pyparsing`.
If you're running another Linux flavor, install the equivalent of the above packages for your system.
If you're running another Linux distro, install the equivalent of the above packages for your system.
## Installation
@ -60,7 +60,7 @@ To install:
pip install --editable .[test] # [test] installs extras needed for running the tests
```
To verify your install, `which lbrynet` should return a path inside of the `lbry-venv` folder created by the `virtualenv` command.
To verify your installation, `which lbrynet` should return a path inside of the `lbry-venv` folder created by the `virtualenv` command.
## Run the tests
To run the unit tests from the repo directory:

View file

@ -1,12 +1,12 @@
# <img src="https://raw.githubusercontent.com/lbryio/lbry/master/lbry.png" alt="LBRY" width="48" height="36" /> LBRY [![Build Status](https://travis-ci.org/lbryio/lbry.svg?branch=master)](https://travis-ci.org/lbryio/lbry) [![Test Coverage](https://codecov.io/gh/lbryio/lbry/branch/master/graph/badge.svg)](https://codecov.io/gh/lbryio/lbry)
LBRY is an open-source protocol providing distribution, discovery, and purchase of digital content (data) via a decentralized network. It utilizes the [LBRY blockchain](https://github.com/lbryio/lbrycrd) as a global namespace and database of digital content. Blockchain entries contain searchable content metadata, identities, and rights and access rules. LBRY also provides a data network that consists of peers uploading and downloading data from other peers, possibly in exchange for payments, and a distributed hash table, used by peers to discover other peers.
LBRY is an open-source protocol providing distribution, discovery, and purchase of digital content (data) via a decentralized peer-to-peer network. It utilizes the [LBRY blockchain](https://github.com/lbryio/lbrycrd) as a global namespace and database of digital content. Blockchain entries contain searchable content metadata, identities, rights and access rules. LBRY also provides a data network that consists of peers (seeders) uploading and downloading data from other peers, possibly in exchange for payments, as well as a distributed hash table used by peers to discover other peers.
This project provides an SDK for building applications using the LBRY protocol. The SDK is written in Python 3.7+ using Twisted. For other languages or platforms, a JSONRPC API is provided.
This project provides an SDK for building applications using the LBRY protocol. The SDK is written in Python 3.7+ using Twisted. For other languages or platforms, a JSON-RPC API is provided.
## Installation
Our [releases page](https://github.com/lbryio/lbry/releases) contains pre-built binaries of the latest release, pre-releases, and past releases, for macOS, Debian-based Linux, and Windows. [Automated travis builds](http://build.lbry.io/daemon/) are also available for testing.
Our [releases page](https://github.com/lbryio/lbry/releases) contains pre-built binaries of the latest release, pre-releases, and past releases for macOS, Debian-based Linux, and Windows. [Automated travis builds](http://build.lbry.io/daemon/) are also available for testing.
## Usage
@ -14,13 +14,15 @@ Run `lbrynet start` to launch the API server.
By default, `lbrynet` will provide a JSON-RPC server at `http://localhost:5279`. It is easy to interact with via cURL or sane programming languages.
Our [quickstart guide](http://lbry.io/quickstart) provides a simple walkthrough and examples for learning.
Our [quickstart guide](https://lbry.tech/playground) provides a simple walkthrough and examples for learning.
With the daemon running, `lbrynet commands` will show you a list of commands.
The full API is documented [here](https://lbry.tech/api/sdk).
## Running from source
Installing from source is also relatively painless, full instructions are in [INSTALL.md](INSTALL.md)
Installing from source is also relatively painless. Full instructions are in [INSTALL.md](INSTALL.md)
## Contributing
@ -41,3 +43,7 @@ The primary contact for this project is [@jackrobison](mailto:jack@lbry.io).
## Additional information and links
The documentation for the API can be found [here](https://lbry.tech/api/sdk).
Daemon defaults, ports, and other settings are documented [here](https://lbry.tech/resources/daemon-settings).
Settings can be configured using a daemon-settings.yml file. An example can be found [here](https://github.com/lbryio/lbry/blob/master/example_daemon_settings.yml).

View file

@ -100,7 +100,7 @@
<div class="md-flex__cell md-flex__cell--shrink">
<a href="/" title="LBRY" class="md-header-nav__button md-logo">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" width="24" height="24">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" alt="LBRY logo" width="24" height="24">
</a>
</div>
@ -196,7 +196,7 @@
<label class="md-nav__title md-nav__title--site" for="drawer">
<span class="md-nav__button md-logo">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" width="48" height="48">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" alt="LBRY logo" width="48" height="48">
</span>
LBRY

View file

@ -0,0 +1,26 @@
# This is an example daemon_settings.yml file.
# See https://lbry.tech/resources/daemon-settings for all configuration keys and values
share_usage_data: True
lbryum_servers:
- lbryumx1.lbry.io:50001
- lbryumx2.lbry.io:50001
blockchain_name: lbrycrd_main
data_dir: /home/lbry/.lbrynet
download_directory: /home/lbry/downloads
delete_blobs_on_remove: True
dht_node_port: 4444
peer_port: 3333
use_upnp: True
use_auth_http: True
use_https: True
#components_to_skip:
# - peer_protocol_server
# - hash_announcer
# - dht

View file

@ -1,7 +1,10 @@
import logging
from lbrynet.custom_logger import install_logger
__name__ = "lbrynet"
__version__ = "0.30.1rc1"
__version__ = "0.30.1rc5"
version = tuple(__version__.split('.'))
install_logger()
logging.getLogger(__name__).addHandler(logging.NullHandler())

View file

@ -7,7 +7,7 @@ from cryptography.hazmat.primitives.ciphers import Cipher, modes
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from cryptography.hazmat.primitives.padding import PKCS7
from cryptography.hazmat.backends import default_backend
from lbrynet.core.BlobInfo import BlobInfo
from lbrynet.p2p.BlobInfo import BlobInfo
from lbrynet.blob.blob_file import MAX_BLOB_SIZE
log = logging.getLogger(__name__)
@ -75,7 +75,7 @@ class StreamBlobDecryptor:
def finish_decrypt():
bytes_left = len(self.buff) % (AES.block_size // 8)
if bytes_left != 0:
log.warning(self.buff[-1 * (AES.block_size // 8):].encode('hex'))
log.warning(binascii.hexlify(self.buff[-1 * (AES.block_size // 8):]).decode())
raise Exception("blob %s has incorrect padding: %i bytes left" %
(self.blob.blob_hash, bytes_left))
data_to_decrypt, self.buff = self.buff, b''

View file

@ -6,7 +6,7 @@ import logging
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from twisted.internet import defer
from lbrynet.cryptstream.CryptBlob import CryptStreamBlobMaker
from lbrynet.blob.CryptBlob import CryptStreamBlobMaker
log = logging.getLogger(__name__)

View file

@ -2,16 +2,16 @@
Utilities for turning plain files into LBRY Files.
"""
import logging
import os
import logging
from binascii import hexlify
from twisted.internet import defer
from twisted.protocols.basic import FileSender
from lbrynet.core.StreamDescriptor import BlobStreamDescriptorWriter, EncryptedFileStreamType
from lbrynet.core.StreamDescriptor import format_sd_info, get_stream_hash, validate_descriptor
from lbrynet.cryptstream.CryptStreamCreator import CryptStreamCreator
from lbrynet.p2p.StreamDescriptor import BlobStreamDescriptorWriter, EncryptedFileStreamType
from lbrynet.p2p.StreamDescriptor import format_sd_info, get_stream_hash, validate_descriptor
from lbrynet.blob.CryptStreamCreator import CryptStreamCreator
log = logging.getLogger(__name__)

View file

@ -6,13 +6,13 @@ from binascii import hexlify, unhexlify
from twisted.internet import defer
from lbrynet import conf
from lbrynet.core.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.core.HTTPBlobDownloader import HTTPBlobDownloader
from lbrynet.core.utils import short_hash
from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaver
from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileDownloader
from lbrynet.file_manager.EncryptedFileStatusReport import EncryptedFileStatusReport
from lbrynet.core.StreamDescriptor import save_sd_info
from lbrynet.p2p.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.p2p.HTTPBlobDownloader import HTTPBlobDownloader
from lbrynet.utils import short_hash
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileSaver
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileDownloader
from lbrynet.blob.EncryptedFileStatusReport import EncryptedFileStatusReport
from lbrynet.p2p.StreamDescriptor import save_sd_info
log = logging.getLogger(__name__)

View file

@ -7,15 +7,14 @@ from binascii import hexlify, unhexlify
from twisted.internet import defer, task, reactor
from twisted.python.failure import Failure
from lbrynet.reflector.reupload import reflect_file
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloader
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, get_sd_info
from lbrynet.cryptstream.client.CryptStreamDownloader import AlreadyStoppedError
from lbrynet.cryptstream.client.CryptStreamDownloader import CurrentlyStoppingError
from lbrynet.core.utils import safe_start_looping_call, safe_stop_looping_call
from lbrynet import conf
from lbrynet.extras.reflector.reupload import reflect_file
from lbrynet.blob.EncryptedFileDownloader import ManagedEncryptedFileDownloader
from lbrynet.blob.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet.p2p.StreamDescriptor import EncryptedFileStreamType, get_sd_info
from lbrynet.blob.client.CryptStreamDownloader import AlreadyStoppedError
from lbrynet.blob.client.CryptStreamDownloader import CurrentlyStoppingError
from lbrynet.utils import safe_start_looping_call, safe_stop_looping_call
log = logging.getLogger(__name__)

View file

@ -1,4 +0,0 @@
from .blob_file import BlobFile
from .creator import BlobFileCreator
from .writer import HashBlobWriter
from .reader import HashBlobReader

View file

@ -1,10 +1,10 @@
import logging
import os
import logging
from twisted.internet import defer, threads
from twisted.web.client import FileBodyProducer
from twisted.python.failure import Failure
from lbrynet.core.Error import DownloadCanceledError, InvalidDataError, InvalidBlobHashError
from lbrynet.core.utils import is_valid_blobhash
from lbrynet.cryptoutils import get_lbry_hash_obj
from lbrynet.p2p.Error import DownloadCanceledError, InvalidDataError, InvalidBlobHashError
from lbrynet.blob.writer import HashBlobWriter
from lbrynet.blob.reader import HashBlobReader
@ -12,6 +12,24 @@ log = logging.getLogger(__name__)
MAX_BLOB_SIZE = 2 * 2 ** 20
# digest_size is in bytes, and blob hashes are hex encoded
blobhash_length = get_lbry_hash_obj().digest_size * 2
def is_valid_hashcharacter(char):
return char in "0123456789abcdef"
def is_valid_blobhash(blobhash):
"""Checks whether the blobhash is the correct length and contains only
valid characters (0-9, a-f)
@param blobhash: string, the blobhash to check
@return: True/False
"""
return len(blobhash) == blobhash_length and all(is_valid_hashcharacter(l) for l in blobhash)
class BlobFile:
"""

View file

@ -1,6 +1,6 @@
import binascii
from twisted.internet import defer
from lbrynet.cryptstream.CryptBlob import StreamBlobDecryptor
from lbrynet.blob.CryptBlob import StreamBlobDecryptor
class CryptBlobHandler:

View file

@ -1,13 +1,14 @@
from binascii import unhexlify
import logging
from lbrynet.core.client.BlobRequester import BlobRequester
from lbrynet.core.client.ConnectionManager import ConnectionManager
from lbrynet.core.client.DownloadManager import DownloadManager
from lbrynet.core.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.cryptstream.client.CryptBlobHandler import CryptBlobHandler
from binascii import unhexlify
from twisted.internet import defer
from twisted.python.failure import Failure
from lbrynet.p2p.client.BlobRequester import BlobRequester
from lbrynet.p2p.client.ConnectionManager import ConnectionManager
from lbrynet.p2p.client.DownloadManager import DownloadManager
from lbrynet.p2p.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.blob.client.CryptBlobHandler import CryptBlobHandler
log = logging.getLogger(__name__)

View file

@ -2,14 +2,14 @@ import os
import logging
import traceback
from binascii import hexlify, unhexlify
from lbrynet.core.StreamDescriptor import save_sd_info
from lbrynet.cryptstream.client.CryptStreamDownloader import CryptStreamDownloader
from lbrynet.core.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.core.Error import FileOpenError
from lbrynet.lbry_file.client.EncryptedFileMetadataHandler import EncryptedFileMetadataHandler
from twisted.internet import defer, threads
from lbrynet.p2p.StreamDescriptor import save_sd_info
from lbrynet.blob.client.CryptStreamDownloader import CryptStreamDownloader
from lbrynet.p2p.client.StreamProgressManager import FullStreamProgressManager
from lbrynet.p2p.Error import FileOpenError
from lbrynet.blob.client.EncryptedFileMetadataHandler import EncryptedFileMetadataHandler
log = logging.getLogger(__name__)
@ -44,8 +44,19 @@ class EncryptedFileDownloader(CryptStreamDownloader):
self.blob_manager, download_manager)
def _start(self):
def check_start_succeeded(success):
if success:
self.starting = False
self.stopped = False
self.completed = False
return True
else:
return self._start_failed()
self.download_manager = self._get_download_manager()
d = self._setup_output()
d.addCallback(lambda _: CryptStreamDownloader._start(self))
d.addCallback(lambda _: self.download_manager.start_downloading())
d.addCallbacks(check_start_succeeded)
return d
def _setup_output(self):

View file

@ -1,6 +1,6 @@
from lbrynet.core.StreamDescriptor import EncryptedFileStreamType
from lbrynet.core.StreamDescriptor import EncryptedFileStreamDescriptorValidator
from lbrynet.core.DownloadOption import DownloadOption, DownloadOptionChoice
from lbrynet.p2p.StreamDescriptor import EncryptedFileStreamType
from lbrynet.p2p.StreamDescriptor import EncryptedFileStreamDescriptorValidator
from lbrynet.p2p.DownloadOption import DownloadOption, DownloadOptionChoice
def add_lbry_file_to_sd_identifier(sd_identifier):

View file

@ -3,7 +3,7 @@ import logging
from io import BytesIO
from twisted.internet import defer
from twisted.web.client import FileBodyProducer
from lbrynet.core.cryptoutils import get_lbry_hash_obj
from lbrynet.cryptoutils import get_lbry_hash_obj
log = logging.getLogger(__name__)

View file

@ -1,8 +1,8 @@
import logging
from io import BytesIO
from twisted.python.failure import Failure
from lbrynet.core.Error import DownloadCanceledError, InvalidDataError
from lbrynet.core.cryptoutils import get_lbry_hash_obj
from lbrynet.p2p.Error import DownloadCanceledError, InvalidDataError
from lbrynet.cryptoutils import get_lbry_hash_obj
log = logging.getLogger(__name__)

View file

@ -7,15 +7,15 @@ import sys
import yaml
import envparse
from appdirs import user_data_dir, user_config_dir
from lbrynet.core import utils
from lbrynet.core.Error import InvalidCurrencyError, NoSuchDirectoryError
from lbrynet import utils
from lbrynet.p2p.Error import InvalidCurrencyError, NoSuchDirectoryError
from lbrynet.androidhelpers.paths import (
android_internal_storage_dir,
android_app_internal_storage_dir
)
try:
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
from lbrynet.winpaths import get_path, FOLDERID, UserHandle
except (ImportError, ValueError, NameError):
# Android platform: NameError: name 'c_wchar' is not defined
pass

View file

@ -1,59 +0,0 @@
import platform
import json
import subprocess
import os
from six.moves.urllib import request
from six.moves.urllib.error import URLError
from lbryschema import __version__ as lbryschema_version
from lbrynet import build_type, __version__ as lbrynet_version
from lbrynet.conf import ROOT_DIR
import logging.handlers
log = logging.getLogger(__name__)
def get_lbrynet_version() -> str:
if build_type.BUILD == "dev":
try:
with open(os.devnull, 'w') as devnull:
git_dir = ROOT_DIR + '/.git'
return subprocess.check_output(
['git', '--git-dir='+git_dir, 'describe', '--dirty', '--always'],
stderr=devnull
).decode().strip().lstrip('v')
except (subprocess.CalledProcessError, OSError):
log.debug("failed to get version from git")
return lbrynet_version
def get_platform(get_ip: bool = True) -> dict:
p = {
"processor": platform.processor(),
"python_version": platform.python_version(),
"platform": platform.platform(),
"os_release": platform.release(),
"os_system": platform.system(),
"lbrynet_version": get_lbrynet_version(),
"lbryschema_version": lbryschema_version,
"build": build_type.BUILD, # CI server sets this during build step
}
if p["os_system"] == "Linux":
try:
import distro
p["distro"] = distro.info()
p["desktop"] = os.environ.get('XDG_CURRENT_DESKTOP', 'Unknown')
except ModuleNotFoundError:
pass
# TODO: remove this from get_platform and add a get_external_ip function using treq
if get_ip:
try:
response = json.loads(request.urlopen("https://api.lbry.io/ip").read())
if not response['success']:
raise URLError("failed to get external ip")
p['ip'] = response['data']['ip']
except (URLError, AssertionError):
p['ip'] = "Could not determine IP"
return p

View file

@ -1,8 +0,0 @@
"""
Classes and functions for dealing with Crypt Streams.
Crypt Streams are encrypted blobs and metadata tying those blobs together. At least some of the
metadata is generally stored in a Stream Descriptor File, for example containing a public key
used to bind blobs to the stream and a symmetric key used to encrypt the blobs. The list of blobs
may or may not be present.
"""

View file

@ -102,5 +102,8 @@ class Logger(logging.Logger):
self._log(TRACE, msg, args, **kwargs)
logging.setLoggerClass(Logger)
logging.addLevelName(TRACE, 'TRACE')
def install_logger():
current = logging.getLoggerClass()
if current is not Logger:
logging.setLoggerClass(Logger)
logging.addLevelName(TRACE, 'TRACE')

View file

@ -1 +0,0 @@
from . import Components # register Component classes

View file

@ -1,5 +1,5 @@
from collections import UserDict
from . import constants
from lbrynet.dht import constants
class DictDataStore(UserDict):
@ -44,7 +44,7 @@ class DictDataStore(UserDict):
del self[key]
def hasPeersForBlob(self, key):
return True if key in self and len(tuple(self.filter_bad_and_expired_peers(key))) else False
return bool(key in self and len(tuple(self.filter_bad_and_expired_peers(key))))
def addPeerToBlob(self, contact, key, compact_address, lastPublished, originallyPublished, originalPublisherID):
if key in self:

View file

@ -1,4 +1,4 @@
from .error import DecodeError
from lbrynet.dht.error import DecodeError
def bencode(data):

View file

@ -13,14 +13,12 @@ class DecodeError(Exception):
Should be raised by an C{Encoding} implementation if decode operation
fails
"""
pass
class BucketFull(Exception):
"""
Raised when the bucket is full
"""
pass
class UnknownRemoteException(Exception):

View file

@ -10,7 +10,6 @@ class IDataStore(Interface):
def keys(self):
""" Return a list of the keys in this data store """
pass
def removeExpiredPeers(self):
pass

View file

@ -1,8 +1,8 @@
import logging
from twisted.internet import defer
from .distance import Distance
from .error import TimeoutError
from . import constants
from lbrynet.dht.distance import Distance
from lbrynet.dht.error import TimeoutError
from lbrynet.dht import constants
log = logging.getLogger(__name__)
@ -67,7 +67,7 @@ class _IterativeFind:
def getContactTriples(self, result):
if self.is_find_value_request:
contact_triples = result['contacts']
contact_triples = result[b'contacts']
else:
contact_triples = result
for contact_tup in contact_triples:
@ -112,11 +112,11 @@ class _IterativeFind:
# We are looking for a value, and the remote node didn't have it
# - mark it as the closest "empty" node, if it is
# TODO: store to this peer after finding the value as per the kademlia spec
if 'closestNodeNoValue' in self.find_value_result:
if b'closestNodeNoValue' in self.find_value_result:
if self.is_closer(contact):
self.find_value_result['closestNodeNoValue'] = contact
self.find_value_result[b'closestNodeNoValue'] = contact
else:
self.find_value_result['closestNodeNoValue'] = contact
self.find_value_result[b'closestNodeNoValue'] = contact
contactTriples = self.getContactTriples(result)
for contactTriple in contactTriples:
if (contactTriple[1], contactTriple[2]) in ((c.address, c.port) for c in self.already_contacted):

View file

@ -1,8 +1,8 @@
import logging
from . import constants
from .distance import Distance
from .error import BucketFull
from lbrynet.dht import constants
from lbrynet.dht.distance import Distance
from lbrynet.dht.error import BucketFull
log = logging.getLogger(__name__)

View file

@ -7,7 +7,7 @@
# The docstrings in this module contain epytext markup; API documentation
# may be created by processing this file with epydoc: http://epydoc.sf.net
from . import msgtypes
from lbrynet.dht import msgtypes
class MessageTranslator:

View file

@ -7,8 +7,8 @@
# The docstrings in this module contain epytext markup; API documentation
# may be created by processing this file with epydoc: http://epydoc.sf.net
from lbrynet.core.utils import generate_id
from . import constants
from lbrynet.utils import generate_id
from lbrynet.dht import constants
class Message:

View file

@ -5,17 +5,12 @@ from functools import reduce
from twisted.internet import defer, error, task
from lbrynet.core.utils import generate_id, DeferredDict
from lbrynet.core.call_later_manager import CallLaterManager
from lbrynet.core.PeerManager import PeerManager
from .error import TimeoutError
from . import constants
from . import routingtable
from . import datastore
from . import protocol
from .peerfinder import DHTPeerFinder
from .contact import ContactManager
from .iterativefind import iterativeFind
from lbrynet.utils import generate_id, DeferredDict
from lbrynet.dht.call_later_manager import CallLaterManager
from lbrynet.dht.error import TimeoutError
from lbrynet.dht import constants, routingtable, datastore, protocol
from lbrynet.dht.contact import ContactManager
from lbrynet.dht.iterativefind import iterativeFind
log = logging.getLogger(__name__)
@ -83,8 +78,8 @@ class Node(MockKademliaHelper):
def __init__(self, node_id=None, udpPort=4000, dataStore=None,
routingTableClass=None, networkProtocol=None,
externalIP=None, peerPort=3333, listenUDP=None,
callLater=None, resolve=None, clock=None, peer_finder=None,
peer_manager=None, interface='', externalUDPPort=None):
callLater=None, resolve=None, clock=None,
interface='', externalUDPPort=None):
"""
@param dataStore: The data store to use. This must be class inheriting
from the C{DataStore} interface (or providing the
@ -124,20 +119,13 @@ class Node(MockKademliaHelper):
else:
self._routingTable = routingTableClass(self.node_id, self.clock.seconds)
# Initialize this node's network access mechanisms
if networkProtocol is None:
self._protocol = protocol.KademliaProtocol(self)
else:
self._protocol = networkProtocol
# Initialize the data storage mechanism used by this node
self._protocol = networkProtocol or protocol.KademliaProtocol(self)
self.token_secret = self._generateID()
self.old_token_secret = None
self.externalIP = externalIP
self.peerPort = peerPort
self.externalUDPPort = externalUDPPort or self.port
self._dataStore = dataStore or datastore.DictDataStore(self.clock.seconds)
self.peer_manager = peer_manager or PeerManager()
self.peer_finder = peer_finder or DHTPeerFinder(self, self.peer_manager)
self._join_deferred = None
#def __del__(self):
@ -228,10 +216,10 @@ class Node(MockKademliaHelper):
# find the closest peers to us
closest = yield self._iterativeFind(self.node_id, shortlist if not self.contacts else None)
yield _ping_contacts(closest)
# # query random hashes in our bucket key ranges to fill or split them
# random_ids_in_range = self._routingTable.getRefreshList()
# while random_ids_in_range:
# yield self.iterativeFindNode(random_ids_in_range.pop())
# query random hashes in our bucket key ranges to fill or split them
random_ids_in_range = self._routingTable.getRefreshList()
while random_ids_in_range:
yield self.iterativeFindNode(random_ids_in_range.pop())
defer.returnValue(None)
@defer.inlineCallbacks
@ -255,7 +243,7 @@ class Node(MockKademliaHelper):
yield _iterative_join()
@defer.inlineCallbacks
def start(self, known_node_addresses=None):
def start(self, known_node_addresses=None, block_on_join=False):
""" Causes the Node to attempt to join the DHT network by contacting the
known DHT nodes. This can be called multiple times if the previous attempt
has failed or if the Node has lost all the contacts.
@ -270,8 +258,11 @@ class Node(MockKademliaHelper):
self.start_listening()
yield self._protocol._listening
# TODO: Refresh all k-buckets further away than this node's closest neighbour
yield self.joinNetwork(known_node_addresses or [])
self.start_looping_calls()
d = self.joinNetwork(known_node_addresses or [])
d.addCallback(lambda _: self.start_looping_calls())
d.addCallback(lambda _: log.info("Joined the dht"))
if block_on_join:
yield d
def start_looping_calls(self):
self.safe_start_looping_call(self._change_token_lc, constants.tokenSecretChangeInterval)
@ -580,7 +571,7 @@ class Node(MockKademliaHelper):
"""
return generate_id()
# from lbrynet.core.utils import profile_deferred
# from lbrynet.p2p.utils import profile_deferred
# @profile_deferred()
@defer.inlineCallbacks
def _iterativeFind(self, key, startupShortlist=None, rpc='findNode', exclude=None):

View file

@ -3,12 +3,8 @@ import errno
from binascii import hexlify
from twisted.internet import protocol, defer
from .error import BUILTIN_EXCEPTIONS, UnknownRemoteException, TimeoutError, TransportNotConnected
from . import constants
from . import encoding
from . import msgtypes
from . import msgformat
from lbrynet.dht import constants, encoding, msgformat, msgtypes
from lbrynet.dht.error import BUILTIN_EXCEPTIONS, UnknownRemoteException, TimeoutError, TransportNotConnected
log = logging.getLogger(__name__)
@ -118,7 +114,20 @@ class KademliaProtocol(protocol.DatagramProtocol):
return args
return args + ({b'protocolVersion': self._protocolVersion},)
@defer.inlineCallbacks
def sendRPC(self, contact, method, args):
for _ in range(constants.rpcAttempts):
try:
response = yield self._sendRPC(contact, method, args)
return response
except TimeoutError:
if contact.contact_is_good:
log.debug("RETRY %s ON %s", method, contact)
continue
else:
raise
def _sendRPC(self, contact, method, args):
"""
Sends an RPC to the specified contact
@ -153,11 +162,12 @@ class KademliaProtocol(protocol.DatagramProtocol):
df = defer.Deferred()
def _remove_contact(failure): # remove the contact from the routing table and track the failure
contact.update_last_failed()
try:
self._node.removeContact(contact)
if not contact.contact_is_good:
self._node.removeContact(contact)
except (ValueError, IndexError):
pass
contact.update_last_failed()
return failure
def _update_contact(result): # refresh the contact in the routing table
@ -422,7 +432,7 @@ class KademliaProtocol(protocol.DatagramProtocol):
else:
result = func()
except Exception as e:
log.exception("error handling request for %s:%i %s", senderContact.address, senderContact.port, method)
log.error("error handling request for %s:%i %s", senderContact.address, senderContact.port, method)
df.errback(e)
else:
df.callback(result)

View file

@ -6,14 +6,12 @@
# may be created by processing this file with epydoc: http://epydoc.sf.net
import random
from binascii import unhexlify
from twisted.internet import defer
from . import constants
from . import kbucket
from .error import TimeoutError
from .distance import Distance
import logging
from twisted.internet import defer
from lbrynet.dht import constants, kbucket
from lbrynet.dht.error import TimeoutError
from lbrynet.dht.distance import Distance
log = logging.getLogger(__name__)
@ -215,7 +213,7 @@ class TreeRoutingTable:
now = int(self._getTime())
for bucket in self._buckets[startIndex:]:
if force or now - bucket.lastAccessed >= constants.refreshTimeout:
searchID = self._randomIDInBucketRange(bucketIndex)
searchID = self.midpoint_id_in_bucket_range(bucketIndex)
refreshIDs.append(searchID)
bucketIndex += 1
return refreshIDs
@ -263,22 +261,25 @@ class TreeRoutingTable:
i += 1
return i
def _randomIDInBucketRange(self, bucketIndex):
def random_id_in_bucket_range(self, bucketIndex):
""" Returns a random ID in the specified k-bucket's range
@param bucketIndex: The index of the k-bucket to use
@type bucketIndex: int
"""
idValue = random.randrange(
self._buckets[bucketIndex].rangeMin, self._buckets[bucketIndex].rangeMax)
randomID = hex(idValue)[2:]
if randomID[-1] == 'L':
randomID = randomID[:-1]
if len(randomID) % 2 != 0:
randomID = '0' + randomID
randomID = unhexlify(randomID)
randomID = ((constants.key_bits // 8) - len(randomID)) * b'\x00' + randomID
return randomID
random_id = int(random.randrange(self._buckets[bucketIndex].rangeMin, self._buckets[bucketIndex].rangeMax))
return random_id.to_bytes(constants.key_bits // 8, 'big')
def midpoint_id_in_bucket_range(self, bucketIndex):
""" Returns the middle ID in the specified k-bucket's range
@param bucketIndex: The index of the k-bucket to use
@type bucketIndex: int
"""
half = int((self._buckets[bucketIndex].rangeMax - self._buckets[bucketIndex].rangeMin) // 2)
return int(self._buckets[bucketIndex].rangeMin + half).to_bytes(constants.key_bits // 8, 'big')
def _splitBucket(self, oldBucketIndex):
""" Splits the specified k-bucket into two new buckets which together

View file

@ -20,26 +20,29 @@ from docopt import docopt
from textwrap import dedent
from lbrynet import __name__ as lbrynet_name
from lbrynet.daemon.Daemon import Daemon
from lbrynet.daemon.DaemonControl import start as daemon_main
from lbrynet.daemon.DaemonConsole import main as daemon_console
from lbrynet.daemon.auth.client import LBRYAPIClient
from lbrynet.core.system_info import get_platform
from lbrynet.extras.daemon.Daemon import Daemon
from lbrynet.extras.daemon.DaemonControl import start as daemon_main
from lbrynet.extras.daemon.DaemonConsole import main as daemon_console
from lbrynet.extras.daemon.auth.client import LBRYAPIClient
from lbrynet.extras.system_info import get_platform
async def execute_command(method, params, conf_path=None):
# this check if the daemon is running or not
api = None
try:
api = await LBRYAPIClient.get_client(conf_path)
await api.status()
except (ClientConnectorError, ConnectionError):
await api.session.close()
if api:
await api.session.close()
print("Could not connect to daemon. Are you sure it's running?")
return 1
# this actually executes the method
resp = await api.call(method, params)
try:
resp = await api.call(method, params)
await api.session.close()
print(json.dumps(resp["result"], indent=2))
except KeyError:
@ -58,11 +61,13 @@ def print_help():
lbrynet [--conf <config file>] <command> [<args>]
EXAMPLES
lbrynet commands # list available commands
lbrynet status # get daemon status
lbrynet --conf ~/l1.conf status # like above but using ~/l1.conf as config file
lbrynet resolve_name what # resolve a name
lbrynet help resolve_name # get help for a command
lbrynet start # starts the daemon. The daemon needs to be running for commands to work
lbrynet help # display this message
lbrynet help <command_name> # get help for a command(doesn't need the daemon to be running)
lbrynet commands # list available commands
lbrynet status # get the running status of the daemon
lbrynet --conf ~/l1.conf # use ~/l1.conf as config file
lbrynet resolve what # resolve a name
"""))
@ -135,7 +140,7 @@ def main(argv=None):
elif method in ['version', '--version', '-v']:
print("{lbrynet_name} {lbrynet_version}".format(
lbrynet_name=lbrynet_name, **get_platform(get_ip=False)
lbrynet_name=lbrynet_name, **get_platform()
))
return 0

View file

@ -1,7 +1,7 @@
import logging
from twisted.internet import defer
from twisted._threads import AlreadyQuit
from .ComponentManager import ComponentManager
from lbrynet.extras.daemon.ComponentManager import ComponentManager
log = logging.getLogger(__name__)

View file

@ -1,7 +1,8 @@
import logging
from twisted.internet import defer
from lbrynet.core.Error import ComponentStartConditionNotMet
from lbrynet.p2p.Error import ComponentStartConditionNotMet
from lbrynet.extras.daemon.PeerManager import PeerManager
from lbrynet.extras.daemon.PeerFinder import DHTPeerFinder
log = logging.getLogger(__name__)
@ -33,13 +34,15 @@ class RequiredCondition(metaclass=RequiredConditionType):
class ComponentManager:
default_component_classes = {}
def __init__(self, reactor=None, analytics_manager=None, skip_components=None, **override_components):
def __init__(self, reactor=None, analytics_manager=None, skip_components=None,
peer_manager=None, peer_finder=None, **override_components):
self.skip_components = skip_components or []
self.reactor = reactor
self.component_classes = {}
self.components = set()
self.analytics_manager = analytics_manager
self.peer_manager = peer_manager or PeerManager()
self.peer_finder = peer_finder or DHTPeerFinder(self)
for component_name, component_class in self.default_component_classes.items():
if component_name in override_components:
@ -60,7 +63,8 @@ class ComponentManager:
try:
component = self.get_component(condition.component)
result = condition.evaluate(component)
except Exception as err:
except Exception:
log.exception('failed to evaluate condition:')
result = False
return result, "" if result else condition.message
@ -113,10 +117,10 @@ class ComponentManager:
:return: (defer.Deferred)
"""
for component_name, cb in callbacks.items():
if component_name not in self.component_classes:
raise NameError("unknown component: %s" % component_name)
if component_name not in self.skip_components:
raise NameError("unknown component: %s" % component_name)
if not callable(cb):
raise ValueError("%s is not callable" % cb)

View file

@ -2,35 +2,39 @@ import os
import asyncio
import logging
import treq
import json
import math
import binascii
from hashlib import sha256
from types import SimpleNamespace
from twisted.internet import defer, threads, reactor, error, task
import lbryschema
from aioupnp import __version__ as aioupnp_version
from aioupnp.upnp import UPnP
from aioupnp.fault import UPnPError
from lbrynet import conf
from lbrynet.core.utils import DeferredDict
from lbrynet.core.PaymentRateManager import OnlyFreePaymentsManager
from lbrynet.core.RateLimiter import RateLimiter
from lbrynet.core.BlobManager import DiskBlobManager
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.wallet.network import Network
from lbrynet.core.server.BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet.core.server.ServerProtocol import ServerProtocolFactory
from lbrynet.daemon.Component import Component
from lbrynet.daemon.ExchangeRateManager import ExchangeRateManager
from lbrynet.database.storage import SQLiteStorage
from lbrynet.dht import node, hashannouncer
from lbrynet.file_manager.EncryptedFileManager import EncryptedFileManager
from lbrynet.lbry_file.client.EncryptedFileDownloader import EncryptedFileSaverFactory
from lbrynet.lbry_file.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
from lbrynet.reflector import ServerFactory as reflector_server_factory
from lbrynet.core.utils import generate_id
import lbrynet.schema
from lbrynet import conf
from lbrynet.blob.EncryptedFileManager import EncryptedFileManager
from lbrynet.blob.client.EncryptedFileDownloader import EncryptedFileSaverFactory
from lbrynet.blob.client.EncryptedFileOptions import add_lbry_file_to_sd_identifier
from lbrynet.dht.node import Node
from lbrynet.extras.daemon.Component import Component
from lbrynet.extras.daemon.ExchangeRateManager import ExchangeRateManager
from lbrynet.extras.daemon.storage import SQLiteStorage
from lbrynet.extras.daemon.HashAnnouncer import DHTHashAnnouncer
from lbrynet.extras.reflector.server.server import ReflectorServerFactory
from lbrynet.extras.wallet import LbryWalletManager
from lbrynet.extras.wallet import Network
from lbrynet.utils import DeferredDict, generate_id
from lbrynet.p2p.PaymentRateManager import OnlyFreePaymentsManager
from lbrynet.p2p.RateLimiter import RateLimiter
from lbrynet.p2p.BlobManager import DiskBlobManager
from lbrynet.p2p.StreamDescriptor import StreamDescriptorIdentifier, EncryptedFileStreamType
from lbrynet.p2p.server.BlobRequestHandler import BlobRequestHandlerFactory
from lbrynet.p2p.server.ServerProtocol import ServerProtocolFactory
log = logging.getLogger(__name__)
@ -42,7 +46,6 @@ HEADERS_COMPONENT = "blockchain_headers"
WALLET_COMPONENT = "wallet"
DHT_COMPONENT = "dht"
HASH_ANNOUNCER_COMPONENT = "hash_announcer"
STREAM_IDENTIFIER_COMPONENT = "stream_identifier"
FILE_MANAGER_COMPONENT = "file_manager"
PEER_PROTOCOL_SERVER_COMPONENT = "peer_protocol_server"
REFLECTOR_COMPONENT = "reflector"
@ -94,10 +97,19 @@ class ConfigSettings:
return conf.settings.node_id
@staticmethod
def get_external_ip():
from lbrynet.core.system_info import get_platform
platform = get_platform(get_ip=True)
return platform['ip']
@defer.inlineCallbacks
def get_external_ip(): # used if upnp is disabled or non-functioning
try:
buf = []
response = yield treq.get("https://api.lbry.io/ip")
yield treq.collect(response, buf.append)
parsed = json.loads(b"".join(buf).decode())
if parsed['success']:
return parsed['data']['ip']
return
except Exception as err:
return
# Shorthand for common ConfigSettings methods
@ -159,7 +171,7 @@ class DatabaseComponent(Component):
'Your database is revision %i, expected %i' %
(old_revision, self.get_current_db_revision()))
if old_revision < self.get_current_db_revision():
from lbrynet.database.migrator import dbmigrator
from lbrynet.extras.daemon.migrator import dbmigrator
log.info("Upgrading your databases (revision %i to %i)", old_revision, self.get_current_db_revision())
yield threads.deferToThread(
dbmigrator.migrate_db, GCS('data_dir'), old_revision, self.get_current_db_revision()
@ -190,7 +202,7 @@ class HeadersComponent(Component):
self.headers_file = os.path.join(self.headers_dir, 'headers')
self.old_file = os.path.join(conf.settings['lbryum_wallet_dir'], 'blockchain_headers')
self._downloading_headers = None
self._headers_progress_percent = None
self._headers_progress_percent = 0
@property
def component(self):
@ -336,7 +348,7 @@ class WalletComponent(Component):
return self.wallet_manager
def get_status(self):
if self.wallet_manager:
if self.wallet_manager and self.running:
local_height = self.wallet_manager.network.get_local_height()
remote_height = self.wallet_manager.network.get_server_height()
best_hash = self.wallet_manager.get_best_blockhash()
@ -353,7 +365,7 @@ class WalletComponent(Component):
conf.settings.ensure_wallet_dir()
log.info("Starting torba wallet")
storage = self.component_manager.get_component(DATABASE_COMPONENT)
lbryschema.BLOCKCHAIN_NAME = conf.settings['blockchain_name']
lbrynet.schema.BLOCKCHAIN_NAME = conf.settings['blockchain_name']
self.wallet_manager = yield f2d(LbryWalletManager.from_lbrynet_config(conf.settings, storage))
self.wallet_manager.old_db = storage
yield f2d(self.wallet_manager.start())
@ -366,7 +378,7 @@ class WalletComponent(Component):
class BlobComponent(Component):
component_name = BLOB_COMPONENT
depends_on = [DATABASE_COMPONENT, DHT_COMPONENT]
depends_on = [DATABASE_COMPONENT]
def __init__(self, component_manager):
super().__init__(component_manager)
@ -378,8 +390,12 @@ class BlobComponent(Component):
def start(self):
storage = self.component_manager.get_component(DATABASE_COMPONENT)
dht_node = self.component_manager.get_component(DHT_COMPONENT)
self.blob_manager = DiskBlobManager(CS.get_blobfiles_dir(), storage, dht_node._dataStore)
datastore = None
if DHT_COMPONENT not in self.component_manager.skip_components:
dht_node = self.component_manager.get_component(DHT_COMPONENT)
if dht_node:
datastore = dht_node._dataStore
self.blob_manager = DiskBlobManager(CS.get_blobfiles_dir(), storage, datastore)
return self.blob_manager.setup()
def stop(self):
@ -424,20 +440,22 @@ class DHTComponent(Component):
node_id = CS.get_node_id()
if node_id is None:
node_id = generate_id()
external_ip = self.upnp_component.external_ip
if not external_ip:
log.warning("UPnP component failed to get external ip")
external_ip = yield CS.get_external_ip()
if not external_ip:
log.warning("failed to get external ip")
self.dht_node = node.Node(
self.dht_node = Node(
node_id=node_id,
udpPort=GCS('dht_node_port'),
externalUDPPort=self.external_udp_port,
externalIP=self.upnp_component.external_ip,
externalIP=external_ip,
peerPort=self.external_peer_port
)
self.dht_node.start_listening()
yield self.dht_node._protocol._listening
d = self.dht_node.joinNetwork(GCS('known_dht_nodes'))
d.addCallback(lambda _: self.dht_node.start_looping_calls())
d.addCallback(lambda _: log.info("Joined the dht"))
yield self.dht_node.start(GCS('known_dht_nodes'), block_on_join=False)
log.info("Started the dht")
@defer.inlineCallbacks
@ -461,7 +479,7 @@ class HashAnnouncerComponent(Component):
def start(self):
storage = self.component_manager.get_component(DATABASE_COMPONENT)
dht_node = self.component_manager.get_component(DHT_COMPONENT)
self.hash_announcer = hashannouncer.DHTHashAnnouncer(dht_node, storage)
self.hash_announcer = DHTHashAnnouncer(dht_node, storage)
yield self.hash_announcer.start()
@defer.inlineCallbacks
@ -494,41 +512,6 @@ class RateLimiterComponent(Component):
return defer.succeed(None)
class StreamIdentifierComponent(Component):
component_name = STREAM_IDENTIFIER_COMPONENT
depends_on = [DHT_COMPONENT, RATE_LIMITER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT, WALLET_COMPONENT]
def __init__(self, component_manager):
super().__init__(component_manager)
self.sd_identifier = StreamDescriptorIdentifier()
@property
def component(self):
return self.sd_identifier
@defer.inlineCallbacks
def start(self):
dht_node = self.component_manager.get_component(DHT_COMPONENT)
rate_limiter = self.component_manager.get_component(RATE_LIMITER_COMPONENT)
blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
storage = self.component_manager.get_component(DATABASE_COMPONENT)
wallet = self.component_manager.get_component(WALLET_COMPONENT)
add_lbry_file_to_sd_identifier(self.sd_identifier)
file_saver_factory = EncryptedFileSaverFactory(
dht_node.peer_finder,
rate_limiter,
blob_manager,
storage,
wallet,
GCS('download_directory')
)
yield self.sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_saver_factory)
def stop(self):
pass
class PaymentRateComponent(Component):
component_name = PAYMENT_RATE_COMPONENT
@ -549,8 +532,8 @@ class PaymentRateComponent(Component):
class FileManagerComponent(Component):
component_name = FILE_MANAGER_COMPONENT
depends_on = [DHT_COMPONENT, RATE_LIMITER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT, WALLET_COMPONENT,
STREAM_IDENTIFIER_COMPONENT, PAYMENT_RATE_COMPONENT]
depends_on = [RATE_LIMITER_COMPONENT, BLOB_COMPONENT, DATABASE_COMPONENT, WALLET_COMPONENT,
PAYMENT_RATE_COMPONENT]
def __init__(self, component_manager):
super().__init__(component_manager)
@ -569,15 +552,26 @@ class FileManagerComponent(Component):
@defer.inlineCallbacks
def start(self):
dht_node = self.component_manager.get_component(DHT_COMPONENT)
rate_limiter = self.component_manager.get_component(RATE_LIMITER_COMPONENT)
blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
storage = self.component_manager.get_component(DATABASE_COMPONENT)
wallet = self.component_manager.get_component(WALLET_COMPONENT)
sd_identifier = self.component_manager.get_component(STREAM_IDENTIFIER_COMPONENT)
sd_identifier = StreamDescriptorIdentifier()
add_lbry_file_to_sd_identifier(sd_identifier)
file_saver_factory = EncryptedFileSaverFactory(
self.component_manager.peer_finder,
rate_limiter,
blob_manager,
storage,
wallet,
GCS('download_directory')
)
yield sd_identifier.add_stream_downloader_factory(EncryptedFileStreamType, file_saver_factory)
payment_rate_manager = self.component_manager.get_component(PAYMENT_RATE_COMPONENT)
log.info('Starting the file manager')
self.file_manager = EncryptedFileManager(dht_node.peer_finder, rate_limiter, blob_manager, wallet,
self.file_manager = EncryptedFileManager(self.component_manager.peer_finder, rate_limiter, blob_manager, wallet,
payment_rate_manager, storage, sd_identifier)
yield self.file_manager.setup()
log.info('Done setting up file manager')
@ -589,7 +583,7 @@ class FileManagerComponent(Component):
class PeerProtocolServerComponent(Component):
component_name = PEER_PROTOCOL_SERVER_COMPONENT
depends_on = [UPNP_COMPONENT, DHT_COMPONENT, RATE_LIMITER_COMPONENT, BLOB_COMPONENT, WALLET_COMPONENT,
depends_on = [UPNP_COMPONENT, RATE_LIMITER_COMPONENT, BLOB_COMPONENT, WALLET_COMPONENT,
PAYMENT_RATE_COMPONENT]
def __init__(self, component_manager):
@ -618,7 +612,7 @@ class PeerProtocolServerComponent(Component):
}
server_factory = ServerProtocolFactory(
self.component_manager.get_component(RATE_LIMITER_COMPONENT), query_handlers,
self.component_manager.get_component(DHT_COMPONENT).peer_manager
self.component_manager.peer_manager
)
try:
@ -642,7 +636,7 @@ class PeerProtocolServerComponent(Component):
class ReflectorComponent(Component):
component_name = REFLECTOR_COMPONENT
depends_on = [DHT_COMPONENT, BLOB_COMPONENT, FILE_MANAGER_COMPONENT]
depends_on = [BLOB_COMPONENT, FILE_MANAGER_COMPONENT]
def __init__(self, component_manager):
super().__init__(component_manager)
@ -656,10 +650,9 @@ class ReflectorComponent(Component):
@defer.inlineCallbacks
def start(self):
log.info("Starting reflector server")
dht_node = self.component_manager.get_component(DHT_COMPONENT)
blob_manager = self.component_manager.get_component(BLOB_COMPONENT)
file_manager = self.component_manager.get_component(FILE_MANAGER_COMPONENT)
reflector_factory = reflector_server_factory(dht_node.peer_manager, blob_manager, file_manager)
reflector_factory = ReflectorServerFactory(self.component_manager.peer_manager, blob_manager, file_manager)
try:
self.reflector_server = yield reactor.listenTCP(self.reflector_server_port, reflector_factory)
log.info('Started reflector on port %s', self.reflector_server_port)
@ -695,10 +688,12 @@ class UPnPComponent(Component):
@defer.inlineCallbacks
def _setup_redirects(self):
upnp_redirects = yield DeferredDict({
"UDP": from_future(self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port")),
"TCP": from_future(self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port"))
})
d = {}
if PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
d["TCP"] = from_future(self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port"))
if DHT_COMPONENT not in self.component_manager.skip_components:
d["UDP"] = from_future(self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port"))
upnp_redirects = yield DeferredDict(d)
self.upnp_redirects.update(upnp_redirects)
@defer.inlineCallbacks
@ -710,33 +705,41 @@ class UPnPComponent(Component):
log.info("found upnp gateway: %s", self.upnp.gateway.manufacturer_string)
except Exception as err:
log.warning("upnp discovery failed: %s", err)
return
self.upnp = None
# update the external ip
try:
external_ip = yield from_future(self.upnp.get_external_ip())
if external_ip == "0.0.0.0":
log.warning("upnp doesn't know the external ip address (returned 0.0.0.0), using fallback")
external_ip = CS.get_external_ip()
if self.external_ip and self.external_ip != external_ip:
log.info("external ip changed from %s to %s", self.external_ip, external_ip)
elif not self.external_ip:
log.info("got external ip: %s", external_ip)
self.external_ip = external_ip
except (asyncio.TimeoutError, UPnPError):
pass
if not self.upnp_redirects: # setup missing redirects
external_ip = None
if self.upnp:
try:
upnp_redirects = yield DeferredDict({
"UDP": from_future(self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port")),
"TCP": from_future(self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port"))
})
external_ip = yield from_future(self.upnp.get_external_ip())
if external_ip != "0.0.0.0" and not self.external_ip:
log.info("got external ip from UPnP: %s", external_ip)
except (asyncio.TimeoutError, UPnPError):
pass
if external_ip == "0.0.0.0" or not external_ip:
log.warning("unable to get external ip from UPnP, checking lbry.io fallback")
external_ip = yield CS.get_external_ip()
if self.external_ip and self.external_ip != external_ip:
log.info("external ip changed from %s to %s", self.external_ip, external_ip)
self.external_ip = external_ip
assert self.external_ip is not None # TODO: handle going/starting offline
if not self.upnp_redirects and self.upnp: # setup missing redirects
try:
log.info("add UPnP port mappings")
d = {}
if PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
d["TCP"] = from_future(self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port"))
if DHT_COMPONENT not in self.component_manager.skip_components:
d["UDP"] = from_future(self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port"))
upnp_redirects = yield DeferredDict(d)
log.info("set up redirects: %s", upnp_redirects)
self.upnp_redirects.update(upnp_redirects)
except (asyncio.TimeoutError, UPnPError):
self.upnp = None
return self._maintain_redirects()
else: # check existing redirects are still active
elif self.upnp: # check existing redirects are still active
found = set()
mappings = yield from_future(self.upnp.get_redirects())
for mapping in mappings:
@ -744,7 +747,7 @@ class UPnPComponent(Component):
if proto in self.upnp_redirects and mapping['NewExternalPort'] == self.upnp_redirects[proto]:
if mapping['NewInternalClient'] == self.upnp.lan_address:
found.add(proto)
if 'UDP' not in found:
if 'UDP' not in found and DHT_COMPONENT not in self.component_manager.skip_components:
try:
udp_port = yield from_future(
self.upnp.get_next_mapping(self._int_dht_node_port, "UDP", "LBRY DHT port")
@ -753,7 +756,7 @@ class UPnPComponent(Component):
log.info("refreshed upnp redirect for dht port: %i", udp_port)
except (asyncio.TimeoutError, UPnPError):
del self.upnp_redirects['UDP']
if 'TCP' not in found:
if 'TCP' not in found and PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components:
try:
tcp_port = yield from_future(
self.upnp.get_next_mapping(self._int_peer_port, "TCP", "LBRY peer port")
@ -762,22 +765,28 @@ class UPnPComponent(Component):
log.info("refreshed upnp redirect for peer port: %i", tcp_port)
except (asyncio.TimeoutError, UPnPError):
del self.upnp_redirects['TCP']
if 'TCP' in self.upnp_redirects and 'UDP' in self.upnp_redirects:
log.debug("upnp redirects are still active")
if ('TCP' in self.upnp_redirects
and PEER_PROTOCOL_SERVER_COMPONENT not in self.component_manager.skip_components) and (
'UDP' in self.upnp_redirects and DHT_COMPONENT not in self.component_manager.skip_components):
if self.upnp_redirects:
log.debug("upnp redirects are still active")
@defer.inlineCallbacks
def start(self):
log.info("detecting external ip")
if not self.use_upnp:
self.external_ip = CS.get_external_ip()
self.external_ip = yield CS.get_external_ip()
return
success = False
yield self._maintain_redirects()
if self.upnp:
if not self.upnp_redirects:
if not self.upnp_redirects and not all([x in self.component_manager.skip_components for x in
(DHT_COMPONENT, PEER_PROTOCOL_SERVER_COMPONENT)]):
log.error("failed to setup upnp, debugging infomation: %s", self.upnp.zipped_debugging_info)
else:
success = True
log.debug("set up upnp port redirects for gateway: %s", self.upnp.gateway.manufacturer_string)
if self.upnp_redirects:
log.debug("set up upnp port redirects for gateway: %s", self.upnp.gateway.manufacturer_string)
else:
log.error("failed to setup upnp")
self.component_manager.analytics_manager.send_upnp_setup_success_fail(success, self.get_status())

View file

@ -14,41 +14,36 @@ from twisted.internet import defer, reactor
from twisted.internet.task import LoopingCall
from twisted.python.failure import Failure
from torba.constants import COIN
from torba.baseaccount import SingleKey, HierarchicalDeterministic
from torba.client.baseaccount import SingleKey, HierarchicalDeterministic
from lbryschema.claim import ClaimDict
from lbryschema.uri import parse_lbry_uri
from lbryschema.error import URIParseError, DecodeError
from lbryschema.validator import validate_claim_id
from lbryschema.address import decode_address
from lbryschema.decode import smart_decode
# TODO: importing this when internet is disabled raises a socket.gaierror
from lbrynet.core.system_info import get_lbrynet_version
from lbrynet import conf
from lbrynet.reflector import reupload
from lbrynet.daemon.Components import d2f, f2d
from lbrynet.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT
from lbrynet.daemon.Components import STREAM_IDENTIFIER_COMPONENT, FILE_MANAGER_COMPONENT, RATE_LIMITER_COMPONENT
from lbrynet.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT, PAYMENT_RATE_COMPONENT, UPNP_COMPONENT
from lbrynet.daemon.ComponentManager import RequiredCondition
from lbrynet.daemon.Downloader import GetStream
from lbrynet.daemon.Publisher import Publisher
from lbrynet.daemon.auth.server import AuthJSONRPCServer
from lbrynet.core import utils, system_info
from lbrynet.core.StreamDescriptor import download_sd_blob
from lbrynet.core.Error import InsufficientFundsError, UnknownNameError
from lbrynet.core.Error import DownloadDataTimeout, DownloadSDTimeout
from lbrynet.core.Error import NullFundsError, NegativeFundsError
from lbrynet.core.Error import ResolveError
from lbrynet import conf, utils, __version__
from lbrynet.dht.error import TimeoutError
from lbrynet.core.Peer import Peer
from lbrynet.core.SinglePeerDownloader import SinglePeerDownloader
from lbrynet.core.client.StandaloneBlobDownloader import StandaloneBlobDownloader
from lbrynet.wallet.account import Account as LBCAccount
from lbrynet.wallet.manager import LbryWalletManager
from lbrynet.wallet.dewies import dewies_to_lbc, lbc_to_dewies
from lbrynet.blob.blob_file import is_valid_blobhash
from lbrynet.extras import system_info
from lbrynet.extras.reflector import reupload
from lbrynet.extras.daemon.Components import d2f, f2d
from lbrynet.extras.daemon.Components import WALLET_COMPONENT, DATABASE_COMPONENT, DHT_COMPONENT, BLOB_COMPONENT
from lbrynet.extras.daemon.Components import FILE_MANAGER_COMPONENT, RATE_LIMITER_COMPONENT
from lbrynet.extras.daemon.Components import EXCHANGE_RATE_MANAGER_COMPONENT, PAYMENT_RATE_COMPONENT, UPNP_COMPONENT
from lbrynet.extras.daemon.ComponentManager import RequiredCondition
from lbrynet.extras.daemon.Downloader import GetStream
from lbrynet.extras.daemon.Publisher import Publisher
from lbrynet.extras.daemon.auth.server import AuthJSONRPCServer
from lbrynet.extras.wallet import LbryWalletManager
from lbrynet.extras.wallet.account import Account as LBCAccount
from lbrynet.extras.wallet.dewies import dewies_to_lbc, lbc_to_dewies
from lbrynet.p2p.StreamDescriptor import download_sd_blob
from lbrynet.p2p.Error import InsufficientFundsError, UnknownNameError, DownloadDataTimeout, DownloadSDTimeout
from lbrynet.p2p.Error import NullFundsError, NegativeFundsError, ResolveError
from lbrynet.p2p.Peer import Peer
from lbrynet.p2p.SinglePeerDownloader import SinglePeerDownloader
from lbrynet.p2p.client.StandaloneBlobDownloader import StandaloneBlobDownloader
from lbrynet.schema.claim import ClaimDict
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.schema.error import URIParseError, DecodeError
from lbrynet.schema.validator import validate_claim_id
from lbrynet.schema.address import decode_address
from lbrynet.schema.decode import smart_decode
log = logging.getLogger(__name__)
requires = AuthJSONRPCServer.requires
@ -211,7 +206,6 @@ class Daemon(AuthJSONRPCServer):
DATABASE_COMPONENT: "storage",
DHT_COMPONENT: "dht_node",
WALLET_COMPONENT: "wallet_manager",
STREAM_IDENTIFIER_COMPONENT: "sd_identifier",
FILE_MANAGER_COMPONENT: "file_manager",
EXCHANGE_RATE_MANAGER_COMPONENT: "exchange_rate_manager",
PAYMENT_RATE_COMPONENT: "payment_rate_manager",
@ -221,7 +215,7 @@ class Daemon(AuthJSONRPCServer):
}
def __init__(self, analytics_manager=None, component_manager=None):
to_skip = list(conf.settings['components_to_skip'])
to_skip = conf.settings['components_to_skip']
if 'reflector' not in to_skip and not conf.settings['run_reflector_server']:
to_skip.append('reflector')
looping_calls = {
@ -242,7 +236,6 @@ class Daemon(AuthJSONRPCServer):
self.storage = None
self.dht_node = None
self.wallet_manager: LbryWalletManager = None
self.sd_identifier = None
self.file_manager = None
self.exchange_rate_manager = None
self.payment_rate_manager = None
@ -306,7 +299,7 @@ class Daemon(AuthJSONRPCServer):
rate_manager = rate_manager or self.payment_rate_manager
timeout = timeout or 30
downloader = StandaloneBlobDownloader(
blob_hash, self.blob_manager, self.dht_node.peer_finder, self.rate_limiter,
blob_hash, self.blob_manager, self.component_manager.peer_finder, self.rate_limiter,
rate_manager, self.wallet_manager, timeout
)
return downloader.download()
@ -373,8 +366,8 @@ class Daemon(AuthJSONRPCServer):
self.analytics_manager.send_download_started(download_id, name, claim_dict)
self.analytics_manager.send_new_download_start(download_id, name, claim_dict)
self.streams[sd_hash] = GetStream(
self.sd_identifier, self.wallet_manager, self.exchange_rate_manager, self.blob_manager,
self.dht_node.peer_finder, self.rate_limiter, self.payment_rate_manager, self.storage,
self.file_manager.sd_identifier, self.wallet_manager, self.exchange_rate_manager, self.blob_manager,
self.component_manager.peer_finder, self.rate_limiter, self.payment_rate_manager, self.storage,
conf.settings['max_key_fee'], conf.settings['disable_max_key_fee'], conf.settings['data_rate'],
timeout
)
@ -433,7 +426,7 @@ class Daemon(AuthJSONRPCServer):
if blob:
return self.blob_manager.get_blob(blob[0])
return download_sd_blob(
sd_hash.decode(), self.blob_manager, self.dht_node.peer_finder, self.rate_limiter,
sd_hash.decode(), self.blob_manager, self.component_manager.peer_finder, self.rate_limiter,
self.payment_rate_manager, self.wallet_manager, timeout=conf.settings['peer_search_timeout'],
download_mirrors=conf.settings['download_mirrors']
)
@ -451,7 +444,7 @@ class Daemon(AuthJSONRPCServer):
Get total stream size in bytes from a sd blob
"""
d = self.sd_identifier.get_metadata_for_sd_blob(sd_blob)
d = self.file_manager.sd_identifier.get_metadata_for_sd_blob(sd_blob)
d.addCallback(lambda metadata: metadata.validator.info_to_show())
d.addCallback(lambda info: int(dict(info)['stream_size']))
return d
@ -469,9 +462,7 @@ class Daemon(AuthJSONRPCServer):
"""
Calculate estimated LBC cost for a stream given its size in bytes
"""
cost = self._get_est_cost_from_stream_size(size)
resolved = await self.wallet_manager.resolve(uri)
if uri in resolved and 'claim' in resolved[uri]:
@ -536,7 +527,6 @@ class Daemon(AuthJSONRPCServer):
sd blob will be downloaded to determine the stream size
"""
if size is not None:
return self.get_est_cost_using_known_size(uri, size)
return self.get_est_cost_from_uri(uri)
@ -852,7 +842,7 @@ class Daemon(AuthJSONRPCServer):
message,
conf.settings.installation_id,
platform_name,
get_lbrynet_version()
__version__
)
return self._render_response(True)
@ -1225,7 +1215,7 @@ class Daemon(AuthJSONRPCServer):
)
if self.ledger.network.is_connected:
await self.ledger.update_account(account)
await self.ledger.subscribe_account(account)
self.default_wallet.save()
@ -1260,7 +1250,7 @@ class Daemon(AuthJSONRPCServer):
)
if self.ledger.network.is_connected:
await self.ledger.update_account(account)
await self.ledger.subscribe_account(account)
self.default_wallet.save()
@ -1449,7 +1439,7 @@ class Daemon(AuthJSONRPCServer):
return self.get_account_or_error(account_id).get_max_gap()
@requires("wallet")
def jsonrpc_account_fund(self, to_account, from_account, amount='0.0',
def jsonrpc_account_fund(self, to_account=None, from_account=None, amount='0.0',
everything=False, outputs=1, broadcast=False):
"""
Transfer some amount (or --everything) to an account from another
@ -1458,8 +1448,8 @@ class Daemon(AuthJSONRPCServer):
be used together with --everything).
Usage:
account_fund (<to_account> | --to_account=<to_account>)
(<from_account> | --from_account=<from_account>)
account_fund [<to_account> | --to_account=<to_account>]
[<from_account> | --from_account=<from_account>]
(<amount> | --amount=<amount> | --everything)
[<outputs> | --outputs=<outputs>]
[--broadcast]
@ -1476,8 +1466,8 @@ class Daemon(AuthJSONRPCServer):
(map) transaction performing requested action
"""
to_account = self.get_account_or_error(to_account, 'to_account')
from_account = self.get_account_or_error(from_account, 'from_account')
to_account = self.get_account_or_default(to_account, 'to_account')
from_account = self.get_account_or_default(from_account, 'from_account')
amount = self.get_dewies_or_error('amount', amount) if amount else None
if not isinstance(outputs, int):
raise ValueError("--outputs must be an integer.")
@ -1488,6 +1478,35 @@ class Daemon(AuthJSONRPCServer):
outputs=outputs, broadcast=broadcast
)
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_account_send(self, amount, addresses, account_id=None, broadcast=False):
"""
Send the same number of credits to multiple addresses.
Usage:
account_send <amount> <addresses>... [--account_id=<account_id>] [--broadcast]
Options:
--account_id=<account_id> : (str) account to fund the transaction
--broadcast : (bool) actually broadcast the transaction, default: false.
Returns:
"""
amount = self.get_dewies_or_error("amount", amount)
if not amount:
raise NullFundsError
elif amount < 0:
raise NegativeFundsError()
for address in addresses:
decode_address(address)
account = self.get_account_or_default(account_id)
result = await account.send_to_addresses(amount, addresses, broadcast)
self.analytics_manager.send_credits_sent()
return result
@requires(WALLET_COMPONENT)
def jsonrpc_address_is_mine(self, address, account_id=None):
"""
@ -1777,8 +1796,8 @@ class Daemon(AuthJSONRPCServer):
results[resolved_uri] = resolved[resolved_uri]
return results
@requires(STREAM_IDENTIFIER_COMPONENT, WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
DHT_COMPONENT, RATE_LIMITER_COMPONENT, PAYMENT_RATE_COMPONENT, DATABASE_COMPONENT,
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
RATE_LIMITER_COMPONENT, PAYMENT_RATE_COMPONENT, DATABASE_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_get(self, uri, file_name=None, timeout=None):
"""
@ -1837,7 +1856,8 @@ class Daemon(AuthJSONRPCServer):
raise ResolveError(
"Failed to resolve stream at lbry://{}".format(uri.replace("lbry://", ""))
)
if 'error' in resolved:
raise ResolveError(f"error resolving stream: {resolved['error']}")
txid, nout, name = resolved['txid'], resolved['nout'], resolved['name']
claim_dict = ClaimDict.load_dict(resolved['value'])
sd_hash = claim_dict.source_hash.decode()
@ -1963,7 +1983,7 @@ class Daemon(AuthJSONRPCServer):
response = yield self._render_response(result)
defer.returnValue(response)
@requires(STREAM_IDENTIFIER_COMPONENT, WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
@requires(WALLET_COMPONENT, EXCHANGE_RATE_MANAGER_COMPONENT, BLOB_COMPONENT,
DHT_COMPONENT, RATE_LIMITER_COMPONENT, PAYMENT_RATE_COMPONENT, DATABASE_COMPONENT,
conditions=[WALLET_IS_UNLOCKED])
def jsonrpc_stream_cost_estimate(self, uri, size=None):
@ -2207,19 +2227,14 @@ class Daemon(AuthJSONRPCServer):
available = await account.get_balance()
if amount >= available:
# TODO: add check for existing claim balance
#balance = yield self.wallet.get_max_usable_balance_for_claim(name)
#max_bid_amount = balance - MAX_UPDATE_FEE_ESTIMATE
#if balance <= MAX_UPDATE_FEE_ESTIMATE:
raise InsufficientFundsError(
"Insufficient funds, please deposit additional LBC. Minimum additional LBC needed {}"
.format(round((amount - available) / COIN + 0.01, 2))
)
# .format(MAX_UPDATE_FEE_ESTIMATE - balance))
#elif bid > max_bid_amount:
# raise InsufficientFundsError(
# "Please lower the bid value, the maximum amount you can specify for this claim is {}."
# .format(max_bid_amount))
existing_claims = await account.get_claims(claim_name=name)
if len(existing_claims) == 1:
available += existing_claims[0].get_estimator(self.ledger).effective_amount
if amount >= available:
raise InsufficientFundsError(
f"Please lower the bid value, the maximum amount "
f"you can specify for this claim is {dewies_to_lbc(available)}."
)
metadata = metadata or {}
if fee is not None:
@ -2271,7 +2286,7 @@ class Daemon(AuthJSONRPCServer):
}
}
# this will be used to verify the format with lbryschema
# this will be used to verify the format with lbrynet.schema
claim_copy = deepcopy(claim_dict)
if sources is not None:
claim_dict['stream']['source'] = sources
@ -2292,7 +2307,7 @@ class Daemon(AuthJSONRPCServer):
raise Exception("no source provided to publish")
try:
ClaimDict.load_dict(claim_copy)
# the metadata to use in the claim can be serialized by lbryschema
# the metadata to use in the claim can be serialized by lbrynet.schema
except DecodeError as err:
# there was a problem with a metadata field, raise an error here rather than
# waiting to find out when we go to publish the claim (after having made the stream)
@ -2321,7 +2336,7 @@ class Daemon(AuthJSONRPCServer):
)
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
async def jsonrpc_claim_abandon(self, claim_id=None, txid=None, nout=None, account_id=None):
async def jsonrpc_claim_abandon(self, claim_id=None, txid=None, nout=None, account_id=None, blocking=True):
"""
Abandon a name and reclaim credits from the claim
@ -2329,12 +2344,14 @@ class Daemon(AuthJSONRPCServer):
claim_abandon [<claim_id> | --claim_id=<claim_id>]
[<txid> | --txid=<txid>] [<nout> | --nout=<nout>]
[--account_id=<account_id>]
[--blocking]
Options:
--claim_id=<claim_id> : (str) claim_id of the claim to abandon
--txid=<txid> : (str) txid of the claim to abandon
--nout=<nout> : (int) nout of the claim to abandon
--account_id=<account_id> : (str) id of the account to use
--blocking : (bool) wait until abandon is in mempool
Returns:
(dict) Dictionary containing result of the claim
@ -2354,6 +2371,8 @@ class Daemon(AuthJSONRPCServer):
tx = await self.wallet_manager.abandon_claim(claim_id, txid, nout, account)
self.analytics_manager.send_claim_action('abandon')
if blocking:
await self.ledger.wait(tx)
return {"success": True, "tx": tx}
@requires(WALLET_COMPONENT, conditions=[WALLET_IS_UNLOCKED])
@ -2813,7 +2832,7 @@ class Daemon(AuthJSONRPCServer):
Delete a blob
Usage:
blob_delete (<blob_hash> | --blob_hash=<blob_hash)
blob_delete (<blob_hash> | --blob_hash=<blob_hash>)
Options:
--blob_hash=<blob_hash> : (str) blob hash of the blob to delete
@ -2849,7 +2868,7 @@ class Daemon(AuthJSONRPCServer):
(list) List of contact dictionaries {'host': <peer ip>, 'port': <peer port>, 'node_id': <peer node id>}
"""
if not utils.is_valid_blobhash(blob_hash):
if not is_valid_blobhash(blob_hash):
raise Exception("invalid blob hash")
finished_deferred = self.dht_node.iterativeFindValue(unhexlify(blob_hash))
@ -3125,6 +3144,7 @@ class Daemon(AuthJSONRPCServer):
result['buckets'] = {}
for i in range(len(self.dht_node._routingTable._buckets)):
result['buckets'][i] = []
for contact in self.dht_node._routingTable._buckets[i]._contacts:
blobs = list(hosts.pop(contact)) if contact in hosts else []
blob_hashes.update(blobs)
@ -3134,7 +3154,7 @@ class Daemon(AuthJSONRPCServer):
"node_id": hexlify(contact.id).decode(),
"blobs": blobs,
}
result['buckets'].setdefault(i, []).append(host)
result['buckets'][i].append(host)
contact_set.add(hexlify(contact.id).decode())
result['contacts'] = list(contact_set)

View file

@ -6,12 +6,10 @@ import logging.handlers
from twisted.internet import defer, reactor, threads
from aiohttp import client_exceptions
from lbrynet import analytics
from lbrynet import conf
from lbrynet.core import utils
from lbrynet.core import log_support
from lbrynet.daemon.auth.client import LBRYAPIClient
from lbrynet.daemon.Daemon import Daemon
from lbrynet import utils, conf, log_support
from lbrynet.extras.daemon import analytics
from lbrynet.extras.daemon.auth.client import LBRYAPIClient
from lbrynet.extras.daemon.Daemon import Daemon
log = logging.getLogger(__name__)

View file

@ -7,16 +7,14 @@ if 'win' in sys.platform:
import certifi
os.environ['SSL_CERT_FILE'] = certifi.where()
from lbrynet.core import log_support
import argparse
import logging.handlers
from twisted.internet import reactor
from lbrynet import conf
from lbrynet.core import utils, system_info
from lbrynet.daemon.Daemon import Daemon
from lbrynet import utils, conf, log_support
from lbrynet.extras import system_info
from lbrynet.extras.daemon.Daemon import Daemon
log = logging.getLogger(__name__)
@ -54,7 +52,7 @@ def start(argv=None, conf_path=None):
conf.settings.update({'use_auth_http': args.useauth}, data_types=(conf.TYPE_CLI,))
if args.version:
version = system_info.get_platform(get_ip=False)
version = system_info.get_platform()
version['installation_id'] = conf.settings.installation_id
print(utils.json_dumps_pretty(version))
return

View file

@ -1,19 +1,17 @@
import logging
import os
from twisted.internet import defer
from twisted.internet.task import LoopingCall
from lbrynet.daemon.Components import f2d
from lbryschema.fee import Fee
from lbrynet.core.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed, InvalidStreamDescriptorError
from lbrynet.core.Error import DownloadDataTimeout, DownloadCanceledError, DownloadSDTimeout
from lbrynet.core.utils import safe_start_looping_call, safe_stop_looping_call
from lbrynet.core.StreamDescriptor import download_sd_blob
from lbrynet.file_manager.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from lbrynet import conf
from torba.constants import COIN
from lbrynet.wallet.dewies import dewies_to_lbc
from lbrynet.schema.fee import Fee
from lbrynet.p2p.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed, InvalidStreamDescriptorError
from lbrynet.p2p.Error import DownloadDataTimeout, DownloadCanceledError
from lbrynet.p2p.StreamDescriptor import download_sd_blob
from lbrynet.blob.EncryptedFileDownloader import ManagedEncryptedFileDownloaderFactory
from torba.client.constants import COIN
from lbrynet.extras.wallet.dewies import dewies_to_lbc
from lbrynet.extras.daemon.Components import f2d
INITIALIZING_CODE = 'initializing'
DOWNLOAD_METADATA_CODE = 'downloading_metadata'
@ -34,8 +32,11 @@ log = logging.getLogger(__name__)
class GetStream:
def __init__(self, sd_identifier, wallet, exchange_rate_manager, blob_manager, peer_finder, rate_limiter,
payment_rate_manager, storage, max_key_fee, disable_max_key_fee, data_rate=None, timeout=None):
payment_rate_manager, storage, max_key_fee, disable_max_key_fee, data_rate=None, timeout=None,
reactor=None):
if not reactor:
from twisted.internet import reactor
self.reactor = reactor
self.timeout = timeout or conf.settings['download_timeout']
self.data_rate = data_rate or conf.settings['data_rate']
self.max_key_fee = max_key_fee or conf.settings['max_key_fee'][1]
@ -53,44 +54,17 @@ class GetStream:
self.sd_identifier = sd_identifier
self.storage = storage
self.downloader = None
self.checker = LoopingCall(self.check_status)
# fired when the download is complete
self.finished_deferred = None
# fired after the metadata and the first data blob have been downloaded
self.data_downloading_deferred = defer.Deferred(None)
self.wrote_data = False
@property
def download_path(self):
return os.path.join(self.download_directory, self.downloader.file_name)
def _check_status(self, status):
if status.num_completed > 0 and not self.data_downloading_deferred.called:
self.data_downloading_deferred.callback(True)
if self.data_downloading_deferred.called:
safe_stop_looping_call(self.checker)
else:
log.debug("Waiting for stream data (%i seconds)", self.timeout_counter)
def check_status(self):
"""
Check if we've got the first data blob in the stream yet
"""
self.timeout_counter += 1
if self.timeout_counter > self.timeout:
if not self.data_downloading_deferred.called:
if self.downloader:
err = DownloadDataTimeout(self.sd_hash)
else:
err = DownloadSDTimeout(self.sd_hash)
self.data_downloading_deferred.errback(err)
safe_stop_looping_call(self.checker)
elif self.downloader:
d = self.downloader.status()
d.addCallback(self._check_status)
else:
log.debug("Waiting for stream descriptor (%i seconds)", self.timeout_counter)
def convert_max_fee(self):
currency, amount = self.max_key_fee['currency'], self.max_key_fee['amount']
return self.exchange_rate_manager.convert_currency(currency, "LBC", amount)
@ -151,18 +125,13 @@ class GetStream:
else:
defer.returnValue(None)
@defer.inlineCallbacks
def finish(self, results, name):
self.set_status(DOWNLOAD_STOPPED_CODE, name)
log.info("Finished downloading lbry://%s (%s) --> %s", name, self.sd_hash[:6],
self.download_path)
safe_stop_looping_call(self.checker)
status = yield self.downloader.status()
self._check_status(status)
defer.returnValue(self.download_path)
return defer.succeed(self.download_path)
def fail(self, err):
safe_stop_looping_call(self.checker)
raise err
@defer.inlineCallbacks
@ -194,8 +163,10 @@ class GetStream:
self.downloader = yield self._create_downloader(sd_blob, file_name=file_name)
yield self.pay_key_fee(key_fee, name)
yield self.storage.save_content_claim(self.downloader.stream_hash, "%s:%i" % (txid, nout))
log.info("Downloading lbry://%s (%s) --> %s", name, self.sd_hash[:6], self.download_path)
self.finished_deferred = self.downloader.start()
self.downloader.download_manager.progress_manager.wrote_first_data.addCallback(
self.data_downloading_deferred.callback
)
self.finished_deferred.addCallbacks(lambda result: self.finish(result, name), self.fail)
@defer.inlineCallbacks
@ -211,16 +182,19 @@ class GetStream:
"""
self.set_status(INITIALIZING_CODE, name)
key_fee = yield self._initialize(stream_info)
safe_start_looping_call(self.checker, 1)
self.set_status(DOWNLOAD_METADATA_CODE, name)
try:
sd_blob = yield self._download_sd_blob()
yield self._download(sd_blob, name, key_fee, txid, nout, file_name)
self.set_status(DOWNLOAD_RUNNING_CODE, name)
yield self.data_downloading_deferred
log.info("Downloading lbry://%s (%s) --> %s", name, self.sd_hash[:6], self.download_path)
self.data_downloading_deferred.addTimeout(self.timeout, self.reactor)
try:
yield self.data_downloading_deferred
self.wrote_data = True
except defer.TimeoutError:
raise DownloadDataTimeout("data download timed out")
except (DownloadDataTimeout, InvalidStreamDescriptorError) as err:
safe_stop_looping_call(self.checker)
raise err
defer.returnValue((self.downloader, self.finished_deferred))

View file

@ -6,7 +6,7 @@ import treq
from twisted.internet import defer
from twisted.internet.task import LoopingCall
from lbrynet.core.Error import InvalidExchangeRateResponse, CurrencyConversionError
from lbrynet.p2p.Error import InvalidExchangeRateResponse, CurrencyConversionError
log = logging.getLogger(__name__)

View file

@ -2,8 +2,7 @@ import binascii
import logging
from twisted.internet import defer, task
from lbrynet.core import utils
from lbrynet import conf
from lbrynet import utils, conf
log = logging.getLogger(__name__)

View file

@ -4,7 +4,6 @@ import logging
from twisted.internet import defer
from lbrynet import conf
log = logging.getLogger(__name__)
@ -19,13 +18,12 @@ class DHTPeerFinder(DummyPeerFinder):
"""This class finds peers which have announced to the DHT that they have certain blobs"""
#implements(IPeerFinder)
def __init__(self, dht_node, peer_manager):
def __init__(self, component_manager):
"""
dht_node - an instance of dht.Node class
peer_manager - an instance of PeerManager class
component_manager - an instance of ComponentManager
"""
self.dht_node = dht_node
self.peer_manager = peer_manager
self.component_manager = component_manager
self.peer_manager = component_manager.peer_manager
self.peers = {}
self._ongoing_searchs = {}
@ -40,19 +38,30 @@ class DHTPeerFinder(DummyPeerFinder):
Returns:
list of peers for the blob
"""
self.peers.setdefault(blob_hash, {(self.dht_node.externalIP, self.dht_node.peerPort,)})
if "dht" in self.component_manager.skip_components:
return defer.succeed([])
if not self.component_manager.all_components_running("dht"):
return defer.succeed([])
dht_node = self.component_manager.get_component("dht")
self.peers.setdefault(blob_hash, {(dht_node.externalIP, dht_node.peerPort,)})
if not blob_hash in self._ongoing_searchs or self._ongoing_searchs[blob_hash].called:
self._ongoing_searchs[blob_hash] = self._execute_peer_search(blob_hash, timeout)
peers = set(self._filter_self(blob_hash) if filter_self else self.peers[blob_hash])
self._ongoing_searchs[blob_hash] = self._execute_peer_search(dht_node, blob_hash, timeout)
def _filter_self(blob_hash):
my_host, my_port = dht_node.externalIP, dht_node.peerPort
return {(host, port) for host, port in self.peers[blob_hash] if (host, port) != (my_host, my_port)}
peers = set(_filter_self(blob_hash) if filter_self else self.peers[blob_hash])
return defer.succeed([self.peer_manager.get_peer(*peer) for peer in peers])
@defer.inlineCallbacks
def _execute_peer_search(self, blob_hash, timeout):
def _execute_peer_search(self, dht_node, blob_hash, timeout):
bin_hash = binascii.unhexlify(blob_hash)
finished_deferred = self.dht_node.iterativeFindValue(bin_hash, exclude=self.peers[blob_hash])
finished_deferred = dht_node.iterativeFindValue(bin_hash, exclude=self.peers[blob_hash])
timeout = timeout or conf.settings['peer_search_timeout']
if timeout:
finished_deferred.addTimeout(timeout, self.dht_node.clock)
finished_deferred.addTimeout(timeout, dht_node.clock)
try:
peer_list = yield finished_deferred
self.peers[blob_hash].update({(host, port) for _, host, port in peer_list})
@ -60,7 +69,3 @@ class DHTPeerFinder(DummyPeerFinder):
log.debug("DHT timed out while looking peers for blob %s after %s seconds", blob_hash, timeout)
finally:
del self._ongoing_searchs[blob_hash]
def _filter_self(self, blob_hash):
my_host, my_port = self.dht_node.externalIP, self.dht_node.peerPort
return {(host, port) for host, port in self.peers[blob_hash] if (host, port) != (my_host, my_port)}

View file

@ -1,4 +1,4 @@
from lbrynet.core.Peer import Peer
from lbrynet.p2p.Peer import Peer
class PeerManager:

View file

@ -3,7 +3,7 @@ import logging
import mimetypes
import os
from lbrynet.file_manager.EncryptedFileCreator import create_lbry_file
from lbrynet.blob.EncryptedFileCreator import create_lbry_file
log = logging.getLogger(__name__)

View file

@ -0,0 +1 @@
from lbrynet.extras.daemon import Components # register Component classes

View file

@ -4,8 +4,8 @@ import logging
import treq
from twisted.internet import defer, task
from lbrynet import conf
from lbrynet.core import looping_call_manager, utils, system_info
from lbrynet import conf, utils
from lbrynet.extras import looping_call_manager, system_info
# Things We Track
SERVER_STARTUP = 'Server Startup'
@ -136,8 +136,8 @@ class Manager:
def _get_looping_calls(self):
return [
('send_heartbeat', self._send_heartbeat, 60),
('update_tracked_metrics', self._update_tracked_metrics, 300),
('send_heartbeat', self._send_heartbeat, 300),
('update_tracked_metrics', self._update_tracked_metrics, 600),
]
def _setup_looping_calls(self):

View file

@ -3,7 +3,7 @@ from zope.interface import implementer
from twisted.cred import portal, checkers, credentials, error as cred_error
from twisted.internet import defer
from twisted.web import resource
from .keyring import Keyring
from lbrynet.extras.daemon.auth.keyring import Keyring
log = logging.getLogger(__name__)

View file

@ -4,7 +4,7 @@ import logging
from urllib.parse import urlparse
from lbrynet import conf
from lbrynet.daemon.auth.keyring import Keyring, APIKey
from lbrynet.extras.daemon.auth.keyring import Keyring, APIKey
log = logging.getLogger(__name__)
USER_AGENT = "AuthServiceProxy/0.1"

View file

@ -4,8 +4,8 @@ from twisted.web import server, guard, resource
from twisted.cred import portal
from lbrynet import conf
from .auth import PasswordChecker, HttpPasswordRealm
from ..auth.keyring import Keyring
from lbrynet.extras.daemon.auth.auth import PasswordChecker, HttpPasswordRealm
from lbrynet.extras.daemon.auth.keyring import Keyring
log = logging.getLogger(__name__)
@ -42,7 +42,7 @@ class AuthJSONRPCResource(resource.Resource):
realm = HttpPasswordRealm(self)
portal_to_realm = portal.Portal(realm, [checker, ])
root = guard.HTTPAuthSessionWrapper(
portal_to_realm, [guard.BasicCredentialFactory('Login to lbrynet api'), ]
portal_to_realm, [guard.BasicCredentialFactory(b'Login to lbrynet api'), ]
)
else:
log.info("Using non-authenticated API")

View file

@ -3,6 +3,7 @@ import datetime
import hmac
import hashlib
import base58
import logging
from OpenSSL.crypto import FILETYPE_PEM
from ssl import create_default_context, SSLContext
from cryptography.hazmat.backends import default_backend
@ -12,7 +13,7 @@ from cryptography.x509.name import NameOID, NameAttribute
from cryptography.hazmat.primitives.asymmetric import rsa
from cryptography.hazmat.primitives import serialization
from twisted.internet import ssl
import logging
from lbrynet import conf
log = logging.getLogger(__name__)

View file

@ -1,10 +1,9 @@
import asyncio
import logging
from six.moves.urllib import parse as urlparse
from urllib import parse as urlparse
import json
import inspect
import signal
from functools import wraps
from twisted.web import server
from twisted.internet import defer
@ -14,16 +13,17 @@ from twisted.internet.error import ConnectionDone, ConnectionLost
from txjsonrpc import jsonrpclib
from traceback import format_exc
from lbrynet import conf, analytics
from lbrynet.core.Error import InvalidAuthenticationToken
from lbrynet.core import utils
from lbrynet.core.Error import ComponentsNotStarted, ComponentStartConditionNotMet
from lbrynet.core.looping_call_manager import LoopingCallManager
from lbrynet.daemon.ComponentManager import ComponentManager
from .keyring import APIKey, Keyring
from .undecorated import undecorated
from .factory import AuthJSONRPCResource
from lbrynet.daemon.json_response_encoder import JSONResponseEncoder
from lbrynet import conf, utils
from lbrynet.extras.daemon import analytics
from lbrynet.p2p.Error import InvalidAuthenticationToken
from lbrynet.p2p.Error import ComponentsNotStarted, ComponentStartConditionNotMet
from lbrynet.extras.looping_call_manager import LoopingCallManager
from lbrynet.extras.daemon.ComponentManager import ComponentManager
from lbrynet.extras.daemon.auth.keyring import APIKey, Keyring
from lbrynet.extras.daemon.auth.undecorated import undecorated
from lbrynet.extras.daemon.auth.factory import AuthJSONRPCResource
from lbrynet.extras.daemon.json_response_encoder import JSONResponseEncoder
log = logging.getLogger(__name__)
EMPTY_PARAMS = [{}]
@ -206,7 +206,6 @@ class AuthJSONRPCServer(AuthorizedBase):
self.announced_startup = False
self.sessions = {}
self.server = None
self.keyring = Keyring.generate_and_save()
@defer.inlineCallbacks
def start_listening(self):
@ -284,7 +283,10 @@ class AuthJSONRPCServer(AuthorizedBase):
return d
def get_server_factory(self):
return AuthJSONRPCResource(self).getServerFactory(self.keyring, self._use_authentication, self._use_https)
keyring = None
if self._use_authentication or self._use_https:
keyring = Keyring.generate_and_save()
return AuthJSONRPCResource(self).getServerFactory(keyring, self._use_authentication, self._use_https)
def _set_headers(self, request, data, update_secret=False):
if conf.settings['allowed_origin']:

View file

@ -1,11 +1,15 @@
import logging
from decimal import Decimal
from binascii import hexlify
from datetime import datetime
from json import JSONEncoder
from ecdsa import BadSignatureError
from lbrynet.wallet.transaction import Transaction, Output
from lbrynet.wallet.dewies import dewies_to_lbc
from lbrynet.wallet.ledger import MainNetLedger
from lbrynet.extras.wallet import MainNetLedger
from lbrynet.extras.wallet.transaction import Transaction, Output
from lbrynet.extras.wallet.dewies import dewies_to_lbc
log = logging.getLogger(__name__)
class JSONResponseEncoder(JSONEncoder):
@ -75,6 +79,12 @@ class JSONResponseEncoder(JSONEncoder):
)
except BadSignatureError:
output['valid_signature'] = False
except ValueError:
log.exception(
'txo.id: %s, txo.channel.id:%s, output: %s',
txo.id, txo.channel.id, output
)
output['valid_signature'] = False
if txo.script.is_claim_name:
output['type'] = 'claim'

View file

@ -7,21 +7,21 @@ def migrate_db(db_dir, start, end):
current = start
while current < end:
if current == 1:
from lbrynet.database.migrator.migrate1to2 import do_migration
from .migrate1to2 import do_migration
elif current == 2:
from lbrynet.database.migrator.migrate2to3 import do_migration
from .migrate2to3 import do_migration
elif current == 3:
from lbrynet.database.migrator.migrate3to4 import do_migration
from .migrate3to4 import do_migration
elif current == 4:
from lbrynet.database.migrator.migrate4to5 import do_migration
from .migrate4to5 import do_migration
elif current == 5:
from lbrynet.database.migrator.migrate5to6 import do_migration
from .migrate5to6 import do_migration
elif current == 6:
from lbrynet.database.migrator.migrate6to7 import do_migration
from .migrate6to7 import do_migration
elif current == 7:
from lbrynet.database.migrator.migrate7to8 import do_migration
from .migrate7to8 import do_migration
elif current == 8:
from lbrynet.database.migrator.migrate8to9 import do_migration
from .migrate8to9 import do_migration
else:
raise Exception("DB migration of version {} to {} is not available".format(current,
current+1))

View file

@ -2,8 +2,8 @@ import sqlite3
import os
import json
import logging
from lbryschema.decode import smart_decode
from lbrynet import conf
from lbrynet.schema.decode import smart_decode
log = logging.getLogger(__name__)

View file

@ -2,9 +2,9 @@ import sqlite3
import logging
import os
from lbrynet.core.Error import InvalidStreamDescriptorError
from lbrynet.core.StreamDescriptor import EncryptedFileStreamType, format_sd_info, format_blobs, validate_descriptor
from lbrynet.cryptstream.CryptBlob import CryptBlobInfo
from lbrynet.p2p.Error import InvalidStreamDescriptorError
from lbrynet.p2p.StreamDescriptor import EncryptedFileStreamType, format_sd_info, format_blobs, validate_descriptor
from lbrynet.blob.CryptBlob import CryptBlobInfo
log = logging.getLogger(__name__)

View file

@ -6,13 +6,13 @@ from binascii import hexlify, unhexlify
from decimal import Decimal
from twisted.internet import defer, task, threads
from twisted.enterprise import adbapi
from torba.client.constants import COIN
from lbryschema.claim import ClaimDict
from lbryschema.decode import smart_decode
from lbrynet import conf
from lbrynet.cryptstream.CryptBlob import CryptBlobInfo
from lbrynet.schema.claim import ClaimDict
from lbrynet.schema.decode import smart_decode
from lbrynet.blob.CryptBlob import CryptBlobInfo
from lbrynet.dht.constants import dataExpireTimeout
from torba.constants import COIN
log = logging.getLogger(__name__)
@ -418,7 +418,7 @@ class SQLiteStorage:
def check_if_stream_exists(self, stream_hash):
d = self.db.runQuery("select stream_hash from stream where stream_hash=?", (stream_hash, ))
d.addCallback(lambda r: True if len(r) else False)
d.addCallback(lambda r: bool(len(r)))
return d
def get_blob_num_by_hash(self, stream_hash, blob_hash):
@ -573,16 +573,13 @@ class SQLiteStorage:
}
def _get_supports(transaction):
if len(claim_ids) == 1:
bind = "=?"
else:
bind = "in ({})".format(','.join('?' for _ in range(len(claim_ids))))
return [
_format_support(*support_info)
for support_info in transaction.execute(
f"select * from support where claim_id {bind}",
for support_info in _batched_select(
transaction,
"select * from support where claim_id in {}",
tuple(claim_ids)
).fetchall()
)
]
return self.db.runInteraction(_get_supports)
@ -756,28 +753,27 @@ class SQLiteStorage:
def get_claims_from_stream_hashes(self, stream_hashes, include_supports=True):
def _batch_get_claim(transaction):
results = {}
bind = "({})".format(','.join('?' for _ in range(len(stream_hashes))))
claim_infos = transaction.execute(
claim_infos = _batched_select(
transaction,
"select content_claim.stream_hash, c.* from content_claim "
"inner join claim c on c.claim_outpoint=content_claim.claim_outpoint "
"and content_claim.stream_hash in {} order by c.rowid desc".format(bind),
tuple(stream_hashes)
).fetchall()
"and content_claim.stream_hash in {} order by c.rowid desc",
stream_hashes)
channel_id_infos = {}
for claim_info in claim_infos:
if claim_info[7]:
streams = channel_id_infos.get(claim_info[7], [])
streams.append(claim_info[0])
channel_id_infos[claim_info[7]] = streams
for claim_info in claim_infos:
stream_hash = claim_info[0]
result = _format_claim_response(*claim_info[1:])
results[stream_hash] = result
bind = "({})".format(','.join('?' for _ in range(len(channel_id_infos))))
for claim_id, channel_name in transaction.execute(
f"select claim_id, claim_name from claim where claim_id in {bind}",
tuple(channel_id_infos.keys())
).fetchall():
channel_names = _batched_select(
transaction,
"select claim_id, claim_name from claim where claim_id in {}",
tuple(channel_id_infos.keys())
)
for claim_id, channel_name in channel_names:
for stream_hash in channel_id_infos[claim_id]:
results[stream_hash]['channel_name'] = channel_name
return results
@ -892,3 +888,11 @@ def _format_claim_response(outpoint, claim_id, name, amount, height, serialized,
"channel_name": None
}
return r
def _batched_select(transaction, query, parameters):
for start_index in range(0, len(parameters), 900):
current_batch = parameters[start_index:start_index+900]
bind = "({})".format(','.join(['?'] * len(current_batch)))
for result in transaction.execute(query.format(bind), current_batch):
yield result

View file

@ -63,7 +63,3 @@ If the transfer was not successful (False), the blob is re-added to the needed_b
Blob requests continue for each of the blobs the client has queued to send, when completed
the client disconnects.
"""
from lbrynet.reflector.server.server import ReflectorServerFactory as ServerFactory
from lbrynet.reflector.client.client import EncryptedFileReflectorClientFactory as ClientFactory
from lbrynet.reflector.client.blob import BlobReflectorClientFactory as BlobClientFactory

View file

@ -5,7 +5,7 @@ from twisted.protocols.basic import FileSender
from twisted.internet.protocol import Protocol, ClientFactory
from twisted.internet import defer, error
from lbrynet.reflector.common import IncompleteResponse, REFLECTOR_V2
from lbrynet.extras.reflector.common import IncompleteResponse, REFLECTOR_V2
log = logging.getLogger(__name__)

View file

@ -6,8 +6,8 @@ from twisted.protocols.basic import FileSender
from twisted.internet.protocol import Protocol, ClientFactory
from twisted.internet import defer, error
from lbrynet.reflector.common import IncompleteResponse, ReflectorRequestError
from lbrynet.reflector.common import REFLECTOR_V1, REFLECTOR_V2
from lbrynet.extras.reflector.common import IncompleteResponse, ReflectorRequestError
from lbrynet.extras.reflector.common import REFLECTOR_V1, REFLECTOR_V2
log = logging.getLogger(__name__)

View file

@ -2,7 +2,8 @@ import random
from twisted.internet import reactor, defer
from lbrynet import conf
from lbrynet.reflector import ClientFactory, BlobClientFactory
from lbrynet.extras.reflector.client.client import EncryptedFileReflectorClientFactory
from lbrynet.extras.reflector.client.blob import BlobReflectorClientFactory
def _is_ip(host):
@ -26,7 +27,7 @@ def resolve(host):
@defer.inlineCallbacks
def _reflect_stream(blob_manager, stream_hash, sd_hash, reflector_server):
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
factory = ClientFactory(blob_manager, stream_hash, sd_hash)
factory = EncryptedFileReflectorClientFactory(blob_manager, stream_hash, sd_hash)
ip = yield resolve(reflector_address)
yield reactor.connectTCP(ip, reflector_port, factory)
result = yield factory.finished_deferred
@ -40,7 +41,7 @@ def _reflect_file(lbry_file, reflector_server):
@defer.inlineCallbacks
def _reflect_blobs(blob_manager, blob_hashes, reflector_server):
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
factory = BlobClientFactory(blob_manager, blob_hashes)
factory = BlobReflectorClientFactory(blob_manager, blob_hashes)
ip = yield resolve(reflector_address)
yield reactor.connectTCP(ip, reflector_port, factory)
result = yield factory.finished_deferred

View file

@ -3,12 +3,12 @@ import json
from twisted.python import failure
from twisted.internet import error, defer
from twisted.internet.protocol import Protocol, ServerFactory
from lbrynet.core.utils import is_valid_blobhash
from lbrynet.core.Error import DownloadCanceledError, InvalidBlobHashError
from lbrynet.core.StreamDescriptor import BlobStreamDescriptorReader
from lbrynet.core.StreamDescriptor import save_sd_info
from lbrynet.reflector.common import REFLECTOR_V1, REFLECTOR_V2
from lbrynet.reflector.common import ReflectorRequestError, ReflectorClientVersionError
from lbrynet.blob.blob_file import is_valid_blobhash
from lbrynet.p2p.Error import DownloadCanceledError, InvalidBlobHashError
from lbrynet.p2p.StreamDescriptor import BlobStreamDescriptorReader
from lbrynet.p2p.StreamDescriptor import save_sd_info
from lbrynet.extras.reflector.common import REFLECTOR_V1, REFLECTOR_V2
from lbrynet.extras.reflector.common import ReflectorRequestError, ReflectorClientVersionError
log = logging.getLogger(__name__)

View file

@ -0,0 +1,30 @@
import platform
import os
import logging.handlers
from lbrynet.schema import __version__ as schema_version
from lbrynet import build_type, __version__ as lbrynet_version
log = logging.getLogger(__name__)
def get_platform() -> dict:
p = {
"processor": platform.processor(),
"python_version": platform.python_version(),
"platform": platform.platform(),
"os_release": platform.release(),
"os_system": platform.system(),
"lbrynet_version": lbrynet_version,
"lbryschema_version": schema_version,
"build": build_type.BUILD, # CI server sets this during build step
}
if p["os_system"] == "Linux":
try:
import distro
p["distro"] = distro.info()
p["desktop"] = os.environ.get('XDG_CURRENT_DESKTOP', 'Unknown')
except ModuleNotFoundError:
pass
return p

View file

@ -0,0 +1,11 @@
__node_daemon__ = 'lbrycrdd'
__node_cli__ = 'lbrycrd-cli'
__node_bin__ = ''
__node_url__ = (
'https://github.com/lbryio/lbrycrd/releases/download/v0.12.2.1/lbrycrd-linux.zip'
)
__spvserver__ = 'lbrynet.extras.wallet.server.coin.LBCRegTest'
from lbrynet.extras.wallet.ledger import MainNetLedger, RegTestLedger
from lbrynet.extras.wallet.manager import LbryWalletManager
from lbrynet.extras.wallet.network import Network

View file

@ -1,11 +1,11 @@
import json
import logging
from torba.baseaccount import BaseAccount
from torba.basetransaction import TXORef
from torba.client.baseaccount import BaseAccount
from torba.client.basetransaction import TXORef
from lbryschema.claim import ClaimDict
from lbryschema.signer import SECP256k1, get_signer
from lbrynet.schema.claim import ClaimDict
from lbrynet.schema.signer import SECP256k1, get_signer
log = logging.getLogger(__name__)
@ -95,7 +95,9 @@ class Account(BaseAccount):
txid, nout = maybe_claim_id.split(':')
tx = await self.ledger.db.get_transaction(txid=txid)
if not tx:
log.warning("Claim migration failed to find a transaction for outpoint %s:%i")
log.warning(
"Claim migration failed to find a transaction for outpoint %s", maybe_claim_id
)
results['previous-corrupted'] += 1
continue
if tx.outputs[int(nout)].script.is_claim_involved:
@ -178,3 +180,24 @@ class Account(BaseAccount):
def get_channel_count(self, **constraints):
return self.ledger.db.get_channel_count(account=self, **constraints)
async def send_to_addresses(self, amount, addresses, broadcast=False):
tx_class = self.ledger.transaction_class
tx = await tx_class.create(
inputs=[],
outputs=[
tx_class.output_class.pay_pubkey_hash(amount, self.ledger.address_to_hash160(address))
for address in addresses
],
funding_accounts=[self],
change_account=self
)
if broadcast:
await self.ledger.broadcast(tx)
else:
await self.ledger.release_outputs(
[txi.txo_ref.txo for txi in tx.inputs]
)
return tx

View file

@ -1,7 +1,6 @@
import six
import struct
import binascii
from torba.hash import double_sha256
from torba.client.hash import double_sha256
class InvalidProofError(Exception):
@ -17,7 +16,7 @@ def get_hash_for_outpoint(txhash, nout, height_of_last_takeover):
# noinspection PyPep8
def verify_proof(proof, rootHash, name):
def verify_proof(proof, root_hash, name):
previous_computed_hash = None
reverse_computed_name = ''
verified_value = False
@ -32,7 +31,7 @@ def verify_proof(proof, rootHash, name):
if previous_child_character >= child['character']:
raise InvalidProofError("children not in increasing order")
previous_child_character = child['character']
to_hash += six.int2byte(child['character'])
to_hash += bytes((child['character'],))
if 'nodeHash' in child:
if len(child['nodeHash']) != 64:
raise InvalidProofError("invalid child nodeHash")
@ -70,7 +69,7 @@ def verify_proof(proof, rootHash, name):
previous_computed_hash = double_sha256(to_hash)
if previous_computed_hash != binascii.unhexlify(rootHash)[::-1]:
if previous_computed_hash != binascii.unhexlify(root_hash)[::-1]:
raise InvalidProofError("computed hash does not match roothash")
if 'txhash' in proof and 'nOut' in proof:
if not verified_value:

View file

@ -1,4 +1,4 @@
from torba.basedatabase import BaseDatabase
from torba.client.basedatabase import BaseDatabase
class WalletDatabase(BaseDatabase):
@ -64,7 +64,7 @@ class WalletDatabase(BaseDatabase):
if channel_ids:
channels = {
txo.claim_id: txo for txo in
(await super().get_utxos(
(await self.get_claims(
my_account=my_account,
claim_id__in=channel_ids
))
@ -78,7 +78,7 @@ class WalletDatabase(BaseDatabase):
@staticmethod
def constrain_claims(constraints):
constraints['claim_type__any'] = {'is_claim': 1, 'is_update': 1}
constraints['claim_type__any'] = {'is_claim': 1, 'is_update': 1, 'is_support': 1}
def get_claims(self, **constraints):
self.constrain_claims(constraints)

View file

@ -0,0 +1,33 @@
import textwrap
from torba.client.util import coins_to_satoshis, satoshis_to_coins
def lbc_to_dewies(lbc):
try:
return coins_to_satoshis(lbc)
except ValueError:
raise ValueError(textwrap.dedent(
"""
Decimal inputs require a value in the ones place and in the tenths place
separated by a period. The value provided, '{}', is not of the correct
format.
The following are examples of valid decimal inputs:
1.0
0.001
2.34500
4534.4
2323434.0000
The following are NOT valid:
83
.456
123.
""".format(lbc)
))
def dewies_to_lbc(dewies):
return satoshis_to_coins(dewies)

View file

@ -2,9 +2,9 @@ import struct
from typing import Optional
from binascii import hexlify, unhexlify
from torba.baseheader import BaseHeaders
from torba.util import ArithUint256
from torba.hash import sha512, double_sha256, ripemd160
from torba.client.baseheader import BaseHeaders
from torba.client.util import ArithUint256
from torba.client.hash import sha512, double_sha256, ripemd160
class Headers(BaseHeaders):

View file

@ -2,16 +2,17 @@ import asyncio
import logging
from binascii import unhexlify
from lbryschema.error import URIParseError
from lbryschema.uri import parse_lbry_uri
from torba.baseledger import BaseLedger
from .resolve import Resolver
from .account import Account
from .network import Network
from .database import WalletDatabase
from .transaction import Transaction
from .header import Headers, UnvalidatedHeaders
from lbrynet.schema.validator import validate_claim_id
from torba.client.baseledger import BaseLedger
from lbrynet.schema.error import URIParseError
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.extras.wallet.dewies import dewies_to_lbc
from lbrynet.extras.wallet.resolve import Resolver
from lbrynet.extras.wallet.account import Account
from lbrynet.extras.wallet.network import Network
from lbrynet.extras.wallet.database import WalletDatabase
from lbrynet.extras.wallet.transaction import Transaction
from lbrynet.extras.wallet.header import Headers, UnvalidatedHeaders
log = logging.getLogger(__name__)
@ -56,11 +57,18 @@ class MainNetLedger(BaseLedger):
async def resolve(self, page, page_size, *uris):
for uri in uris:
try:
parse_lbry_uri(uri)
parsed_uri = parse_lbry_uri(uri)
if parsed_uri.claim_id:
validate_claim_id(parsed_uri.claim_id)
except URIParseError as err:
return {'error': err.args[0]}
resolutions = await self.network.get_values_for_uris(self.headers.hash().decode(), *uris)
return await self.resolver._handle_resolutions(resolutions, uris, page, page_size)
except Exception as e:
return {'error': str(e)}
try:
resolutions = await self.network.get_values_for_uris(self.headers.hash().decode(), *uris)
return await self.resolver._handle_resolutions(resolutions, uris, page, page_size)
except Exception as e:
return {'error': str(e)}
async def get_claim_by_claim_id(self, claim_id):
result = (await self.network.get_claims_by_ids(claim_id)).pop(claim_id, {})
@ -85,9 +93,10 @@ class MainNetLedger(BaseLedger):
total_change = len((await account.change.get_addresses()))
channel_count = await account.get_channel_count()
claim_count = await account.get_claim_count()
log.info("Loaded account %s with %d receiving addresses (gap: %d), "
"%d change addresses (gap: %d), %d channels, %d certificates and %d claims.",
account.id, total_receiving, account.receiving.gap, total_change, account.change.gap,
balance = dewies_to_lbc(await account.get_balance())
log.info("Loaded account %s with %s LBC, %d receiving addresses (gap: %d), "
"%d change addresses (gap: %d), %d channels, %d certificates and %d claims. ",
account.id, balance, total_receiving, account.receiving.gap, total_change, account.change.gap,
channel_count, len(account.certificates), claim_count)

View file

@ -9,16 +9,16 @@ from typing import Optional
from twisted.internet import defer
from lbryschema.schema import SECP256k1
from torba.basemanager import BaseWalletManager
from lbrynet.schema.schema import SECP256k1
from torba.client.basemanager import BaseWalletManager
from lbryschema.claim import ClaimDict
from lbrynet.schema.claim import ClaimDict
from .ledger import MainNetLedger
from .account import BaseAccount, generate_certificate
from .transaction import Transaction
from .database import WalletDatabase
from .dewies import dewies_to_lbc
from lbrynet.extras.wallet.ledger import MainNetLedger
from lbrynet.extras.wallet.account import BaseAccount, generate_certificate
from lbrynet.extras.wallet.transaction import Transaction
from lbrynet.extras.wallet.database import WalletDatabase
from lbrynet.extras.wallet.dewies import dewies_to_lbc
log = logging.getLogger(__name__)
@ -137,8 +137,8 @@ class LbryWalletManager(BaseWalletManager):
'certificates': unmigrated.get('claim_certificates', {}),
'address_generator': {
'name': 'deterministic-chain',
'receiving': {'gap': 20, 'maximum_uses_per_address': 2},
'change': {'gap': 6, 'maximum_uses_per_address': 2}
'receiving': {'gap': 20, 'maximum_uses_per_address': 1},
'change': {'gap': 6, 'maximum_uses_per_address': 1}
}
}]
}, indent=4, sort_keys=True)
@ -202,8 +202,10 @@ class LbryWalletManager(BaseWalletManager):
return manager
async def _migrate_addresses(self, receiving_addresses: set, change_addresses: set):
migrated_receiving = set((await self.default_account.receiving.generate_keys(0, len(receiving_addresses))))
migrated_change = set((await self.default_account.change.generate_keys(0, len(change_addresses))))
async with self.default_account.receiving.address_generator_lock:
migrated_receiving = set((await self.default_account.receiving._generate_keys(0, len(receiving_addresses))))
async with self.default_account.change.address_generator_lock:
migrated_change = set((await self.default_account.change._generate_keys(0, len(change_addresses))))
receiving_addresses = set(map(self.default_account.ledger.public_key_to_address, receiving_addresses))
change_addresses = set(map(self.default_account.ledger.public_key_to_address, change_addresses))
if not any(change_addresses.difference(migrated_change)):
@ -239,11 +241,16 @@ class LbryWalletManager(BaseWalletManager):
async def send_claim_to_address(self, claim_id: str, destination_address: str, amount: Optional[int],
account=None):
account = account or self.default_account
claims = await account.ledger.db.get_utxos(claim_id=claim_id)
claims = await account.get_claims(
claim_name_type__any={'is_claim': 1, 'is_update': 1}, # exclude is_supports
claim_id=claim_id
)
if not claims:
raise NameError(f"Claim not found: {claim_id}")
if not amount:
amount = claims[0].get_estimator(self.ledger).effective_amount
tx = await Transaction.update(
claims[0], ClaimDict.deserialize(claims[0].script.value['claim']), amount,
claims[0], ClaimDict.deserialize(claims[0].script.values['claim']), amount,
destination_address.encode(), [account], account
)
await self.ledger.broadcast(tx)
@ -265,9 +272,10 @@ class LbryWalletManager(BaseWalletManager):
check_cache = kwargs.get('check_cache', False) # TODO: put caching back (was force_refresh parameter)
ledger: MainNetLedger = self.default_account.ledger
results = await ledger.resolve(page, page_size, *uris)
await self.old_db.save_claims_for_resolve(
(value for value in results.values() if 'error' not in value)
).asFuture(asyncio.get_event_loop())
if 'error' not in results:
await self.old_db.save_claims_for_resolve(
(value for value in results.values() if 'error' not in value)
).asFuture(asyncio.get_event_loop())
return results
def get_claims_for_name(self, name: str):
@ -296,40 +304,60 @@ class LbryWalletManager(BaseWalletManager):
'fee': dewies_to_lbc(tx.fee),
'date': datetime.fromtimestamp(ts).isoformat(' ')[:-3] if tx.height > 0 else None,
'confirmations': headers.height - tx.height if tx.height > 0 else 0,
'claim_info': [{
'address': txo.get_address(account.ledger),
'balance_delta': dewies_to_lbc(-txo.amount),
'amount': dewies_to_lbc(txo.amount),
'claim_id': txo.claim_id,
'claim_name': txo.claim_name,
'nout': txo.position
} for txo in tx.my_claim_outputs],
'update_info': [{
'address': txo.get_address(account.ledger),
'balance_delta': dewies_to_lbc(-txo.amount),
'amount': dewies_to_lbc(txo.amount),
'claim_id': txo.claim_id,
'claim_name': txo.claim_name,
'nout': txo.position
} for txo in tx.my_update_outputs],
'support_info': [{
'address': txo.get_address(account.ledger),
'balance_delta': dewies_to_lbc(txo.amount),
'amount': dewies_to_lbc(txo.amount),
'claim_id': txo.claim_id,
'claim_name': txo.claim_name,
'is_tip': not txo.is_my_account,
'nout': txo.position
} for txo in tx.my_support_outputs],
'abandon_info': [{
'address': txo.get_address(account.ledger),
'balance_delta': dewies_to_lbc(-txo.amount),
'amount': dewies_to_lbc(txo.amount),
'claim_id': txo.claim_id,
'claim_name': txo.claim_name,
'nout': txo.position
} for txo in tx.my_abandon_outputs],
'claim_info': [],
'update_info': [],
'support_info': [],
'abandon_info': []
}
for txo in tx.my_claim_outputs:
item['claim_info'].append({
'address': txo.get_address(account.ledger),
'balance_delta': dewies_to_lbc(-txo.amount),
'amount': dewies_to_lbc(txo.amount),
'claim_id': txo.claim_id,
'claim_name': txo.claim_name,
'nout': txo.position
})
for txo in tx.my_update_outputs:
item['update_info'].append({
'address': txo.get_address(account.ledger),
'balance_delta': dewies_to_lbc(-txo.amount),
'amount': dewies_to_lbc(txo.amount),
'claim_id': txo.claim_id,
'claim_name': txo.claim_name,
'nout': txo.position
})
for txo in tx.my_support_outputs:
is_tip = next(tx.my_inputs, None) is None
item['support_info'].append({
'address': txo.get_address(account.ledger),
'balance_delta': dewies_to_lbc(txo.amount if is_tip else -txo.amount),
'amount': dewies_to_lbc(txo.amount),
'claim_id': txo.claim_id,
'claim_name': txo.claim_name,
'is_tip': is_tip,
'nout': txo.position
})
for txo in tx.other_support_outputs:
is_tip = next(tx.my_inputs, None) is not None
item['support_info'].append({
'address': txo.get_address(account.ledger),
'balance_delta': dewies_to_lbc(-txo.amount),
'amount': dewies_to_lbc(txo.amount),
'claim_id': txo.claim_id,
'claim_name': txo.claim_name,
'is_tip': is_tip,
'nout': txo.position
})
for txo in tx.my_abandon_outputs:
item['abandon_info'].append({
'address': txo.get_address(account.ledger),
'balance_delta': dewies_to_lbc(-txo.amount),
'amount': dewies_to_lbc(txo.amount),
'claim_id': txo.claim_id,
'claim_name': txo.claim_name,
'nout': txo.position
})
if all([txi.txo_ref.txo is not None for txi in tx.inputs]):
item['fee'] = dewies_to_lbc(tx.fee)
else:
@ -349,7 +377,10 @@ class LbryWalletManager(BaseWalletManager):
claim = claim.sign(
certificate.private_key, claim_address, certificate.claim_id, curve=SECP256k1
)
existing_claims = await account.get_claims(claim_name=name)
existing_claims = await account.get_claims(
claim_name_type__any={'is_claim': 1, 'is_update': 1}, # exclude is_supports
claim_name=name
)
if len(existing_claims) == 0:
tx = await Transaction.claim(
name, claim, amount, claim_address, [account], account
@ -367,19 +398,6 @@ class LbryWalletManager(BaseWalletManager):
# TODO: release reserved tx outputs in case anything fails by this point
return tx
def _old_get_temp_claim_info(self, tx, txo, address, claim_dict, name, bid):
return {
"claim_id": txo.claim_id,
"name": name,
"amount": bid,
"address": address,
"txid": tx.id,
"nout": txo.position,
"value": claim_dict,
"height": -1,
"claim_sequence": -1,
}
async def support_claim(self, claim_name, claim_id, amount, account):
holding_address = await account.receiving.get_or_create_usable_address()
tx = await Transaction.support(claim_name, claim_id, amount, holding_address, [account], account)
@ -413,6 +431,19 @@ class LbryWalletManager(BaseWalletManager):
# TODO: release reserved tx outputs in case anything fails by this point
return tx
def _old_get_temp_claim_info(self, tx, txo, address, claim_dict, name, bid):
return {
"claim_id": txo.claim_id,
"name": name,
"amount": bid,
"address": address,
"txid": tx.id,
"nout": txo.position,
"value": claim_dict,
"height": -1,
"claim_sequence": -1,
}
def get_certificates(self, private_key_accounts, exclude_without_key=True, **constraints):
return self.db.get_certificates(
private_key_accounts=private_key_accounts,

View file

@ -1,4 +1,4 @@
from torba.basenetwork import BaseNetwork
from torba.client.basenetwork import BaseNetwork
class Network(BaseNetwork):

View file

@ -3,14 +3,13 @@ import logging
from ecdsa import BadSignatureError
from binascii import unhexlify, hexlify
from lbrynet.core.Error import UnknownNameError, UnknownClaimID, UnknownURI, UnknownOutpoint
from lbryschema.address import is_address
from lbryschema.claim import ClaimDict
from lbryschema.decode import smart_decode
from lbryschema.error import DecodeError
from lbryschema.uri import parse_lbry_uri
from .claim_proofs import verify_proof, InvalidProofError
from lbrynet.p2p.Error import UnknownNameError, UnknownClaimID, UnknownURI, UnknownOutpoint
from lbrynet.schema.address import is_address
from lbrynet.schema.claim import ClaimDict
from lbrynet.schema.decode import smart_decode
from lbrynet.schema.error import DecodeError
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.extras.wallet.claim_proofs import verify_proof, InvalidProofError
log = logging.getLogger(__name__)
@ -57,14 +56,14 @@ class Resolver:
height, depth,
transaction_class=self.transaction_class,
hash160_to_address=self.hash160_to_address)
result['certificate'] = self.parse_and_validate_claim_result(certificate_result,
raw=raw)
result['certificate'] = await self.parse_and_validate_claim_result(certificate_result,
raw=raw)
elif certificate_resolution_type == "claim_id":
result['certificate'] = self.parse_and_validate_claim_result(certificate_response,
raw=raw)
result['certificate'] = await self.parse_and_validate_claim_result(certificate_response,
raw=raw)
elif certificate_resolution_type == "sequence":
result['certificate'] = self.parse_and_validate_claim_result(certificate_response,
raw=raw)
result['certificate'] = await self.parse_and_validate_claim_result(certificate_response,
raw=raw)
else:
log.error("unknown response type: %s", certificate_resolution_type)
@ -97,17 +96,17 @@ class Resolver:
height, depth,
transaction_class=self.transaction_class,
hash160_to_address=self.hash160_to_address)
result['claim'] = self.parse_and_validate_claim_result(claim_result,
certificate,
raw)
result['claim'] = await self.parse_and_validate_claim_result(claim_result,
certificate,
raw)
elif claim_resolution_type == "claim_id":
result['claim'] = self.parse_and_validate_claim_result(claim_response,
certificate,
raw)
result['claim'] = await self.parse_and_validate_claim_result(claim_response,
certificate,
raw)
elif claim_resolution_type == "sequence":
result['claim'] = self.parse_and_validate_claim_result(claim_response,
certificate,
raw)
result['claim'] = await self.parse_and_validate_claim_result(claim_response,
certificate,
raw)
else:
log.error("unknown response type: %s", claim_resolution_type)
@ -137,10 +136,12 @@ class Resolver:
if claims_in_channel:
result['claims_in_channel'] = claims_in_channel
elif 'error' not in result:
result['error'] = "claim not found"
result['success'] = False
result['uri'] = str(parsed_uri)
return {'error': 'claim not found', 'success': False, 'uri': str(parsed_uri)}
# invalid signatures can only return outside a channel
if result.get('claim', {}).get('has_signature', False):
if parsed_uri.path and not result['claim']['signature_is_valid']:
return {'error': 'claim not found', 'success': False, 'uri': str(parsed_uri)}
return result
async def get_certificate_and_validate_result(self, claim_result):
@ -151,9 +152,9 @@ class Resolver:
if certificate_id:
certificate = await self.network.get_claims_by_ids(certificate_id.decode())
certificate = certificate.pop(certificate_id.decode()) if certificate else None
return self.parse_and_validate_claim_result(claim_result, certificate=certificate)
return await self.parse_and_validate_claim_result(claim_result, certificate=certificate)
def parse_and_validate_claim_result(self, claim_result, certificate=None, raw=False):
async def parse_and_validate_claim_result(self, claim_result, certificate=None, raw=False):
if not claim_result or 'value' not in claim_result:
return claim_result
@ -174,7 +175,7 @@ class Resolver:
if decoded.has_signature:
if certificate is None:
log.info("fetching certificate to check claim signature")
certificate = self.network.get_claims_by_ids(decoded.certificate_id)
certificate = await self.network.get_claims_by_ids(decoded.certificate_id.decode())
if not certificate:
log.warning('Certificate %s not found', decoded.certificate_id)
claim_result['has_signature'] = True
@ -235,7 +236,7 @@ class Resolver:
# these results can include those where `signature_is_valid` is False. if they are skipped,
# page indexing becomes tricky, as the number of results isn't known until after having
# processed them.
# TODO: fix ^ in lbryschema
# TODO: fix ^ in lbrynet.schema
async def iter_validate_channel_claims():
formatted_claims = []
@ -244,7 +245,7 @@ class Resolver:
for claim_id in claim_ids:
claim = batch_result[claim_id]
if claim['name'] == claim_names[claim_id]:
formatted_claim = self.parse_and_validate_claim_result(claim, certificate)
formatted_claim = await self.parse_and_validate_claim_result(claim, certificate)
formatted_claim['absolute_channel_position'] = claim_positions[
claim['claim_id']]
formatted_claims.append(formatted_claim)
@ -384,6 +385,9 @@ def validate_claim_signature_and_get_channel_name(claim, certificate_claim,
claim_address, decoded_certificate=None):
if not certificate_claim:
return False, None
if 'value' not in certificate_claim:
log.warning('Got an invalid claim while parsing certificates, please report: %s', certificate_claim)
return False, None
certificate = decoded_certificate or smart_decode(certificate_claim['value'])
if not isinstance(certificate, ClaimDict):
raise TypeError("Certificate is not a ClaimDict: %s" % str(type(certificate)))
@ -416,6 +420,12 @@ def _decode_claim_result(claim):
if not claim['signature_is_valid']:
log.warning("lbry://%s#%s has an invalid signature",
claim['name'], claim['claim_id'])
if 'value' not in claim:
log.warning('Got an invalid claim while parsing, please report: %s', claim)
claim['hex'] = None
claim['value'] = None
claim['error'] = "Failed to parse: missing value"
return claim
try:
decoded = smart_decode(claim['value'])
claim_dict = decoded.claim_dict

View file

@ -1,5 +1,5 @@
from torba.basescript import BaseInputScript, BaseOutputScript, Template
from torba.basescript import PUSH_SINGLE, PUSH_INTEGER, OP_DROP, OP_2DROP, PUSH_SUBSCRIPT, OP_VERIFY
from torba.client.basescript import BaseInputScript, BaseOutputScript, Template
from torba.client.basescript import PUSH_SINGLE, PUSH_INTEGER, OP_DROP, OP_2DROP, PUSH_SUBSCRIPT, OP_VERIFY
class InputScript(BaseInputScript):

View file

@ -0,0 +1,174 @@
import hashlib
import struct
import msgpack
from torba.server.hash import hash_to_hex_str
from torba.server.block_processor import BlockProcessor
from lbrynet.schema.proto.claim_pb2 import Claim
from lbrynet.schema.uri import parse_lbry_uri
from lbrynet.schema.decode import smart_decode
from lbrynet.extras.wallet.server.model import NameClaim, ClaimInfo, ClaimUpdate, ClaimSupport
class LBRYBlockProcessor(BlockProcessor):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
if self.env.coin.NET == "regtest":
self.prefetcher.polling_delay = 0.5
self.should_validate_signatures = self.env.boolean('VALIDATE_CLAIM_SIGNATURES', False)
self.logger.info("LbryumX Block Processor - Validating signatures: {}".format(self.should_validate_signatures))
def advance_blocks(self, blocks):
# save height, advance blocks as usual, then hook our claim tx processing
height = self.height + 1
super().advance_blocks(blocks)
pending_undo = []
for index, block in enumerate(blocks):
undo = self.advance_claim_txs(block.transactions, height + index)
pending_undo.append((height+index, undo,))
self.db.write_undo(pending_undo)
def advance_claim_txs(self, txs, height):
# TODO: generate claim undo info!
undo_info = []
add_undo = undo_info.append
update_inputs = set()
for tx, txid in txs:
update_inputs.clear()
if tx.has_claims:
for index, output in enumerate(tx.outputs):
claim = output.claim
if isinstance(claim, NameClaim):
add_undo(self.advance_claim_name_transaction(output, height, txid, index))
elif isinstance(claim, ClaimUpdate):
update_input = self.db.get_update_input(claim, tx.inputs)
if update_input:
update_inputs.add(update_input)
add_undo(self.advance_update_claim(output, height, txid, index))
else:
info = (hash_to_hex_str(txid), hash_to_hex_str(claim.claim_id),)
self.logger.error("REJECTED: {} updating {}".format(*info))
elif isinstance(claim, ClaimSupport):
self.advance_support(claim, txid, index, height, output.value)
for txin in tx.inputs:
if txin not in update_inputs:
abandoned_claim_id = self.db.abandon_spent(txin.prev_hash, txin.prev_idx)
if abandoned_claim_id:
add_undo((abandoned_claim_id, self.db.get_claim_info(abandoned_claim_id)))
return undo_info
def advance_update_claim(self, output, height, txid, nout):
claim_id = output.claim.claim_id
claim_info = self.claim_info_from_output(output, txid, nout, height)
old_claim_info = self.db.get_claim_info(claim_id)
self.db.put_claim_id_for_outpoint(old_claim_info.txid, old_claim_info.nout, None)
if old_claim_info.cert_id:
self.db.remove_claim_from_certificate_claims(old_claim_info.cert_id, claim_id)
if claim_info.cert_id:
self.db.put_claim_id_signed_by_cert_id(claim_info.cert_id, claim_id)
self.db.put_claim_info(claim_id, claim_info)
self.db.put_claim_id_for_outpoint(txid, nout, claim_id)
return claim_id, old_claim_info
def advance_claim_name_transaction(self, output, height, txid, nout):
claim_id = claim_id_hash(txid, nout)
claim_info = self.claim_info_from_output(output, txid, nout, height)
if claim_info.cert_id:
self.db.put_claim_id_signed_by_cert_id(claim_info.cert_id, claim_id)
self.db.put_claim_info(claim_id, claim_info)
self.db.put_claim_for_name(claim_info.name, claim_id)
self.db.put_claim_id_for_outpoint(txid, nout, claim_id)
return claim_id, None
def backup_from_undo_info(self, claim_id, undo_claim_info):
"""
Undo information holds a claim state **before** a transaction changes it
There are 4 possibilities when processing it, of which only 3 are valid ones:
1. the claim is known and the undo info has info, it was an update
2. the claim is known and the undo info doesn't hold any info, it was claimed
3. the claim in unknown and the undo info has info, it was abandoned
4. the claim is unknown and the undo info does't hold info, error!
"""
undo_claim_info = ClaimInfo(*undo_claim_info) if undo_claim_info else None
current_claim_info = self.db.get_claim_info(claim_id)
if current_claim_info and undo_claim_info:
# update, remove current claim
self.db.remove_claim_id_for_outpoint(current_claim_info.txid, current_claim_info.nout)
if current_claim_info.cert_id:
self.db.remove_claim_from_certificate_claims(current_claim_info.cert_id, claim_id)
elif current_claim_info and not undo_claim_info:
# claim, abandon it
self.db.abandon_spent(current_claim_info.txid, current_claim_info.nout)
elif not current_claim_info and undo_claim_info:
# abandon, reclaim it (happens below)
pass
else:
# should never happen, unless the database got into an inconsistent state
raise Exception("Unexpected situation occurred on backup, this means the database is inconsistent. "
"Please report. Resetting the data folder (reindex) solves it for now.")
if undo_claim_info:
self.db.put_claim_info(claim_id, undo_claim_info)
if undo_claim_info.cert_id:
cert_id = self._checksig(undo_claim_info.name, undo_claim_info.value, undo_claim_info.address)
self.db.put_claim_id_signed_by_cert_id(cert_id, claim_id)
self.db.put_claim_for_name(undo_claim_info.name, claim_id)
self.db.put_claim_id_for_outpoint(undo_claim_info.txid, undo_claim_info.nout, claim_id)
def backup_txs(self, txs):
self.logger.info("Reorg at height {} with {} transactions.".format(self.height, len(txs)))
undo_info = msgpack.loads(self.db.claim_undo_db.get(struct.pack(">I", self.height)), use_list=False)
for claim_id, undo_claim_info in reversed(undo_info):
self.backup_from_undo_info(claim_id, undo_claim_info)
return super().backup_txs(txs)
def backup_blocks(self, raw_blocks):
self.db.batched_flush_claims()
super().backup_blocks(raw_blocks=raw_blocks)
self.db.batched_flush_claims()
def shutdown(self):
self.db.shutdown()
async def flush(self, flush_utxos):
self.db.batched_flush_claims()
return await super().flush(flush_utxos)
def advance_support(self, claim_support, txid, nout, height, amount):
# TODO: check for more controller claim rules, like takeover or ordering
pass
def claim_info_from_output(self, output, txid, nout, height):
amount = output.value
address = self.coin.address_from_script(output.pk_script)
name, value, cert_id = output.claim.name, output.claim.value, None
assert txid and address
cert_id = self._checksig(name, value, address)
return ClaimInfo(name, value, txid, nout, amount, address, height, cert_id)
def _checksig(self, name, value, address):
try:
parse_lbry_uri(name.decode()) # skip invalid names
cert_id = Claim.FromString(value).publisherSignature.certificateId[::-1] or None
if not self.should_validate_signatures:
return cert_id
if cert_id:
cert_claim = self.db.get_claim_info(cert_id)
if cert_claim:
certificate = smart_decode(cert_claim.value)
claim_dict = smart_decode(value)
claim_dict.validate_signature(address, certificate)
return cert_id
except Exception as e:
pass
def claim_id_hash(txid, n):
# TODO: This should be in lbryschema
packed = txid + struct.pack('>I', n)
md = hashlib.new('ripemd160')
md.update(hashlib.sha256(packed).digest())
return md.digest()

View file

@ -0,0 +1,139 @@
import struct
from torba.server.script import ScriptPubKey, _match_ops, OpCodes
from torba.server.util import cachedproperty
from torba.server.hash import hash_to_hex_str, HASHX_LEN
from hashlib import sha256
from torba.server.coins import Coin, CoinError
from lbrynet.extras.wallet.server.opcodes import decode_claim_script, opcodes as lbry_opcodes
class LBC(Coin):
from .session import LBRYElectrumX
from .block_processor import LBRYBlockProcessor
from .tx import LBRYDeserializer
from .daemon import LBCDaemon
from .db import LBRYDB
DAEMON = LBCDaemon
SESSIONCLS = LBRYElectrumX
BLOCK_PROCESSOR = LBRYBlockProcessor
DB = LBRYDB
DESERIALIZER = LBRYDeserializer
NAME = "LBRY"
SHORTNAME = "LBC"
NET = "mainnet"
BASIC_HEADER_SIZE = 112
CHUNK_SIZE = 96
XPUB_VERBYTES = bytes.fromhex("019C354f")
XPRV_VERBYTES = bytes.fromhex("019C3118")
P2PKH_VERBYTE = bytes.fromhex("55")
P2SH_VERBYTES = bytes.fromhex("7A")
WIF_BYTE = bytes.fromhex("1C")
GENESIS_HASH = ('9c89283ba0f3227f6c03b70216b9f665'
'f0118d5e0fa729cedf4fb34d6a34f463')
TX_COUNT = 2716936
TX_COUNT_HEIGHT = 329554
TX_PER_BLOCK = 1
RPC_PORT = 9245
REORG_LIMIT = 200
PEERS = [
]
@classmethod
def genesis_block(cls, block):
'''Check the Genesis block is the right one for this coin.
Return the block less its unspendable coinbase.
'''
header = cls.block_header(block, 0)
header_hex_hash = hash_to_hex_str(cls.header_hash(header))
if header_hex_hash != cls.GENESIS_HASH:
raise CoinError('genesis block has hash {} expected {}'
.format(header_hex_hash, cls.GENESIS_HASH))
return block
@classmethod
def electrum_header(cls, header, height):
version, = struct.unpack('<I', header[:4])
timestamp, bits, nonce = struct.unpack('<III', header[100:112])
return {
'version': version,
'prev_block_hash': hash_to_hex_str(header[4:36]),
'merkle_root': hash_to_hex_str(header[36:68]),
'claim_trie_root': hash_to_hex_str(header[68:100]),
'timestamp': timestamp,
'bits': bits,
'nonce': nonce,
'block_height': height,
}
@cachedproperty
def address_handlers(self):
return ScriptPubKey.PayToHandlers(
address=self.P2PKH_address_from_hash160,
script_hash=self.P2SH_address_from_hash160,
pubkey=self.P2PKH_address_from_pubkey,
unspendable=lambda: None,
strange=self.claim_address_handler,
)
@classmethod
def address_from_script(cls, script):
'''Given a pk_script, return the adddress it pays to, or None.'''
return ScriptPubKey.pay_to(cls.address_handlers, script)
@classmethod
def claim_address_handler(cls, script):
'''Parse a claim script, returns the address
'''
decoded = decode_claim_script(script)
if not decoded:
return None
ops = []
for op, data, _ in decoded[1]:
if not data:
ops.append(op)
else:
ops.append((op, data,))
match = _match_ops
TO_ADDRESS_OPS = [OpCodes.OP_DUP, OpCodes.OP_HASH160, -1,
OpCodes.OP_EQUALVERIFY, OpCodes.OP_CHECKSIG]
TO_P2SH_OPS = [OpCodes.OP_HASH160, -1, OpCodes.OP_EQUAL]
TO_PUBKEY_OPS = [-1, OpCodes.OP_CHECKSIG]
if match(ops, TO_ADDRESS_OPS):
return cls.P2PKH_address_from_hash160(ops[2][-1])
if match(ops, TO_P2SH_OPS):
return cls.P2SH_address_from_hash160(ops[1][-1])
if match(ops, TO_PUBKEY_OPS):
return cls.P2PKH_address_from_pubkey(ops[0][-1])
if ops and ops[0] == OpCodes.OP_RETURN:
return None
return None
@classmethod
def hashX_from_script(cls, script):
'''
Overrides electrumx hashX from script by extracting addresses from claim scripts.
'''
if script and script[0] == OpCodes.OP_RETURN:
return None
if script[0] in [
lbry_opcodes.OP_CLAIM_NAME,
lbry_opcodes.OP_SUPPORT_CLAIM,
lbry_opcodes.OP_UPDATE_CLAIM
]:
return cls.address_to_hashX(cls.claim_address_handler(script))
else:
return sha256(script).digest()[:HASHX_LEN]
class LBCRegTest(LBC):
NET = "regtest"
GENESIS_HASH = '6e3fcf1299d4ec5d79c3a4c91d624a4acf9e2e173d95a1a0504f677669687556'
XPUB_VERBYTES = bytes.fromhex('043587cf')
XPRV_VERBYTES = bytes.fromhex('04358394')
P2PKH_VERBYTE = bytes.fromhex("6f")
P2SH_VERBYTES = bytes.fromhex("c4")

Some files were not shown because too many files have changed in this diff Show more